Merge pull request #115 from logspace-ai/dev

Bug fixes
This commit is contained in:
anovazzi1 2023-04-05 21:29:30 -03:00 committed by GitHub
commit d88c2ae92f
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
108 changed files with 7200 additions and 1112 deletions

View file

@ -6,7 +6,7 @@ on:
pull_request:
env:
POETRY_VERSION: "1.3.1"
POETRY_VERSION: "1.4.0"
jobs:
build:

View file

@ -10,7 +10,7 @@ on:
- "pyproject.toml"
env:
POETRY_VERSION: "1.3.1"
POETRY_VERSION: "1.4.0"
jobs:
if_release:

33
.github/workflows/test.yml vendored Normal file
View file

@ -0,0 +1,33 @@
name: test
on:
push:
branches: [main]
pull_request:
branches: [dev]
env:
POETRY_VERSION: "1.4.0"
jobs:
build:
runs-on: ubuntu-latest
strategy:
matrix:
python-version:
- "3.10"
- "3.11"
steps:
- uses: actions/checkout@v3
- name: Install poetry
run: pipx install poetry==$POETRY_VERSION
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
cache: "poetry"
- name: Install dependencies
run: poetry install
- name: Run unit tests
run: |
make test

View file

@ -42,14 +42,13 @@ build:
dev:
make install_frontend
ifeq ($(build),1)
@echo 'Running docker compose up with build'
docker compose up --build
@echo 'Running docker compose up with build'
docker compose $(if $(debug),-f docker-compose.debug.yml) up --build
else
@echo 'Running docker compose up without build'
docker compose up
@echo 'Running docker compose up without build'
docker compose $(if $(debug),-f docker-compose.debug.yml) up
endif
publish:
make build
poetry publish

View file

@ -15,4 +15,4 @@ COPY ./ ./
# Install dependencies
RUN poetry config virtualenvs.create false && poetry install --no-interaction --no-ansi
CMD ["uvicorn", "langflow.main:app", "--host", "0.0.0.0", "--port", "5003", "--reload"]
CMD ["uvicorn", "langflow.main:app", "--host", "0.0.0.0", "--port", "5003", "--reload", "log-level", "debug"]

28
docker-compose.debug.yml Normal file
View file

@ -0,0 +1,28 @@
version: '3.4'
services:
backend:
volumes:
- ./:/app
build:
context: ./
dockerfile: ./dev.Dockerfile
command: ["sh", "-c", "pip install debugpy -t /tmp && python /tmp/debugpy --wait-for-client --listen 0.0.0.0:5678 -m uvicorn langflow.main:app --host 0.0.0.0 --port 7860 --reload"]
ports:
- 7860:7860
- 5678:5678
restart: on-failure
frontend:
build:
context: ./src/frontend
dockerfile: ./dev.Dockerfile
args:
- BACKEND_URL=http://backend:7860
ports:
- "3000:3000"
volumes:
- ./src/frontend/public:/home/node/app/public
- ./src/frontend/src:/home/node/app/src
- ./src/frontend/package.json:/home/node/app/package.json
restart: on-failure

704
poetry.lock generated
View file

@ -220,14 +220,14 @@ files = [
[[package]]
name = "beautifulsoup4"
version = "4.11.2"
version = "4.12.0"
description = "Screen-scraping library"
category = "main"
optional = false
python-versions = ">=3.6.0"
files = [
{file = "beautifulsoup4-4.11.2-py3-none-any.whl", hash = "sha256:0e79446b10b3ecb499c1556f7e228a53e64a2bfcebd455f370d8927cb5b59e39"},
{file = "beautifulsoup4-4.11.2.tar.gz", hash = "sha256:bc4bdda6717de5a2987436fb8d72f45dc90dd856bdfd512a1314ce90349a0106"},
{file = "beautifulsoup4-4.12.0-py3-none-any.whl", hash = "sha256:2130a5ad7f513200fae61a17abb5e338ca980fa28c439c0571014bc0217e9591"},
{file = "beautifulsoup4-4.12.0.tar.gz", hash = "sha256:c5fceeaec29d09c84970e47c65f2f0efe57872f7cff494c9691a26ec0ff13234"},
]
[package.dependencies]
@ -239,37 +239,37 @@ lxml = ["lxml"]
[[package]]
name = "black"
version = "23.1.0"
version = "23.3.0"
description = "The uncompromising code formatter."
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
{file = "black-23.1.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:b6a92a41ee34b883b359998f0c8e6eb8e99803aa8bf3123bf2b2e6fec505a221"},
{file = "black-23.1.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:57c18c5165c1dbe291d5306e53fb3988122890e57bd9b3dcb75f967f13411a26"},
{file = "black-23.1.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:9880d7d419bb7e709b37e28deb5e68a49227713b623c72b2b931028ea65f619b"},
{file = "black-23.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e6663f91b6feca5d06f2ccd49a10f254f9298cc1f7f49c46e498a0771b507104"},
{file = "black-23.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:9afd3f493666a0cd8f8df9a0200c6359ac53940cbde049dcb1a7eb6ee2dd7074"},
{file = "black-23.1.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:bfffba28dc52a58f04492181392ee380e95262af14ee01d4bc7bb1b1c6ca8d27"},
{file = "black-23.1.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:c1c476bc7b7d021321e7d93dc2cbd78ce103b84d5a4cf97ed535fbc0d6660648"},
{file = "black-23.1.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:382998821f58e5c8238d3166c492139573325287820963d2f7de4d518bd76958"},
{file = "black-23.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bf649fda611c8550ca9d7592b69f0637218c2369b7744694c5e4902873b2f3a"},
{file = "black-23.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:121ca7f10b4a01fd99951234abdbd97728e1240be89fde18480ffac16503d481"},
{file = "black-23.1.0-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:a8471939da5e824b891b25751955be52ee7f8a30a916d570a5ba8e0f2eb2ecad"},
{file = "black-23.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8178318cb74f98bc571eef19068f6ab5613b3e59d4f47771582f04e175570ed8"},
{file = "black-23.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:a436e7881d33acaf2536c46a454bb964a50eff59b21b51c6ccf5a40601fbef24"},
{file = "black-23.1.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:a59db0a2094d2259c554676403fa2fac3473ccf1354c1c63eccf7ae65aac8ab6"},
{file = "black-23.1.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:0052dba51dec07ed029ed61b18183942043e00008ec65d5028814afaab9a22fd"},
{file = "black-23.1.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:49f7b39e30f326a34b5c9a4213213a6b221d7ae9d58ec70df1c4a307cf2a1580"},
{file = "black-23.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:162e37d49e93bd6eb6f1afc3e17a3d23a823042530c37c3c42eeeaf026f38468"},
{file = "black-23.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:8b70eb40a78dfac24842458476135f9b99ab952dd3f2dab738c1881a9b38b753"},
{file = "black-23.1.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:a29650759a6a0944e7cca036674655c2f0f63806ddecc45ed40b7b8aa314b651"},
{file = "black-23.1.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:bb460c8561c8c1bec7824ecbc3ce085eb50005883a6203dcfb0122e95797ee06"},
{file = "black-23.1.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:c91dfc2c2a4e50df0026f88d2215e166616e0c80e86004d0003ece0488db2739"},
{file = "black-23.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a951cc83ab535d248c89f300eccbd625e80ab880fbcfb5ac8afb5f01a258ac9"},
{file = "black-23.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:0680d4380db3719ebcfb2613f34e86c8e6d15ffeabcf8ec59355c5e7b85bb555"},
{file = "black-23.1.0-py3-none-any.whl", hash = "sha256:7a0f701d314cfa0896b9001df70a530eb2472babb76086344e688829efd97d32"},
{file = "black-23.1.0.tar.gz", hash = "sha256:b0bd97bea8903f5a2ba7219257a44e3f1f9d00073d6cc1add68f0beec69692ac"},
{file = "black-23.3.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:0945e13506be58bf7db93ee5853243eb368ace1c08a24c65ce108986eac65915"},
{file = "black-23.3.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:67de8d0c209eb5b330cce2469503de11bca4085880d62f1628bd9972cc3366b9"},
{file = "black-23.3.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:7c3eb7cea23904399866c55826b31c1f55bbcd3890ce22ff70466b907b6775c2"},
{file = "black-23.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32daa9783106c28815d05b724238e30718f34155653d4d6e125dc7daec8e260c"},
{file = "black-23.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:35d1381d7a22cc5b2be2f72c7dfdae4072a3336060635718cc7e1ede24221d6c"},
{file = "black-23.3.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:a8a968125d0a6a404842fa1bf0b349a568634f856aa08ffaff40ae0dfa52e7c6"},
{file = "black-23.3.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:c7ab5790333c448903c4b721b59c0d80b11fe5e9803d8703e84dcb8da56fec1b"},
{file = "black-23.3.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:a6f6886c9869d4daae2d1715ce34a19bbc4b95006d20ed785ca00fa03cba312d"},
{file = "black-23.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f3c333ea1dd6771b2d3777482429864f8e258899f6ff05826c3a4fcc5ce3f70"},
{file = "black-23.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:11c410f71b876f961d1de77b9699ad19f939094c3a677323f43d7a29855fe326"},
{file = "black-23.3.0-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:1d06691f1eb8de91cd1b322f21e3bfc9efe0c7ca1f0e1eb1db44ea367dff656b"},
{file = "black-23.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50cb33cac881766a5cd9913e10ff75b1e8eb71babf4c7104f2e9c52da1fb7de2"},
{file = "black-23.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:e114420bf26b90d4b9daa597351337762b63039752bdf72bf361364c1aa05925"},
{file = "black-23.3.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:48f9d345675bb7fbc3dd85821b12487e1b9a75242028adad0333ce36ed2a6d27"},
{file = "black-23.3.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:714290490c18fb0126baa0fca0a54ee795f7502b44177e1ce7624ba1c00f2331"},
{file = "black-23.3.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:064101748afa12ad2291c2b91c960be28b817c0c7eaa35bec09cc63aa56493c5"},
{file = "black-23.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:562bd3a70495facf56814293149e51aa1be9931567474993c7942ff7d3533961"},
{file = "black-23.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:e198cf27888ad6f4ff331ca1c48ffc038848ea9f031a3b40ba36aced7e22f2c8"},
{file = "black-23.3.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:3238f2aacf827d18d26db07524e44741233ae09a584273aa059066d644ca7b30"},
{file = "black-23.3.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:f0bd2f4a58d6666500542b26354978218a9babcdc972722f4bf90779524515f3"},
{file = "black-23.3.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:92c543f6854c28a3c7f39f4d9b7694f9a6eb9d3c5e2ece488c327b6e7ea9b266"},
{file = "black-23.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a150542a204124ed00683f0db1f5cf1c2aaaa9cc3495b7a3b5976fb136090ab"},
{file = "black-23.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:6b39abdfb402002b8a7d030ccc85cf5afff64ee90fa4c5aebc531e3ad0175ddb"},
{file = "black-23.3.0-py3-none-any.whl", hash = "sha256:ec751418022185b0c1bb7d7736e6933d40bbb14c14a0abcf9123d1b159f98dd4"},
{file = "black-23.3.0.tar.gz", hash = "sha256:1c7b8d606e728a41ea1ccbd7264677e494e87cf630e399262ced92d4a8dac940"},
]
[package.dependencies]
@ -502,21 +502,23 @@ files = [
[[package]]
name = "comm"
version = "0.1.2"
version = "0.1.3"
description = "Jupyter Python Comm implementation, for usage in ipykernel, xeus-python etc."
category = "dev"
optional = false
python-versions = ">=3.6"
files = [
{file = "comm-0.1.2-py3-none-any.whl", hash = "sha256:9f3abf3515112fa7c55a42a6a5ab358735c9dccc8b5910a9d8e3ef5998130666"},
{file = "comm-0.1.2.tar.gz", hash = "sha256:3e2f5826578e683999b93716285b3b1f344f157bf75fa9ce0a797564e742f062"},
{file = "comm-0.1.3-py3-none-any.whl", hash = "sha256:16613c6211e20223f215fc6d3b266a247b6e2641bf4e0a3ad34cb1aff2aa3f37"},
{file = "comm-0.1.3.tar.gz", hash = "sha256:a61efa9daffcfbe66fd643ba966f846a624e4e6d6767eda9cf6e993aadaab93e"},
]
[package.dependencies]
traitlets = ">=5.3"
[package.extras]
lint = ["black (>=22.6.0)", "mdformat (>0.7)", "mdformat-gfm (>=0.3.5)", "ruff (>=0.0.156)"]
test = ["pytest"]
typing = ["mypy (>=0.990)"]
[[package]]
name = "dataclasses-json"
@ -577,6 +579,36 @@ files = [
{file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"},
]
[[package]]
name = "dill"
version = "0.3.6"
description = "serialize all of python"
category = "main"
optional = false
python-versions = ">=3.7"
files = [
{file = "dill-0.3.6-py3-none-any.whl", hash = "sha256:a07ffd2351b8c678dfc4a856a3005f8067aea51d6ba6c700796a4d9e280f39f0"},
{file = "dill-0.3.6.tar.gz", hash = "sha256:e5db55f3687856d8fbdab002ed78544e1c4559a130302693d839dfe8f93f2373"},
]
[package.extras]
graph = ["objgraph (>=1.7.2)"]
[[package]]
name = "exceptiongroup"
version = "1.1.1"
description = "Backport of PEP 654 (exception groups)"
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
{file = "exceptiongroup-1.1.1-py3-none-any.whl", hash = "sha256:232c37c63e4f682982c8b6459f33a8981039e5fb8756b2074364e5055c498c9e"},
{file = "exceptiongroup-1.1.1.tar.gz", hash = "sha256:d484c3090ba2889ae2928419117447a14daf3c1231d5e30d0aae34f354f01785"},
]
[package.extras]
test = ["pytest (>=6)"]
[[package]]
name = "executing"
version = "1.2.0"
@ -723,14 +755,14 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0dev)"]
[[package]]
name = "google-api-python-client"
version = "2.81.0"
version = "2.83.0"
description = "Google API Client Library for Python"
category = "main"
optional = false
python-versions = ">=3.7"
files = [
{file = "google-api-python-client-2.81.0.tar.gz", hash = "sha256:8faab0b9b19d3797b455d33320c643253b6761fd0d3f3adb54792ab155d0795a"},
{file = "google_api_python_client-2.81.0-py2.py3-none-any.whl", hash = "sha256:ad6700ae3a76ead8956d7f30935978cea308530e342ad8c1e26a4e40fc05c054"},
{file = "google-api-python-client-2.83.0.tar.gz", hash = "sha256:d07509f1b2d2b2427363b454db996f7a15e1751a48cfcaf28427050560dd51cf"},
{file = "google_api_python_client-2.83.0-py2.py3-none-any.whl", hash = "sha256:afa7fe2a5d77e8f136cdb8f40a120dd6660c2292f791c1b22734dfe786bd1dac"},
]
[package.dependencies]
@ -742,14 +774,14 @@ uritemplate = ">=3.0.1,<5"
[[package]]
name = "google-auth"
version = "2.16.2"
version = "2.17.1"
description = "Google Authentication Library"
category = "main"
optional = false
python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*"
files = [
{file = "google-auth-2.16.2.tar.gz", hash = "sha256:07e14f34ec288e3f33e00e2e3cc40c8942aa5d4ceac06256a28cd8e786591420"},
{file = "google_auth-2.16.2-py2.py3-none-any.whl", hash = "sha256:2fef3cf94876d1a0e204afece58bb4d83fb57228aaa366c64045039fda6770a2"},
{file = "google-auth-2.17.1.tar.gz", hash = "sha256:8f379b46bad381ad2a0b989dfb0c13ad28d3c2a79f27348213f8946a1d15d55a"},
{file = "google_auth-2.17.1-py2.py3-none-any.whl", hash = "sha256:357ff22a75b4c0f6093470f21816a825d2adee398177569824e37b6c10069e19"},
]
[package.dependencies]
@ -798,14 +830,14 @@ requests = "*"
[[package]]
name = "googleapis-common-protos"
version = "1.58.0"
version = "1.59.0"
description = "Common protobufs used in Google APIs"
category = "main"
optional = false
python-versions = ">=3.7"
files = [
{file = "googleapis-common-protos-1.58.0.tar.gz", hash = "sha256:c727251ec025947d545184ba17e3578840fc3a24a0516a020479edab660457df"},
{file = "googleapis_common_protos-1.58.0-py2.py3-none-any.whl", hash = "sha256:ca3befcd4580dab6ad49356b46bf165bb68ff4b32389f028f1abd7c10ab9519a"},
{file = "googleapis-common-protos-1.59.0.tar.gz", hash = "sha256:4168fcb568a826a52f23510412da405abd93f4d23ba544bb68d943b14ba3cb44"},
{file = "googleapis_common_protos-1.59.0-py2.py3-none-any.whl", hash = "sha256:b287dc48449d1d41af0c69f4ea26242b5ae4c3d7249a38b0984c86a4caffff1f"},
]
[package.dependencies]
@ -921,21 +953,67 @@ files = [
{file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"},
]
[[package]]
name = "httpcore"
version = "0.16.3"
description = "A minimal low-level HTTP client."
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
{file = "httpcore-0.16.3-py3-none-any.whl", hash = "sha256:da1fb708784a938aa084bde4feb8317056c55037247c787bd7e19eb2c2949dc0"},
{file = "httpcore-0.16.3.tar.gz", hash = "sha256:c5d6f04e2fc530f39e0c077e6a30caa53f1451096120f1f38b954afd0b17c0cb"},
]
[package.dependencies]
anyio = ">=3.0,<5.0"
certifi = "*"
h11 = ">=0.13,<0.15"
sniffio = ">=1.0.0,<2.0.0"
[package.extras]
http2 = ["h2 (>=3,<5)"]
socks = ["socksio (>=1.0.0,<2.0.0)"]
[[package]]
name = "httplib2"
version = "0.21.0"
version = "0.22.0"
description = "A comprehensive HTTP client library."
category = "main"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
files = [
{file = "httplib2-0.21.0-py3-none-any.whl", hash = "sha256:987c8bb3eb82d3fa60c68699510a692aa2ad9c4bd4f123e51dfb1488c14cdd01"},
{file = "httplib2-0.21.0.tar.gz", hash = "sha256:fc144f091c7286b82bec71bdbd9b27323ba709cc612568d3000893bfd9cb4b34"},
{file = "httplib2-0.22.0-py3-none-any.whl", hash = "sha256:14ae0a53c1ba8f3d37e9e27cf37eabb0fb9980f435ba405d546948b009dd64dc"},
{file = "httplib2-0.22.0.tar.gz", hash = "sha256:d7a10bc5ef5ab08322488bde8c726eeee5c8618723fdb399597ec58f3d82df81"},
]
[package.dependencies]
pyparsing = {version = ">=2.4.2,<3.0.0 || >3.0.0,<3.0.1 || >3.0.1,<3.0.2 || >3.0.2,<3.0.3 || >3.0.3,<4", markers = "python_version > \"3.0\""}
[[package]]
name = "httpx"
version = "0.23.3"
description = "The next generation HTTP client."
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
{file = "httpx-0.23.3-py3-none-any.whl", hash = "sha256:a211fcce9b1254ea24f0cd6af9869b3d29aba40154e947d2a07bb499b3e310d6"},
{file = "httpx-0.23.3.tar.gz", hash = "sha256:9818458eb565bb54898ccb9b8b251a28785dd4a55afbc23d0eb410754fe7d0f9"},
]
[package.dependencies]
certifi = "*"
httpcore = ">=0.15.0,<0.17.0"
rfc3986 = {version = ">=1.3,<2", extras = ["idna2008"]}
sniffio = "*"
[package.extras]
brotli = ["brotli", "brotlicffi"]
cli = ["click (>=8.0.0,<9.0.0)", "pygments (>=2.0.0,<3.0.0)", "rich (>=10,<13)"]
http2 = ["h2 (>=3,<5)"]
socks = ["socksio (>=1.0.0,<2.0.0)"]
[[package]]
name = "idna"
version = "3.4"
@ -968,16 +1046,28 @@ docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker
perf = ["ipython"]
testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"]
[[package]]
name = "iniconfig"
version = "2.0.0"
description = "brain-dead simple config-ini parsing"
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
{file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"},
{file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"},
]
[[package]]
name = "ipykernel"
version = "6.21.3"
version = "6.22.0"
description = "IPython Kernel for Jupyter"
category = "dev"
optional = false
python-versions = ">=3.8"
files = [
{file = "ipykernel-6.21.3-py3-none-any.whl", hash = "sha256:24ebd9715e317c185e37156ab3a87382410185230dde7aeffce389d6c7d4428a"},
{file = "ipykernel-6.21.3.tar.gz", hash = "sha256:c8ff581905d70e7299bc1473a2f7c113bec1744fb3746d58e5b4b93bd8ee7001"},
{file = "ipykernel-6.22.0-py3-none-any.whl", hash = "sha256:1ae6047c1277508933078163721bbb479c3e7292778a04b4bacf0874550977d6"},
{file = "ipykernel-6.22.0.tar.gz", hash = "sha256:302558b81f1bc22dc259fb2a0c5c7cf2f4c0bdb21b50484348f7bafe7fb71421"},
]
[package.dependencies]
@ -1004,14 +1094,14 @@ test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio"
[[package]]
name = "ipython"
version = "8.11.0"
version = "8.12.0"
description = "IPython: Productive Interactive Computing"
category = "dev"
optional = false
python-versions = ">=3.8"
files = [
{file = "ipython-8.11.0-py3-none-any.whl", hash = "sha256:5b54478e459155a326bf5f42ee4f29df76258c0279c36f21d71ddb560f88b156"},
{file = "ipython-8.11.0.tar.gz", hash = "sha256:735cede4099dbc903ee540307b9171fbfef4aa75cfcacc5a273b2cda2f02be04"},
{file = "ipython-8.12.0-py3-none-any.whl", hash = "sha256:1c183bf61b148b00bcebfa5d9b39312733ae97f6dad90d7e9b4d86c8647f498c"},
{file = "ipython-8.12.0.tar.gz", hash = "sha256:a950236df04ad75b5bc7f816f9af3d74dc118fd42f2ff7e80e8e60ca1f182e2d"},
]
[package.dependencies]
@ -1027,6 +1117,7 @@ prompt-toolkit = ">=3.0.30,<3.0.37 || >3.0.37,<3.1.0"
pygments = ">=2.4.0"
stack-data = "*"
traitlets = ">=5"
typing-extensions = {version = "*", markers = "python_version < \"3.10\""}
[package.extras]
all = ["black", "curio", "docrepr", "ipykernel", "ipyparallel", "ipywidgets", "matplotlib", "matplotlib (!=3.2.0)", "nbconvert", "nbformat", "notebook", "numpy (>=1.21)", "pandas", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "qtconsole", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "trio", "typing-extensions"]
@ -1063,14 +1154,14 @@ testing = ["Django (<3.1)", "attrs", "colorama", "docopt", "pytest (<7.0.0)"]
[[package]]
name = "jupyter-client"
version = "8.0.3"
version = "8.1.0"
description = "Jupyter protocol implementation and client libraries"
category = "dev"
optional = false
python-versions = ">=3.8"
files = [
{file = "jupyter_client-8.0.3-py3-none-any.whl", hash = "sha256:be48ac6bd659cbbddb7a674cf06b3b8afbf53f228253cf58bde604c03bd487b0"},
{file = "jupyter_client-8.0.3.tar.gz", hash = "sha256:ed65498bea6d876ef9d8da3e0db3dd33c5d129f5b2645f56ae03993782966bd0"},
{file = "jupyter_client-8.1.0-py3-none-any.whl", hash = "sha256:d5b8e739d7816944be50f81121a109788a3d92732ecf1ad1e4dadebc948818fe"},
{file = "jupyter_client-8.1.0.tar.gz", hash = "sha256:3fbab64100a0dcac7701b1e0f1a4412f1ccb45546ff2ad9bc4fcbe4e19804811"},
]
[package.dependencies]
@ -1108,14 +1199,14 @@ test = ["ipykernel", "pre-commit", "pytest", "pytest-cov", "pytest-timeout"]
[[package]]
name = "langchain"
version = "0.0.113"
version = "0.0.131"
description = "Building applications with LLMs through composability"
category = "main"
optional = false
python-versions = ">=3.8.1,<4.0"
files = [
{file = "langchain-0.0.113-py3-none-any.whl", hash = "sha256:9e146d116fd3b9b2210c8c447cabfa20ef27c26ea3f2bc986eab97d1dad0aab6"},
{file = "langchain-0.0.113.tar.gz", hash = "sha256:a494fe02bc63da4bcda7da8d7f4a346522fbc87f0a4955b72519ec2ed86bf906"},
{file = "langchain-0.0.131-py3-none-any.whl", hash = "sha256:3564a759e85095c9d71a78817da9cec1e2a8a0cda1bdd94ef8ac7008e432717a"},
{file = "langchain-0.0.131.tar.gz", hash = "sha256:61baf67fbec561ce38d187915a46e1c41139270826453600951760fde1a5d98a"},
]
[package.dependencies]
@ -1123,14 +1214,42 @@ aiohttp = ">=3.8.3,<4.0.0"
dataclasses-json = ">=0.5.7,<0.6.0"
numpy = ">=1,<2"
pydantic = ">=1,<2"
PyYAML = ">=6,<7"
PyYAML = ">=5.4.1"
requests = ">=2,<3"
SQLAlchemy = ">=1,<2"
tenacity = ">=8.1.0,<9.0.0"
[package.extras]
all = ["aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.2.2,<0.3.0)", "beautifulsoup4 (>=4,<5)", "cohere (>=3,<4)", "deeplake (>=3.2.9,<4.0.0)", "elasticsearch (>=8,<9)", "faiss-cpu (>=1,<2)", "google-api-python-client (==2.70.0)", "google-search-results (>=2,<3)", "huggingface_hub (>=0,<1)", "jinja2 (>=3,<4)", "manifest-ml (>=0.0.1,<0.0.2)", "networkx (>=2.6.3,<3.0.0)", "nlpcloud (>=1,<2)", "nltk (>=3,<4)", "nomic (>=1.0.43,<2.0.0)", "openai (>=0,<1)", "opensearch-py (>=2.0.0,<3.0.0)", "pgvector (>=0.1.6,<0.2.0)", "pinecone-client (>=2,<3)", "psycopg2-binary (>=2.9.5,<3.0.0)", "pypdf (>=3.4.0,<4.0.0)", "qdrant-client (>=1.0.4,<2.0.0)", "redis (>=4,<5)", "sentence-transformers (>=2,<3)", "spacy (>=3,<4)", "tensorflow-text (>=2.11.0,<3.0.0)", "tiktoken (>=0,<1)", "torch (>=1,<2)", "transformers (>=4,<5)", "weaviate-client (>=3,<4)", "wikipedia (>=1,<2)", "wolframalpha (==5.0.0)"]
llms = ["anthropic (>=0.2.2,<0.3.0)", "cohere (>=3,<4)", "huggingface_hub (>=0,<1)", "manifest-ml (>=0.0.1,<0.0.2)", "nlpcloud (>=1,<2)", "openai (>=0,<1)", "torch (>=1,<2)", "transformers (>=4,<5)"]
all = ["aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.2.4,<0.3.0)", "beautifulsoup4 (>=4,<5)", "boto3 (>=1.26.96,<2.0.0)", "cohere (>=3,<4)", "deeplake (>=3.2.9,<4.0.0)", "elasticsearch (>=8,<9)", "faiss-cpu (>=1,<2)", "google-api-python-client (==2.70.0)", "google-search-results (>=2,<3)", "huggingface_hub (>=0,<1)", "jina (>=3.14,<4.0)", "jinja2 (>=3,<4)", "manifest-ml (>=0.0.1,<0.0.2)", "networkx (>=2.6.3,<3.0.0)", "nlpcloud (>=1,<2)", "nltk (>=3,<4)", "nomic (>=1.0.43,<2.0.0)", "openai (>=0,<1)", "opensearch-py (>=2.0.0,<3.0.0)", "pgvector (>=0.1.6,<0.2.0)", "pinecone-client (>=2,<3)", "psycopg2-binary (>=2.9.5,<3.0.0)", "pyowm (>=3.3.0,<4.0.0)", "pypdf (>=3.4.0,<4.0.0)", "qdrant-client (>=1.1.1,<2.0.0)", "redis (>=4,<5)", "sentence-transformers (>=2,<3)", "spacy (>=3,<4)", "tensorflow-text (>=2.11.0,<3.0.0)", "tiktoken (>=0.3.2,<0.4.0)", "torch (>=1,<2)", "transformers (>=4,<5)", "weaviate-client (>=3,<4)", "wikipedia (>=1,<2)", "wolframalpha (==5.0.0)"]
cohere = ["cohere (>=3,<4)"]
llms = ["anthropic (>=0.2.4,<0.3.0)", "cohere (>=3,<4)", "huggingface_hub (>=0,<1)", "manifest-ml (>=0.0.1,<0.0.2)", "nlpcloud (>=1,<2)", "openai (>=0,<1)", "torch (>=1,<2)", "transformers (>=4,<5)"]
openai = ["openai (>=0,<1)"]
qdrant = ["qdrant-client (>=1.1.1,<2.0.0)"]
[[package]]
name = "markdown-it-py"
version = "2.2.0"
description = "Python port of markdown-it. Markdown parsing, done right!"
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
{file = "markdown-it-py-2.2.0.tar.gz", hash = "sha256:7c9a5e412688bc771c67432cbfebcdd686c93ce6484913dccf06cb5a0bea35a1"},
{file = "markdown_it_py-2.2.0-py3-none-any.whl", hash = "sha256:5a35f8d1870171d9acc47b99612dc146129b631baf04970128b568f190d0cc30"},
]
[package.dependencies]
mdurl = ">=0.1,<1.0"
[package.extras]
benchmarking = ["psutil", "pytest", "pytest-benchmark"]
code-style = ["pre-commit (>=3.0,<4.0)"]
compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"]
linkify = ["linkify-it-py (>=1,<3)"]
plugins = ["mdit-py-plugins"]
profiling = ["gprof2dot"]
rtd = ["attrs", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"]
testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"]
[[package]]
name = "marshmallow"
@ -1183,6 +1302,18 @@ files = [
[package.dependencies]
traitlets = "*"
[[package]]
name = "mdurl"
version = "0.1.2"
description = "Markdown URL utilities"
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
{file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"},
{file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"},
]
[[package]]
name = "multidict"
version = "6.0.4"
@ -1378,14 +1509,14 @@ files = [
[[package]]
name = "openai"
version = "0.27.2"
version = "0.27.4"
description = "Python client library for the OpenAI API"
category = "main"
optional = false
python-versions = ">=3.7.1"
files = [
{file = "openai-0.27.2-py3-none-any.whl", hash = "sha256:6df674cf257e9e0504f1fd191c333d3f6a2442b13218d0eccf06230eb24d320e"},
{file = "openai-0.27.2.tar.gz", hash = "sha256:5869fdfa34b0ec66c39afa22f4a0fb83a135dff81f6505f52834c6ab3113f762"},
{file = "openai-0.27.4-py3-none-any.whl", hash = "sha256:3b82c867d531e1fd2003d9de2131e1c4bfd4c70b1a3149e0543a555b30807b70"},
{file = "openai-0.27.4.tar.gz", hash = "sha256:9f9d27d26e62c6068f516c0729449954b5ef6994be1a6cbfe7dbefbc84423a04"},
]
[package.dependencies]
@ -1411,6 +1542,55 @@ files = [
{file = "packaging-23.0.tar.gz", hash = "sha256:b6ad297f8907de0fa2fe1ccbd26fdaf387f5f47c7275fedf8cce89f99446cf97"},
]
[[package]]
name = "pandas"
version = "1.5.3"
description = "Powerful data structures for data analysis, time series, and statistics"
category = "main"
optional = false
python-versions = ">=3.8"
files = [
{file = "pandas-1.5.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3749077d86e3a2f0ed51367f30bf5b82e131cc0f14260c4d3e499186fccc4406"},
{file = "pandas-1.5.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:972d8a45395f2a2d26733eb8d0f629b2f90bebe8e8eddbb8829b180c09639572"},
{file = "pandas-1.5.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:50869a35cbb0f2e0cd5ec04b191e7b12ed688874bd05dd777c19b28cbea90996"},
{file = "pandas-1.5.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3ac844a0fe00bfaeb2c9b51ab1424e5c8744f89860b138434a363b1f620f354"},
{file = "pandas-1.5.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a0a56cef15fd1586726dace5616db75ebcfec9179a3a55e78f72c5639fa2a23"},
{file = "pandas-1.5.3-cp310-cp310-win_amd64.whl", hash = "sha256:478ff646ca42b20376e4ed3fa2e8d7341e8a63105586efe54fa2508ee087f328"},
{file = "pandas-1.5.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6973549c01ca91ec96199e940495219c887ea815b2083722821f1d7abfa2b4dc"},
{file = "pandas-1.5.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c39a8da13cede5adcd3be1182883aea1c925476f4e84b2807a46e2775306305d"},
{file = "pandas-1.5.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f76d097d12c82a535fda9dfe5e8dd4127952b45fea9b0276cb30cca5ea313fbc"},
{file = "pandas-1.5.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e474390e60ed609cec869b0da796ad94f420bb057d86784191eefc62b65819ae"},
{file = "pandas-1.5.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f2b952406a1588ad4cad5b3f55f520e82e902388a6d5a4a91baa8d38d23c7f6"},
{file = "pandas-1.5.3-cp311-cp311-win_amd64.whl", hash = "sha256:bc4c368f42b551bf72fac35c5128963a171b40dce866fb066540eeaf46faa003"},
{file = "pandas-1.5.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:14e45300521902689a81f3f41386dc86f19b8ba8dd5ac5a3c7010ef8d2932813"},
{file = "pandas-1.5.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9842b6f4b8479e41968eced654487258ed81df7d1c9b7b870ceea24ed9459b31"},
{file = "pandas-1.5.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:26d9c71772c7afb9d5046e6e9cf42d83dd147b5cf5bcb9d97252077118543792"},
{file = "pandas-1.5.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fbcb19d6fceb9e946b3e23258757c7b225ba450990d9ed63ccceeb8cae609f7"},
{file = "pandas-1.5.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:565fa34a5434d38e9d250af3c12ff931abaf88050551d9fbcdfafca50d62babf"},
{file = "pandas-1.5.3-cp38-cp38-win32.whl", hash = "sha256:87bd9c03da1ac870a6d2c8902a0e1fd4267ca00f13bc494c9e5a9020920e1d51"},
{file = "pandas-1.5.3-cp38-cp38-win_amd64.whl", hash = "sha256:41179ce559943d83a9b4bbacb736b04c928b095b5f25dd2b7389eda08f46f373"},
{file = "pandas-1.5.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c74a62747864ed568f5a82a49a23a8d7fe171d0c69038b38cedf0976831296fa"},
{file = "pandas-1.5.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c4c00e0b0597c8e4f59e8d461f797e5d70b4d025880516a8261b2817c47759ee"},
{file = "pandas-1.5.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a50d9a4336a9621cab7b8eb3fb11adb82de58f9b91d84c2cd526576b881a0c5a"},
{file = "pandas-1.5.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd05f7783b3274aa206a1af06f0ceed3f9b412cf665b7247eacd83be41cf7bf0"},
{file = "pandas-1.5.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f69c4029613de47816b1bb30ff5ac778686688751a5e9c99ad8c7031f6508e5"},
{file = "pandas-1.5.3-cp39-cp39-win32.whl", hash = "sha256:7cec0bee9f294e5de5bbfc14d0573f65526071029d036b753ee6507d2a21480a"},
{file = "pandas-1.5.3-cp39-cp39-win_amd64.whl", hash = "sha256:dfd681c5dc216037e0b0a2c821f5ed99ba9f03ebcf119c7dac0e9a7b960b9ec9"},
{file = "pandas-1.5.3.tar.gz", hash = "sha256:74a3fd7e5a7ec052f183273dc7b0acd3a863edf7520f5d3a1765c04ffdb3b0b1"},
]
[package.dependencies]
numpy = [
{version = ">=1.20.3", markers = "python_version < \"3.10\""},
{version = ">=1.21.0", markers = "python_version >= \"3.10\""},
{version = ">=1.23.2", markers = "python_version >= \"3.11\""},
]
python-dateutil = ">=2.8.1"
pytz = ">=2020.1"
[package.extras]
test = ["hypothesis (>=5.5.3)", "pytest (>=6.0)", "pytest-xdist (>=1.31)"]
[[package]]
name = "parso"
version = "0.8.3"
@ -1468,19 +1648,35 @@ files = [
[[package]]
name = "platformdirs"
version = "3.1.1"
version = "3.2.0"
description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
{file = "platformdirs-3.1.1-py3-none-any.whl", hash = "sha256:e5986afb596e4bb5bde29a79ac9061aa955b94fca2399b7aaac4090860920dd8"},
{file = "platformdirs-3.1.1.tar.gz", hash = "sha256:024996549ee88ec1a9aa99ff7f8fc819bb59e2c3477b410d90a16d32d6e707aa"},
{file = "platformdirs-3.2.0-py3-none-any.whl", hash = "sha256:ebe11c0d7a805086e99506aa331612429a72ca7cd52a1f0d277dc4adc20cb10e"},
{file = "platformdirs-3.2.0.tar.gz", hash = "sha256:d5b638ca397f25f979350ff789db335903d7ea010ab28903f57b27e1b16c2b08"},
]
[package.extras]
docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.22,!=1.23.4)"]
test = ["appdirs (==1.4.4)", "covdefaults (>=2.2.2)", "pytest (>=7.2.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"]
test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.2.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"]
[[package]]
name = "pluggy"
version = "1.0.0"
description = "plugin and hook calling mechanisms for python"
category = "dev"
optional = false
python-versions = ">=3.6"
files = [
{file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"},
{file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"},
]
[package.extras]
dev = ["pre-commit", "tox"]
testing = ["pytest", "pytest-benchmark"]
[[package]]
name = "prompt-toolkit"
@ -1615,48 +1811,48 @@ files = [
[[package]]
name = "pydantic"
version = "1.10.6"
version = "1.10.7"
description = "Data validation and settings management using python type hints"
category = "main"
optional = false
python-versions = ">=3.7"
files = [
{file = "pydantic-1.10.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f9289065611c48147c1dd1fd344e9d57ab45f1d99b0fb26c51f1cf72cd9bcd31"},
{file = "pydantic-1.10.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8c32b6bba301490d9bb2bf5f631907803135e8085b6aa3e5fe5a770d46dd0160"},
{file = "pydantic-1.10.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd9b9e98068fa1068edfc9eabde70a7132017bdd4f362f8b4fd0abed79c33083"},
{file = "pydantic-1.10.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c84583b9df62522829cbc46e2b22e0ec11445625b5acd70c5681ce09c9b11c4"},
{file = "pydantic-1.10.6-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b41822064585fea56d0116aa431fbd5137ce69dfe837b599e310034171996084"},
{file = "pydantic-1.10.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:61f1f08adfaa9cc02e0cbc94f478140385cbd52d5b3c5a657c2fceb15de8d1fb"},
{file = "pydantic-1.10.6-cp310-cp310-win_amd64.whl", hash = "sha256:32937835e525d92c98a1512218db4eed9ddc8f4ee2a78382d77f54341972c0e7"},
{file = "pydantic-1.10.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:bbd5c531b22928e63d0cb1868dee76123456e1de2f1cb45879e9e7a3f3f1779b"},
{file = "pydantic-1.10.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e277bd18339177daa62a294256869bbe84df1fb592be2716ec62627bb8d7c81d"},
{file = "pydantic-1.10.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89f15277d720aa57e173954d237628a8d304896364b9de745dcb722f584812c7"},
{file = "pydantic-1.10.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b243b564cea2576725e77aeeda54e3e0229a168bc587d536cd69941e6797543d"},
{file = "pydantic-1.10.6-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3ce13a558b484c9ae48a6a7c184b1ba0e5588c5525482681db418268e5f86186"},
{file = "pydantic-1.10.6-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3ac1cd4deed871dfe0c5f63721e29debf03e2deefa41b3ed5eb5f5df287c7b70"},
{file = "pydantic-1.10.6-cp311-cp311-win_amd64.whl", hash = "sha256:b1eb6610330a1dfba9ce142ada792f26bbef1255b75f538196a39e9e90388bf4"},
{file = "pydantic-1.10.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4ca83739c1263a044ec8b79df4eefc34bbac87191f0a513d00dd47d46e307a65"},
{file = "pydantic-1.10.6-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea4e2a7cb409951988e79a469f609bba998a576e6d7b9791ae5d1e0619e1c0f2"},
{file = "pydantic-1.10.6-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:53de12b4608290992a943801d7756f18a37b7aee284b9ffa794ee8ea8153f8e2"},
{file = "pydantic-1.10.6-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:60184e80aac3b56933c71c48d6181e630b0fbc61ae455a63322a66a23c14731a"},
{file = "pydantic-1.10.6-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:415a3f719ce518e95a92effc7ee30118a25c3d032455d13e121e3840985f2efd"},
{file = "pydantic-1.10.6-cp37-cp37m-win_amd64.whl", hash = "sha256:72cb30894a34d3a7ab6d959b45a70abac8a2a93b6480fc5a7bfbd9c935bdc4fb"},
{file = "pydantic-1.10.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3091d2eaeda25391405e36c2fc2ed102b48bac4b384d42b2267310abae350ca6"},
{file = "pydantic-1.10.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:751f008cd2afe812a781fd6aa2fb66c620ca2e1a13b6a2152b1ad51553cb4b77"},
{file = "pydantic-1.10.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:12e837fd320dd30bd625be1b101e3b62edc096a49835392dcf418f1a5ac2b832"},
{file = "pydantic-1.10.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:587d92831d0115874d766b1f5fddcdde0c5b6c60f8c6111a394078ec227fca6d"},
{file = "pydantic-1.10.6-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:476f6674303ae7965730a382a8e8d7fae18b8004b7b69a56c3d8fa93968aa21c"},
{file = "pydantic-1.10.6-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:3a2be0a0f32c83265fd71a45027201e1278beaa82ea88ea5b345eea6afa9ac7f"},
{file = "pydantic-1.10.6-cp38-cp38-win_amd64.whl", hash = "sha256:0abd9c60eee6201b853b6c4be104edfba4f8f6c5f3623f8e1dba90634d63eb35"},
{file = "pydantic-1.10.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6195ca908045054dd2d57eb9c39a5fe86409968b8040de8c2240186da0769da7"},
{file = "pydantic-1.10.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:43cdeca8d30de9a897440e3fb8866f827c4c31f6c73838e3a01a14b03b067b1d"},
{file = "pydantic-1.10.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c19eb5163167489cb1e0161ae9220dadd4fc609a42649e7e84a8fa8fff7a80f"},
{file = "pydantic-1.10.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:012c99a9c0d18cfde7469aa1ebff922e24b0c706d03ead96940f5465f2c9cf62"},
{file = "pydantic-1.10.6-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:528dcf7ec49fb5a84bf6fe346c1cc3c55b0e7603c2123881996ca3ad79db5bfc"},
{file = "pydantic-1.10.6-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:163e79386c3547c49366e959d01e37fc30252285a70619ffc1b10ede4758250a"},
{file = "pydantic-1.10.6-cp39-cp39-win_amd64.whl", hash = "sha256:189318051c3d57821f7233ecc94708767dd67687a614a4e8f92b4a020d4ffd06"},
{file = "pydantic-1.10.6-py3-none-any.whl", hash = "sha256:acc6783751ac9c9bc4680379edd6d286468a1dc8d7d9906cd6f1186ed682b2b0"},
{file = "pydantic-1.10.6.tar.gz", hash = "sha256:cf95adb0d1671fc38d8c43dd921ad5814a735e7d9b4d9e437c088002863854fd"},
{file = "pydantic-1.10.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e79e999e539872e903767c417c897e729e015872040e56b96e67968c3b918b2d"},
{file = "pydantic-1.10.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:01aea3a42c13f2602b7ecbbea484a98169fb568ebd9e247593ea05f01b884b2e"},
{file = "pydantic-1.10.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:516f1ed9bc2406a0467dd777afc636c7091d71f214d5e413d64fef45174cfc7a"},
{file = "pydantic-1.10.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae150a63564929c675d7f2303008d88426a0add46efd76c3fc797cd71cb1b46f"},
{file = "pydantic-1.10.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ecbbc51391248116c0a055899e6c3e7ffbb11fb5e2a4cd6f2d0b93272118a209"},
{file = "pydantic-1.10.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f4a2b50e2b03d5776e7f21af73e2070e1b5c0d0df255a827e7c632962f8315af"},
{file = "pydantic-1.10.7-cp310-cp310-win_amd64.whl", hash = "sha256:a7cd2251439988b413cb0a985c4ed82b6c6aac382dbaff53ae03c4b23a70e80a"},
{file = "pydantic-1.10.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:68792151e174a4aa9e9fc1b4e653e65a354a2fa0fed169f7b3d09902ad2cb6f1"},
{file = "pydantic-1.10.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dfe2507b8ef209da71b6fb5f4e597b50c5a34b78d7e857c4f8f3115effaef5fe"},
{file = "pydantic-1.10.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10a86d8c8db68086f1e30a530f7d5f83eb0685e632e411dbbcf2d5c0150e8dcd"},
{file = "pydantic-1.10.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d75ae19d2a3dbb146b6f324031c24f8a3f52ff5d6a9f22f0683694b3afcb16fb"},
{file = "pydantic-1.10.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:464855a7ff7f2cc2cf537ecc421291b9132aa9c79aef44e917ad711b4a93163b"},
{file = "pydantic-1.10.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:193924c563fae6ddcb71d3f06fa153866423ac1b793a47936656e806b64e24ca"},
{file = "pydantic-1.10.7-cp311-cp311-win_amd64.whl", hash = "sha256:b4a849d10f211389502059c33332e91327bc154acc1845f375a99eca3afa802d"},
{file = "pydantic-1.10.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:cc1dde4e50a5fc1336ee0581c1612215bc64ed6d28d2c7c6f25d2fe3e7c3e918"},
{file = "pydantic-1.10.7-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e0cfe895a504c060e5d36b287ee696e2fdad02d89e0d895f83037245218a87fe"},
{file = "pydantic-1.10.7-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:670bb4683ad1e48b0ecb06f0cfe2178dcf74ff27921cdf1606e527d2617a81ee"},
{file = "pydantic-1.10.7-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:950ce33857841f9a337ce07ddf46bc84e1c4946d2a3bba18f8280297157a3fd1"},
{file = "pydantic-1.10.7-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c15582f9055fbc1bfe50266a19771bbbef33dd28c45e78afbe1996fd70966c2a"},
{file = "pydantic-1.10.7-cp37-cp37m-win_amd64.whl", hash = "sha256:82dffb306dd20bd5268fd6379bc4bfe75242a9c2b79fec58e1041fbbdb1f7914"},
{file = "pydantic-1.10.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8c7f51861d73e8b9ddcb9916ae7ac39fb52761d9ea0df41128e81e2ba42886cd"},
{file = "pydantic-1.10.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6434b49c0b03a51021ade5c4daa7d70c98f7a79e95b551201fff682fc1661245"},
{file = "pydantic-1.10.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64d34ab766fa056df49013bb6e79921a0265204c071984e75a09cbceacbbdd5d"},
{file = "pydantic-1.10.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:701daea9ffe9d26f97b52f1d157e0d4121644f0fcf80b443248434958fd03dc3"},
{file = "pydantic-1.10.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:cf135c46099ff3f919d2150a948ce94b9ce545598ef2c6c7bf55dca98a304b52"},
{file = "pydantic-1.10.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b0f85904f73161817b80781cc150f8b906d521fa11e3cdabae19a581c3606209"},
{file = "pydantic-1.10.7-cp38-cp38-win_amd64.whl", hash = "sha256:9f6f0fd68d73257ad6685419478c5aece46432f4bdd8d32c7345f1986496171e"},
{file = "pydantic-1.10.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c230c0d8a322276d6e7b88c3f7ce885f9ed16e0910354510e0bae84d54991143"},
{file = "pydantic-1.10.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:976cae77ba6a49d80f461fd8bba183ff7ba79f44aa5cfa82f1346b5626542f8e"},
{file = "pydantic-1.10.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d45fc99d64af9aaf7e308054a0067fdcd87ffe974f2442312372dfa66e1001d"},
{file = "pydantic-1.10.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d2a5ebb48958754d386195fe9e9c5106f11275867051bf017a8059410e9abf1f"},
{file = "pydantic-1.10.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:abfb7d4a7cd5cc4e1d1887c43503a7c5dd608eadf8bc615413fc498d3e4645cd"},
{file = "pydantic-1.10.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:80b1fab4deb08a8292d15e43a6edccdffa5377a36a4597bb545b93e79c5ff0a5"},
{file = "pydantic-1.10.7-cp39-cp39-win_amd64.whl", hash = "sha256:d71e69699498b020ea198468e2480a2f1e7433e32a3a99760058c6520e2bea7e"},
{file = "pydantic-1.10.7-py3-none-any.whl", hash = "sha256:0cd181f1d0b1d00e2b705f1bf1ac7799a2d938cce3376b8007df62b29be3c2c6"},
{file = "pydantic-1.10.7.tar.gz", hash = "sha256:cfc83c0678b6ba51b0532bea66860617c4cd4251ecf76e9846fa5a9f3454e97e"},
]
[package.dependencies]
@ -1696,11 +1892,35 @@ files = [
[package.extras]
diagrams = ["jinja2", "railroad-diagrams"]
[[package]]
name = "pytest"
version = "7.2.2"
description = "pytest: simple powerful testing with Python"
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
{file = "pytest-7.2.2-py3-none-any.whl", hash = "sha256:130328f552dcfac0b1cec75c12e3f005619dc5f874f0a06e8ff7263f0ee6225e"},
{file = "pytest-7.2.2.tar.gz", hash = "sha256:c99ab0c73aceb050f68929bc93af19ab6db0558791c6a0715723abe9d0ade9d4"},
]
[package.dependencies]
attrs = ">=19.2.0"
colorama = {version = "*", markers = "sys_platform == \"win32\""}
exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""}
iniconfig = "*"
packaging = "*"
pluggy = ">=0.12,<2.0"
tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""}
[package.extras]
testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"]
[[package]]
name = "python-dateutil"
version = "2.8.2"
description = "Extensions to the standard Python datetime module"
category = "dev"
category = "main"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
files = [
@ -1711,28 +1931,40 @@ files = [
[package.dependencies]
six = ">=1.5"
[[package]]
name = "pytz"
version = "2023.3"
description = "World timezone definitions, modern and historical"
category = "main"
optional = false
python-versions = "*"
files = [
{file = "pytz-2023.3-py2.py3-none-any.whl", hash = "sha256:a151b3abb88eda1d4e34a9814df37de2a80e301e68ba0fd856fb9b46bfbbbffb"},
{file = "pytz-2023.3.tar.gz", hash = "sha256:1d8ce29db189191fb55338ee6d0387d82ab59f3d00eac103412d64e0ebd0c588"},
]
[[package]]
name = "pywin32"
version = "305"
version = "306"
description = "Python for Window Extensions"
category = "dev"
optional = false
python-versions = "*"
files = [
{file = "pywin32-305-cp310-cp310-win32.whl", hash = "sha256:421f6cd86e84bbb696d54563c48014b12a23ef95a14e0bdba526be756d89f116"},
{file = "pywin32-305-cp310-cp310-win_amd64.whl", hash = "sha256:73e819c6bed89f44ff1d690498c0a811948f73777e5f97c494c152b850fad478"},
{file = "pywin32-305-cp310-cp310-win_arm64.whl", hash = "sha256:742eb905ce2187133a29365b428e6c3b9001d79accdc30aa8969afba1d8470f4"},
{file = "pywin32-305-cp311-cp311-win32.whl", hash = "sha256:19ca459cd2e66c0e2cc9a09d589f71d827f26d47fe4a9d09175f6aa0256b51c2"},
{file = "pywin32-305-cp311-cp311-win_amd64.whl", hash = "sha256:326f42ab4cfff56e77e3e595aeaf6c216712bbdd91e464d167c6434b28d65990"},
{file = "pywin32-305-cp311-cp311-win_arm64.whl", hash = "sha256:4ecd404b2c6eceaca52f8b2e3e91b2187850a1ad3f8b746d0796a98b4cea04db"},
{file = "pywin32-305-cp36-cp36m-win32.whl", hash = "sha256:48d8b1659284f3c17b68587af047d110d8c44837736b8932c034091683e05863"},
{file = "pywin32-305-cp36-cp36m-win_amd64.whl", hash = "sha256:13362cc5aa93c2beaf489c9c9017c793722aeb56d3e5166dadd5ef82da021fe1"},
{file = "pywin32-305-cp37-cp37m-win32.whl", hash = "sha256:a55db448124d1c1484df22fa8bbcbc45c64da5e6eae74ab095b9ea62e6d00496"},
{file = "pywin32-305-cp37-cp37m-win_amd64.whl", hash = "sha256:109f98980bfb27e78f4df8a51a8198e10b0f347257d1e265bb1a32993d0c973d"},
{file = "pywin32-305-cp38-cp38-win32.whl", hash = "sha256:9dd98384da775afa009bc04863426cb30596fd78c6f8e4e2e5bbf4edf8029504"},
{file = "pywin32-305-cp38-cp38-win_amd64.whl", hash = "sha256:56d7a9c6e1a6835f521788f53b5af7912090674bb84ef5611663ee1595860fc7"},
{file = "pywin32-305-cp39-cp39-win32.whl", hash = "sha256:9d968c677ac4d5cbdaa62fd3014ab241718e619d8e36ef8e11fb930515a1e918"},
{file = "pywin32-305-cp39-cp39-win_amd64.whl", hash = "sha256:50768c6b7c3f0b38b7fb14dd4104da93ebced5f1a50dc0e834594bff6fbe1271"},
{file = "pywin32-306-cp310-cp310-win32.whl", hash = "sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d"},
{file = "pywin32-306-cp310-cp310-win_amd64.whl", hash = "sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8"},
{file = "pywin32-306-cp311-cp311-win32.whl", hash = "sha256:e65028133d15b64d2ed8f06dd9fbc268352478d4f9289e69c190ecd6818b6407"},
{file = "pywin32-306-cp311-cp311-win_amd64.whl", hash = "sha256:a7639f51c184c0272e93f244eb24dafca9b1855707d94c192d4a0b4c01e1100e"},
{file = "pywin32-306-cp311-cp311-win_arm64.whl", hash = "sha256:70dba0c913d19f942a2db25217d9a1b726c278f483a919f1abfed79c9cf64d3a"},
{file = "pywin32-306-cp312-cp312-win32.whl", hash = "sha256:383229d515657f4e3ed1343da8be101000562bf514591ff383ae940cad65458b"},
{file = "pywin32-306-cp312-cp312-win_amd64.whl", hash = "sha256:37257794c1ad39ee9be652da0462dc2e394c8159dfd913a8a4e8eb6fd346da0e"},
{file = "pywin32-306-cp312-cp312-win_arm64.whl", hash = "sha256:5821ec52f6d321aa59e2db7e0a35b997de60c201943557d108af9d4ae1ec7040"},
{file = "pywin32-306-cp37-cp37m-win32.whl", hash = "sha256:1c73ea9a0d2283d889001998059f5eaaba3b6238f767c9cf2833b13e6a685f65"},
{file = "pywin32-306-cp37-cp37m-win_amd64.whl", hash = "sha256:72c5f621542d7bdd4fdb716227be0dd3f8565c11b280be6315b06ace35487d36"},
{file = "pywin32-306-cp38-cp38-win32.whl", hash = "sha256:e4c092e2589b5cf0d365849e73e02c391c1349958c5ac3e9d5ccb9a28e017b3a"},
{file = "pywin32-306-cp38-cp38-win_amd64.whl", hash = "sha256:e8ac1ae3601bee6ca9f7cb4b5363bf1c0badb935ef243c4733ff9a393b1690c0"},
{file = "pywin32-306-cp39-cp39-win32.whl", hash = "sha256:e25fd5b485b55ac9c057f67d94bc203f3f6595078d1fb3b458c9c28b7153a802"},
{file = "pywin32-306-cp39-cp39-win_amd64.whl", hash = "sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4"},
]
[[package]]
@ -1787,89 +2019,89 @@ files = [
[[package]]
name = "pyzmq"
version = "25.0.1"
version = "25.0.2"
description = "Python bindings for 0MQ"
category = "dev"
optional = false
python-versions = ">=3.6"
files = [
{file = "pyzmq-25.0.1-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:94f65e13e6df035b0ae90d49adfe7891aa4e7bdeaa65265729fecc04ab3eb0fe"},
{file = "pyzmq-25.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f0399450d970990705ce47ed65f5efed3e4627dfc80628c3798100e7b72e023b"},
{file = "pyzmq-25.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f29709b0431668a967d7ff0394b00a865e7b7dde827ee0a47938b705b7c4aec3"},
{file = "pyzmq-25.0.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4fee9420b34c0ab426f105926a701a3d73f878fe77f07a1b92e0b78d1e2c795c"},
{file = "pyzmq-25.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:57be375c6bc66b0f685cd298e5c1c3d7ee34a254145b8087aed6e25db372b0f3"},
{file = "pyzmq-25.0.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:a3309b2c5a5be3d48c9ade77b340361764449aa22854ac65935b1e6c0cdabe2c"},
{file = "pyzmq-25.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7574d24579e83ee8c5d3b14769e7ba895161c43a601e911dd89d449e545e00ad"},
{file = "pyzmq-25.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:041d617091258133e602919b28fdce4d3e2f8aedcd1e8b34c599653bc288d59e"},
{file = "pyzmq-25.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7897ba8c3fedc6b3023bad676ceb69dbf90c077ff18ae3133ba43db47417cc72"},
{file = "pyzmq-25.0.1-cp310-cp310-win32.whl", hash = "sha256:c462f70dadbd4649e572ca7cd1e7cf3305a8c2afc53b84214c0a7c0c3af8a657"},
{file = "pyzmq-25.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:e3a721710992cf0e213bbb7be48fb0f32202e8d01f556c196c870373bb9ad4f4"},
{file = "pyzmq-25.0.1-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:b0a0fcf56279b9f3acc9b36a83feb7640c51b0db444b6870e4406d002be1d514"},
{file = "pyzmq-25.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:95aff52fc847ea5755d2370f86e379ba2ed6eb67a0a6f90f0e8e99c553693b81"},
{file = "pyzmq-25.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b55366e6c11e1ef7403d072b9867b62cf63eebd31dd038ef65bc8d65572854f6"},
{file = "pyzmq-25.0.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:64a2bc72bcad705ee42a8fe877478ddadb7e260e806562833d3d814125e28a44"},
{file = "pyzmq-25.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ca66aa24422d7f324acd5cb7fc7df616eb6f0205e059393fb108702e33e90c7"},
{file = "pyzmq-25.0.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:58d5dfec2e2befd09b04c4683b3c984d2203cf6e054d0f9786be3826737ad612"},
{file = "pyzmq-25.0.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3549292d65987e422e2c9f105b1485448381f489d8a6b6b040fc8b8f497bd578"},
{file = "pyzmq-25.0.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5b1ca8b0df50d1ac88857ffe9ebd1347e0a5bb5f6e1d99940fdd7df0ffdefb49"},
{file = "pyzmq-25.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a1a107e89cdcf799060ba4fa85fd3c942e19df7b24eb2600618b2406cc73c18e"},
{file = "pyzmq-25.0.1-cp311-cp311-win32.whl", hash = "sha256:0f22ba4e9041549a5a3f5a545169dda52fa0aa7b5ef46b336cbe6679c4c3c134"},
{file = "pyzmq-25.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:0644c0d5c73e4bfeee8148f638ab16ad783df1c4d6c2f968552a26a43fb002a1"},
{file = "pyzmq-25.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:c5eb4b17d73b1fc208a4faa6b5918983ccc961770aa37741891f61db302dae4e"},
{file = "pyzmq-25.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:649dd55948144a108041397f07c1299086ce1c85c2e166831db3a33dac1d0c7f"},
{file = "pyzmq-25.0.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c99fd8d3efc138d6a7fb1e822133f62bb18ffec66dc6d398dcb2ac2ab8eb2cb0"},
{file = "pyzmq-25.0.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:d72d69d4bb37c05a446d10bc40b391cf8fb7572654fb73fa69e7d2a395197e65"},
{file = "pyzmq-25.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:036dbf8373aed4ccf56d58c561b23601b8f33919ec1093d8c77b37ac1259702d"},
{file = "pyzmq-25.0.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:861c37649c75a2ecfc2034a32b9d5ca744e1e0cddcbf65afbd8027cf7d9755be"},
{file = "pyzmq-25.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:92f04d63aecbb71d41f7db5f988167ef429f96d8197fd46684688cdb513e8a2e"},
{file = "pyzmq-25.0.1-cp36-cp36m-win32.whl", hash = "sha256:866a4e918f1f4b2f83e9982b817df257910e3e50e456ffa74f141a10adcd11d1"},
{file = "pyzmq-25.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:ec29c880b82cd38a63810a93b77e13f167e05732049101947772eed9ae805097"},
{file = "pyzmq-25.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0241a334e39aa74e4ba0ae5f9e29521f1b48b8d56bf707f25f322c04eb423e99"},
{file = "pyzmq-25.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3b7032f55b1ed2cd8c349a89e467dca2338b7765fab82cb64c3504e49adaf51"},
{file = "pyzmq-25.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:960f98f562ee6a50ecf283bc62479d00f5ee10e9068a21683b9e961cd87c9261"},
{file = "pyzmq-25.0.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:835da498b71570d56e5526de4d5b36fa10dd9b8a82e2c405f963afeb51ff5bdc"},
{file = "pyzmq-25.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:21de2ef6099fa8d6a3c2dc15aaca58e9f9ffdcc7b82a246590aa9564815699d9"},
{file = "pyzmq-25.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1e448a5a294958e915a7e1b664e6fbfcd3814989d381fb068673317f6f3ea3f8"},
{file = "pyzmq-25.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:40d909bdc8a2d64ad260925154712602ee6a0425ae0b08bce78a19adfdc2f05b"},
{file = "pyzmq-25.0.1-cp37-cp37m-win32.whl", hash = "sha256:6ff37f2b818df25c887fd40bb434569db7ff66b35f5dfff6f40cc476aee92e3f"},
{file = "pyzmq-25.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:f66ee27a0221771bbaa2cce456e8ca890569c3d18b08b955eb6420c12516537c"},
{file = "pyzmq-25.0.1-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:1003bbae89435eadec03b4fa3bb6516dd1529fb09ae5704284f7400cc77009ba"},
{file = "pyzmq-25.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:dde7a65a8bfa88aa1721add504320f8344272542291ce4e7c77993fa32901567"},
{file = "pyzmq-25.0.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:20b6155429d3b57e9e7bd11f1680985ef8b5b0868f1a64073fb8c01326c7c80c"},
{file = "pyzmq-25.0.1-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e37a764cbf91c1ed9a02e4fede79a414284aca2a0b7d92d82a3c7b82d678ec2d"},
{file = "pyzmq-25.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa56a362066b3a853a64d35693a08046f640961efcc0e7643768916403e72e70"},
{file = "pyzmq-25.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c4bdf1241886d39d816535d3ef9fc325bbf02470c9fd5f2cb62706eeb834f7f2"},
{file = "pyzmq-25.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:446acbac24427ef42bff61a807ddcad8d03df78fb976184a4d7d6f4b1e7d8a67"},
{file = "pyzmq-25.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b39847501d229e5fab155d88a565edfb182cdd3f7046f15a7f2df9c77cdc422d"},
{file = "pyzmq-25.0.1-cp38-cp38-win32.whl", hash = "sha256:cba6b81b653d789d76e438c2e77b49f610b23e84b3bb43b99100f08a0a5d637b"},
{file = "pyzmq-25.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:6eca6b90c4fb290efd27582780b5eaf048887a32b2c5fcd6330819192cb07b38"},
{file = "pyzmq-25.0.1-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:58207a6709e53b723105bac6bb3c6795ee134f7e71351f39c09d52ac235c6b0d"},
{file = "pyzmq-25.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c62084f37682e7ee4064e8310078be4f6f7687bf528ae5761e2ba7216c5b8949"},
{file = "pyzmq-25.0.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9c44e9f04f8ed99c6f2e9e49f29d400d7557dd9e9e3f64e1e8a595aedc4258a2"},
{file = "pyzmq-25.0.1-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c635d1c40d341835066931a018e378428dfbe0347ed4bb45a6b57f7d8c34196e"},
{file = "pyzmq-25.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eef93b5574c9ff36b4be376555efd369bd55b99bcc7be72f23bd38102dd9392b"},
{file = "pyzmq-25.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:44bc81099ab33388f6c061c1b194307d877428cb2b18282d0385584d5c73ed72"},
{file = "pyzmq-25.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6d988844ed6caa21b0076b64671e83a136d93c57f1ae5a72b915661af55d313b"},
{file = "pyzmq-25.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9d5eb6e88ae8a8734f239ffe1ed90559a426cf5b859b8ee66e0cd43fc5daf5c9"},
{file = "pyzmq-25.0.1-cp39-cp39-win32.whl", hash = "sha256:f6b45db9de4c8adbf5fda58e827a32315d282cfb01e54dc74e7c7ccc0988c010"},
{file = "pyzmq-25.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:47eeb94b78aa442568b85ad28f85bd37a9c3c34d052cbf8ebf8622c45f23a9cd"},
{file = "pyzmq-25.0.1-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0ed7475f3adf0c7750d75740b3267947b501a33f4625ceae709fda2e75ec9ed7"},
{file = "pyzmq-25.0.1-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6d09c22ed4d0afcc662d17c2429a03fc1fae7fe7e3bc1f413e744bccfeaabdc3"},
{file = "pyzmq-25.0.1-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:703ec5f5a8369c09d8f3eb626358bdb590a2b1375bcce8b7da01b3a03f8b8668"},
{file = "pyzmq-25.0.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20aea31cc0d1f6c3fb4685db08b4c771545cf3fed3c4b4c8942c0a4e97042ec8"},
{file = "pyzmq-25.0.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:b1c03b942557bb366fd3dc377a15763d5d688de1328228136c75e50f968333cc"},
{file = "pyzmq-25.0.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4e8a5ced9d92837f52ccdae6351c627b5012669727bc3eede2dc0f581eca1d0e"},
{file = "pyzmq-25.0.1-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:d78f840d88244272fb7252e47522b1179833aff7ec64583bda3d21259c9c2c20"},
{file = "pyzmq-25.0.1-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c3f78fa80780e24d294f9421123cb3bd3b68677953c53da85273a22d1c983298"},
{file = "pyzmq-25.0.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f6de4305e02560111a5d4555758faa85d44a5bff70cccff58dbf30c81a079f0"},
{file = "pyzmq-25.0.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:34a1b1a8ce9b20e01aba71b9279d9b1d4e5980a6a4e42092180e16628a444ca1"},
{file = "pyzmq-25.0.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:625759a0112af7c3fb560de5724d749729f00b901f7625d1a3f3fb38897544b1"},
{file = "pyzmq-25.0.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8cff159b21438c24476a49865f3d5700c9cc5833600661bc0e672decec2ff357"},
{file = "pyzmq-25.0.1-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4cc47652d990de9ef967c494c526d73920ef064fef0444355a7cebec6fc50542"},
{file = "pyzmq-25.0.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44db5162a6881f7d740dec65917f38f9bfbc5ad9a10e06d7d5deebb27eb63939"},
{file = "pyzmq-25.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f38bf2c60a3f7b87cf5177043eb7a331a4f53bc9305a2452decbd42ad0c98741"},
{file = "pyzmq-25.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:b1cf4becd15669bc62a41c1b1bb742e22ac25965134e4254cde82a4dc2554b1b"},
{file = "pyzmq-25.0.1.tar.gz", hash = "sha256:44a24f7ce44e70d20e2a4c9ba5af70b4611df7a4b920eed2c8e0bdd5a5af225f"},
{file = "pyzmq-25.0.2-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:ac178e666c097c8d3deb5097b58cd1316092fc43e8ef5b5fdb259b51da7e7315"},
{file = "pyzmq-25.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:659e62e1cbb063151c52f5b01a38e1df6b54feccfa3e2509d44c35ca6d7962ee"},
{file = "pyzmq-25.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8280ada89010735a12b968ec3ea9a468ac2e04fddcc1cede59cb7f5178783b9c"},
{file = "pyzmq-25.0.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9b5eeb5278a8a636bb0abdd9ff5076bcbb836cd2302565df53ff1fa7d106d54"},
{file = "pyzmq-25.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a2e5fe42dfe6b73ca120b97ac9f34bfa8414feb15e00e37415dbd51cf227ef6"},
{file = "pyzmq-25.0.2-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:827bf60e749e78acb408a6c5af6688efbc9993e44ecc792b036ec2f4b4acf485"},
{file = "pyzmq-25.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7b504ae43d37e282301da586529e2ded8b36d4ee2cd5e6db4386724ddeaa6bbc"},
{file = "pyzmq-25.0.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:cb1f69a0a2a2b1aae8412979dd6293cc6bcddd4439bf07e4758d864ddb112354"},
{file = "pyzmq-25.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2b9c9cc965cdf28381e36da525dcb89fc1571d9c54800fdcd73e3f73a2fc29bd"},
{file = "pyzmq-25.0.2-cp310-cp310-win32.whl", hash = "sha256:24abbfdbb75ac5039205e72d6c75f10fc39d925f2df8ff21ebc74179488ebfca"},
{file = "pyzmq-25.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6a821a506822fac55d2df2085a52530f68ab15ceed12d63539adc32bd4410f6e"},
{file = "pyzmq-25.0.2-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:9af0bb0277e92f41af35e991c242c9c71920169d6aa53ade7e444f338f4c8128"},
{file = "pyzmq-25.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:54a96cf77684a3a537b76acfa7237b1e79a8f8d14e7f00e0171a94b346c5293e"},
{file = "pyzmq-25.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88649b19ede1cab03b96b66c364cbbf17c953615cdbc844f7f6e5f14c5e5261c"},
{file = "pyzmq-25.0.2-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:715cff7644a80a7795953c11b067a75f16eb9fc695a5a53316891ebee7f3c9d5"},
{file = "pyzmq-25.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:312b3f0f066b4f1d17383aae509bacf833ccaf591184a1f3c7a1661c085063ae"},
{file = "pyzmq-25.0.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:d488c5c8630f7e782e800869f82744c3aca4aca62c63232e5d8c490d3d66956a"},
{file = "pyzmq-25.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:38d9f78d69bcdeec0c11e0feb3bc70f36f9b8c44fc06e5d06d91dc0a21b453c7"},
{file = "pyzmq-25.0.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3059a6a534c910e1d5d068df42f60d434f79e6cc6285aa469b384fa921f78cf8"},
{file = "pyzmq-25.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6526d097b75192f228c09d48420854d53dfbc7abbb41b0e26f363ccb26fbc177"},
{file = "pyzmq-25.0.2-cp311-cp311-win32.whl", hash = "sha256:5c5fbb229e40a89a2fe73d0c1181916f31e30f253cb2d6d91bea7927c2e18413"},
{file = "pyzmq-25.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:ed15e3a2c3c2398e6ae5ce86d6a31b452dfd6ad4cd5d312596b30929c4b6e182"},
{file = "pyzmq-25.0.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:032f5c8483c85bf9c9ca0593a11c7c749d734ce68d435e38c3f72e759b98b3c9"},
{file = "pyzmq-25.0.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:374b55516393bfd4d7a7daa6c3b36d6dd6a31ff9d2adad0838cd6a203125e714"},
{file = "pyzmq-25.0.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:08bfcc21b5997a9be4fefa405341320d8e7f19b4d684fb9c0580255c5bd6d695"},
{file = "pyzmq-25.0.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1a843d26a8da1b752c74bc019c7b20e6791ee813cd6877449e6a1415589d22ff"},
{file = "pyzmq-25.0.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:b48616a09d7df9dbae2f45a0256eee7b794b903ddc6d8657a9948669b345f220"},
{file = "pyzmq-25.0.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:d4427b4a136e3b7f85516c76dd2e0756c22eec4026afb76ca1397152b0ca8145"},
{file = "pyzmq-25.0.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:26b0358e8933990502f4513c991c9935b6c06af01787a36d133b7c39b1df37fa"},
{file = "pyzmq-25.0.2-cp36-cp36m-win32.whl", hash = "sha256:c8fedc3ccd62c6b77dfe6f43802057a803a411ee96f14e946f4a76ec4ed0e117"},
{file = "pyzmq-25.0.2-cp36-cp36m-win_amd64.whl", hash = "sha256:2da6813b7995b6b1d1307329c73d3e3be2fd2d78e19acfc4eff2e27262732388"},
{file = "pyzmq-25.0.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a35960c8b2f63e4ef67fd6731851030df68e4b617a6715dd11b4b10312d19fef"},
{file = "pyzmq-25.0.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eef2a0b880ab40aca5a878933376cb6c1ec483fba72f7f34e015c0f675c90b20"},
{file = "pyzmq-25.0.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:85762712b74c7bd18e340c3639d1bf2f23735a998d63f46bb6584d904b5e401d"},
{file = "pyzmq-25.0.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:64812f29d6eee565e129ca14b0c785744bfff679a4727137484101b34602d1a7"},
{file = "pyzmq-25.0.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:510d8e55b3a7cd13f8d3e9121edf0a8730b87d925d25298bace29a7e7bc82810"},
{file = "pyzmq-25.0.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b164cc3c8acb3d102e311f2eb6f3c305865ecb377e56adc015cb51f721f1dda6"},
{file = "pyzmq-25.0.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:28fdb9224a258134784a9cf009b59265a9dde79582fb750d4e88a6bcbc6fa3dc"},
{file = "pyzmq-25.0.2-cp37-cp37m-win32.whl", hash = "sha256:dd771a440effa1c36d3523bc6ba4e54ff5d2e54b4adcc1e060d8f3ca3721d228"},
{file = "pyzmq-25.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:9bdc40efb679b9dcc39c06d25629e55581e4c4f7870a5e88db4f1c51ce25e20d"},
{file = "pyzmq-25.0.2-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:1f82906a2d8e4ee310f30487b165e7cc8ed09c009e4502da67178b03083c4ce0"},
{file = "pyzmq-25.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:21ec0bf4831988af43c8d66ba3ccd81af2c5e793e1bf6790eb2d50e27b3c570a"},
{file = "pyzmq-25.0.2-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:abbce982a17c88d2312ec2cf7673985d444f1beaac6e8189424e0a0e0448dbb3"},
{file = "pyzmq-25.0.2-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9e1d2f2d86fc75ed7f8845a992c5f6f1ab5db99747fb0d78b5e4046d041164d2"},
{file = "pyzmq-25.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a2e92ff20ad5d13266bc999a29ed29a3b5b101c21fdf4b2cf420c09db9fb690e"},
{file = "pyzmq-25.0.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:edbbf06cc2719889470a8d2bf5072bb00f423e12de0eb9ffec946c2c9748e149"},
{file = "pyzmq-25.0.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:77942243ff4d14d90c11b2afd8ee6c039b45a0be4e53fb6fa7f5e4fd0b59da39"},
{file = "pyzmq-25.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ab046e9cb902d1f62c9cc0eca055b1d11108bdc271caf7c2171487298f229b56"},
{file = "pyzmq-25.0.2-cp38-cp38-win32.whl", hash = "sha256:ad761cfbe477236802a7ab2c080d268c95e784fe30cafa7e055aacd1ca877eb0"},
{file = "pyzmq-25.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:8560756318ec7c4c49d2c341012167e704b5a46d9034905853c3d1ade4f55bee"},
{file = "pyzmq-25.0.2-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:ab2c056ac503f25a63f6c8c6771373e2a711b98b304614151dfb552d3d6c81f6"},
{file = "pyzmq-25.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:cca8524b61c0eaaa3505382dc9b9a3bc8165f1d6c010fdd1452c224225a26689"},
{file = "pyzmq-25.0.2-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:cfb9f7eae02d3ac42fbedad30006b7407c984a0eb4189a1322241a20944d61e5"},
{file = "pyzmq-25.0.2-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5eaeae038c68748082137d6896d5c4db7927e9349237ded08ee1bbd94f7361c9"},
{file = "pyzmq-25.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a31992a8f8d51663ebf79df0df6a04ffb905063083d682d4380ab8d2c67257c"},
{file = "pyzmq-25.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6a979e59d2184a0c8f2ede4b0810cbdd86b64d99d9cc8a023929e40dce7c86cc"},
{file = "pyzmq-25.0.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:1f124cb73f1aa6654d31b183810febc8505fd0c597afa127c4f40076be4574e0"},
{file = "pyzmq-25.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:65c19a63b4a83ae45d62178b70223adeee5f12f3032726b897431b6553aa25af"},
{file = "pyzmq-25.0.2-cp39-cp39-win32.whl", hash = "sha256:83d822e8687621bed87404afc1c03d83fa2ce39733d54c2fd52d8829edb8a7ff"},
{file = "pyzmq-25.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:24683285cc6b7bf18ad37d75b9db0e0fefe58404e7001f1d82bf9e721806daa7"},
{file = "pyzmq-25.0.2-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4a4b4261eb8f9ed71f63b9eb0198dd7c934aa3b3972dac586d0ef502ba9ab08b"},
{file = "pyzmq-25.0.2-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:62ec8d979f56c0053a92b2b6a10ff54b9ec8a4f187db2b6ec31ee3dd6d3ca6e2"},
{file = "pyzmq-25.0.2-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:affec1470351178e892121b3414c8ef7803269f207bf9bef85f9a6dd11cde264"},
{file = "pyzmq-25.0.2-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffc71111433bd6ec8607a37b9211f4ef42e3d3b271c6d76c813669834764b248"},
{file = "pyzmq-25.0.2-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:6fadc60970714d86eff27821f8fb01f8328dd36bebd496b0564a500fe4a9e354"},
{file = "pyzmq-25.0.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:269968f2a76c0513490aeb3ba0dc3c77b7c7a11daa894f9d1da88d4a0db09835"},
{file = "pyzmq-25.0.2-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f7c8b8368e84381ae7c57f1f5283b029c888504aaf4949c32e6e6fb256ec9bf0"},
{file = "pyzmq-25.0.2-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:25e6873a70ad5aa31e4a7c41e5e8c709296edef4a92313e1cd5fc87bbd1874e2"},
{file = "pyzmq-25.0.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b733076ff46e7db5504c5e7284f04a9852c63214c74688bdb6135808531755a3"},
{file = "pyzmq-25.0.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:a6f6ae12478fdc26a6d5fdb21f806b08fa5403cd02fd312e4cb5f72df078f96f"},
{file = "pyzmq-25.0.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:67da1c213fbd208906ab3470cfff1ee0048838365135a9bddc7b40b11e6d6c89"},
{file = "pyzmq-25.0.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:531e36d9fcd66f18de27434a25b51d137eb546931033f392e85674c7a7cea853"},
{file = "pyzmq-25.0.2-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:34a6fddd159ff38aa9497b2e342a559f142ab365576284bc8f77cb3ead1f79c5"},
{file = "pyzmq-25.0.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b491998ef886662c1f3d49ea2198055a9a536ddf7430b051b21054f2a5831800"},
{file = "pyzmq-25.0.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:5d496815074e3e3d183fe2c7fcea2109ad67b74084c254481f87b64e04e9a471"},
{file = "pyzmq-25.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:56a94ab1d12af982b55ca96c6853db6ac85505e820d9458ac76364c1998972f4"},
{file = "pyzmq-25.0.2.tar.gz", hash = "sha256:6b8c1bbb70e868dc88801aa532cae6bd4e3b5233784692b786f17ad2962e5149"},
]
[package.dependencies]
@ -1897,6 +2129,43 @@ urllib3 = ">=1.21.1,<1.27"
socks = ["PySocks (>=1.5.6,!=1.5.7)"]
use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
[[package]]
name = "rfc3986"
version = "1.5.0"
description = "Validating URI References per RFC 3986"
category = "dev"
optional = false
python-versions = "*"
files = [
{file = "rfc3986-1.5.0-py2.py3-none-any.whl", hash = "sha256:a86d6e1f5b1dc238b218b012df0aa79409667bb209e58da56d0b94704e712a97"},
{file = "rfc3986-1.5.0.tar.gz", hash = "sha256:270aaf10d87d0d4e095063c65bf3ddbc6ee3d0b226328ce21e036f946e421835"},
]
[package.dependencies]
idna = {version = "*", optional = true, markers = "extra == \"idna2008\""}
[package.extras]
idna2008 = ["idna"]
[[package]]
name = "rich"
version = "13.3.3"
description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal"
category = "dev"
optional = false
python-versions = ">=3.7.0"
files = [
{file = "rich-13.3.3-py3-none-any.whl", hash = "sha256:540c7d6d26a1178e8e8b37e9ba44573a3cd1464ff6348b99ee7061b95d1c6333"},
{file = "rich-13.3.3.tar.gz", hash = "sha256:dc84400a9d842b3a9c5ff74addd8eb798d155f36c1c91303888e0a66850d2a15"},
]
[package.dependencies]
markdown-it-py = ">=2.2.0,<3.0.0"
pygments = ">=2.13.0,<3.0.0"
[package.extras]
jupyter = ["ipywidgets (>=7.5.1,<9)"]
[[package]]
name = "rsa"
version = "4.9"
@ -1941,14 +2210,14 @@ files = [
[[package]]
name = "setuptools"
version = "67.6.0"
version = "67.6.1"
description = "Easily download, build, install, upgrade, and uninstall Python packages"
category = "main"
optional = false
python-versions = ">=3.7"
files = [
{file = "setuptools-67.6.0-py3-none-any.whl", hash = "sha256:b78aaa36f6b90a074c1fa651168723acbf45d14cb1196b6f02c0fd07f17623b2"},
{file = "setuptools-67.6.0.tar.gz", hash = "sha256:2ee892cd5f29f3373097f5a814697e397cf3ce313616df0af11231e2ad118077"},
{file = "setuptools-67.6.1-py3-none-any.whl", hash = "sha256:e728ca814a823bf7bf60162daf9db95b93d532948c4c0bea762ce62f60189078"},
{file = "setuptools-67.6.1.tar.gz", hash = "sha256:257de92a9d50a60b8e22abfcbb771571fde0dbf3ec234463212027a4eeecbe9a"},
]
[package.extras]
@ -2214,14 +2483,41 @@ test = ["black (>=22.3.0,<23.0.0)", "coverage (>=6.2,<7.0)", "isort (>=5.0.6,<6.
[[package]]
name = "types-pyyaml"
version = "6.0.12.8"
version = "6.0.12.9"
description = "Typing stubs for PyYAML"
category = "main"
optional = false
python-versions = "*"
files = [
{file = "types-PyYAML-6.0.12.8.tar.gz", hash = "sha256:19304869a89d49af00be681e7b267414df213f4eb89634c4495fa62e8f942b9f"},
{file = "types_PyYAML-6.0.12.8-py3-none-any.whl", hash = "sha256:5314a4b2580999b2ea06b2e5f9a7763d860d6e09cdf21c0e9561daa9cbd60178"},
{file = "types-PyYAML-6.0.12.9.tar.gz", hash = "sha256:c51b1bd6d99ddf0aa2884a7a328810ebf70a4262c292195d3f4f9a0005f9eeb6"},
{file = "types_PyYAML-6.0.12.9-py3-none-any.whl", hash = "sha256:5aed5aa66bd2d2e158f75dda22b059570ede988559f030cf294871d3b647e3e8"},
]
[[package]]
name = "types-requests"
version = "2.28.11.17"
description = "Typing stubs for requests"
category = "dev"
optional = false
python-versions = "*"
files = [
{file = "types-requests-2.28.11.17.tar.gz", hash = "sha256:0d580652ce903f643f8c3b494dd01d29367ea57cea0c7ad7f65cf3169092edb0"},
{file = "types_requests-2.28.11.17-py3-none-any.whl", hash = "sha256:cc1aba862575019306b2ed134eb1ea994cab1c887a22e18d3383e6dd42e9789b"},
]
[package.dependencies]
types-urllib3 = "<1.27"
[[package]]
name = "types-urllib3"
version = "1.26.25.10"
description = "Typing stubs for urllib3"
category = "dev"
optional = false
python-versions = "*"
files = [
{file = "types-urllib3-1.26.25.10.tar.gz", hash = "sha256:c44881cde9fc8256d05ad6b21f50c4681eb20092552351570ab0a8a0653286d6"},
{file = "types_urllib3-1.26.25.10-py3-none-any.whl", hash = "sha256:12c744609d588340a07e45d333bf870069fc8793bcf96bae7a96d4712a42591d"},
]
[[package]]
@ -2419,4 +2715,4 @@ testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more
[metadata]
lock-version = "2.0"
python-versions = "^3.9"
content-hash = "9acd2b7396be651321ac517873a398d1631a76918fefdb003f7f587f031d9ba1"
content-hash = "91c68c5a5673f7b2bd0833af35da1262afd21d631cc62ec6ff9c65f69a96af0a"

View file

@ -1,6 +1,6 @@
[tool.poetry]
name = "langflow"
version = "0.0.46"
version = "0.0.52"
description = "A Python package with a built-in web application"
authors = ["Logspace <contact@logspace.ai>"]
maintainers = [
@ -29,15 +29,25 @@ google-search-results = "^2.4.1"
google-api-python-client = "^2.79.0"
typer = "^0.7.0"
gunicorn = "^20.1.0"
langchain = "^0.0.113"
langchain = "^0.0.131"
openai = "^0.27.2"
types-pyyaml = "^6.0.12.8"
dill = "^0.3.6"
pandas = "^1.5.3"
[tool.poetry.group.dev.dependencies]
black = "^23.1.0"
ipykernel = "^6.21.2"
mypy = "^1.1.1"
ruff = "^0.0.254"
httpx = "^0.23.3"
rich = "^13.3.3"
pytest = "^7.2.2"
types-requests = "^2.28.11"
requests = "^2.28.0"
[tool.ruff]
line-length = 120
[build-system]
requires = ["poetry-core"]

View file

@ -1,4 +1,3 @@
import logging
import multiprocessing
import platform
from pathlib import Path
@ -7,8 +6,10 @@ import typer
from fastapi.staticfiles import StaticFiles
from langflow.main import create_app
from langflow.settings import settings
from langflow.utils.logger import configure
logger = logging.getLogger(__name__)
app = typer.Typer()
def get_number_of_workers(workers=None):
@ -17,9 +18,28 @@ def get_number_of_workers(workers=None):
return workers
def update_settings(config: str):
"""Update the settings from a config file."""
if config:
settings.update_from_yaml(config)
@app.command()
def serve(
host: str = "127.0.0.1", workers: int = 1, timeout: int = 60, port: int = 7860
host: str = typer.Option("127.0.0.1", help="Host to bind the server to."),
workers: int = typer.Option(1, help="Number of worker processes."),
timeout: int = typer.Option(60, help="Worker timeout in seconds."),
port: int = typer.Option(7860, help="Port to listen on."),
config: str = typer.Option("config.yaml", help="Path to the configuration file."),
log_level: str = typer.Option("info", help="Logging level."),
log_file: Path = typer.Option("logs/langflow.log", help="Path to the log file."),
):
"""
Run the Langflow server.
"""
configure(log_level=log_level, log_file=log_file)
update_settings(config)
app = create_app()
# get the directory of the current file
path = Path(__file__).parent
@ -39,10 +59,10 @@ def serve(
if platform.system() in ["Darwin", "Windows"]:
# Run using uvicorn on MacOS and Windows
# Windows doesn't support gunicorn
# MacOS requires a env variable to be set to use gunicorn
# MacOS requires an env variable to be set to use gunicorn
import uvicorn
uvicorn.run(app, host=host, port=port, log_level="info")
uvicorn.run(app, host=host, port=port, log_level=log_level)
else:
from langflow.server import LangflowApplication
@ -50,7 +70,7 @@ def serve(
def main():
typer.run(serve)
app()
if __name__ == "__main__":

View file

@ -0,0 +1,27 @@
from pydantic import BaseModel, validator
class Code(BaseModel):
code: str
class Prompt(BaseModel):
template: str
# Build ValidationResponse class for {"imports": {"errors": []}, "function": {"errors": []}}
class CodeValidationResponse(BaseModel):
imports: dict
function: dict
@validator("imports")
def validate_imports(cls, v):
return v or {"errors": []}
@validator("function")
def validate_function(cls, v):
return v or {"errors": []}
class PromptValidationResponse(BaseModel):
input_variables: list

View file

@ -1,12 +1,14 @@
import logging
from typing import Any, Dict
from fastapi import APIRouter, HTTPException
from langflow.interface.run import process_data_graph
from langflow.interface.run import process_graph
from langflow.interface.types import build_langchain_types_dict
# build router
router = APIRouter()
logger = logging.getLogger(__name__)
@router.get("/all")
@ -17,6 +19,8 @@ def get_all():
@router.post("/predict")
def get_load(data: Dict[str, Any]):
try:
return process_data_graph(data)
return process_graph(data)
except Exception as e:
return HTTPException(status_code=500, detail=str(e))
# Log stack trace
logger.exception(e)
raise HTTPException(status_code=500, detail=str(e)) from e

View file

@ -1,58 +0,0 @@
from fastapi import APIRouter
from langflow.interface.listing import list_type
# build router
router = APIRouter(
prefix="/list",
tags=["list"],
)
@router.get("/")
def read_items():
"""List all components"""
return [
"chains",
"agents",
"prompts",
"llms",
"tools",
]
@router.get("/chains")
def list_chains():
"""List all chain types"""
return list_type("chains")
@router.get("/agents")
def list_agents():
"""List all agent types"""
# return list(agents.loading.AGENT_TO_CLASS.keys())
return list_type("agents")
@router.get("/prompts")
def list_prompts():
"""List all prompt types"""
return list_type("prompts")
@router.get("/llms")
def list_llms():
"""List all llm types"""
return list_type("llms")
@router.get("/memories")
def list_memories():
"""List all memory types"""
return list_type("memories")
@router.get("/tools")
def list_tools():
"""List all load tools"""
return list_type("tools")

View file

@ -1,63 +0,0 @@
from fastapi import APIRouter, HTTPException
from langflow.interface.signature import get_signature
# build router
router = APIRouter(
prefix="/signatures",
tags=["signatures"],
)
@router.get("/chain")
def get_chain(name: str):
"""Get the signature of a chain."""
try:
return get_signature(name, "chains")
except ValueError as exc:
raise HTTPException(status_code=404, detail="Chain not found") from exc
@router.get("/agent")
def get_agent(name: str):
"""Get the signature of an agent."""
try:
return get_signature(name, "agents")
except ValueError as exc:
raise HTTPException(status_code=404, detail="Agent not found") from exc
@router.get("/prompt")
def get_prompt(name: str):
"""Get the signature of a prompt."""
try:
return get_signature(name, "prompts")
except ValueError as exc:
raise HTTPException(status_code=404, detail="Prompt not found") from exc
@router.get("/llm")
def get_llm(name: str):
"""Get the signature of an llm."""
try:
return get_signature(name, "llms")
except ValueError as exc:
raise HTTPException(status_code=404, detail="LLM not found") from exc
@router.get("/memory")
def get_memory(name: str):
"""Get the signature of a memory."""
try:
return get_signature(name, "memories")
except ValueError as exc:
raise HTTPException(status_code=404, detail="Memory not found") from exc
@router.get("/tool")
def get_tool(name: str):
"""Get the signature of a tool."""
try:
return get_signature(name, "tools")
except ValueError as exc:
raise HTTPException(status_code=404, detail="Tool not found") from exc

View file

@ -0,0 +1,36 @@
from fastapi import APIRouter, HTTPException
from langflow.api.base import (
Code,
CodeValidationResponse,
Prompt,
PromptValidationResponse,
)
from langflow.graph.utils import extract_input_variables_from_prompt
from langflow.utils.logger import logger
from langflow.utils.validate import validate_code
# build router
router = APIRouter(prefix="/validate", tags=["validate"])
@router.post("/code", status_code=200, response_model=CodeValidationResponse)
def post_validate_code(code: Code):
try:
errors = validate_code(code.code)
return CodeValidationResponse(
imports=errors.get("imports", {}),
function=errors.get("function", {}),
)
except Exception as e:
return HTTPException(status_code=500, detail=str(e))
@router.post("/prompt", status_code=200, response_model=PromptValidationResponse)
def post_validate_prompt(prompt: Prompt):
try:
input_variables = extract_input_variables_from_prompt(prompt.template)
return PromptValidationResponse(input_variables=input_variables)
except Exception as e:
logger.exception(e)
return HTTPException(status_code=500, detail=str(e))

View file

72
src/backend/langflow/cache/utils.py vendored Normal file
View file

@ -0,0 +1,72 @@
import contextlib
import hashlib
import json
import os
import tempfile
from pathlib import Path
import dill # type: ignore
PREFIX = "langflow_cache"
def clear_old_cache_files(max_cache_size: int = 3):
cache_dir = Path(tempfile.gettempdir())
cache_files = list(cache_dir.glob(f"{PREFIX}_*.dill"))
if len(cache_files) > max_cache_size:
cache_files_sorted_by_mtime = sorted(
cache_files, key=lambda x: x.stat().st_mtime, reverse=True
)
for cache_file in cache_files_sorted_by_mtime[max_cache_size:]:
with contextlib.suppress(OSError):
os.remove(cache_file)
def filter_json(json_data):
filtered_data = json_data.copy()
# Remove 'viewport' and 'chatHistory' keys
if "viewport" in filtered_data:
del filtered_data["viewport"]
if "chatHistory" in filtered_data:
del filtered_data["chatHistory"]
# Filter nodes
if "nodes" in filtered_data:
for node in filtered_data["nodes"]:
if "position" in node:
del node["position"]
if "positionAbsolute" in node:
del node["positionAbsolute"]
if "selected" in node:
del node["selected"]
if "dragging" in node:
del node["dragging"]
return filtered_data
def compute_hash(graph_data):
graph_data = filter_json(graph_data)
cleaned_graph_json = json.dumps(graph_data, sort_keys=True)
return hashlib.sha256(cleaned_graph_json.encode("utf-8")).hexdigest()
def save_cache(hash_val: str, chat_data, clean_old_cache_files: bool):
cache_path = Path(tempfile.gettempdir()) / f"{PREFIX}_{hash_val}.dill"
with cache_path.open("wb") as cache_file:
dill.dump(chat_data, cache_file)
if clean_old_cache_files:
clear_old_cache_files()
def load_cache(hash_val):
cache_path = Path(tempfile.gettempdir()) / f"{PREFIX}_{hash_val}.dill"
if cache_path.exists():
with cache_path.open("rb") as cache_file:
return dill.load(cache_file)
return None

View file

@ -1,27 +1,57 @@
chains:
- LLMChain
- LLMMathChain
- LLMChecker
# - ConversationChain
- LLMCheckerChain
- ConversationChain
- SeriesCharacterChain
- MidJourneyPromptChain
- TimeTravelGuideChain
agents:
- ZeroShotAgent
- JsonAgent
- CSVAgent
- initialize_agent
prompts:
- PromptTemplate
- FewShotPromptTemplate
- ZeroShotPrompt
# Wait more tests
# - ChatPromptTemplate
# - SystemMessagePromptTemplate
# - HumanMessagePromptTemplate
llms:
- OpenAI
- OpenAIChat
- AzureOpenAI
- ChatOpenAI
tools:
- Search
- PAL-MATH
- Calculator
- Serper Search
- Tool
- PythonFunction
- JsonSpec
wrappers:
- RequestsWrapper
toolkits:
- OpenAPIToolkit
- JsonToolkit
memories:
# - ConversationBufferMemory
- ConversationBufferMemory
- ConversationSummaryMemory
- ConversationKGMemory
embeddings: []
vectorstores: []
documentloaders: []
dev: false

View file

@ -1,42 +1,17 @@
from langchain.agents.mrkl import prompt
from langflow.template import nodes
# These should always be instantiated
CUSTOM_NODES = {
"prompts": {"ZeroShotPrompt": nodes.ZeroShotPromptNode()},
"tools": {"PythonFunction": nodes.PythonFunctionNode(), "Tool": nodes.ToolNode()},
"agents": {
"JsonAgent": nodes.JsonAgentNode(),
"CSVAgent": nodes.CSVAgentNode(),
"initialize_agent": nodes.InitializeAgentNode(),
},
}
def get_custom_prompts():
"""Get custom prompts."""
return {
"ZeroShotPrompt": {
"template": {
"_type": "zero_shot",
"prefix": {
"type": "str",
"required": False,
"placeholder": "",
"list": False,
"show": True,
"multiline": True,
"value": prompt.PREFIX,
},
"suffix": {
"type": "str",
"required": True,
"placeholder": "",
"list": False,
"show": True,
"multiline": True,
"value": prompt.SUFFIX,
},
"format_instructions": {
"type": "str",
"required": False,
"placeholder": "",
"list": False,
"show": True,
"multiline": True,
"value": prompt.FORMAT_INSTRUCTIONS,
},
},
"description": "Prompt template for Zero Shot Agent.",
"base_classes": ["BasePromptTemplate"],
}
}
def get_custom_nodes(node_type: str):
"""Get custom nodes."""
return CUSTOM_NODES.get(node_type, {})

View file

@ -0,0 +1,4 @@
from langflow.graph.base import Edge, Node
from langflow.graph.graph import Graph
__all__ = ["Graph", "Node", "Edge"]

View file

@ -0,0 +1,244 @@
# Description: Graph class for building a graph of nodes and edges
# Insights:
# - Defer prompts building to the last moment or when they have all the tools
# - Build each inner agent first, then build the outer agent
import types
from copy import deepcopy
from typing import Any, Dict, List, Optional
from langflow.graph.constants import DIRECT_TYPES
from langflow.graph.utils import load_file
from langflow.interface import loading
from langflow.interface.listing import ALL_TYPES_DICT
from langflow.utils.logger import logger
class Node:
def __init__(self, data: Dict, base_type: Optional[str] = None) -> None:
self.id: str = data["id"]
self._data = data
self.edges: List[Edge] = []
self.base_type: Optional[str] = base_type
self._parse_data()
self._built_object = None
self._built = False
def _parse_data(self) -> None:
self.data = self._data["data"]
self.output = self.data["node"]["base_classes"]
template_dicts = {
key: value
for key, value in self.data["node"]["template"].items()
if isinstance(value, dict)
}
self.required_inputs = [
template_dicts[key]["type"]
for key, value in template_dicts.items()
if value["required"]
]
self.optional_inputs = [
template_dicts[key]["type"]
for key, value in template_dicts.items()
if not value["required"]
]
template_dict = self.data["node"]["template"]
self.node_type = (
self.data["type"] if "Tool" not in self.output else template_dict["_type"]
)
if self.base_type is None:
for base_type, value in ALL_TYPES_DICT.items():
if self.node_type in value:
self.base_type = base_type
break
def _build_params(self):
# Some params are required, some are optional
# but most importantly, some params are python base classes
# like str and others are LangChain objects like LLMChain, BasePromptTemplate
# so we need to be able to distinguish between the two
# The dicts with "type" == "str" are the ones that are python base classes
# and most likely have a "value" key
# So for each key besides "_type" in the template dict, we have a dict
# with a "type" key. If the type is not "str", then we need to get the
# edge that connects to that node and get the Node with the required data
# and use that as the value for the param
# If the type is "str", then we need to get the value of the "value" key
# and use that as the value for the param
template_dict = {
key: value
for key, value in self.data["node"]["template"].items()
if isinstance(value, dict)
}
params = {}
for key, value in template_dict.items():
if key == "_type":
continue
# If the type is not transformable to a python base class
# then we need to get the edge that connects to this node
if value.get("type") == "file":
# Load the type in value.get('suffixes') using
# what is inside value.get('content')
# value.get('value') is the file name
file_name = value.get("value")
content = value.get("content")
type_to_load = value.get("suffixes")
loaded_dict = load_file(file_name, content, type_to_load)
params[key] = loaded_dict
# We should check if the type is in something not
# the opposite
elif value.get("type") not in DIRECT_TYPES:
# Get the edge that connects to this node
edges = [
edge
for edge in self.edges
if edge.target == self and edge.matched_type in value["type"]
]
# Get the output of the node that the edge connects to
# if the value['list'] is True, then there will be more
# than one time setting to params[key]
# so we need to append to a list if it exists
# or create a new list if it doesn't
if value["required"] and not edges:
# If a required parameter is not found, raise an error
raise ValueError(
f"Required input {key} for module {self.node_type} not found"
)
elif value["list"]:
# If this is a list parameter, append all sources to a list
params[key] = [edge.source for edge in edges]
elif edges:
# If a single parameter is found, use its source
params[key] = edges[0].source
elif value["required"] or value.get("value"):
params[key] = value["value"]
# Add _type to params
self.params = params
def _build(self):
# The params dict is used to build the module
# it contains values and keys that point to nodes which
# have their own params dict
# When build is called, we iterate through the params dict
# and if the value is a node, we call build on that node
# and use the output of that build as the value for the param
# if the value is not a node, then we use the value as the param
# and continue
# Another aspect is that the node_type is the class that we need to import
# and instantiate with these built params
logger.debug(f"Building {self.node_type}")
# Build each node in the params dict
for key, value in self.params.copy().items():
# Check if Node or list of Nodes and not self
# to avoid recursion
if isinstance(value, Node):
if value == self:
del self.params[key]
continue
result = value.build()
# If the key is "func", then we need to use the run method
if key == "func" and not isinstance(result, types.FunctionType):
# func can be PythonFunction(code='\ndef upper_case(text: str) -> str:\n return text.upper()\n')
# so we need to check if there is an attribute called run
if hasattr(result, "run"):
result = result.run # type: ignore
elif hasattr(result, "get_function"):
result = result.get_function() # type: ignore
self.params[key] = result
elif isinstance(value, list) and all(
isinstance(node, Node) for node in value
):
self.params[key] = [node.build() for node in value] # type: ignore
# Get the class from LANGCHAIN_TYPES_DICT
# and instantiate it with the params
# and return the instance
try:
self._built_object = loading.instantiate_class(
node_type=self.node_type,
base_type=self.base_type,
params=self.params,
)
except Exception as exc:
raise ValueError(
f"Error building node {self.node_type}: {str(exc)}"
) from exc
if self._built_object is None:
raise ValueError(f"Node type {self.node_type} not found")
self._built = True
def build(self, force: bool = False) -> Any:
if not self._built or force:
self._build()
return deepcopy(self._built_object)
def add_edge(self, edge: "Edge") -> None:
self.edges.append(edge)
def __repr__(self) -> str:
return f"Node(id={self.id}, data={self.data})"
def __eq__(self, __o: object) -> bool:
return self.id == __o.id if isinstance(__o, Node) else False
def __hash__(self) -> int:
return id(self)
class Edge:
def __init__(self, source: "Node", target: "Node"):
self.source: "Node" = source
self.target: "Node" = target
self.validate_edge()
def validate_edge(self) -> None:
# Validate that the outputs of the source node are valid inputs
# for the target node
self.source_types = self.source.output
self.target_reqs = self.target.required_inputs + self.target.optional_inputs
# Both lists contain strings and sometimes a string contains the value we are
# looking for e.g. comgin_out=["Chain"] and target_reqs=["LLMChain"]
# so we need to check if any of the strings in source_types is in target_reqs
self.valid = any(
output in target_req
for output in self.source_types
for target_req in self.target_reqs
)
# Get what type of input the target node is expecting
self.matched_type = next(
(
output
for output in self.source_types
for target_req in self.target_reqs
if output in target_req
),
None,
)
no_matched_type = self.matched_type is None
if no_matched_type:
logger.debug(self.source_types)
logger.debug(self.target_reqs)
if no_matched_type:
raise ValueError(
f"Edge between {self.source.node_type} and {self.target.node_type} "
f"has no matched type"
)
def __repr__(self) -> str:
return (
f"Edge(source={self.source.id}, target={self.target.id}, valid={self.valid}"
f", matched_type={self.matched_type})"
)

View file

@ -0,0 +1 @@
DIRECT_TYPES = ["str", "bool", "code", "int", "float", "Any"]

View file

@ -0,0 +1,159 @@
from typing import Dict, List, Union
from langflow.graph.base import Edge, Node
from langflow.graph.nodes import (
AgentNode,
ChainNode,
FileToolNode,
LLMNode,
MemoryNode,
PromptNode,
ToolkitNode,
ToolNode,
WrapperNode,
)
from langflow.interface.agents.base import agent_creator
from langflow.interface.chains.base import chain_creator
from langflow.interface.llms.base import llm_creator
from langflow.interface.prompts.base import prompt_creator
from langflow.interface.toolkits.base import toolkits_creator
from langflow.interface.tools.base import tool_creator
from langflow.interface.tools.constants import FILE_TOOLS
from langflow.interface.tools.util import get_tools_dict
from langflow.interface.wrappers.base import wrapper_creator
from langflow.interface.memories.base import memory_creator
from langflow.utils import payload
class Graph:
def __init__(
self,
nodes: List[Dict[str, Union[str, Dict[str, Union[str, List[str]]]]]],
edges: List[Dict[str, str]],
) -> None:
self._nodes = nodes
self._edges = edges
self._build_graph()
def _build_graph(self) -> None:
self.nodes = self._build_nodes()
self.edges = self._build_edges()
for edge in self.edges:
edge.source.add_edge(edge)
edge.target.add_edge(edge)
# This is a hack to make sure that the LLM node is sent to
# the toolkit node
llm_node = None
for node in self.nodes:
node._build_params()
if isinstance(node, LLMNode):
llm_node = node
for node in self.nodes:
if isinstance(node, ToolkitNode):
node.params["llm"] = llm_node
# remove invalid nodes
self.nodes = [node for node in self.nodes if self._validate_node(node)]
def _validate_node(self, node: Node) -> bool:
# All nodes that do not have edges are invalid
return len(node.edges) > 0
def get_node(self, node_id: str) -> Union[None, Node]:
return next((node for node in self.nodes if node.id == node_id), None)
def get_nodes_with_target(self, node: Node) -> List[Node]:
connected_nodes: List[Node] = [
edge.source for edge in self.edges if edge.target == node
]
return connected_nodes
def build(self) -> List[Node]:
# Get root node
root_node = payload.get_root_node(self)
if root_node is None:
raise ValueError("No root node found")
return root_node.build()
def get_node_neighbors(self, node: Node) -> Dict[Node, int]:
neighbors: Dict[Node, int] = {}
for edge in self.edges:
if edge.source == node:
neighbor = edge.target
if neighbor not in neighbors:
neighbors[neighbor] = 0
neighbors[neighbor] += 1
elif edge.target == node:
neighbor = edge.source
if neighbor not in neighbors:
neighbors[neighbor] = 0
neighbors[neighbor] += 1
return neighbors
def _build_edges(self) -> List[Edge]:
# Edge takes two nodes as arguments, so we need to build the nodes first
# and then build the edges
# if we can't find a node, we raise an error
edges: List[Edge] = []
for edge in self._edges:
source = self.get_node(edge["source"])
target = self.get_node(edge["target"])
if source is None:
raise ValueError(f"Source node {edge['source']} not found")
if target is None:
raise ValueError(f"Target node {edge['target']} not found")
edges.append(Edge(source, target))
return edges
def _build_nodes(self) -> List[Node]:
nodes: List[Node] = []
for node in self._nodes:
node_data = node["data"]
node_type: str = node_data["type"] # type: ignore
node_lc_type: str = node_data["node"]["template"]["_type"] # type: ignore
if node_type in prompt_creator.to_list():
nodes.append(PromptNode(node))
elif (
node_type in agent_creator.to_list()
or node_lc_type in agent_creator.to_list()
):
nodes.append(AgentNode(node))
elif node_type in chain_creator.to_list():
nodes.append(ChainNode(node))
elif (
node_type in tool_creator.to_list()
or node_lc_type in get_tools_dict().keys()
):
if node_type in FILE_TOOLS:
nodes.append(FileToolNode(node))
nodes.append(ToolNode(node))
elif node_type in toolkits_creator.to_list():
nodes.append(ToolkitNode(node))
elif node_type in wrapper_creator.to_list():
nodes.append(WrapperNode(node))
elif (
node_type in llm_creator.to_list()
or node_lc_type in llm_creator.to_list()
):
nodes.append(LLMNode(node))
elif (
node_type in memory_creator.to_list()
or node_lc_type in memory_creator.to_list()
):
nodes.append(MemoryNode(node))
else:
nodes.append(Node(node))
return nodes
def get_children_by_node_type(self, node: Node, node_type: str) -> List[Node]:
children = []
node_types = [node.data["type"]]
if "node" in node.data:
node_types += node.data["node"]["base_classes"]
if node_type in node_types:
children.append(node)
return children

View file

@ -0,0 +1,156 @@
from copy import deepcopy
from typing import Any, Dict, List, Optional, Union
from langflow.graph.base import Node
from langflow.graph.utils import extract_input_variables_from_prompt
class AgentNode(Node):
def __init__(self, data: Dict):
super().__init__(data, base_type="agents")
self.tools: List[ToolNode] = []
self.chains: List[ChainNode] = []
def _set_tools_and_chains(self) -> None:
for edge in self.edges:
source_node = edge.source
if isinstance(source_node, ToolNode):
self.tools.append(source_node)
elif isinstance(source_node, ChainNode):
self.chains.append(source_node)
def build(self, force: bool = False) -> Any:
if not self._built or force:
self._set_tools_and_chains()
# First, build the tools
for tool_node in self.tools:
tool_node.build()
# Next, build the chains and the rest
for chain_node in self.chains:
chain_node.build(tools=self.tools)
self._build()
return deepcopy(self._built_object)
class ToolNode(Node):
def __init__(self, data: Dict):
super().__init__(data, base_type="tools")
def build(self, force: bool = False) -> Any:
if not self._built or force:
self._build()
return deepcopy(self._built_object)
class PromptNode(Node):
def __init__(self, data: Dict):
super().__init__(data, base_type="prompts")
def build(
self,
force: bool = False,
tools: Optional[Union[List[Node], List[ToolNode]]] = None,
) -> Any:
if not self._built or force:
if "input_variables" not in self.params:
self.params["input_variables"] = []
# Check if it is a ZeroShotPrompt and needs a tool
if "ShotPrompt" in self.node_type:
tools = (
[tool_node.build() for tool_node in tools]
if tools is not None
else []
)
self.params["tools"] = tools
prompt_params = [
key
for key, value in self.params.items()
if isinstance(value, str) and key != "format_instructions"
]
else:
prompt_params = ["template"]
for param in prompt_params:
prompt_text = self.params[param]
variables = extract_input_variables_from_prompt(prompt_text)
self.params["input_variables"].extend(variables)
self.params["input_variables"] = list(set(self.params["input_variables"]))
self._build()
return deepcopy(self._built_object)
class ChainNode(Node):
def __init__(self, data: Dict):
super().__init__(data, base_type="chains")
def build(
self,
force: bool = False,
tools: Optional[Union[List[Node], List[ToolNode]]] = None,
) -> Any:
if not self._built or force:
# Check if the chain requires a PromptNode
for key, value in self.params.items():
if isinstance(value, PromptNode):
# Build the PromptNode, passing the tools if available
self.params[key] = value.build(tools=tools, force=force)
self._build()
return deepcopy(self._built_object)
class LLMNode(Node):
def __init__(self, data: Dict):
super().__init__(data, base_type="llms")
def build(self, force: bool = False) -> Any:
if not self._built or force:
self._build()
return deepcopy(self._built_object)
class ToolkitNode(Node):
def __init__(self, data: Dict):
super().__init__(data, base_type="toolkits")
def build(self, force: bool = False) -> Any:
if not self._built or force:
self._build()
return deepcopy(self._built_object)
class FileToolNode(ToolNode):
def __init__(self, data: Dict):
super().__init__(data)
def build(self, force: bool = False) -> Any:
if not self._built or force:
self._build()
return deepcopy(self._built_object)
class WrapperNode(Node):
def __init__(self, data: Dict):
super().__init__(data, base_type="wrappers")
def build(self, force: bool = False) -> Any:
if not self._built or force:
if "headers" in self.params:
self.params["headers"] = eval(self.params["headers"])
self._build()
return deepcopy(self._built_object)
class MemoryNode(Node):
def __init__(self, data: Dict):
super().__init__(data, base_type="memory")
def build(self, force: bool = False) -> Any:
if not self._built or force:
self._build()
return deepcopy(self._built_object)

View file

@ -0,0 +1,60 @@
import base64
import csv
import io
import json
import re
from typing import Any
import yaml
def load_file(file_name, file_content, accepted_types) -> Any:
"""Load a file from a string."""
# Check if the file is accepted
if not any(file_name.endswith(suffix) for suffix in accepted_types):
raise ValueError(f"File {file_name} is not accepted")
# Get the suffix
suffix = file_name.split(".")[-1]
# file_content == 'data:application/x-yaml;base64,b3BlbmFwaTogIjMuMC4wIg...'
data = file_content.split(",")[1]
decoded_bytes = base64.b64decode(data)
# Convert the bytes object to a string
decoded_string = decoded_bytes.decode("utf-8")
if suffix == "json":
# Return the json content
return json.loads(decoded_string)
elif suffix in ["yaml", "yml"]:
# Return the yaml content
loaded_yaml = yaml.load(decoded_string, Loader=yaml.FullLoader)
try:
from langchain.agents.agent_toolkits.openapi.spec import reduce_openapi_spec # type: ignore
return reduce_openapi_spec(loaded_yaml)
except ImportError:
return loaded_yaml
elif suffix == "csv":
# Load the csv content
csv_reader = csv.DictReader(io.StringIO(decoded_string))
return list(csv_reader)
else:
raise ValueError(f"File {file_name} is not accepted")
def validate_prompt(prompt: str):
"""Validate prompt."""
if extract_input_variables_from_prompt(prompt):
return prompt
return fix_prompt(prompt)
def fix_prompt(prompt: str):
"""Fix prompt."""
return prompt + " {input}"
def extract_input_variables_from_prompt(prompt: str) -> list[str]:
"""Extract input variables from prompt."""
return re.findall(r"{(.*?)}", prompt)

View file

@ -0,0 +1,3 @@
from langflow.interface.agents.base import AgentCreator
__all__ = ["AgentCreator"]

View file

@ -0,0 +1,49 @@
from typing import Dict, List, Optional
from langchain.agents import loading
from langflow.custom.customs import get_custom_nodes
from langflow.interface.agents.custom import CUSTOM_AGENTS
from langflow.interface.base import LangChainTypeCreator
from langflow.settings import settings
from langflow.utils.util import build_template_from_class
class AgentCreator(LangChainTypeCreator):
type_name: str = "agents"
@property
def type_to_loader_dict(self) -> Dict:
if self.type_dict is None:
self.type_dict = loading.AGENT_TO_CLASS
# Add JsonAgent to the list of agents
for name, agent in CUSTOM_AGENTS.items():
# TODO: validate AgentType
self.type_dict[name] = agent # type: ignore
return self.type_dict
def get_signature(self, name: str) -> Optional[Dict]:
try:
if name in get_custom_nodes(self.type_name).keys():
return get_custom_nodes(self.type_name)[name]
return build_template_from_class(
name, self.type_to_loader_dict, add_function=True
)
except ValueError as exc:
raise ValueError("Agent not found") from exc
# Now this is a generator
def to_list(self) -> List[str]:
names = []
for _, agent in self.type_to_loader_dict.items():
agent_name = (
agent.function_name()
if hasattr(agent, "function_name")
else agent.__name__
)
if agent_name in settings.agents or settings.dev:
names.append(agent_name)
return names
agent_creator = AgentCreator()

View file

@ -0,0 +1,131 @@
from typing import Any, List, Optional
from langchain import LLMChain
from langchain.agents import AgentExecutor, Tool, ZeroShotAgent, initialize_agent
from langchain.agents.agent_toolkits.json.prompt import JSON_PREFIX, JSON_SUFFIX
from langchain.agents.agent_toolkits.json.toolkit import JsonToolkit
from langchain.agents.agent_toolkits.pandas.prompt import PREFIX as PANDAS_PREFIX
from langchain.agents.agent_toolkits.pandas.prompt import SUFFIX as PANDAS_SUFFIX
from langchain.agents.mrkl.prompt import FORMAT_INSTRUCTIONS
from langchain.llms.base import BaseLLM
from langchain.memory.chat_memory import BaseChatMemory
from langchain.schema import BaseLanguageModel
from langchain.tools.python.tool import PythonAstREPLTool
class JsonAgent(AgentExecutor):
"""Json agent"""
@staticmethod
def function_name():
return "JsonAgent"
@classmethod
def initialize(cls, *args, **kwargs):
return cls.from_toolkit_and_llm(*args, **kwargs)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
@classmethod
def from_toolkit_and_llm(cls, toolkit: JsonToolkit, llm: BaseLanguageModel):
tools = toolkit.get_tools()
tool_names = [tool.name for tool in tools]
prompt = ZeroShotAgent.create_prompt(
tools,
prefix=JSON_PREFIX,
suffix=JSON_SUFFIX,
format_instructions=FORMAT_INSTRUCTIONS,
input_variables=None,
)
llm_chain = LLMChain(
llm=llm,
prompt=prompt,
)
agent = ZeroShotAgent(llm_chain=llm_chain, allowed_tools=tool_names)
return cls.from_agent_and_tools(agent=agent, tools=tools, verbose=True)
def run(self, *args, **kwargs):
return super().run(*args, **kwargs)
class CSVAgent(AgentExecutor):
"""CSV agent"""
@staticmethod
def function_name():
return "CSVAgent"
@classmethod
def initialize(cls, *args, **kwargs):
return cls.from_toolkit_and_llm(*args, **kwargs)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
@classmethod
def from_toolkit_and_llm(
cls,
path: dict,
llm: BaseLanguageModel,
pandas_kwargs: Optional[dict] = None,
**kwargs: Any
):
import pandas as pd # type: ignore
_kwargs = pandas_kwargs or {}
df = pd.DataFrame.from_dict(path, **_kwargs)
tools = [PythonAstREPLTool(locals={"df": df})] # type: ignore
prompt = ZeroShotAgent.create_prompt(
tools,
prefix=PANDAS_PREFIX,
suffix=PANDAS_SUFFIX,
input_variables=["df", "input", "agent_scratchpad"],
)
partial_prompt = prompt.partial(df=str(df.head()))
llm_chain = LLMChain(
llm=llm,
prompt=partial_prompt,
)
tool_names = [tool.name for tool in tools]
agent = ZeroShotAgent(llm_chain=llm_chain, allowed_tools=tool_names, **kwargs)
return cls.from_agent_and_tools(agent=agent, tools=tools, verbose=True)
def run(self, *args, **kwargs):
return super().run(*args, **kwargs)
class InitializeAgent(AgentExecutor):
"""Implementation of initialize_agent function"""
@staticmethod
def function_name():
return "initialize_agent"
@classmethod
def initialize(
cls, llm: BaseLLM, tools: List[Tool], agent: str, memory: BaseChatMemory
):
return initialize_agent(
tools=tools,
llm=llm,
# LangChain now uses Enum for agent, but we still support string
agent=agent, # type: ignore
memory=memory,
return_intermediate_steps=True,
)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def run(self, *args, **kwargs):
return super().run(*args, **kwargs)
CUSTOM_AGENTS = {
"JsonAgent": JsonAgent,
"CSVAgent": CSVAgent,
"initialize_agent": InitializeAgent,
}

View file

@ -0,0 +1,45 @@
from langchain import LLMChain
from langchain.agents import AgentExecutor, ZeroShotAgent
from langchain.agents.agent_toolkits.json.prompt import JSON_PREFIX, JSON_SUFFIX
from langchain.agents.agent_toolkits.json.toolkit import JsonToolkit
from langchain.agents.mrkl.prompt import FORMAT_INSTRUCTIONS
from langchain.schema import BaseLanguageModel
class MalfoyAgent(AgentExecutor):
"""Json agent"""
prefix = "Malfoy: "
@classmethod
def initialize(cls, *args, **kwargs):
return cls.from_toolkit_and_llm(*args, **kwargs)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
@classmethod
def from_toolkit_and_llm(cls, toolkit: JsonToolkit, llm: BaseLanguageModel):
tools = toolkit.get_tools()
tool_names = [tool.name for tool in tools]
prompt = ZeroShotAgent.create_prompt(
tools,
prefix=JSON_PREFIX,
suffix=JSON_SUFFIX,
format_instructions=FORMAT_INSTRUCTIONS,
input_variables=None,
)
llm_chain = LLMChain(
llm=llm,
prompt=prompt,
)
agent = ZeroShotAgent(llm_chain=llm_chain, allowed_tools=tool_names)
return cls.from_agent_and_tools(agent=agent, tools=tools, verbose=True)
def run(self, *args, **kwargs):
return super().run(*args, **kwargs)
PREBUILT_AGENTS = {
"MalfoyAgent": MalfoyAgent,
}

View file

@ -0,0 +1,75 @@
from abc import ABC, abstractmethod
from typing import Any, Dict, List, Optional, Type, Union
from pydantic import BaseModel
from langflow.template.base import FrontendNode, Template, TemplateField
# Assuming necessary imports for Field, Template, and FrontendNode classes
class LangChainTypeCreator(BaseModel, ABC):
type_name: str
type_dict: Optional[Dict] = None
@property
def frontend_node_class(self) -> Type[FrontendNode]:
"""The class type of the FrontendNode created in frontend_node."""
return FrontendNode
@property
@abstractmethod
def type_to_loader_dict(self) -> Dict:
if self.type_dict is None:
raise NotImplementedError
return self.type_dict
@abstractmethod
def get_signature(self, name: str) -> Union[Optional[Dict[Any, Any]], FrontendNode]:
pass
@abstractmethod
def to_list(self) -> List[str]:
pass
def to_dict(self) -> Dict:
result: Dict = {self.type_name: {}}
for name in self.to_list():
# frontend_node.to_dict() returns a dict with the following structure:
# {name: {template: {fields}, description: str}}
# so we should update the result dict
result[self.type_name].update(self.frontend_node(name).to_dict())
return result
def frontend_node(self, name) -> FrontendNode:
signature = self.get_signature(name)
if signature is None:
raise ValueError(f"{name} not found")
if isinstance(signature, FrontendNode):
return signature
fields = [
TemplateField(
name=key,
field_type=value["type"],
required=value.get("required", False),
placeholder=value.get("placeholder", ""),
is_list=value.get("list", False),
show=value.get("show", True),
multiline=value.get("multiline", False),
value=value.get("value", None),
suffixes=value.get("suffixes", []),
file_types=value.get("fileTypes", []),
content=value.get("content", None),
)
for key, value in signature["template"].items()
if key != "_type"
]
template = Template(type_name=name, fields=fields)
return self.frontend_node_class(
template=template,
description=signature.get("description", ""),
base_classes=signature["base_classes"],
name=name,
)

View file

@ -0,0 +1,3 @@
from langflow.interface.chains.base import ChainCreator
__all__ = ["ChainCreator"]

View file

@ -0,0 +1,45 @@
from typing import Dict, List, Optional
from langflow.custom.customs import get_custom_nodes
from langflow.interface.base import LangChainTypeCreator
from langflow.interface.custom_lists import chain_type_to_cls_dict
from langflow.settings import settings
from langflow.utils.util import build_template_from_class
# Assuming necessary imports for Field, Template, and FrontendNode classes
class ChainCreator(LangChainTypeCreator):
type_name: str = "chains"
@property
def type_to_loader_dict(self) -> Dict:
if self.type_dict is None:
self.type_dict = chain_type_to_cls_dict
from langflow.interface.chains.custom import CUSTOM_CHAINS
self.type_dict.update(CUSTOM_CHAINS)
# Filter according to settings.chains
self.type_dict = {
name: chain
for name, chain in self.type_dict.items()
if name in settings.chains or settings.dev
}
return self.type_dict
def get_signature(self, name: str) -> Optional[Dict]:
try:
if name in get_custom_nodes(self.type_name).keys():
return get_custom_nodes(self.type_name)[name]
return build_template_from_class(name, self.type_to_loader_dict)
except ValueError as exc:
raise ValueError("Chain not found") from exc
def to_list(self) -> List[str]:
custom_chains = list(get_custom_nodes("chains").keys())
default_chains = list(self.type_to_loader_dict.keys())
return default_chains + custom_chains
chain_creator = ChainCreator()

View file

@ -0,0 +1,101 @@
from typing import Dict, Optional, Type
from langchain.chains import ConversationChain
from langchain.memory.buffer import ConversationBufferMemory
from langchain.schema import BaseMemory
from pydantic import Field, root_validator
from langflow.graph.utils import extract_input_variables_from_prompt
DEFAULT_SUFFIX = """"
Current conversation:
{history}
Human: {input}
{ai_prefix}"""
class BaseCustomChain(ConversationChain):
"""BaseCustomChain is a chain you can use to have a conversation with a custom character."""
template: Optional[str]
ai_prefix_key: Optional[str]
"""Field to use as the ai_prefix. It needs to be set and has to be in the template"""
@root_validator(pre=False)
def build_template(cls, values):
format_dict = {}
input_variables = extract_input_variables_from_prompt(values["template"])
if values.get("ai_prefix_key", None) is None:
values["ai_prefix_key"] = values["memory"].ai_prefix
for key in input_variables:
new_value = values.get(key, f"{{{key}}}")
format_dict[key] = new_value
if key == values.get("ai_prefix_key", None):
values["memory"].ai_prefix = new_value
values["template"] = values["template"].format(**format_dict)
values["template"] = values["template"]
values["input_variables"] = extract_input_variables_from_prompt(
values["template"]
)
values["prompt"].template = values["template"]
values["prompt"].input_variables = values["input_variables"]
return values
class SeriesCharacterChain(BaseCustomChain):
"""SeriesCharacterChain is a chain you can use to have a conversation with a character from a series."""
character: str
series: str
template: Optional[
str
] = """I want you to act like {character} from {series}.
I want you to respond and answer like {character}. do not write any explanations. only answer like {character}.
You must know all of the knowledge of {character}.
Current conversation:
{history}
Human: {input}
{character}:"""
memory: BaseMemory = Field(default_factory=ConversationBufferMemory)
ai_prefix_key: Optional[str] = "character"
"""Default memory store."""
class MidJourneyPromptChain(BaseCustomChain):
"""MidJourneyPromptChain is a chain you can use to generate new MidJourney prompts."""
template: Optional[
str
] = """I want you to act as a prompt generator for Midjourney's artificial intelligence program.
Your job is to provide detailed and creative descriptions that will inspire unique and interesting images from the AI.
Keep in mind that the AI is capable of understanding a wide range of language and can interpret abstract concepts, so feel free to be as imaginative and descriptive as possible.
For example, you could describe a scene from a futuristic city, or a surreal landscape filled with strange creatures.
The more detailed and imaginative your description, the more interesting the resulting image will be. Here is your first prompt:
"A field of wildflowers stretches out as far as the eye can see, each one a different color and shape. In the distance, a massive tree towers over the landscape, its branches reaching up to the sky like tentacles.\"
Current conversation:
{history}
Human: {input}
AI:""" # noqa: E501
class TimeTravelGuideChain(BaseCustomChain):
template: Optional[
str
] = """I want you to act as my time travel guide. You are helpful and creative. I will provide you with the historical period or future time I want to visit and you will suggest the best events, sights, or people to experience. Provide the suggestions and any necessary information.
Current conversation:
{history}
Human: {input}
AI:""" # noqa: E501
CUSTOM_CHAINS: Dict[str, Type[ConversationChain]] = {
"SeriesCharacterChain": SeriesCharacterChain,
"MidJourneyPromptChain": MidJourneyPromptChain,
"TimeTravelGuideChain": TimeTravelGuideChain,
}

View file

@ -0,0 +1,37 @@
from typing import Callable, Optional
from pydantic import BaseModel, validator
from langflow.utils import validate
class Function(BaseModel):
code: str
function: Optional[Callable] = None
imports: Optional[str] = None
# Eval code and store the function
def __init__(self, **data):
super().__init__(**data)
# Validate the function
@validator("code")
def validate_func(cls, v):
try:
validate.eval_function(v)
except Exception as e:
raise e
return v
def get_function(self):
"""Get the function"""
function_name = validate.extract_function_name(self.code)
return validate.create_function(self.code, function_name)
class PythonFunction(Function):
"""Python function"""
code: str

View file

@ -1,43 +1,71 @@
## LLM
from typing import Any
from langchain import llms
from langchain.llms.openai import OpenAIChat
## LLM
from langchain import (
chains,
document_loaders,
embeddings,
llms,
memory,
requests,
vectorstores,
)
from langchain.agents import agent_toolkits
from langchain.chat_models import ChatOpenAI
from langflow.interface.importing.utils import import_class
## LLM
llm_type_to_cls_dict = llms.type_to_cls_dict
llm_type_to_cls_dict["openai-chat"] = OpenAIChat
## Memory
# from langchain.memory.buffer_window import ConversationBufferWindowMemory
# from langchain.memory.chat_memory import ChatMessageHistory
# from langchain.memory.combined import CombinedMemory
# from langchain.memory.entity import ConversationEntityMemory
# from langchain.memory.kg import ConversationKGMemory
# from langchain.memory.readonly import ReadOnlySharedMemory
# from langchain.memory.simple import SimpleMemory
# from langchain.memory.summary import ConversationSummaryMemory
# from langchain.memory.summary_buffer import ConversationSummaryBufferMemory
memory_type_to_cls_dict: dict[str, Any] = {
# "CombinedMemory": CombinedMemory,
# "ConversationBufferWindowMemory": ConversationBufferWindowMemory,
# "ConversationBufferMemory": ConversationBufferMemory,
# "SimpleMemory": SimpleMemory,
# "ConversationSummaryBufferMemory": ConversationSummaryBufferMemory,
# "ConversationKGMemory": ConversationKGMemory,
# "ConversationEntityMemory": ConversationEntityMemory,
# "ConversationSummaryMemory": ConversationSummaryMemory,
# "ChatMessageHistory": ChatMessageHistory,
# "ConversationStringBufferMemory": ConversationStringBufferMemory,
# "ReadOnlySharedMemory": ReadOnlySharedMemory,
}
llm_type_to_cls_dict["openai-chat"] = ChatOpenAI # type: ignore
## Chain
# from langchain.chains.loading import type_to_loader_dict
# from langchain.chains.conversation.base import ConversationChain
chain_type_to_cls_dict: dict[str, Any] = {
chain_name: import_class(f"langchain.chains.{chain_name}")
for chain_name in chains.__all__
}
# chain_type_to_cls_dict = type_to_loader_dict
# chain_type_to_cls_dict["conversation_chain"] = ConversationChain
toolkit_type_to_loader_dict: dict[str, Any] = {
toolkit_name: import_class(f"langchain.agents.agent_toolkits.{toolkit_name}")
# if toolkit_name is lower case it is a loader
for toolkit_name in agent_toolkits.__all__
if toolkit_name.islower()
}
toolkit_type_to_cls_dict: dict[str, Any] = {
toolkit_name: import_class(f"langchain.agents.agent_toolkits.{toolkit_name}")
# if toolkit_name is not lower case it is a class
for toolkit_name in agent_toolkits.__all__
if not toolkit_name.islower()
}
## Memories
memory_type_to_cls_dict: dict[str, Any] = {
memory_name: import_class(f"langchain.memory.{memory_name}")
for memory_name in memory.__all__
}
## Wrappers
wrapper_type_to_cls_dict: dict[str, Any] = {
wrapper.__name__: wrapper for wrapper in [requests.RequestsWrapper]
}
## Embeddings
embedding_type_to_cls_dict: dict[str, Any] = {
embedding_name: import_class(f"langchain.embeddings.{embedding_name}")
for embedding_name in embeddings.__all__
}
## Vector Stores
vectorstores_type_to_cls_dict: dict[str, Any] = {
vectorstore_name: import_class(f"langchain.vectorstores.{vectorstore_name}")
for vectorstore_name in vectorstores.__all__
}
## Document Loaders
documentloaders_type_to_cls_dict: dict[str, Any] = {
documentloader_name: import_class(
f"langchain.document_loaders.{documentloader_name}"
)
for documentloader_name in document_loaders.__all__
}

View file

@ -0,0 +1,31 @@
from typing import Dict, List, Optional
from langflow.interface.base import LangChainTypeCreator
from langflow.interface.custom_lists import documentloaders_type_to_cls_dict
from langflow.settings import settings
from langflow.utils.util import build_template_from_class
class DocumentLoaderCreator(LangChainTypeCreator):
type_name: str = "documentloader"
@property
def type_to_loader_dict(self) -> Dict:
return documentloaders_type_to_cls_dict
def get_signature(self, name: str) -> Optional[Dict]:
"""Get the signature of a document loader."""
try:
return build_template_from_class(name, documentloaders_type_to_cls_dict)
except ValueError as exc:
raise ValueError(f"Documment Loader {name} not found") from exc
def to_list(self) -> List[str]:
return [
documentloader.__name__
for documentloader in self.type_to_loader_dict.values()
if documentloader.__name__ in settings.documentloaders or settings.dev
]
documentloader_creator = DocumentLoaderCreator()

View file

@ -0,0 +1,31 @@
from typing import Dict, List, Optional
from langflow.interface.base import LangChainTypeCreator
from langflow.interface.custom_lists import embedding_type_to_cls_dict
from langflow.settings import settings
from langflow.utils.util import build_template_from_class
class EmbeddingCreator(LangChainTypeCreator):
type_name: str = "embeddings"
@property
def type_to_loader_dict(self) -> Dict:
return embedding_type_to_cls_dict
def get_signature(self, name: str) -> Optional[Dict]:
"""Get the signature of an embedding."""
try:
return build_template_from_class(name, embedding_type_to_cls_dict)
except ValueError as exc:
raise ValueError(f"Embedding {name} not found") from exc
def to_list(self) -> List[str]:
return [
embedding.__name__
for embedding in self.type_to_loader_dict.values()
if embedding.__name__ in settings.embeddings or settings.dev
]
embedding_creator = EmbeddingCreator()

View file

@ -0,0 +1,7 @@
from langflow.interface.importing.utils import import_by_type # noqa: F401
# This module is used to import any langchain class by name.
ALL = [
"import_by_type",
]

View file

@ -0,0 +1,115 @@
# This module is used to import any langchain class by name.
import importlib
from typing import Any, Type
from langchain import PromptTemplate
from langchain.agents import Agent
from langchain.chains.base import Chain
from langchain.chat_models.base import BaseChatModel
from langchain.llms.base import BaseLLM
from langchain.tools import BaseTool
from langflow.interface.tools.util import get_tool_by_name
def import_module(module_path: str) -> Any:
"""Import module from module path"""
if "from" not in module_path:
# Import the module using the module path
return importlib.import_module(module_path)
# Split the module path into its components
_, module_path, _, object_name = module_path.split()
# Import the module using the module path
module = importlib.import_module(module_path)
return getattr(module, object_name)
def import_by_type(_type: str, name: str) -> Any:
"""Import class by type and name"""
if _type is None:
raise ValueError(f"Type cannot be None. Check if {name} is in the config file.")
func_dict = {
"agents": import_agent,
"prompts": import_prompt,
"llms": {"llm": import_llm, "chat": import_chat_llm},
"tools": import_tool,
"chains": import_chain,
"toolkits": import_toolkit,
"wrappers": import_wrapper,
"memory": import_memory,
}
if _type == "llms":
key = "chat" if "chat" in name.lower() else "llm"
loaded_func = func_dict[_type][key] # type: ignore
else:
loaded_func = func_dict[_type]
return loaded_func(name)
def import_chat_llm(llm: str) -> BaseChatModel:
"""Import chat llm from llm name"""
return import_class(f"langchain.chat_models.{llm}")
def import_memory(memory: str) -> Any:
"""Import memory from memory name"""
return import_module(f"from langchain.memory import {memory}")
def import_class(class_path: str) -> Any:
"""Import class from class path"""
module_path, class_name = class_path.rsplit(".", 1)
module = import_module(module_path)
return getattr(module, class_name)
def import_prompt(prompt: str) -> Type[PromptTemplate]:
from langflow.interface.prompts.custom import CUSTOM_PROMPTS
"""Import prompt from prompt name"""
if prompt == "ZeroShotPrompt":
return import_class("langchain.prompts.PromptTemplate")
elif prompt in CUSTOM_PROMPTS:
return CUSTOM_PROMPTS[prompt]
return import_class(f"langchain.prompts.{prompt}")
def import_wrapper(wrapper: str) -> Any:
"""Import wrapper from wrapper name"""
return import_module(f"from langchain.requests import {wrapper}")
def import_toolkit(toolkit: str) -> Any:
"""Import toolkit from toolkit name"""
return import_module(f"from langchain.agents.agent_toolkits import {toolkit}")
def import_agent(agent: str) -> Agent:
"""Import agent from agent name"""
# check for custom agent
return import_class(f"langchain.agents.{agent}")
def import_llm(llm: str) -> BaseLLM:
"""Import llm from llm name"""
return import_class(f"langchain.llms.{llm}")
def import_tool(tool: str) -> BaseTool:
"""Import tool from tool name"""
return get_tool_by_name(tool)
def import_chain(chain: str) -> Type[Chain]:
"""Import chain from chain name"""
from langflow.interface.chains.custom import CUSTOM_CHAINS
if chain in CUSTOM_CHAINS:
return CUSTOM_CHAINS[chain]
return import_class(f"langchain.chains.{chain}")

View file

@ -1,82 +1,31 @@
from langchain import agents, chains, prompts
from langchain.agents.load_tools import get_all_tool_names
from langflow.custom import customs
from langflow.interface.custom_lists import (
llm_type_to_cls_dict,
memory_type_to_cls_dict,
)
from langflow.settings import settings
from langflow.utils import util
from langflow.interface.agents.base import agent_creator
from langflow.interface.chains.base import chain_creator
from langflow.interface.llms.base import llm_creator
from langflow.interface.memories.base import memory_creator
from langflow.interface.prompts.base import prompt_creator
from langflow.interface.toolkits.base import toolkits_creator
from langflow.interface.tools.base import tool_creator
from langflow.interface.wrappers.base import wrapper_creator
def list_type(object_type: str):
"""List all components"""
def get_type_dict():
return {
"chains": list_chain_types,
"agents": list_agents,
"prompts": list_prompts,
"llms": list_llms,
"memories": list_memories,
"tools": list_tools,
}.get(object_type, lambda: "Invalid type")()
"agents": agent_creator.to_list(),
"prompts": prompt_creator.to_list(),
"llms": llm_creator.to_list(),
"tools": tool_creator.to_list(),
"chains": chain_creator.to_list(),
"memory": memory_creator.to_list(),
"toolkits": toolkits_creator.to_list(),
"wrappers": wrapper_creator.to_list(),
}
def list_agents():
"""List all agent types"""
return [
agent.__name__
for agent in agents.loading.AGENT_TO_CLASS.values()
if agent.__name__ in settings.agents or settings.dev
]
LANGCHAIN_TYPES_DICT = get_type_dict()
# Now we'll build a dict with Langchain types and ours
def list_prompts():
"""List all prompt types"""
custom_prompts = customs.get_custom_prompts()
library_prompts = [
prompt.__annotations__["return"].__name__
for prompt in prompts.loading.type_to_loader_dict.values()
if prompt.__annotations__["return"].__name__ in settings.prompts or settings.dev
]
return library_prompts + list(custom_prompts.keys())
def list_tools():
"""List all load tools"""
tools = []
for tool in get_all_tool_names():
tool_params = util.get_tool_params(util.get_tools_dict(tool))
if tool_params and tool_params["name"] in settings.tools or settings.dev:
tools.append(tool_params["name"])
return tools
def list_llms():
"""List all llm types"""
return [
llm.__name__
for llm in llm_type_to_cls_dict.values()
if llm.__name__ in settings.llms or settings.dev
]
def list_chain_types():
"""List all chain types"""
return [
chain.__annotations__["return"].__name__
for chain in chains.loading.type_to_loader_dict.values()
if chain.__annotations__["return"].__name__ in settings.chains or settings.dev
]
def list_memories():
"""List all memory types"""
return [
memory.__name__
for memory in memory_type_to_cls_dict.values()
if memory.__name__ in settings.memories or settings.dev
]
ALL_TYPES_DICT = {
**LANGCHAIN_TYPES_DICT,
"Custom": ["Custom Tool", "Python Function"],
}

View file

@ -0,0 +1,3 @@
from langflow.interface.llms.base import LLMCreator
__all__ = ["LLMCreator"]

View file

@ -0,0 +1,33 @@
from typing import Dict, List, Optional
from langflow.interface.base import LangChainTypeCreator
from langflow.interface.custom_lists import llm_type_to_cls_dict
from langflow.settings import settings
from langflow.utils.util import build_template_from_class
class LLMCreator(LangChainTypeCreator):
type_name: str = "llms"
@property
def type_to_loader_dict(self) -> Dict:
if self.type_dict is None:
self.type_dict = llm_type_to_cls_dict
return self.type_dict
def get_signature(self, name: str) -> Optional[Dict]:
"""Get the signature of an llm."""
try:
return build_template_from_class(name, llm_type_to_cls_dict)
except ValueError as exc:
raise ValueError("LLM not found") from exc
def to_list(self) -> List[str]:
return [
llm.__name__
for llm in self.type_to_loader_dict.values()
if llm.__name__ in settings.llms or settings.dev
]
llm_creator = LLMCreator()

View file

@ -1,7 +1,10 @@
import json
from typing import Any, Dict, Optional
from typing import Any, Callable, Dict, Optional
from langchain.agents import ZeroShotAgent
from langchain.agents import agent as agent_module
from langchain.agents.agent import AgentExecutor
from langchain.agents.agent_toolkits.base import BaseToolkit
from langchain.agents.load_tools import (
_BASE_TOOLS,
_EXTRA_LLM_TOOLS,
@ -15,29 +18,67 @@ from langchain.chains.loading import load_chain_from_config
from langchain.llms.base import BaseLLM
from langchain.llms.loading import load_llm_from_config
from langflow.interface.agents.custom import CUSTOM_AGENTS
from langflow.interface.importing.utils import import_by_type
from langflow.interface.toolkits.base import toolkits_creator
from langflow.interface.types import get_type_list
from langflow.utils import payload, util
from langflow.utils import util, validate
def instantiate_class(node_type: str, base_type: str, params: Dict) -> Any:
"""Instantiate class from module type and key, and params"""
if node_type in CUSTOM_AGENTS:
if custom_agent := CUSTOM_AGENTS.get(node_type):
return custom_agent.initialize(**params) # type: ignore
class_object = import_by_type(_type=base_type, name=node_type)
if base_type == "agents":
# We need to initialize it differently
return load_agent_executor(class_object, params)
elif node_type == "ZeroShotPrompt":
if "tools" not in params:
params["tools"] = []
return ZeroShotAgent.create_prompt(**params)
elif node_type == "PythonFunction":
# If the node_type is "PythonFunction"
# we need to get the function from the params
# which will be a str containing a python function
# and then we need to compile it and return the function
# as the instance
function_string = params["code"]
if isinstance(function_string, str):
return validate.eval_function(function_string)
raise ValueError("Function should be a string")
elif base_type == "toolkits":
loaded_toolkit = class_object(**params)
# Check if node_type has a loader
if toolkits_creator.has_create_function(node_type):
return load_toolkits_executor(node_type, loaded_toolkit, params)
return loaded_toolkit
else:
return class_object(**params)
def load_flow_from_json(path: str):
# This is done to avoid circular imports
from langflow.graph import Graph
"""Load flow from json file"""
with open(path, "r") as f:
flow_graph = json.load(f)
data_graph = flow_graph["data"]
extracted_json = extract_json(data_graph)
return load_langchain_type_from_config(config=extracted_json)
def extract_json(data_graph):
nodes = data_graph["nodes"]
# Substitute ZeroShotPrompt with PromptTemplate
nodes = replace_zero_shot_prompt_with_prompt_template(nodes)
# nodes = replace_zero_shot_prompt_with_prompt_template(nodes)
# Add input variables
nodes = payload.extract_input_variables(nodes)
# nodes = payload.extract_input_variables(nodes)
# Nodes, edges and root node
edges = data_graph["edges"]
root = payload.get_root_node(nodes, edges)
return payload.build_json(root, nodes, edges)
graph = Graph(nodes, edges)
return graph.build()
def replace_zero_shot_prompt_with_prompt_template(nodes):
@ -92,6 +133,25 @@ def load_agent_executor_from_config(
)
def load_agent_executor(agent_class: type[agent_module.Agent], params, **kwargs):
"""Load agent executor from agent class, tools and chain"""
allowed_tools = params["allowed_tools"]
llm_chain = params["llm_chain"]
tool_names = [tool.name for tool in allowed_tools]
agent = agent_class(allowed_tools=tool_names, llm_chain=llm_chain)
return AgentExecutor.from_agent_and_tools(
agent=agent,
tools=allowed_tools,
**kwargs,
)
def load_toolkits_executor(node_type: str, toolkit: BaseToolkit, params: dict):
create_function: Callable = toolkits_creator.get_create_function(node_type)
if llm := params.get("llm"):
return create_function(llm=llm, toolkit=toolkit)
def load_tools_from_config(tool_list: list[dict]) -> list:
"""Load tools based on a config list.

View file

@ -0,0 +1,3 @@
from langflow.interface.memories.base import MemoryCreator
__all__ = ["MemoryCreator"]

View file

@ -0,0 +1,40 @@
from typing import Dict, List, Optional, Type
from langflow.interface.base import LangChainTypeCreator
from langflow.interface.custom_lists import memory_type_to_cls_dict
from langflow.settings import settings
from langflow.template.base import FrontendNode
from langflow.template.nodes import MemoryFrontendNode
from langflow.utils.util import build_template_from_class
class MemoryCreator(LangChainTypeCreator):
type_name: str = "memories"
@property
def frontend_node_class(self) -> Type[FrontendNode]:
"""The class type of the FrontendNode created in frontend_node."""
return MemoryFrontendNode
@property
def type_to_loader_dict(self) -> Dict:
if self.type_dict is None:
self.type_dict = memory_type_to_cls_dict
return self.type_dict
def get_signature(self, name: str) -> Optional[Dict]:
"""Get the signature of a memory."""
try:
return build_template_from_class(name, memory_type_to_cls_dict)
except ValueError as exc:
raise ValueError("Memory not found") from exc
def to_list(self) -> List[str]:
return [
memory.__name__
for memory in self.type_to_loader_dict.values()
if memory.__name__ in settings.memories or settings.dev
]
memory_creator = MemoryCreator()

View file

@ -0,0 +1,3 @@
from langflow.interface.prompts.base import PromptCreator
__all__ = ["PromptCreator"]

View file

@ -0,0 +1,59 @@
from typing import Dict, List, Optional, Type
from langchain import prompts
from langflow.custom.customs import get_custom_nodes
from langflow.interface.base import LangChainTypeCreator
from langflow.interface.importing.utils import import_class
from langflow.settings import settings
from langflow.template.nodes import PromptFrontendNode
from langflow.utils.util import build_template_from_class
class PromptCreator(LangChainTypeCreator):
type_name: str = "prompts"
@property
def frontend_node_class(self) -> Type[PromptFrontendNode]:
return PromptFrontendNode
@property
def type_to_loader_dict(self) -> Dict:
if self.type_dict is None:
self.type_dict = {
prompt_name: import_class(f"langchain.prompts.{prompt_name}")
# if prompt_name is not lower case it is a class
for prompt_name in prompts.__all__
}
# Merge CUSTOM_PROMPTS into self.type_dict
from langflow.interface.prompts.custom import CUSTOM_PROMPTS
self.type_dict.update(CUSTOM_PROMPTS)
# Now filter according to settings.prompts
self.type_dict = {
name: prompt
for name, prompt in self.type_dict.items()
if name in settings.prompts or settings.dev
}
return self.type_dict
def get_signature(self, name: str) -> Optional[Dict]:
try:
if name in get_custom_nodes(self.type_name).keys():
return get_custom_nodes(self.type_name)[name]
return build_template_from_class(name, self.type_to_loader_dict)
except ValueError as exc:
raise ValueError("Prompt not found") from exc
def to_list(self) -> List[str]:
custom_prompts = get_custom_nodes("prompts")
# library_prompts = [
# prompt.__annotations__["return"].__name__
# for prompt in self.type_to_loader_dict.values()
# if prompt.__annotations__["return"].__name__ in settings.prompts
# or settings.dev
# ]
return list(self.type_to_loader_dict.keys()) + list(custom_prompts.keys())
prompt_creator = PromptCreator()

View file

@ -0,0 +1,77 @@
from typing import Dict, List, Optional, Type
from langchain.prompts import PromptTemplate
from pydantic import root_validator
from langflow.graph.utils import extract_input_variables_from_prompt
# Steps to create a BaseCustomPrompt:
# 1. Create a prompt template that endes with:
# Current conversation:
# {history}
# Human: {input}
# {ai_prefix}:
# 2. Create a class that inherits from BaseCustomPrompt
# 3. Add the following class attributes:
# template: str = ""
# description: Optional[str]
# ai_prefix: Optional[str] = "{ai_prefix}"
# 3.1. The ai_prefix should be a value in input_variables
# SeriesCharacterPrompt is a working example
# If used in a LLMChain, with a Memory module, it will work as expected
# We should consider creating ConversationalChains that expose custom parameters
# That way it will be easier to create custom prompts
class BaseCustomPrompt(PromptTemplate):
template: str = ""
description: Optional[str]
ai_prefix: Optional[str]
@root_validator(pre=False)
def build_template(cls, values):
format_dict = {}
ai_prefix_format_dict = {}
for key in values.get("input_variables", []):
new_value = values.get(key, f"{{{key}}}")
format_dict[key] = new_value
if key in values["ai_prefix"]:
ai_prefix_format_dict[key] = new_value
values["ai_prefix"] = values["ai_prefix"].format(**ai_prefix_format_dict)
values["template"] = values["template"].format(**format_dict)
values["template"] = values["template"]
values["input_variables"] = extract_input_variables_from_prompt(
values["template"]
)
return values
class SeriesCharacterPrompt(BaseCustomPrompt):
# Add a very descriptive description for the prompt generator
description: Optional[
str
] = "A prompt that asks the AI to act like a character from a series."
character: str
series: str
template: str = """I want you to act like {character} from {series}.
I want you to respond and answer like {character}. do not write any explanations. only answer like {character}.
You must know all of the knowledge of {character}.
Current conversation:
{history}
Human: {input}
{character}:"""
ai_prefix: str = "{character}"
input_variables: List[str] = ["character", "series"]
CUSTOM_PROMPTS: Dict[str, Type[BaseCustomPrompt]] = {
"SeriesCharacterPrompt": SeriesCharacterPrompt
}
if __name__ == "__main__":
prompt = SeriesCharacterPrompt(character="Harry Potter", series="Harry Potter")
print(prompt.template)

View file

@ -1,47 +1,226 @@
import contextlib
import io
import re
from typing import Any, Dict
from langflow.cache.utils import compute_hash, load_cache, save_cache
from langflow.graph.graph import Graph
from langflow.interface import loading
from langflow.utils.logger import logger
def process_data_graph(data_graph: Dict[str, Any]):
def load_langchain_object(data_graph, is_first_message=False):
"""
Process data graph by extracting input variables and replacing ZeroShotPrompt
Load langchain object from cache if it exists, otherwise build it.
"""
computed_hash = compute_hash(data_graph)
if is_first_message:
langchain_object = build_langchain_object(data_graph)
else:
logger.debug("Loading langchain object from cache")
langchain_object = load_cache(computed_hash)
return computed_hash, langchain_object
def build_langchain_object(data_graph):
"""
Build langchain object from data_graph.
"""
logger.debug("Building langchain object")
nodes = data_graph["nodes"]
# Add input variables
# nodes = payload.extract_input_variables(nodes)
# Nodes, edges and root node
edges = data_graph["edges"]
graph = Graph(nodes, edges)
return graph.build()
def process_graph(data_graph: Dict[str, Any]):
"""
Process graph by extracting input variables and replacing ZeroShotPrompt
with PromptTemplate,then run the graph and return the result and thought.
"""
# Load langchain object
logger.debug("Loading langchain object")
message = data_graph.pop("message", "")
is_first_message = len(data_graph.get("chatHistory", [])) == 0
computed_hash, langchain_object = load_langchain_object(
data_graph, is_first_message
)
logger.debug("Loaded langchain object")
extracted_json = loading.extract_json(data_graph)
if langchain_object is None:
# Raise user facing error
raise ValueError(
"There was an error loading the langchain_object. Please, check all the nodes and try again."
)
message = data_graph["message"]
# Generate result and thought
logger.debug("Generating result and thought")
result, thought = get_result_and_thought_using_graph(langchain_object, message)
logger.debug("Generated result and thought")
# Process json
result, thought = get_result_and_thought(extracted_json, message)
# Save langchain_object to cache
# We have to save it here because if the
# memory is updated we need to keep the new values
logger.debug("Saving langchain object to cache")
save_cache(computed_hash, langchain_object, is_first_message)
logger.debug("Saved langchain object to cache")
return {"result": str(result), "thought": thought.strip()}
return {
"result": result,
"thought": re.sub(
r"\x1b\[([0-9,A-Z]{1,2}(;[0-9,A-Z]{1,2})?)?[m|K]", "", thought
).strip(),
def get_memory_key(langchain_object):
"""
Given a LangChain object, this function retrieves the current memory key from the object's memory attribute.
It then checks if the key exists in a dictionary of known memory keys and returns the corresponding key,
or None if the current key is not recognized.
"""
mem_key_dict = {
"chat_history": "history",
"history": "chat_history",
}
memory_key = langchain_object.memory.memory_key
return mem_key_dict.get(memory_key)
def update_memory_keys(langchain_object, possible_new_mem_key):
"""
Given a LangChain object and a possible new memory key, this function updates the input and output keys in the
object's memory attribute to exclude the current memory key and the possible new key. It then sets the memory key
to the possible new key.
"""
input_key = [
key
for key in langchain_object.input_keys
if key not in [langchain_object.memory.memory_key, possible_new_mem_key]
][0]
output_key = [
key
for key in langchain_object.output_keys
if key not in [langchain_object.memory.memory_key, possible_new_mem_key]
][0]
langchain_object.memory.input_key = input_key
langchain_object.memory.output_key = output_key
langchain_object.memory.memory_key = possible_new_mem_key
def fix_memory_inputs(langchain_object):
"""
Given a LangChain object, this function checks if it has a memory attribute and if that memory key exists in the
object's input variables. If so, it does nothing. Otherwise, it gets a possible new memory key using the
get_memory_key function and updates the memory keys using the update_memory_keys function.
"""
if hasattr(langchain_object, "memory") and langchain_object.memory is not None:
try:
if langchain_object.memory.memory_key in langchain_object.input_variables:
return
except AttributeError:
if (
langchain_object.memory.memory_key
in langchain_object.prompt.input_variables
):
return
possible_new_mem_key = get_memory_key(langchain_object)
if possible_new_mem_key is not None:
update_memory_keys(langchain_object, possible_new_mem_key)
def get_result_and_thought_using_graph(langchain_object, message: str):
"""Get result and thought from extracted json"""
try:
if hasattr(langchain_object, "verbose"):
langchain_object.verbose = True
chat_input = None
memory_key = ""
if hasattr(langchain_object, "memory") and langchain_object.memory is not None:
memory_key = langchain_object.memory.memory_key
for key in langchain_object.input_keys:
if key not in [memory_key, "chat_history"]:
chat_input = {key: message}
if hasattr(langchain_object, "return_intermediate_steps"):
# https://github.com/hwchase17/langchain/issues/2068
# Deactivating until we have a frontend solution
# to display intermediate steps
langchain_object.return_intermediate_steps = False
fix_memory_inputs(langchain_object)
with io.StringIO() as output_buffer, contextlib.redirect_stdout(output_buffer):
try:
output = langchain_object(chat_input)
except ValueError as exc:
# make the error message more informative
logger.debug(f"Error: {str(exc)}")
output = langchain_object.run(chat_input)
intermediate_steps = (
output.get("intermediate_steps", []) if isinstance(output, dict) else []
)
result = (
output.get(langchain_object.output_keys[0])
if isinstance(output, dict)
else output
)
if intermediate_steps:
thought = format_intermediate_steps(intermediate_steps)
else:
thought = output_buffer.getvalue()
except Exception as exc:
raise ValueError(f"Error: {str(exc)}") from exc
return result, thought
def get_result_and_thought(extracted_json: Dict[str, Any], message: str):
"""Get result and thought from extracted json"""
try:
loaded_langchain = loading.load_langchain_type_from_config(
langchain_object = loading.load_langchain_type_from_config(
config=extracted_json
)
with io.StringIO() as output_buffer, contextlib.redirect_stdout(output_buffer):
result = loaded_langchain(message)
result = (
result.get(loaded_langchain.output_keys[0])
if isinstance(result, dict)
else result
output = langchain_object(message)
intermediate_steps = (
output.get("intermediate_steps", []) if isinstance(output, dict) else []
)
thought = output_buffer.getvalue()
result = (
output.get(langchain_object.output_keys[0])
if isinstance(output, dict)
else output
)
if intermediate_steps:
thought = format_intermediate_steps(intermediate_steps)
else:
thought = output_buffer.getvalue()
except Exception as e:
result = f"Error: {str(e)}"
thought = ""
return result, thought
def format_intermediate_steps(intermediate_steps):
formatted_chain = "> Entering new AgentExecutor chain...\n"
for step in intermediate_steps:
action = step[0]
observation = step[1]
formatted_chain += (
f" {action.log}\nAction: {action.tool}\nAction Input: {action.tool_input}\n"
)
formatted_chain += f"Observation: {observation}\n"
final_answer = f"Final Answer: {observation}\n"
formatted_chain += f"Thought: I now know the final answer\n{final_answer}\n"
formatted_chain += "> Finished chain.\n"
return formatted_chain

View file

@ -1,133 +0,0 @@
from typing import Any, Dict # noqa: F401
from langchain import agents, chains, prompts
from langchain.agents.load_tools import (
_BASE_TOOLS,
_EXTRA_LLM_TOOLS,
_EXTRA_OPTIONAL_TOOLS,
_LLM_TOOLS,
get_all_tool_names,
)
from langflow.custom import customs
from langflow.interface.custom_lists import (
llm_type_to_cls_dict,
memory_type_to_cls_dict,
)
from langflow.utils import util
def get_signature(name: str, object_type: str):
"""Get the signature of an object."""
return {
"chains": get_chain_signature,
"agents": get_agent_signature,
"prompts": get_prompt_signature,
"llms": get_llm_signature,
"memories": get_memory_signature,
"tools": get_tool_signature,
}.get(object_type, lambda name: f"Invalid type: {name}")(name)
def get_chain_signature(name: str):
"""Get the chain type by signature."""
try:
return util.build_template_from_function(
name, chains.loading.type_to_loader_dict
)
except ValueError as exc:
raise ValueError("Chain not found") from exc
def get_agent_signature(name: str):
"""Get the signature of an agent."""
try:
return util.build_template_from_class(name, agents.loading.AGENT_TO_CLASS)
except ValueError as exc:
raise ValueError("Agent not found") from exc
def get_prompt_signature(name: str):
"""Get the signature of a prompt."""
try:
if name in customs.get_custom_prompts().keys():
return customs.get_custom_prompts()[name]
return util.build_template_from_function(
name, prompts.loading.type_to_loader_dict
)
except ValueError as exc:
raise ValueError("Prompt not found") from exc
def get_llm_signature(name: str):
"""Get the signature of an llm."""
try:
return util.build_template_from_class(name, llm_type_to_cls_dict)
except ValueError as exc:
raise ValueError("LLM not found") from exc
def get_memory_signature(name: str):
"""Get the signature of a memory."""
try:
return util.build_template_from_class(name, memory_type_to_cls_dict)
except ValueError as exc:
raise ValueError("Memory not found") from exc
def get_tool_signature(name: str):
"""Get the signature of a tool."""
all_tools = {}
for tool in get_all_tool_names():
if tool_params := util.get_tool_params(util.get_tools_dict(tool)):
all_tools[tool_params["name"]] = tool
# Raise error if name is not in tools
if name not in all_tools.keys():
raise ValueError("Tool not found")
type_dict = {
"str": {
"type": "str",
"required": True,
"list": False,
"show": True,
"placeholder": "",
"value": "",
},
"llm": {"type": "BaseLLM", "required": True, "list": False, "show": True},
}
tool_type = all_tools[name]
if tool_type in _BASE_TOOLS:
params = []
elif tool_type in _LLM_TOOLS:
params = ["llm"]
elif tool_type in _EXTRA_LLM_TOOLS:
_, extra_keys = _EXTRA_LLM_TOOLS[tool_type]
params = ["llm"] + extra_keys
elif tool_type in _EXTRA_OPTIONAL_TOOLS:
_, extra_keys = _EXTRA_OPTIONAL_TOOLS[tool_type]
params = extra_keys
else:
params = []
template = {
param: (type_dict[param].copy() if param == "llm" else type_dict["str"].copy())
for param in params
}
# Remove required from aiosession
if "aiosession" in template.keys():
template["aiosession"]["required"] = False
template["aiosession"]["show"] = False
template["_type"] = tool_type # type: ignore
return {
"template": template,
**util.get_tool_params(util.get_tools_dict(tool_type)),
"base_classes": ["Tool"],
}

View file

@ -0,0 +1,65 @@
from typing import Callable, Dict, List, Optional
from langchain.agents import agent_toolkits
from langflow.interface.base import LangChainTypeCreator
from langflow.interface.importing.utils import import_class, import_module
from langflow.settings import settings
from langflow.utils.util import build_template_from_class
class ToolkitCreator(LangChainTypeCreator):
type_name: str = "toolkits"
all_types: List[str] = agent_toolkits.__all__
create_functions: Dict = {
"JsonToolkit": [],
"SQLDatabaseToolkit": [],
"OpenAPIToolkit": ["create_openapi_agent"],
"VectorStoreToolkit": [
"create_vectorstore_agent",
"create_vectorstore_router_agent",
"VectorStoreInfo",
],
"ZapierToolkit": [],
"PandasToolkit": ["create_pandas_dataframe_agent"],
"CSVToolkit": ["create_csv_agent"],
}
@property
def type_to_loader_dict(self) -> Dict:
if self.type_dict is None:
self.type_dict = {
toolkit_name: import_class(
f"langchain.agents.agent_toolkits.{toolkit_name}"
)
# if toolkit_name is not lower case it is a class
for toolkit_name in agent_toolkits.__all__
if not toolkit_name.islower() and toolkit_name in settings.toolkits
}
return self.type_dict
def get_signature(self, name: str) -> Optional[Dict]:
try:
return build_template_from_class(name, self.type_to_loader_dict)
except ValueError as exc:
raise ValueError("Prompt not found") from exc
def to_list(self) -> List[str]:
return list(self.type_to_loader_dict.keys())
def get_create_function(self, name: str) -> Callable:
if loader_name := self.create_functions.get(name, None):
# import loader
return import_module(
f"from langchain.agents.agent_toolkits import {loader_name[0]}"
)
else:
raise ValueError("Loader not found")
def has_create_function(self, name: str) -> bool:
# check if the function list is not empty
return bool(self.create_functions.get(name, None))
toolkits_creator = ToolkitCreator()

View file

@ -0,0 +1,3 @@
from langflow.interface.tools.base import ToolCreator
__all__ = ["ToolCreator"]

View file

@ -0,0 +1,150 @@
from typing import Dict, List, Optional
from langchain.agents.load_tools import (
_BASE_TOOLS,
_EXTRA_LLM_TOOLS,
_EXTRA_OPTIONAL_TOOLS,
_LLM_TOOLS,
)
from langflow.custom import customs
from langflow.interface.base import LangChainTypeCreator
from langflow.interface.tools.constants import (
CUSTOM_TOOLS,
FILE_TOOLS,
)
from langflow.interface.tools.util import (
get_tool_by_name,
get_tool_params,
get_tools_dict,
)
from langflow.settings import settings
from langflow.template.base import Template, TemplateField
from langflow.utils import util
TOOL_INPUTS = {
"str": TemplateField(
field_type="str",
required=True,
is_list=False,
show=True,
placeholder="",
value="",
),
"llm": TemplateField(field_type="BaseLLM", required=True, is_list=False, show=True),
"func": TemplateField(
field_type="function",
required=True,
is_list=False,
show=True,
multiline=True,
),
"code": TemplateField(
field_type="str",
required=True,
is_list=False,
show=True,
value="",
multiline=True,
),
"dict_": TemplateField(
field_type="file",
required=True,
is_list=False,
show=True,
value="",
),
}
class ToolCreator(LangChainTypeCreator):
type_name: str = "tools"
tools_dict: Optional[Dict] = None
@property
def type_to_loader_dict(self) -> Dict:
if self.tools_dict is None:
self.tools_dict = get_tools_dict()
return self.tools_dict
def get_signature(self, name: str) -> Optional[Dict]:
"""Get the signature of a tool."""
base_classes = ["Tool"]
all_tools = {}
for tool in self.type_to_loader_dict.keys():
if tool_params := get_tool_params(get_tool_by_name(tool)):
tool_name = tool_params.get("name") or str(tool)
all_tools[tool_name] = {"type": tool, "params": tool_params}
# Raise error if name is not in tools
if name not in all_tools.keys():
raise ValueError("Tool not found")
tool_type: str = all_tools[name]["type"] # type: ignore
if tool_type in _BASE_TOOLS:
params = []
elif tool_type in _LLM_TOOLS:
params = ["llm"]
elif tool_type in _EXTRA_LLM_TOOLS:
_, extra_keys = _EXTRA_LLM_TOOLS[tool_type]
params = ["llm"] + extra_keys
elif tool_type in _EXTRA_OPTIONAL_TOOLS:
_, extra_keys = _EXTRA_OPTIONAL_TOOLS[tool_type]
params = extra_keys
elif tool_type == "Tool":
params = ["name", "description", "func"]
elif tool_type in CUSTOM_TOOLS:
# Get custom tool params
params = all_tools[name]["params"] # type: ignore
base_classes = ["function"]
if node := customs.get_custom_nodes("tools").get(tool_type):
return node
elif tool_type in FILE_TOOLS:
params = all_tools[name]["params"] # type: ignore
base_classes += [name]
else:
params = []
# Copy the field and add the name
fields = []
for param in params:
field = TOOL_INPUTS.get(param, TOOL_INPUTS["str"]).copy()
field.name = param
if param == "aiosession":
field.show = False
field.required = False
fields.append(field)
template = Template(fields=fields, type_name=tool_type)
tool_params = all_tools[name]["params"]
return {
"template": util.format_dict(template.to_dict()),
**tool_params,
"base_classes": base_classes,
}
def to_list(self) -> List[str]:
"""List all load tools"""
tools = []
for tool, fcn in get_tools_dict().items():
tool_params = get_tool_params(fcn)
if tool_params and not tool_params.get("name"):
tool_params["name"] = tool
if tool_params and (
tool_params.get("name") in settings.tools
or (tool_params.get("name") and settings.dev)
):
tools.append(tool_params["name"])
return tools
tool_creator = ToolCreator()

View file

@ -0,0 +1,21 @@
from langchain.agents import Tool
from langchain.agents.load_tools import (
_BASE_TOOLS,
_EXTRA_LLM_TOOLS,
_EXTRA_OPTIONAL_TOOLS,
_LLM_TOOLS,
)
from langchain.tools.json.tool import JsonSpec
from langflow.interface.custom.types import PythonFunction
FILE_TOOLS = {"JsonSpec": JsonSpec}
CUSTOM_TOOLS = {"Tool": Tool, "PythonFunction": PythonFunction}
ALL_TOOLS_NAMES = {
**_BASE_TOOLS,
**_LLM_TOOLS, # type: ignore
**{k: v[0] for k, v in _EXTRA_LLM_TOOLS.items()}, # type: ignore
**{k: v[0] for k, v in _EXTRA_OPTIONAL_TOOLS.items()},
**CUSTOM_TOOLS,
**FILE_TOOLS, # type: ignore
}

View file

@ -0,0 +1,118 @@
import ast
import inspect
from typing import Dict, Union
from langchain.agents.tools import Tool
from langflow.interface.tools.constants import ALL_TOOLS_NAMES
def get_tools_dict():
"""Get the tools dictionary."""
all_tools = {}
for tool, fcn in ALL_TOOLS_NAMES.items():
if tool_params := get_tool_params(fcn):
tool_name = tool_params.get("name") or str(tool)
all_tools[tool_name] = fcn
return all_tools
def get_tool_by_name(name: str):
"""Get a tool from the tools dictionary."""
tools = get_tools_dict()
if name not in tools:
raise ValueError(f"{name} not found.")
return tools[name]
def get_func_tool_params(func, **kwargs) -> Union[Dict, None]:
tree = ast.parse(inspect.getsource(func))
# Iterate over the statements in the abstract syntax tree
for node in ast.walk(tree):
# Find the first return statement
if isinstance(node, ast.Return):
tool = node.value
if isinstance(tool, ast.Call):
if isinstance(tool.func, ast.Name) and tool.func.id == "Tool":
if tool.keywords:
tool_params = {}
for keyword in tool.keywords:
if keyword.arg == "name":
tool_params["name"] = ast.literal_eval(keyword.value)
elif keyword.arg == "description":
tool_params["description"] = ast.literal_eval(
keyword.value
)
return tool_params
return {
"name": ast.literal_eval(tool.args[0]),
"description": ast.literal_eval(tool.args[2]),
}
#
else:
# get the class object from the return statement
try:
class_obj = eval(
compile(ast.Expression(tool), "<string>", "eval")
)
except Exception:
return None
return {
"name": getattr(class_obj, "name"),
"description": getattr(class_obj, "description"),
}
# Return None if no return statement was found
return None
def get_class_tool_params(cls, **kwargs) -> Union[Dict, None]:
tree = ast.parse(inspect.getsource(cls))
tool_params = {}
# Iterate over the statements in the abstract syntax tree
for node in ast.walk(tree):
if isinstance(node, ast.ClassDef):
# Find the class definition and look for methods
for stmt in node.body:
if isinstance(stmt, ast.FunctionDef) and stmt.name == "__init__":
# There is no assignment statements in the __init__ method
# So we need to get the params from the function definition
for arg in stmt.args.args:
if arg.arg == "name":
# It should be the name of the class
tool_params[arg.arg] = cls.__name__
elif arg.arg == "self":
continue
# If there is not default value, set it to an empty string
else:
try:
annotation = ast.literal_eval(arg.annotation) # type: ignore
tool_params[arg.arg] = annotation
except ValueError:
tool_params[arg.arg] = ""
# Get the attribute name and the annotation
elif cls != Tool and isinstance(stmt, ast.AnnAssign):
# Get the attribute name and the annotation
tool_params[stmt.target.id] = "" # type: ignore
return tool_params
def get_tool_params(tool, **kwargs) -> Dict:
# Parse the function code into an abstract syntax tree
# Define if it is a function or a class
if inspect.isfunction(tool):
return get_func_tool_params(tool, **kwargs) or {}
elif inspect.isclass(tool):
# Get the parameters necessary to
# instantiate the class
return get_class_tool_params(tool, **kwargs) or {}
else:
raise ValueError("Tool must be a function or class.")

View file

@ -1,12 +1,21 @@
from langflow.interface.listing import list_type
from langflow.interface.signature import get_signature
from langflow.interface.agents.base import agent_creator
from langflow.interface.chains.base import chain_creator
from langflow.interface.documentLoaders.base import documentloader_creator
from langflow.interface.embeddings.base import embedding_creator
from langflow.interface.llms.base import llm_creator
from langflow.interface.memories.base import memory_creator
from langflow.interface.prompts.base import prompt_creator
from langflow.interface.toolkits.base import toolkits_creator
from langflow.interface.tools.base import tool_creator
from langflow.interface.vectorStore.base import vectorstore_creator
from langflow.interface.wrappers.base import wrapper_creator
def get_type_list():
"""Get a list of all langchain types"""
all_types = build_langchain_types_dict()
all_types.pop("tools")
# all_types.pop("tools")
for key, value in all_types.items():
all_types[key] = [item["template"]["_type"] for item in value.values()]
@ -14,23 +23,28 @@ def get_type_list():
return all_types
def build_langchain_types_dict():
def build_langchain_types_dict(): # sourcery skip: dict-assign-update-to-union
"""Build a dictionary of all langchain types"""
return {
"chains": {
chain: get_signature(chain, "chains") for chain in list_type("chains")
},
"agents": {
agent: get_signature(agent, "agents") for agent in list_type("agents")
},
"prompts": {
prompt: get_signature(prompt, "prompts") for prompt in list_type("prompts")
},
"llms": {llm: get_signature(llm, "llms") for llm in list_type("llms")},
"memories": {
memory: get_signature(memory, "memories")
for memory in list_type("memories")
},
"tools": {tool: get_signature(tool, "tools") for tool in list_type("tools")},
}
all_types = {}
creators = [
chain_creator,
agent_creator,
prompt_creator,
llm_creator,
memory_creator,
tool_creator,
toolkits_creator,
wrapper_creator,
embedding_creator,
vectorstore_creator,
documentloader_creator,
]
all_types = {}
for creator in creators:
created_types = creator.to_dict()
if created_types[creator.type_name].values():
all_types.update(created_types)
return all_types

View file

@ -0,0 +1,31 @@
from typing import Dict, List, Optional
from langflow.interface.base import LangChainTypeCreator
from langflow.interface.custom_lists import vectorstores_type_to_cls_dict
from langflow.settings import settings
from langflow.utils.util import build_template_from_class
class VectorstoreCreator(LangChainTypeCreator):
type_name: str = "vectorstore"
@property
def type_to_loader_dict(self) -> Dict:
return vectorstores_type_to_cls_dict
def get_signature(self, name: str) -> Optional[Dict]:
"""Get the signature of an embedding."""
try:
return build_template_from_class(name, vectorstores_type_to_cls_dict)
except ValueError as exc:
raise ValueError(f"Vector Store {name} not found") from exc
def to_list(self) -> List[str]:
return [
vectorstore
for vectorstore in self.type_to_loader_dict.keys()
if vectorstore in settings.vectorstores or settings.dev
]
vectorstore_creator = VectorstoreCreator()

View file

@ -0,0 +1,30 @@
from typing import Dict, List, Optional
from langchain import requests
from langflow.interface.base import LangChainTypeCreator
from langflow.utils.util import build_template_from_class
class WrapperCreator(LangChainTypeCreator):
type_name: str = "wrappers"
@property
def type_to_loader_dict(self) -> Dict:
if self.type_dict is None:
self.type_dict = {
wrapper.__name__: wrapper for wrapper in [requests.TextRequestsWrapper]
}
return self.type_dict
def get_signature(self, name: str) -> Optional[Dict]:
try:
return build_template_from_class(name, self.type_to_loader_dict)
except ValueError as exc:
raise ValueError("Wrapper not found") from exc
def to_list(self) -> List[str]:
return list(self.type_to_loader_dict.keys())
wrapper_creator = WrapperCreator()

View file

@ -2,8 +2,7 @@ from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from langflow.api.endpoints import router as endpoints_router
from langflow.api.list_endpoints import router as list_router
from langflow.api.signature import router as signatures_router
from langflow.api.validate import router as validate_router
def create_app():
@ -23,8 +22,7 @@ def create_app():
)
app.include_router(endpoints_router)
app.include_router(list_router)
app.include_router(signatures_router)
app.include_router(validate_router)
return app

View file

@ -1,29 +1,47 @@
import os
from typing import List, Optional
from typing import List
import yaml
from pydantic import BaseSettings, Field, root_validator
from pydantic import BaseSettings, root_validator
class Settings(BaseSettings):
chains: Optional[List[str]] = Field(...)
agents: Optional[List[str]] = Field(...)
prompts: Optional[List[str]] = Field(...)
llms: Optional[List[str]] = Field(...)
tools: Optional[List[str]] = Field(...)
memories: Optional[List[str]] = Field(...)
dev: bool = Field(...)
chains: List[str] = []
agents: List[str] = []
prompts: List[str] = []
llms: List[str] = []
tools: List[str] = []
memories: List[str] = []
embeddings: List[str] = []
vectorstores: List[str] = []
documentloaders: List[str] = []
wrappers: List[str] = []
toolkits: List[str] = []
dev: bool = False
class Config:
validate_assignment = True
extra = "ignore"
@root_validator
@root_validator(allow_reuse=True)
def validate_lists(cls, values):
for key, value in values.items():
if key != "dev" and not value:
values[key] = []
return values
def update_from_yaml(self, file_path: str):
new_settings = load_settings_from_yaml(file_path)
self.chains = new_settings.chains or []
self.agents = new_settings.agents or []
self.prompts = new_settings.prompts or []
self.llms = new_settings.llms or []
self.tools = new_settings.tools or []
self.memories = new_settings.memories or []
self.wrappers = new_settings.wrappers or []
self.toolkits = new_settings.toolkits or []
self.dev = new_settings.dev or False
def save_settings_to_yaml(settings: Settings, file_path: str):
with open(file_path, "w") as f:
@ -41,9 +59,8 @@ def load_settings_from_yaml(file_path: str) -> Settings:
with open(file_path, "r") as f:
settings_dict = yaml.safe_load(f)
a = Settings.parse_obj(settings_dict)
return a
return Settings(**settings_dict)
settings = load_settings_from_yaml("config.yaml")

View file

@ -0,0 +1,228 @@
from abc import ABC
from typing import Any, Callable, Dict, Optional, Union
from pydantic import BaseModel
from langflow.template.constants import FORCE_SHOW_FIELDS
from langflow.utils import constants
class TemplateFieldCreator(BaseModel, ABC):
field_type: str = "str"
required: bool = False
placeholder: str = ""
is_list: bool = False
show: bool = True
multiline: bool = False
value: Any = None
suffixes: list[str] = []
fileTypes: list[str] = []
file_types: list[str] = []
content: Union[str, None] = None
password: bool = False
options: list[str] = []
name: str = ""
display_name: Optional[str] = None
def to_dict(self):
result = self.dict()
# Remove key if it is None
for key in list(result.keys()):
if result[key] is None or result[key] == []:
del result[key]
result["type"] = result.pop("field_type")
result["list"] = result.pop("is_list")
if result.get("file_types"):
result["fileTypes"] = result.pop("file_types")
if self.field_type == "file":
result["content"] = self.content
return result
def process_field(
self, key: str, value: Dict[str, Any], name: Optional[str] = None
) -> None:
_type = value["type"]
# Remove 'Optional' wrapper
if "Optional" in _type:
_type = _type.replace("Optional[", "")[:-1]
# Check for list type
if "List" in _type:
_type = _type.replace("List[", "")[:-1]
self.is_list = True
# Replace 'Mapping' with 'dict'
if "Mapping" in _type:
_type = _type.replace("Mapping", "dict")
# Change type from str to Tool
self.field_type = "Tool" if key in {"allowed_tools"} else self.field_type
self.field_type = "int" if key in {"max_value_length"} else self.field_type
# Show or not field
self.show = bool(
(self.required and key not in ["input_variables"])
or key in FORCE_SHOW_FIELDS
or "api_key" in key
)
# Add password field
self.password = any(
text in key.lower() for text in {"password", "token", "api", "key"}
)
# Add multline
self.multiline = key in {
"suffix",
"prefix",
"template",
"examples",
"code",
"headers",
}
# Replace dict type with str
if "dict" in self.field_type.lower():
self.field_type = "code"
if key == "dict_":
self.field_type = "file"
self.suffixes = [".json", ".yaml", ".yml"]
self.file_types = ["json", "yaml", "yml"]
# Replace default value with actual value
if "default" in value:
self.value = value["default"]
if key == "headers":
self.value = """{'Authorization':
'Bearer <token>'}"""
# Add options to openai
if name == "OpenAI" and key == "model_name":
self.options = constants.OPENAI_MODELS
self.is_list = True
elif name == "ChatOpenAI" and key == "model_name":
self.options = constants.CHAT_OPENAI_MODELS
self.is_list = True
class TemplateField(TemplateFieldCreator):
pass
class Template(BaseModel):
type_name: str
fields: list[TemplateField]
def process_fields(
self,
name: Optional[str] = None,
format_field_func: Union[Callable, None] = None,
):
if format_field_func:
for field in self.fields:
format_field_func(field, name)
def to_dict(self, format_field_func=None):
self.process_fields(self.type_name, format_field_func)
result = {field.name: field.to_dict() for field in self.fields}
result["_type"] = self.type_name # type: ignore
return result
class FrontendNode(BaseModel):
template: Template
description: str
base_classes: list
name: str = ""
def to_dict(self):
return {
self.name: {
"template": self.template.to_dict(self.format_field),
"description": self.description,
"base_classes": self.base_classes,
}
}
@staticmethod
def format_field(field: TemplateField, name: Optional[str] = None) -> None:
key = field.name
value = field.to_dict()
_type = value["type"]
# Remove 'Optional' wrapper
if "Optional" in _type:
_type = _type.replace("Optional[", "")[:-1]
# Check for list type
if "List" in _type:
_type = _type.replace("List[", "")[:-1]
field.is_list = True
# Replace 'Mapping' with 'dict'
if "Mapping" in _type:
_type = _type.replace("Mapping", "dict")
# Change type from str to Tool
field.field_type = "Tool" if key in {"allowed_tools"} else field.field_type
field.field_type = "int" if key in {"max_value_length"} else field.field_type
# Show or not field
field.show = bool(
(field.required and key not in ["input_variables"])
or key in FORCE_SHOW_FIELDS
or "api_key" in key
)
# Add password field
field.password = any(
text in key.lower() for text in {"password", "token", "api", "key"}
)
# Add multline
field.multiline = key in {
"suffix",
"prefix",
"template",
"examples",
"code",
"headers",
}
# Replace dict type with str
if "dict" in field.field_type.lower():
field.field_type = "code"
if key == "dict_":
field.field_type = "file"
field.suffixes = [".json", ".yaml", ".yml"]
field.file_types = ["json", "yaml", "yml"]
# Replace default value with actual value
if "default" in value:
field.value = value["default"]
if key == "headers":
field.value = """{'Authorization':
'Bearer <token>'}"""
# Add options to openai
if name == "OpenAI" and key == "model_name":
field.options = constants.OPENAI_MODELS
field.is_list = True
elif name == "ChatOpenAI":
if key == "model_name":
field.options = constants.CHAT_OPENAI_MODELS
field.is_list = True
if "api_key" in key and "OpenAI" in str(name):
field.display_name = "OpenAI API Key"
field.required = True
if field.value is None:
field.value = ""

View file

@ -0,0 +1,32 @@
FORCE_SHOW_FIELDS = [
"allowed_tools",
"memory",
"prefix",
"examples",
"temperature",
"model_name",
"headers",
"max_value_length",
"max_tokens",
]
DEFAULT_PROMPT = """
I want you to act as a naming consultant for new companies.
Here are some examples of good company names:
- search engine, Google
- social media, Facebook
- video sharing, YouTube
The name should be short, catchy and easy to remember.
What is a good name for a company that makes {product}?
"""
SYSTEM_PROMPT = """
You are a helpful assistant that talks casually about life in general.
You are a good listener and you can talk about anything.
"""
HUMAN_PROMPT = "{input}"

View file

View file

@ -0,0 +1,282 @@
from typing import Optional
from langchain.agents import loading
from langchain.agents.mrkl import prompt
from langflow.template.base import FrontendNode, Template, TemplateField
from langflow.template.constants import DEFAULT_PROMPT, HUMAN_PROMPT, SYSTEM_PROMPT
from langflow.utils.constants import DEFAULT_PYTHON_FUNCTION
NON_CHAT_AGENTS = {
agent_type: agent_class
for agent_type, agent_class in loading.AGENT_TO_CLASS.items()
if "chat" not in agent_type.value
}
class BasePromptFrontendNode(FrontendNode):
name: str
template: Template
description: str
base_classes: list[str]
def to_dict(self):
return super().to_dict()
class ZeroShotPromptNode(BasePromptFrontendNode):
name: str = "ZeroShotPrompt"
template: Template = Template(
type_name="zero_shot",
fields=[
TemplateField(
field_type="str",
required=False,
placeholder="",
is_list=False,
show=True,
multiline=True,
value=prompt.PREFIX,
name="prefix",
),
TemplateField(
field_type="str",
required=True,
placeholder="",
is_list=False,
show=True,
multiline=True,
value=prompt.SUFFIX,
name="suffix",
),
TemplateField(
field_type="str",
required=False,
placeholder="",
is_list=False,
show=True,
multiline=True,
value=prompt.FORMAT_INSTRUCTIONS,
name="format_instructions",
),
],
)
description: str = "Prompt template for Zero Shot Agent."
base_classes: list[str] = ["BasePromptTemplate"]
def to_dict(self):
return super().to_dict()
class PromptTemplateNode(FrontendNode):
name: str = "PromptTemplate"
template: Template
description: str
base_classes: list[str] = ["BasePromptTemplate"]
def to_dict(self):
return super().to_dict()
class PythonFunctionNode(FrontendNode):
name: str = "PythonFunction"
template: Template = Template(
type_name="python_function",
fields=[
TemplateField(
field_type="code",
required=True,
placeholder="",
is_list=False,
show=True,
value=DEFAULT_PYTHON_FUNCTION,
name="code",
)
],
)
description: str = "Python function to be executed."
base_classes: list[str] = ["function"]
def to_dict(self):
return super().to_dict()
class ToolNode(FrontendNode):
name: str = "Tool"
template: Template = Template(
type_name="tool",
fields=[
TemplateField(
field_type="str",
required=True,
placeholder="",
is_list=False,
show=True,
multiline=True,
value="",
name="name",
),
TemplateField(
field_type="str",
required=True,
placeholder="",
is_list=False,
show=True,
multiline=True,
value="",
name="description",
),
TemplateField(
field_type="str",
required=True,
placeholder="",
is_list=False,
show=True,
multiline=True,
value="",
name="func",
),
],
)
description: str = "Tool to be used in the flow."
base_classes: list[str] = ["BaseTool"]
def to_dict(self):
return super().to_dict()
class JsonAgentNode(FrontendNode):
name: str = "JsonAgent"
template: Template = Template(
type_name="json_agent",
fields=[
TemplateField(
field_type="BaseToolkit",
required=True,
show=True,
name="toolkit",
),
TemplateField(
field_type="BaseLanguageModel",
required=True,
show=True,
name="llm",
),
],
)
description: str = """Construct a json agent from an LLM and tools."""
base_classes: list[str] = ["AgentExecutor"]
def to_dict(self):
return super().to_dict()
class InitializeAgentNode(FrontendNode):
name: str = "initialize_agent"
template: Template = Template(
type_name="initailize_agent",
fields=[
TemplateField(
field_type="str",
required=True,
is_list=True,
show=True,
multiline=False,
options=list(NON_CHAT_AGENTS.keys()),
value=list(NON_CHAT_AGENTS.keys())[0],
name="agent",
),
TemplateField(
field_type="BaseChatMemory",
required=False,
show=True,
name="memory",
),
TemplateField(
field_type="Tool",
required=False,
show=True,
name="tools",
is_list=True,
),
TemplateField(
field_type="BaseLanguageModel",
required=True,
show=True,
name="llm",
),
],
)
description: str = """Construct a json agent from an LLM and tools."""
base_classes: list[str] = ["AgentExecutor"]
def to_dict(self):
return super().to_dict()
@staticmethod
def format_field(field: TemplateField, name: Optional[str] = None) -> None:
# do nothing and don't return anything
pass
class CSVAgentNode(FrontendNode):
name: str = "CSVAgent"
template: Template = Template(
type_name="csv_agent",
fields=[
TemplateField(
field_type="file",
required=True,
show=True,
name="path",
value="",
suffixes=[".csv"],
fileTypes=["csv"],
),
TemplateField(
field_type="BaseLanguageModel",
required=True,
show=True,
name="llm",
),
],
)
description: str = """Construct a json agent from a CSV and tools."""
base_classes: list[str] = ["AgentExecutor"]
def to_dict(self):
return super().to_dict()
class PromptFrontendNode(FrontendNode):
@staticmethod
def format_field(field: TemplateField, name: Optional[str] = None) -> None:
# if field.field_type == "StringPromptTemplate"
# change it to str
if field.field_type == "StringPromptTemplate" and "Message" in str(name):
field.field_type = "str"
field.multiline = True
field.value = HUMAN_PROMPT if "Human" in field.name else SYSTEM_PROMPT
if field.name == "template" and field.value == "":
field.value = DEFAULT_PROMPT
if (
"Union" in field.field_type
and "BaseMessagePromptTemplate" in field.field_type
):
field.field_type = "BaseMessagePromptTemplate"
class MemoryFrontendNode(FrontendNode):
@staticmethod
def format_field(field: TemplateField, name: Optional[str] = None) -> None:
FrontendNode.format_field(field, name)
if not isinstance(field.value, str):
field.value = None
if field.name == "k":
field.required = True
field.show = True
field.field_type = "int"
field.value = 10
field.display_name = "Memory Size"

View file

@ -6,3 +6,10 @@ OPENAI_MODELS = [
"text-ada-001",
]
CHAT_OPENAI_MODELS = ["gpt-3.5-turbo", "gpt-4", "gpt-4-32k"]
DEFAULT_PYTHON_FUNCTION = """
def python_function(text: str) -> str:
\"\"\"This is a default python function that returns the input text\"\"\"
return text
"""

View file

@ -0,0 +1,30 @@
import logging
from pathlib import Path
from rich.logging import RichHandler
logger = logging.getLogger("langflow")
def configure(log_level: str = "INFO", log_file: Path = None): # type: ignore
log_format = "%(asctime)s - %(levelname)s - %(message)s"
log_level_value = getattr(logging, log_level.upper(), logging.INFO)
logging.basicConfig(
level=log_level_value,
format=log_format,
datefmt="[%X]",
handlers=[RichHandler(rich_tracebacks=True)],
)
if log_file:
log_file = Path(log_file)
log_file.parent.mkdir(parents=True, exist_ok=True)
file_handler = logging.FileHandler(log_file)
file_handler.setFormatter(logging.Formatter(log_format))
logger.addHandler(file_handler)
logger.info(f"Logger set up with log level: {log_level_value}({log_level})")
if log_file:
logger.info(f"Log file: {log_file}")

View file

@ -1,5 +1,6 @@
import contextlib
import re
from typing import Dict
def extract_input_variables(nodes):
@ -27,48 +28,59 @@ def extract_input_variables(nodes):
return nodes
def get_root_node(nodes, edges):
def get_root_node(graph):
"""
Returns the root node of the template.
"""
incoming_edges = {edge["source"] for edge in edges}
return next((node for node in nodes if node["id"] not in incoming_edges), None)
incoming_edges = {edge.source for edge in graph.edges}
return next((node for node in graph.nodes if node not in incoming_edges), None)
def build_json(root, nodes, edges):
"""
Builds a json from the nodes and edges
"""
edge_ids = [edge["source"] for edge in edges if edge["target"] == root["id"]]
local_nodes = [node for node in nodes if node["id"] in edge_ids]
def build_json(root, graph) -> Dict:
if "node" not in root.data:
# If the root node has no "node" key, then it has only one child,
# which is the target of the single outgoing edge
edge = root.edges[0]
local_nodes = [edge.target]
else:
# Otherwise, find all children whose type matches the type
# specified in the template
node_type = root.node_type
local_nodes = graph.get_nodes_with_target(root)
if "node" not in root["data"]:
return build_json(local_nodes[0], nodes, edges)
final_dict = root["data"]["node"]["template"].copy()
if len(local_nodes) == 1:
return build_json(local_nodes[0], graph)
# Build a dictionary from the template
template = root.data["node"]["template"]
final_dict = template.copy()
for key, value in final_dict.items():
if key == "_type":
continue
module_type = value["type"]
node_type = value["type"]
if "value" in value and value["value"] is not None:
# If the value is specified, use it
value = value["value"]
elif "dict" in module_type:
elif "dict" in node_type:
# If the value is a dictionary, create an empty dictionary
value = {}
else:
# Otherwise, recursively build the child nodes
children = []
for c in local_nodes:
module_types = [c["data"]["type"]]
if "node" in c["data"]:
module_types += c["data"]["node"]["base_classes"]
if module_type in module_types:
children.append(c)
for local_node in local_nodes:
node_children = graph.get_children_by_node_type(local_node, node_type)
children.extend(node_children)
if value["required"] and not children:
raise ValueError(f"No child with type {module_type} found")
values = [build_json(child, nodes, edges) for child in children]
value = list(values) if value["list"] else next(iter(values), None)
raise ValueError(f"No child with type {node_type} found")
values = [build_json(child, graph) for child in children]
value = (
list(values)
if value["list"]
else next(iter(values), None) # type: ignore
)
final_dict[key] = value
return final_dict

View file

@ -1,20 +1,58 @@
import ast
import importlib
import inspect
import re
from typing import Dict, Optional
from langchain.agents.load_tools import (
_BASE_TOOLS,
_EXTRA_LLM_TOOLS,
_EXTRA_OPTIONAL_TOOLS,
_LLM_TOOLS,
)
from langflow.template.constants import FORCE_SHOW_FIELDS
from langflow.utils import constants
def build_template_from_function(name: str, type_to_loader_dict: Dict):
def build_template_from_parameters(
name: str, type_to_loader_dict: Dict, add_function: bool = False
):
# Retrieve the function that matches the provided name
func = None
for _, v in type_to_loader_dict.items():
if v.__name__ == name:
func = v
break
if func is None:
raise ValueError(f"{name} not found")
# Process parameters
parameters = func.__annotations__
variables = {}
for param_name, param_type in parameters.items():
if param_name in ["return", "kwargs"]:
continue
variables[param_name] = {
"type": param_type.__name__,
"default": parameters[param_name].__repr_args__()[0][1],
# Op
"placeholder": "",
}
# Get the base classes of the return type
return_type = parameters.get("return")
base_classes = get_base_classes(return_type) if return_type else []
if add_function:
base_classes.append("function")
# Get the function's docstring
docs = inspect.getdoc(func) or ""
return {
"template": format_dict(variables, name),
"description": docs["Description"], # type: ignore
"base_classes": base_classes,
}
def build_template_from_function(
name: str, type_to_loader_dict: Dict, add_function: bool = False
):
classes = [
item.__annotations__["return"].__name__ for item in type_to_loader_dict.values()
]
@ -29,51 +67,6 @@ def build_template_from_function(name: str, type_to_loader_dict: Dict):
docs = get_class_doc(_class)
variables = {"_type": _type}
for class_field_items, value in _class.__fields__.items():
if class_field_items in ["callback_manager", "requests_wrapper"]:
continue
variables[class_field_items] = {}
for name_, value_ in value.__repr_args__():
if name_ == "default_factory":
try:
variables[class_field_items][
"default"
] = get_default_factory(
module=_class.__base__.__module__, function=value_
)
except Exception:
variables[class_field_items]["default"] = None
elif name_ not in ["name"]:
variables[class_field_items][name_] = value_
variables[class_field_items]["placeholder"] = (
docs["Attributes"][class_field_items]
if class_field_items in docs["Attributes"]
else ""
)
return {
"template": format_dict(variables, name),
"description": docs["Description"],
"base_classes": get_base_classes(_class),
}
def build_template_from_class(name: str, type_to_cls_dict: Dict):
classes = [item.__name__ for item in type_to_cls_dict.values()]
# Raise error if name is not in chains
if name not in classes:
raise ValueError(f"{name} not found.")
for _type, v in type_to_cls_dict.items():
if v.__name__ == name:
_class = v
# Get the docstring
docs = get_class_doc(_class)
variables = {"_type": _type}
for class_field_items, value in _class.__fields__.items():
if class_field_items in ["callback_manager"]:
@ -97,26 +90,93 @@ def build_template_from_class(name: str, type_to_cls_dict: Dict):
if class_field_items in docs["Attributes"]
else ""
)
# Adding function to base classes to allow
# the output to be a function
base_classes = get_base_classes(_class)
if add_function:
base_classes.append("function")
return {
"template": format_dict(variables, name),
"description": docs["Description"],
"base_classes": get_base_classes(_class),
"base_classes": base_classes,
}
def build_template_from_class(
name: str, type_to_cls_dict: Dict, add_function: bool = False
):
classes = [item.__name__ for item in type_to_cls_dict.values()]
# Raise error if name is not in chains
if name not in classes:
raise ValueError(f"{name} not found.")
for _type, v in type_to_cls_dict.items():
if v.__name__ == name:
_class = v
# Get the docstring
docs = get_class_doc(_class)
variables = {"_type": _type}
if "__fields__" in _class.__dict__:
for class_field_items, value in _class.__fields__.items():
if class_field_items in ["callback_manager"]:
continue
variables[class_field_items] = {}
for name_, value_ in value.__repr_args__():
if name_ == "default_factory":
try:
variables[class_field_items][
"default"
] = get_default_factory(
module=_class.__base__.__module__, function=value_
)
except Exception:
variables[class_field_items]["default"] = None
elif name_ not in ["name"]:
variables[class_field_items][name_] = value_
variables[class_field_items]["placeholder"] = (
docs["Attributes"][class_field_items]
if class_field_items in docs["Attributes"]
else ""
)
base_classes = get_base_classes(_class)
# Adding function to base classes to allow
# the output to be a function
if add_function:
base_classes.append("function")
return {
"template": format_dict(variables, name),
"description": docs["Description"],
"base_classes": base_classes,
}
def get_base_classes(cls):
bases = cls.__bases__
if not bases:
return []
else:
"""Get the base classes of a class.
These are used to determine the output of the nodes.
"""
if bases := cls.__bases__:
result = []
for base in bases:
if any(type in base.__module__ for type in ["pydantic", "abc"]):
continue
result.append(base.__name__)
result.extend(get_base_classes(base))
return result
base_classes = get_base_classes(base)
# check if the base_classes are in the result
# if not, add them
for base_class in base_classes:
if base_class not in result:
result.append(base_class)
else:
result = [cls.__name__]
if not result:
result = [cls.__name__]
return list(set(result + [cls.__name__]))
def get_default_factory(module: str, function: str):
@ -128,60 +188,6 @@ def get_default_factory(module: str, function: str):
return None
def get_tools_dict(name: Optional[str] = None):
"""Get the tools dictionary."""
tools = {
**_BASE_TOOLS,
**_LLM_TOOLS, # type: ignore
**{k: v[0] for k, v in _EXTRA_LLM_TOOLS.items()}, # type: ignore
**{k: v[0] for k, v in _EXTRA_OPTIONAL_TOOLS.items()},
}
return tools[name] if name else tools
def get_tool_params(func, **kwargs):
# Parse the function code into an abstract syntax tree
tree = ast.parse(inspect.getsource(func))
# Iterate over the statements in the abstract syntax tree
for node in ast.walk(tree):
# Find the first return statement
if isinstance(node, ast.Return):
tool = node.value
if isinstance(tool, ast.Call):
if tool.func.id == "Tool":
if tool.keywords:
tool_params = {}
for keyword in tool.keywords:
if keyword.arg == "name":
tool_params["name"] = ast.literal_eval(keyword.value)
elif keyword.arg == "description":
tool_params["description"] = ast.literal_eval(
keyword.value
)
return tool_params
return {
"name": ast.literal_eval(tool.args[0]),
"description": ast.literal_eval(tool.args[2]),
}
else:
# get the class object from the return statement
try:
class_obj = eval(
compile(ast.Expression(tool), "<string>", "eval")
)
except Exception:
return None
return {
"name": getattr(class_obj, "name"),
"description": getattr(class_obj, "description"),
}
# Return None if no return statement was found
return None
def get_class_doc(class_name):
"""
Extracts information from the docstring of a given class.
@ -272,41 +278,58 @@ def format_dict(d, name: Optional[str] = None):
_type = _type.replace("Mapping", "dict")
# Change type from str to Tool
value["type"] = "Tool" if key == "allowed_tools" else _type
value["type"] = "Tool" if key in ["allowed_tools"] else _type
value["type"] = "int" if key in ["max_value_length"] else value["type"]
# Show or not field
value["show"] = bool(
(value["required"] and key not in ["input_variables"])
or key
in [
"allowed_tools",
"memory",
"prefix",
"examples",
"temperature",
"model_name",
]
or key in FORCE_SHOW_FIELDS
or "api_key" in key
)
# Add password field
value["password"] = any(
text in key for text in ["password", "token", "api", "key"]
text in key.lower() for text in ["password", "token", "api", "key"]
)
# Add multline
value["multiline"] = key in ["suffix", "prefix", "template", "examples"]
value["multiline"] = key in [
"suffix",
"prefix",
"template",
"examples",
"code",
"headers",
]
# Replace dict type with str
if "dict" in value["type"].lower():
value["type"] = "code"
if key == "dict_":
value["type"] = "file"
value["suffixes"] = [".json", ".yaml", ".yml"]
value["fileTypes"] = ["json", "yaml", "yml"]
# Replace default value with actual value
if "default" in value:
value["value"] = value["default"]
value.pop("default")
if key == "headers":
value[
"value"
] = """{'Authorization':
'Bearer <token>'}"""
# Add options to openai
if name == "OpenAI" and key == "model_name":
value["options"] = constants.OPENAI_MODELS
elif name == "OpenAIChat" and key == "model_name":
value["list"] = True
elif name == "ChatOpenAI" and key == "model_name":
value["options"] = constants.CHAT_OPENAI_MODELS
value["list"] = True
return d

View file

@ -0,0 +1,168 @@
import ast
import importlib
import types
from typing import Dict
def add_type_ignores():
if not hasattr(ast, "TypeIgnore"):
class TypeIgnore(ast.AST):
_fields = ()
ast.TypeIgnore = TypeIgnore
def validate_code(code):
# Initialize the errors dictionary
errors = {"imports": {"errors": []}, "function": {"errors": []}}
# Parse the code string into an abstract syntax tree (AST)
try:
tree = ast.parse(code)
except Exception as e:
errors["function"]["errors"].append(str(e))
return errors
# Add a dummy type_ignores field to the AST
add_type_ignores()
tree.type_ignores = []
# Evaluate the import statements
for node in tree.body:
if isinstance(node, ast.Import):
for alias in node.names:
try:
importlib.import_module(alias.name)
except ModuleNotFoundError as e:
errors["imports"]["errors"].append(str(e))
# Evaluate the function definition
for node in tree.body:
if isinstance(node, ast.FunctionDef):
code_obj = compile(
ast.Module(body=[node], type_ignores=[]), "<string>", "exec"
)
try:
exec(code_obj)
except Exception as e:
errors["function"]["errors"].append(str(e))
# Return the errors dictionary
return errors
def eval_function(function_string: str):
# Create an empty dictionary to serve as a separate namespace
namespace: Dict = {}
# Execute the code string in the new namespace
exec(function_string, namespace)
function_object = next(
(obj for name, obj in namespace.items() if isinstance(obj, types.FunctionType)),
None,
)
if function_object is None:
raise ValueError("Function string does not contain a function")
return function_object
def execute_function(code, function_name, *args, **kwargs):
add_type_ignores()
module = ast.parse(code)
exec_globals = globals().copy()
for node in module.body:
if isinstance(node, ast.Import):
for alias in node.names:
try:
exec(
f"{alias.asname or alias.name} = importlib.import_module('{alias.name}')",
exec_globals,
locals(),
)
exec_globals[alias.asname or alias.name] = importlib.import_module(
alias.name
)
except ModuleNotFoundError as e:
raise ModuleNotFoundError(
f"Module {alias.name} not found. Please install it and try again."
) from e
function_code = next(
node
for node in module.body
if isinstance(node, ast.FunctionDef) and node.name == function_name
)
function_code.parent = None
code_obj = compile(
ast.Module(body=[function_code], type_ignores=[]), "<string>", "exec"
)
try:
exec(code_obj, exec_globals, locals())
except Exception as exc:
raise ValueError("Function string does not contain a function") from exc
# Add the function to the exec_globals dictionary
exec_globals[function_name] = locals()[function_name]
return exec_globals[function_name](*args, **kwargs)
def create_function(code, function_name):
if not hasattr(ast, "TypeIgnore"):
class TypeIgnore(ast.AST):
_fields = ()
ast.TypeIgnore = TypeIgnore
module = ast.parse(code)
exec_globals = globals().copy()
for node in module.body:
if isinstance(node, ast.Import):
for alias in node.names:
try:
exec_globals[alias.asname or alias.name] = importlib.import_module(
alias.name
)
except ModuleNotFoundError as e:
raise ModuleNotFoundError(
f"Module {alias.name} not found. Please install it and try again."
) from e
function_code = next(
node
for node in module.body
if isinstance(node, ast.FunctionDef) and node.name == function_name
)
function_code.parent = None
code_obj = compile(
ast.Module(body=[function_code], type_ignores=[]), "<string>", "exec"
)
try:
exec(code_obj, exec_globals, locals())
except Exception:
pass
exec_globals[function_name] = locals()[function_name]
# Return a function that imports necessary modules and calls the target function
def wrapped_function(*args, **kwargs):
for module_name, module in exec_globals.items():
if isinstance(module, type(importlib)):
globals()[module_name] = module
return exec_globals[function_name](*args, **kwargs)
return wrapped_function
def extract_function_name(code):
module = ast.parse(code)
for node in module.body:
if isinstance(node, ast.FunctionDef):
return node.name
raise ValueError("No function definition found in the code string")

View file

@ -21,9 +21,12 @@
"@types/node": "^16.18.12",
"@types/react": "^18.0.27",
"@types/react-dom": "^18.0.10",
"ace-builds": "^1.16.0",
"ansi-to-html": "^0.7.2",
"axios": "^1.3.2",
"lodash": "^4.17.21",
"react": "^18.2.0",
"react-ace": "^10.1.0",
"react-cookie": "^4.1.1",
"react-dom": "^18.2.0",
"react-error-boundary": "^4.0.2",
@ -5098,6 +5101,11 @@
"node": ">= 0.6"
}
},
"node_modules/ace-builds": {
"version": "1.16.0",
"resolved": "https://registry.npmjs.org/ace-builds/-/ace-builds-1.16.0.tgz",
"integrity": "sha512-EriMhoxdfhh0zKm7icSt8EXekODAOVsYh9fpnlru9ALwf0Iw7J7bpuqLjhi3QRxvVKR7P0teQdJwTvjVMcYHuw=="
},
"node_modules/acorn": {
"version": "8.8.2",
"resolved": "https://registry.npmjs.org/acorn/-/acorn-8.8.2.tgz",
@ -5308,6 +5316,20 @@
"node": ">=4"
}
},
"node_modules/ansi-to-html": {
"version": "0.7.2",
"resolved": "https://registry.npmjs.org/ansi-to-html/-/ansi-to-html-0.7.2.tgz",
"integrity": "sha512-v6MqmEpNlxF+POuyhKkidusCHWWkaLcGRURzivcU3I9tv7k4JVhFcnukrM5Rlk2rUywdZuzYAZ+kbZqWCnfN3g==",
"dependencies": {
"entities": "^2.2.0"
},
"bin": {
"ansi-to-html": "bin/ansi-to-html"
},
"engines": {
"node": ">=8.0.0"
}
},
"node_modules/anymatch": {
"version": "3.1.3",
"resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz",
@ -7205,6 +7227,11 @@
"resolved": "https://registry.npmjs.org/didyoumean/-/didyoumean-1.2.2.tgz",
"integrity": "sha512-gxtyfqMg7GKyhQmb056K7M3xszy/myH8w+B4RT+QXBQsvAOdc3XymqDDPHx1BgPgsdAA5SIifona89YtRATDzw=="
},
"node_modules/diff-match-patch": {
"version": "1.0.5",
"resolved": "https://registry.npmjs.org/diff-match-patch/-/diff-match-patch-1.0.5.tgz",
"integrity": "sha512-IayShXAgj/QMXgB0IWmKx+rOPuGMhqm5w6jvFxmVenXKIzRqTAAsbBPT3kWQeGANj3jGgvcvv4yK6SxqYmikgw=="
},
"node_modules/diff-sequences": {
"version": "27.5.1",
"resolved": "https://registry.npmjs.org/diff-sequences/-/diff-sequences-27.5.1.tgz",
@ -12409,6 +12436,16 @@
"resolved": "https://registry.npmjs.org/lodash.debounce/-/lodash.debounce-4.0.8.tgz",
"integrity": "sha512-FT1yDzDYEoYWhnSGnpE/4Kj1fLZkDFyqRb7fNt6FdYOSxlUWAtp42Eh6Wb0rGIv/m9Bgo7x4GhQbm5Ys4SG5ow=="
},
"node_modules/lodash.get": {
"version": "4.4.2",
"resolved": "https://registry.npmjs.org/lodash.get/-/lodash.get-4.4.2.tgz",
"integrity": "sha512-z+Uw/vLuy6gQe8cfaFWD7p0wVv8fJl3mbzXh33RS+0oW2wvUqiRXiQ69gLWSLpgB5/6sU+r6BlQR0MBILadqTQ=="
},
"node_modules/lodash.isequal": {
"version": "4.5.0",
"resolved": "https://registry.npmjs.org/lodash.isequal/-/lodash.isequal-4.5.0.tgz",
"integrity": "sha512-pDo3lu8Jhfjqls6GkMgpahsF9kCyayhgykjyLMNFTKWrpVdAQtYyB4muAMWozBB4ig/dtWAmsMxLEI8wuz+DYQ=="
},
"node_modules/lodash.memoize": {
"version": "4.1.2",
"resolved": "https://registry.npmjs.org/lodash.memoize/-/lodash.memoize-4.1.2.tgz",
@ -14803,6 +14840,22 @@
"node": ">=0.10.0"
}
},
"node_modules/react-ace": {
"version": "10.1.0",
"resolved": "https://registry.npmjs.org/react-ace/-/react-ace-10.1.0.tgz",
"integrity": "sha512-VkvUjZNhdYTuKOKQpMIZi7uzZZVgzCjM7cLYu6F64V0mejY8a2XTyPUIMszC6A4trbeMIHbK5fYFcT/wkP/8VA==",
"dependencies": {
"ace-builds": "^1.4.14",
"diff-match-patch": "^1.0.5",
"lodash.get": "^4.4.2",
"lodash.isequal": "^4.5.0",
"prop-types": "^15.7.2"
},
"peerDependencies": {
"react": "^0.13.0 || ^0.14.0 || ^15.0.1 || ^16.0.0 || ^17.0.0 || ^18.0.0",
"react-dom": "^0.13.0 || ^0.14.0 || ^15.0.1 || ^16.0.0 || ^17.0.0 || ^18.0.0"
}
},
"node_modules/react-app-polyfill": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/react-app-polyfill/-/react-app-polyfill-3.0.0.tgz",

View file

@ -16,9 +16,12 @@
"@types/node": "^16.18.12",
"@types/react": "^18.0.27",
"@types/react-dom": "^18.0.10",
"ace-builds": "^1.16.0",
"ansi-to-html": "^0.7.2",
"axios": "^1.3.2",
"lodash": "^4.17.21",
"react": "^18.2.0",
"react-ace": "^10.1.0",
"react-cookie": "^4.1.1",
"react-dom": "^18.2.0",
"react-error-boundary": "^4.0.2",
@ -57,4 +60,4 @@
]
},
"proxy": "http://backend:7860"
}
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 102 KiB

View file

@ -4,7 +4,8 @@
<meta charset="UTF-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>LangFLow</title>
<link rel="icon" href="%PUBLIC_URL%/favicon.ico" />
<title>LangFlow</title>
</head>
<body id='body' style="width: 100%; height:100%">
<noscript>You need to enable JavaScript to run this app.</noscript>

View file

@ -10,6 +10,10 @@ import { typesContext } from "../../../../contexts/typesContext";
import { ParameterComponentType } from "../../../../types/components";
import FloatComponent from "../../../../components/floatComponent";
import Dropdown from "../../../../components/dropdownComponent";
import CodeAreaComponent from "../../../../components/codeAreaComponent";
import InputFileComponent from "../../../../components/inputFileComponent";
import { TabsContext } from "../../../../contexts/tabsContext";
import IntComponent from "../../../../components/intComponent";
export default function ParameterComponent({
left,
@ -42,6 +46,7 @@ export default function ParameterComponent({
const { reactFlowInstance } = useContext(typesContext);
let disabled =
reactFlowInstance?.getEdges().some((e) => e.targetHandle === id) ?? false;
const { save } = useContext(TabsContext);
return (
<div
@ -49,11 +54,17 @@ export default function ParameterComponent({
className="w-full flex flex-wrap justify-between items-center bg-gray-50 dark:bg-gray-800 dark:text-white mt-1 px-5 py-2"
>
<>
<div className="text-sm truncate">
<div className={"text-sm truncate w-full " + (left ? "" : "text-end")}>
{title}
<span className="text-red-600">{required ? " *" : ""}</span>
</div>
{left && (type === "str" || type === "bool" || type === "float") ? (
{left &&
(type === "str" ||
type === "bool" ||
type === "float" ||
type === "code" ||
type === "file" ||
type === "int") ? (
<></>
) : (
<Tooltip title={tooltipTitle + (required ? " (required)" : "")}>
@ -91,6 +102,7 @@ export default function ParameterComponent({
}
onChange={(t: string[]) => {
data.node.template[name].value = t;
save();
}}
/>
) : data.node.template[name].multiline ? (
@ -99,15 +111,17 @@ export default function ParameterComponent({
value={data.node.template[name].value ?? ""}
onChange={(t: string) => {
data.node.template[name].value = t;
save();
}}
/>
) : (
<InputComponent
disabled={disabled}
password={data.node.template[name].password ?? true}
password={data.node.template[name].password ?? false}
value={data.node.template[name].value ?? ""}
onChange={(t) => {
data.node.template[name].value = t;
save();
}}
/>
)}
@ -120,6 +134,7 @@ export default function ParameterComponent({
setEnabled={(t) => {
data.node.template[name].value = t;
setEnabled(t);
save();
}}
/>
</div>
@ -129,6 +144,7 @@ export default function ParameterComponent({
value={data.node.template[name].value ?? ""}
onChange={(t) => {
data.node.template[name].value = t;
save();
}}
/>
) : left === true &&
@ -136,9 +152,41 @@ export default function ParameterComponent({
data.node.template[name].options ? (
<Dropdown
options={data.node.template[name].options}
onSelect={(newValue) => data.node.template[name].value=newValue}
value={data.node.template[name].value??"chose an option"}
onSelect={(newValue) => (data.node.template[name].value = newValue)}
value={data.node.template[name].value ?? "Choose an option"}
></Dropdown>
) : left === true && type === "code" ? (
<CodeAreaComponent
disabled={disabled}
value={data.node.template[name].value ?? ""}
onChange={(t: string) => {
data.node.template[name].value = t;
save();
}}
/>
) : left === true && type === "file" ? (
<InputFileComponent
disabled={disabled}
value={data.node.template[name].value ?? ""}
onChange={(t: string) => {
data.node.template[name].value = t;
}}
fileTypes={data.node.template[name].fileTypes}
suffixes={data.node.template[name].suffixes}
onFileChange={(t: string) => {
data.node.template[name].content = t;
save();
}}
></InputFileComponent>
) : left === true && type === "int" ? (
<IntComponent
disabled={disabled}
value={data.node.template[name].value ?? ""}
onChange={(t) => {
data.node.template[name].value = t;
save();
}}
/>
) : (
<></>
)}

View file

@ -23,7 +23,6 @@ export default function GenericNode({
const { types, deleteNode } = useContext(typesContext);
const Icon = nodeIcons[types[data.type]];
if (!Icon) {
console.log(data);
if (showError.current) {
setErrorData({
title: data.type
@ -32,9 +31,9 @@ export default function GenericNode({
});
showError.current = false;
}
deleteNode(data.id);
return;
}
return (
<div
className={classNames(
@ -72,8 +71,18 @@ export default function GenericNode({
.map((t: string, idx) => (
<div key={idx}>
{idx === 0 ? (
<div className="px-5 py-2 mt-2 dark:text-white text-center">
Inputs:
<div
className={classNames(
"px-5 py-2 mt-2 dark:text-white text-center",
Object.keys(data.node.template).filter(
(key) =>
!key.startsWith("_") && data.node.template[key].show
).length === 0
? "hidden"
: ""
)}
>
Inputs
</div>
) : (
<></>
@ -85,7 +94,13 @@ export default function GenericNode({
nodeColors[types[data.node.template[t].type]] ??
nodeColors.unknown
}
title={snakeToNormalCase(t)}
title={
data.node.template[t].display_name
? data.node.template[t].display_name
: data.node.template[t].name
? snakeToNormalCase(data.node.template[t].name)
: snakeToNormalCase(t)
}
name={t}
tooltipTitle={
"Type: " +
@ -103,7 +118,7 @@ export default function GenericNode({
</div>
))}
<div className="px-5 py-2 mt-2 dark:text-white text-center">
Output:
Output
</div>
<ParameterComponent
data={data}
@ -111,7 +126,7 @@ export default function GenericNode({
title={data.type}
tooltipTitle={`Type: ${data.node.base_classes.join(" | ")}`}
id={[data.type, data.id, ...data.node.base_classes].join("|")}
type={"str"}
type={data.node.base_classes.join("|")}
left={false}
/>
</>

View file

@ -2,6 +2,8 @@ import { ChatBubbleLeftEllipsisIcon, ChatBubbleOvalLeftEllipsisIcon, PlusSmallIc
import { useState } from "react";
import { ChatMessageType } from "../../../types/chat";
import { nodeColors } from "../../../utils";
var Convert = require('ansi-to-html');
var convert = new Convert({newline:true});
export default function ChatMessage({ chat }: { chat: ChatMessageType }) {
const [hidden, setHidden] = useState(true);
@ -27,7 +29,7 @@ export default function ChatMessage({ chat }: { chat: ChatMessageType }) {
style={{ backgroundColor: nodeColors["thought"] }}
className=" text-start inline-block w-full pb-3 pt-3 px-5 cursor-pointer"
dangerouslySetInnerHTML={{
__html: chat.thought.replace(/\n/g, "<br />"),
__html: convert.toHtml(chat.thought)
}}
></div>
)}

View file

@ -5,8 +5,14 @@ import {
PaperAirplaneIcon,
XMarkIcon,
} from "@heroicons/react/24/outline";
import { MouseEventHandler, useContext, useEffect, useRef, useState } from "react";
import { sendAll } from "../../controllers/NodesServices";
import {
MouseEventHandler,
useContext,
useEffect,
useRef,
useState,
} from "react";
import { sendAll } from "../../controllers/API";
import { alertContext } from "../../contexts/alertContext";
import { classNames, nodeColors } from "../../utils";
import { TabsContext } from "../../contexts/tabsContext";
@ -16,7 +22,8 @@ import ChatMessage from "./chatMessage";
const _ = require("lodash");
export default function Chat({ flow, reactFlowInstance }: ChatType) {
const { updateFlow,lockChat,setLockChat,flows,tabIndex } = useContext(TabsContext);
const { updateFlow, lockChat, setLockChat, flows, tabIndex } =
useContext(TabsContext);
const [saveChat, setSaveChat] = useState(false);
const [open, setOpen] = useState(true);
const [chatValue, setChatValue] = useState("");
@ -25,14 +32,14 @@ export default function Chat({ flow, reactFlowInstance }: ChatType) {
const addChatHistory = (
message: string,
isSend: boolean,
thought?: string,
thought?: string
) => {
let tabsChange = false;
setChatHistory((old) => {
let newChat = _.cloneDeep(old);
if(JSON.stringify(flow.chat) !==JSON.stringify(old)){
tabsChange = true
return old
if (JSON.stringify(flow.chat) !== JSON.stringify(old)) {
tabsChange = true;
return old;
}
if (thought) {
newChat.push({ message, isSend, thought });
@ -41,12 +48,17 @@ export default function Chat({ flow, reactFlowInstance }: ChatType) {
}
return newChat;
});
if(tabsChange){
if(thought){
updateFlow({..._.cloneDeep(flow),chat:[...flow.chat,{isSend,message,thought}]})
}
else{
updateFlow({..._.cloneDeep(flow),chat:[...flow.chat,{isSend,message}]})
if (tabsChange) {
if (thought) {
updateFlow({
..._.cloneDeep(flow),
chat: [...flow.chat, { isSend, message, thought }],
});
} else {
updateFlow({
..._.cloneDeep(flow),
chat: [...flow.chat, { isSend, message }],
});
}
}
setSaveChat((chat) => !chat);
@ -63,25 +75,24 @@ export default function Chat({ flow, reactFlowInstance }: ChatType) {
}, [chatHistory]);
function validateNodes() {
if (
reactFlowInstance
.getNodes()
.some(
(n) =>
n.data.node &&
Object.keys(n.data.node.template).some(
(t: any) =>
n.data.node.template[t].required &&
n.data.node.template[t].value === "" &&
n.data.node.template[t].required &&
!reactFlowInstance
.getEdges()
.some(
(e) =>
e.sourceHandle.split("|")[1] === t &&
e.sourceHandle.split("|")[2] === n.id
)
)
)
reactFlowInstance.getNodes().some(
(n) =>
n.data.node &&
Object.keys(n.data.node.template).some((t: any) => {
return (
n.data.node.template[t].required &&
(!n.data.node.template[t].value ||
n.data.node.template[t].value === "") &&
!reactFlowInstance
.getEdges()
.some(
(e) =>
e.targetHandle.split("|")[1] === t &&
e.targetHandle.split("|")[2] === n.id
)
);
})
)
) {
return false;
}
@ -97,19 +108,38 @@ export default function Chat({ flow, reactFlowInstance }: ChatType) {
setChatValue("");
addChatHistory(message, true);
sendAll({ ...reactFlowInstance.toObject(), message, chatHistory,name:flow.name,description:flow.description})
sendAll({
...reactFlowInstance.toObject(),
message,
chatHistory,
name: flow.name,
description: flow.description,
})
.then((r) => {
addChatHistory(r.data.result, false, r.data.thought);
setLockChat(false);
})
.catch((error) => {
setErrorData({ title: error.message ?? "unknow error" });
setErrorData({
title: error.message ?? "Unknown Error",
list: [error.response.data.detail],
});
setLockChat(false);
let lastMessage;
setChatHistory((chatHistory) => {
let newChat = chatHistory;
lastMessage = newChat.pop().message;
return newChat;
});
setChatValue(lastMessage);
});
} else {
setErrorData({
title: "Error sending message",
list: [ "Oops! Looks like you missed some required information. Please fill in all the required fields before continuing."],
list: [
"Oops! Looks like you missed some required information. Please fill in all the required fields before continuing.",
],
});
}
} else {
@ -120,8 +150,8 @@ export default function Chat({ flow, reactFlowInstance }: ChatType) {
}
}
function clearChat() {
setChatHistory([])
updateFlow({ ..._.cloneDeep(flow), chat: []});
setChatHistory([]);
updateFlow({ ..._.cloneDeep(flow), chat: [] });
}
return (
@ -151,9 +181,10 @@ export default function Chat({ flow, reactFlowInstance }: ChatType) {
/>
Chat
</div>
<button className="hover:text-blue-500"
<button
className="hover:text-blue-500 dark:text-white"
onClick={(e) => {
e.stopPropagation()
e.stopPropagation();
clearChat();
}}
>

View file

@ -0,0 +1,50 @@
import { ArrowTopRightOnSquareIcon } from "@heroicons/react/24/outline";
import { useContext, useEffect, useState } from "react";
import { PopUpContext } from "../../contexts/popUpContext";
import CodeAreaModal from "../../modals/codeAreaModal";
import TextAreaModal from "../../modals/textAreaModal";
import { TextAreaComponentType } from "../../types/components";
export default function CodeAreaComponent({
value,
onChange,
disabled,
}: TextAreaComponentType) {
const [myValue, setMyValue] = useState(value);
const { openPopUp } = useContext(PopUpContext);
useEffect(() => {
if (disabled) {
setMyValue("");
onChange("");
}
}, [disabled, onChange]);
return (
<div className={disabled ? "pointer-events-none cursor-not-allowed w-full" : "w-full"}>
<div className="w-full flex items-center gap-3">
<span
className={
"truncate block w-full text-gray-500 px-3 py-2 rounded-md border border-gray-300 dark:border-gray-700 shadow-sm focus:border-indigo-500 focus:ring-indigo-500 sm:text-sm" +
(disabled ? " bg-gray-200" : "")
}
>
{myValue !== "" ? myValue : "Text empty"}
</span>
<button
onClick={() => {
openPopUp(
<CodeAreaModal
value={myValue}
setValue={(t: string) => {
setMyValue(t);
onChange(t);
}}
/>
);
}}
>
<ArrowTopRightOnSquareIcon className="w-6 h-6 hover:text-blue-600" />
</button>
</div>
</div>
);
}

View file

@ -5,7 +5,7 @@ import { DropDownComponentType } from "../../types/components";
import { classNames } from "../../utils";
export default function Dropdown({value, options, onSelect}:DropDownComponentType) {
let [internalValue,setInternalValue] = useState(value??"choose an option")
let [internalValue,setInternalValue] = useState(value===""||!value?"Choose an option":value)
return (
<>
<Listbox value={internalValue} onChange={(value)=>{

View file

@ -0,0 +1,85 @@
import { DocumentMagnifyingGlassIcon } from "@heroicons/react/24/outline";
import { useContext, useEffect, useState } from "react";
import { alertContext } from "../../contexts/alertContext";
import { FileComponentType } from "../../types/components";
export default function InputFileComponent({
value,
onChange,
disabled,
suffixes,
fileTypes,
onFileChange
}: FileComponentType) {
const [myValue, setMyValue] = useState(value);
const { setErrorData } = useContext(alertContext);
useEffect(() => {
if (disabled) {
setMyValue("");
onChange("");
onFileChange("")
}
}, [disabled, onChange]);
function attachFile(fileReadEvent: ProgressEvent<FileReader>) {
fileReadEvent.preventDefault();
const file = fileReadEvent.target.result;
onFileChange(file as string)
}
function checkFileType(fileName:string):boolean{
for (let index = 0; index < suffixes.length; index++) {
if(fileName.endsWith(suffixes[index])){
return true
}
}
return false
}
const handleButtonClick = () => {
const input = document.createElement("input");
input.type = "file";
input.accept = suffixes.join(",");
input.style.display = "none";
input.multiple = false;
input.onchange = (e: Event) => {
const file = (e.target as HTMLInputElement).files?.[0];
const fileData = new FileReader();
fileData.onload = attachFile;
if (file && checkFileType(file.name)) {
fileData.readAsDataURL(file);
setMyValue(file.name);
onChange(file.name);
} else {
setErrorData({
title:
"Please select a valid file. Only files this files are allowed:",
list: fileTypes,
});
}
};
input.click();
};
return (
<div
className={
disabled ? "pointer-events-none cursor-not-allowed w-full" : "w-full"
}
>
<div className="w-full flex items-center gap-3">
<span
className={
"truncate block w-full text-gray-500 px-3 py-2 rounded-md border border-gray-300 dark:border-gray-700 shadow-sm focus:border-indigo-500 focus:ring-indigo-500 sm:text-sm" +
(disabled ? " bg-gray-200" : "")
}
>
{myValue !== "" ? myValue : "No file"}
</span>
<button onClick={handleButtonClick}>
<DocumentMagnifyingGlassIcon className="w-8 h-8 hover:text-blue-600" />
</button>
</div>
</div>
);
}

View file

@ -0,0 +1,38 @@
import { useEffect, useState } from "react";
import { FloatComponentType } from "../../types/components";
export default function IntComponent({
value,
onChange,
disabled,
}: FloatComponentType) {
const [myValue, setMyValue] = useState(value ?? "");
useEffect(() => {
if (disabled) {
setMyValue("");
onChange("");
}
}, [disabled, onChange]);
return (
<div className={disabled ? "pointer-events-none cursor-not-allowed w-full" : "w-full"}>
<input
onKeyDown={(event) => {
if (event.key !== 'Backspace' && event.key !== 'Enter' && event.key !== 'Delete' && event.key !== 'ArrowLeft' && event.key !== 'ArrowRight' && !/^[-]?\d*$/.test(event.key)) {
event.preventDefault();
}
}}
type="number"
value={myValue}
className={
"block w-full form-input dark:bg-gray-900 arrow-hide dark:border-gray-600 rounded-md border-gray-300 shadow-sm focus:border-indigo-500 focus:ring-indigo-500 sm:text-sm" +
(disabled ? " bg-gray-200 dark:bg-gray-700" : "")
}
placeholder="Type a integer number"
onChange={(e) => {
setMyValue(e.target.value);
onChange(e.target.value);
}}
/>
</div>
);
}

View file

@ -1,6 +1,7 @@
import { ArrowTopRightOnSquareIcon } from "@heroicons/react/24/outline";
import { useContext, useEffect, useState } from "react";
import { PopUpContext } from "../../contexts/popUpContext";
import CodeAreaModal from "../../modals/codeAreaModal";
import TextAreaModal from "../../modals/textAreaModal";
import { TextAreaComponentType } from "../../types/components";
@ -9,8 +10,8 @@ export default function TextAreaComponent({ value, onChange, disabled }:TextArea
const { openPopUp } = useContext(PopUpContext);
useEffect(() => {
if (disabled) {
setMyValue([""]);
onChange([""]);
setMyValue("");
onChange("");
}
}, [disabled, onChange]);
return (

View file

@ -5,6 +5,7 @@ import { normalCaseToSnakeCase } from "../utils";
import { alertContext } from "./alertContext";
const TabsContextInitialValue: TabsContextType = {
save:()=>{},
tabIndex: 0,
setTabIndex: (index: number) => {},
flows: [],
@ -16,7 +17,7 @@ const TabsContextInitialValue: TabsContextType = {
uploadFlow: () => {},
lockChat: false,
setLockChat:(prevState:boolean)=>{},
hardReset:()=>{}
hardReset:()=>{},
};
export const TabsContext = createContext<TabsContextType>(
@ -35,15 +36,20 @@ export function TabsProvider({ children }: { children: ReactNode }) {
newNodeId.current = newNodeId.current + 1;
return newNodeId.current;
}
function save(){
if (flows.length !== 0)
window.localStorage.setItem(
"tabsData",
JSON.stringify({ tabIndex, flows, id, nodeId: newNodeId.current })
);
}
useEffect(() => {
//save tabs locally
if (flows.length !== 0)
window.localStorage.setItem(
"tabsData",
JSON.stringify({ tabIndex, flows, id, nodeId: newNodeId.current })
);
save()
}, [flows, id, tabIndex, newNodeId]);
useEffect(() => {
//get tabs locally saved
let cookie = window.localStorage.getItem("tabsData");
@ -177,6 +183,7 @@ export function TabsProvider({ children }: { children: ReactNode }) {
return (
<TabsContext.Provider
value={{
save,
hardReset,
lockChat,
setLockChat,

View file

@ -1,3 +1,4 @@
import { errorsTypeAPI } from './../../types/api/index';
import { APIObjectType, sendAllProps } from '../../types/api/index';
import axios, { AxiosResponse } from "axios";
@ -7,4 +8,9 @@ export async function getAll():Promise<AxiosResponse<APIObjectType>> {
export async function sendAll(data:sendAllProps) {
return await axios.post(`/predict`, data);
}
export async function checkCode(code:string):Promise<AxiosResponse<errorsTypeAPI>>{
return await axios.post('/validate/code',{code})
}

View file

@ -0,0 +1,178 @@
import { Dialog, Transition } from "@headlessui/react";
import { XMarkIcon, CommandLineIcon } from "@heroicons/react/24/outline";
import { Fragment, useContext, useRef, useState } from "react";
import { PopUpContext } from "../../contexts/popUpContext";
import AceEditor from "react-ace";
import "ace-builds/src-noconflict/mode-python";
import "ace-builds/src-noconflict/theme-github";
import "ace-builds/src-noconflict/theme-twilight";
import "ace-builds/src-noconflict/ext-language_tools";
import "ace-builds/webpack-resolver";
import { darkContext } from "../../contexts/darkContext";
import { checkCode } from "../../controllers/API";
import { alertContext } from "../../contexts/alertContext";
export default function CodeAreaModal({
value,
setValue,
}: {
setValue: (value: string) => void;
value: string;
}) {
const [open, setOpen] = useState(true);
const [code, setCode] = useState(value);
const { dark } = useContext(darkContext);
const { setErrorData, setSuccessData } = useContext(alertContext);
const { closePopUp } = useContext(PopUpContext);
const ref = useRef();
function setModalOpen(x: boolean) {
setOpen(x);
if (x === false) {
setTimeout(() => {
closePopUp();
}, 300);
}
}
return (
<Transition.Root show={open} appear={true} as={Fragment}>
<Dialog
as="div"
className="relative z-10"
onClose={setModalOpen}
initialFocus={ref}
>
<Transition.Child
as={Fragment}
enter="ease-out duration-300"
enterFrom="opacity-0"
enterTo="opacity-100"
leave="ease-in duration-200"
leaveFrom="opacity-100"
leaveTo="opacity-0"
>
<div className="fixed inset-0 bg-gray-500 dark:bg-gray-600 dark:bg-opacity-75 bg-opacity-75 transition-opacity" />
</Transition.Child>
<div className="fixed inset-0 z-10 overflow-y-auto">
<div className="flex h-full items-end justify-center p-4 text-center sm:items-center sm:p-0">
<Transition.Child
as={Fragment}
enter="ease-out duration-300"
enterFrom="opacity-0 translate-y-4 sm:translate-y-0 sm:scale-95"
enterTo="opacity-100 translate-y-0 sm:scale-100"
leave="ease-in duration-200"
leaveFrom="opacity-100 translate-y-0 sm:scale-100"
leaveTo="opacity-0 translate-y-4 sm:translate-y-0 sm:scale-95"
>
<Dialog.Panel className="relative flex flex-col justify-between transform h-[600px] overflow-hidden rounded-lg bg-white dark:bg-gray-800 text-left shadow-xl transition-all sm:my-8 w-[700px]">
<div className=" z-50 absolute top-0 right-0 hidden pt-4 pr-4 sm:block">
<button
type="button"
className="rounded-md text-gray-400 hover:text-gray-500 focus:outline-none focus:ring-2 focus:ring-indigo-500 focus:ring-offset-2"
onClick={() => {
setModalOpen(false);
}}
>
<span className="sr-only">Close</span>
<XMarkIcon className="h-6 w-6" aria-hidden="true" />
</button>
</div>
<div className="h-full w-full flex flex-col justify-center items-center">
<div className="flex w-full pb-4 z-10 justify-center shadow-sm">
<div className="mx-auto mt-4 flex h-12 w-12 flex-shrink-0 items-center justify-center rounded-full bg-blue-100 dark:bg-gray-900 sm:mx-0 sm:h-10 sm:w-10">
<CommandLineIcon
className="h-6 w-6 text-blue-600"
aria-hidden="true"
/>
</div>
<div className="mt-4 text-center sm:ml-4 sm:text-left">
<Dialog.Title
as="h3"
className="text-lg font-medium dark:text-white leading-10 text-gray-900"
>
Edit Code
</Dialog.Title>
</div>
</div>
<div className="h-full w-full bg-gray-200 overflow-auto dark:bg-gray-900 p-4 gap-4 flex flex-row justify-center items-center">
<div className="flex h-full w-full">
<div className="overflow-hidden px-4 py-5 sm:p-6 w-full h-full rounded-lg bg-white dark:bg-gray-800 shadow">
{/* need to insert code editor */}
<AceEditor
value={code}
mode="python"
highlightActiveLine={true}
showPrintMargin={false}
fontSize={14}
showGutter
enableLiveAutocompletion
theme={dark ? "twilight" : "github"}
name="CodeEditor"
onChange={(value) => {
setCode(value);
}}
className="h-full w-full rounded-lg"
/>
</div>
</div>
</div>
<div className="bg-gray-200 dark:bg-gray-900 w-full pb-3 flex flex-row-reverse px-4">
<button
type="button"
className="inline-flex w-full justify-center rounded-md border border-transparent bg-indigo-600 px-4 py-2 text-base font-medium text-white shadow-sm hover:bg-indigo-700 focus:outline-none focus:ring-2 focus:ring-indigo-500 focus:ring-offset-2 sm:ml-3 sm:w-auto sm:text-sm"
onClick={() => {
checkCode(code)
.then((apiReturn) => {
console.log(apiReturn);
if (apiReturn.data) {
console.log(apiReturn.data);
let importsErrors = apiReturn.data.imports.errors;
let funcErrors = apiReturn.data.function.errors;
if (
funcErrors.length === 0 &&
importsErrors.length === 0
) {
setSuccessData({
title: "Code is ready to run",
});
setModalOpen(false);
setValue(code)
} else {
if (funcErrors.length !== 0) {
setErrorData({
title: "There is an error in your function",
list: funcErrors,
});
}
if(importsErrors.length!==0){
setErrorData({
title: "There is an error in your imports",
list: importsErrors,
});
}
}
} else {
setErrorData({
title: "Something went wrong, please try again",
});
}
})
.catch((_) =>
setErrorData({
title:
"There is something wrong with this code, please review it",
})
);
}}
>
Check & Save
</button>
</div>
</div>
</Dialog.Panel>
</Transition.Child>
</div>
</div>
</Dialog>
</Transition.Root>
);
}

View file

@ -26,6 +26,7 @@ export default function ExportModal() {
}
}
const [checked,setChecked] = useState(true)
const [name,setName] = useState(flows[tabIndex].name)
return (
<Transition.Root show={open} appear={true} as={Fragment}>
<Dialog
@ -100,15 +101,16 @@ export default function ExportModal() {
if(event.target.value!=""){
let newFlow = flows[tabIndex];
newFlow.name = event.target.value;
setName(event.target.value)
updateFlow(newFlow);
}
else{
setErrorData({title:"Flow name can't be empty"})
setName(event.target.value)
}
}}
type="text"
name="name"
value={flows[tabIndex].name ?? null}
value={name ?? null}
placeholder="File name"
id="name"
className="focus:border focus:border-blue block w-full px-3 py-2 border-gray-300 rounded-md shadow-sm focus:outline-none focus:ring-blue-500 focus:border-blue-500 dark:bg-gray-800 dark:border-gray-600 dark:focus:border-blue-500 dark:focus:ring-blue-500"

View file

@ -38,7 +38,7 @@ export default function ButtonBox({
<h3 className="mt-6 text-lg font-semibold text-white">{title}</h3>
<div className="mt-1 flex flex-grow flex-col justify-between">
<dt className="sr-only">{title}</dt>
<dd className="text-sm text-gray-100">{deactivate? "cooming soon":description}</dd>
<dd className="text-sm text-gray-100">{deactivate? "Coming soon":description}</dd>
</div>
</div>
</div>

View file

@ -1,86 +1,93 @@
import { Bars2Icon } from "@heroicons/react/24/outline";
import DisclosureComponent from "../DisclosureComponent";
import {
nodeColors,
nodeIcons,
nodeNames,
} from "../../../../utils";
import { nodeColors, nodeIcons, nodeNames } from "../../../../utils";
import { useContext, useEffect, useState } from "react";
import { getAll } from "../../../../controllers/NodesServices";
import { getAll } from "../../../../controllers/API";
import { typesContext } from "../../../../contexts/typesContext";
import { APIClassType, APIKindType, APIObjectType } from "../../../../types/api";
import {
APIClassType,
APIKindType,
APIObjectType,
} from "../../../../types/api";
export default function ExtraSidebar() {
const [data, setData] = useState({});
const { setTypes} = useContext(typesContext);
const [data, setData] = useState({});
const { setTypes } = useContext(typesContext);
useEffect(() => {
async function getTypes():Promise<void>{
// Make an asynchronous API call to retrieve all data.
let result = await getAll();
// Update the state of the component with the retrieved data.
setData(result.data);
// Set the types by reducing over the keys of the result data and updating the accumulator.
setTypes(
Object.keys(result.data).reduce(
(acc, curr) => {
Object.keys(result.data[curr]).forEach((c:keyof APIKindType) => {
acc[c] = curr;
// Add the base classes to the accumulator as well.
result.data[curr][c].base_classes?.forEach((b) => {
acc[b] = curr;
});
});
return acc;
},{}
)
);
}
// Call the getTypes function.
getTypes();
}, [setTypes]);
useEffect(() => {
async function getTypes(): Promise<void> {
// Make an asynchronous API call to retrieve all data.
let result = await getAll();
// Update the state of the component with the retrieved data.
setData(result.data);
function onDragStart(event: React.DragEvent<any>, data:{type:string,node?:APIClassType}) {
//start drag event
event.dataTransfer.effectAllowed = "move";
event.dataTransfer.setData("json", JSON.stringify(data));
}
// Set the types by reducing over the keys of the result data and updating the accumulator.
setTypes(
Object.keys(result.data).reduce((acc, curr) => {
Object.keys(result.data[curr]).forEach((c: keyof APIKindType) => {
acc[c] = curr;
// Add the base classes to the accumulator as well.
result.data[curr][c].base_classes?.forEach((b) => {
acc[b] = curr;
});
});
return acc;
}, {})
);
}
// Call the getTypes function.
getTypes();
}, [setTypes]);
return (
<div className="mt-1 w-full">
{Object.keys(data).map((d:keyof APIObjectType, i) => (
<DisclosureComponent
key={i}
button={{ title: nodeNames[d]??nodeNames.unknown, Icon: nodeIcons[d]??nodeIcons.unknown }}
>
<div className="p-2 flex flex-col gap-2">
{Object.keys(data[d]).map((t: string, k) => (
<div key={k}>
<div
draggable
className={" cursor-grab border-l-8 rounded-l-md"}
style={{ borderLeftColor: nodeColors[d]??nodeColors.unknown }}
onDragStart={(event) =>
onDragStart(event, {
type: t,
node: data[d][t],
})
}
>
<div className="flex w-full justify-between text-sm px-3 py-1 items-center border-dashed border-gray-400 dark:border-gray-600 border-l-0 rounded-md rounded-l-none border">
<span className="text-black dark:text-white w-36 truncate text-xs">{t}</span>
<Bars2Icon className="w-4 h-6 text-gray-400 dark:text-gray-600" />
</div>
</div>
</div>
))}
</div>
</DisclosureComponent>
))}
</div>
);
function onDragStart(
event: React.DragEvent<any>,
data: { type: string; node?: APIClassType }
) {
//start drag event
event.dataTransfer.effectAllowed = "move";
event.dataTransfer.setData("json", JSON.stringify(data));
}
return (
<div className="mt-1 w-full">
{Object.keys(data).map((d: keyof APIObjectType, i) => (
<DisclosureComponent
key={i}
button={{
title: nodeNames[d] ?? nodeNames.unknown,
Icon: nodeIcons[d] ?? nodeIcons.unknown,
}}
>
<div className="p-2 flex flex-col gap-2">
{Object.keys(data[d]).map((t: string, k) => (
<div key={k}>
<div
draggable
className={" cursor-grab border-l-8 rounded-l-md"}
style={{
borderLeftColor: nodeColors[d] ?? nodeColors.unknown,
}}
onDragStart={(event) =>
onDragStart(event, {
type: t,
node: data[d][t],
})
}
>
<div className="flex w-full justify-between text-sm px-3 py-1 items-center border-dashed border-gray-400 dark:border-gray-600 border-l-0 rounded-md rounded-l-none border">
<span className="text-black dark:text-white w-36 truncate text-xs">
{t}
</span>
<Bars2Icon className="w-4 h-6 text-gray-400 dark:text-gray-600" />
</div>
</div>
</div>
))}
{Object.keys(data[d]).length===0 && <div className="text-gray-400 text-center">Coming soon</div>}
</div>
</DisclosureComponent>
))}
</div>
);
}

View file

@ -15,4 +15,5 @@ export type sendAllProps={
message:string;
chatHistory:{message:string,isSend:boolean}[],
};
};
export type errorsTypeAPI={function:{errors:Array<string>},imports:{errors:Array<string>}}

View file

@ -36,7 +36,16 @@ export type InputListComponentType = {
export type TextAreaComponentType = {
disabled: boolean;
onChange: (value: string[] | string) => void;
value: string[] | string;
value: string;
};
export type FileComponentType = {
disabled: boolean;
onChange: (value: string[] | string) => void;
value: string;
suffixes:Array<string>;
fileTypes:Array<string>;
onFileChange:(value: string) => void;
};
export type DisclosureComponentType = {

View file

@ -1,6 +1,7 @@
import { FlowType } from "../flow";
export type TabsContextType = {
save:()=>void;
tabIndex: number;
setTabIndex: (index: number) => void;
flows: Array<FlowType>;

View file

@ -5,9 +5,10 @@ import {
LightBulbIcon,
CommandLineIcon,
WrenchScrewdriverIcon,
WrenchIcon,
ComputerDesktopIcon,
Bars3CenterLeftIcon,
PaperClipIcon,
GiftIcon,
QuestionMarkCircleIcon,
} from "@heroicons/react/24/outline";
import { Connection, Edge, Node, ReactFlowInstance } from "reactflow";
@ -88,6 +89,8 @@ export const nodeNames:{[char: string]: string} = {
advanced: "Advanced",
chat: "Chat",
docloaders:"Document Loader",
toolkits:"Toolkits",
wrappers:"Wrappers",
unknown:"Unknown"
};
@ -97,10 +100,12 @@ export const nodeIcons:{[char: string]: React.ForwardRefExoticComponent<React.SV
memories: CpuChipIcon,
llms: LightBulbIcon,
prompts: CommandLineIcon,
tools: WrenchScrewdriverIcon,
tools: WrenchIcon,
advanced: ComputerDesktopIcon,
chat: Bars3CenterLeftIcon,
docloaders:Bars3CenterLeftIcon,
toolkits:WrenchScrewdriverIcon,
wrappers:GiftIcon,
unknown:QuestionMarkCircleIcon
};

32
tests/conftest.py Normal file
View file

@ -0,0 +1,32 @@
from pathlib import Path
import pytest
from fastapi.testclient import TestClient
def pytest_configure():
pytest.BASIC_EXAMPLE_PATH = (
Path(__file__).parent.absolute() / "data" / "basic_example.json"
)
pytest.COMPLEX_EXAMPLE_PATH = (
Path(__file__).parent.absolute() / "data" / "complex_example.json"
)
pytest.OPENAPI_EXAMPLE_PATH = (
Path(__file__).parent.absolute() / "data" / "Openapi.json"
)
pytest.CODE_WITH_SYNTAX_ERROR = """
def get_text():
retun "Hello World"
"""
# Create client fixture for FastAPI
@pytest.fixture(scope="module")
def client():
from langflow.main import create_app
app = create_app()
with TestClient(app) as client:
yield client

445
tests/data/Openapi.json Normal file

File diff suppressed because one or more lines are too long

View file

@ -1,15 +1,16 @@
{
"name": "New Flow 9",
"id": "1",
"name": "New Flow ",
"id": "0",
"data": {
"nodes": [{
"nodes": [
{
"width": 384,
"height": 391,
"id": "dndnode_61",
"id": "dndnode_1",
"type": "genericNode",
"position": {
"x": 764,
"y": 382
"x": 644,
"y": 348
},
"data": {
"type": "LLMChain",
@ -17,7 +18,7 @@
"template": {
"_type": "llm_chain",
"memory": {
"type": "Memory",
"type": "BaseMemory",
"required": false,
"placeholder": "",
"list": false,
@ -34,7 +35,7 @@
"show": false,
"password": false,
"multiline": false,
"value": true
"value": false
},
"prompt": {
"type": "BasePromptTemplate",
@ -46,7 +47,7 @@
"multiline": false
},
"llm": {
"type": "BaseLLM",
"type": "BaseLanguageModel",
"required": true,
"placeholder": "",
"list": false,
@ -70,24 +71,22 @@
"Chain"
]
},
"id": "dndnode_61",
"id": "dndnode_1",
"value": null
},
"selected": false,
"positionAbsolute": {
"x": 764,
"y": 382
},
"dragging": false
"x": 644,
"y": 348
}
},
{
"width": 384,
"height": 351,
"id": "dndnode_62",
"id": "dndnode_4",
"type": "genericNode",
"position": {
"x": 1488,
"y": 550
"x": 1236,
"y": 39.59999999999957
},
"data": {
"type": "ZeroShotAgent",
@ -131,22 +130,22 @@
"Agent"
]
},
"id": "dndnode_62",
"id": "dndnode_4",
"value": null
},
"positionAbsolute": {
"x": 1488,
"y": 550
"x": 1236,
"y": 39.59999999999957
}
},
{
"width": 384,
"height": 529,
"id": "dndnode_63",
"id": "dndnode_5",
"type": "genericNode",
"position": {
"x": 206,
"y": 210
"x": 96,
"y": -299.2000000000003
},
"data": {
"type": "ZeroShotPrompt",
@ -186,24 +185,24 @@
"BasePromptTemplate"
]
},
"id": "dndnode_63",
"id": "dndnode_5",
"value": null
},
"selected": false,
"positionAbsolute": {
"x": 206,
"y": 210
"x": 96,
"y": -299.2000000000003
},
"dragging": false
},
{
"width": 384,
"height": 501,
"id": "dndnode_64",
"height": 477,
"id": "dndnode_8",
"type": "genericNode",
"position": {
"x": 210,
"y": 818
"x": 75,
"y": 328
},
"data": {
"type": "OpenAI",
@ -228,7 +227,7 @@
"show": false,
"password": false,
"multiline": false,
"value": true
"value": null
},
"client": {
"type": "Any",
@ -250,7 +249,6 @@
"multiline": false,
"value": "text-davinci-003",
"options": [
"gpt-3.5-turbo",
"text-davinci-003",
"text-davinci-002"
]
@ -343,7 +341,7 @@
"show": true,
"password": true,
"multiline": false,
"value": "---"
"value": "sk-"
},
"batch_size": {
"type": "int",
@ -399,27 +397,28 @@
"description": "Generic OpenAI class that uses model name.",
"base_classes": [
"BaseOpenAI",
"BaseLLM"
"BaseLLM",
"BaseLanguageModel"
]
},
"id": "dndnode_64",
"id": "dndnode_8",
"value": null
},
"selected": false,
"positionAbsolute": {
"x": 210,
"y": 818
"x": 75,
"y": 328
},
"dragging": false
},
{
"width": 384,
"height": 397,
"id": "dndnode_65",
"id": "dndnode_9",
"type": "genericNode",
"position": {
"x": 776,
"y": 922
"x": 643,
"y": 824
},
"data": {
"type": "Serper Search",
@ -427,13 +426,13 @@
"template": {
"serper_api_key": {
"type": "str",
"required": false,
"required": true,
"list": false,
"show": true,
"placeholder": "",
"value": "---"
},
"_type": "google-serper"
"_type": "Serper Search"
},
"name": "Serper Search",
"description": "A low-cost Google Search API. Useful for when you need to answer questions about current events. Input should be a search query.",
@ -441,101 +440,66 @@
"Tool"
]
},
"id": "dndnode_65",
"id": "dndnode_9",
"value": null
},
"selected": false,
"selected": true,
"positionAbsolute": {
"x": 776,
"y": 922
"x": 643,
"y": 824
},
"dragging": false
}
],
"edges": [{
"source": "dndnode_63",
"sourceHandle": "ZeroShotPrompt|dndnode_63|BasePromptTemplate",
"target": "dndnode_61",
"targetHandle": "BasePromptTemplate|prompt|dndnode_61",
"edges": [
{
"source": "dndnode_5",
"sourceHandle": "ZeroShotPrompt|dndnode_5|BasePromptTemplate",
"target": "dndnode_1",
"targetHandle": "BasePromptTemplate|prompt|dndnode_1",
"className": "animate-pulse",
"id": "reactflow__edge-dndnode_63ZeroShotPrompt|dndnode_63|BasePromptTemplate-dndnode_61BasePromptTemplate|prompt|dndnode_61"
"id": "reactflow__edge-dndnode_5ZeroShotPrompt|dndnode_5|BasePromptTemplate-dndnode_1BasePromptTemplate|prompt|dndnode_1"
},
{
"source": "dndnode_64",
"sourceHandle": "OpenAI|dndnode_64|BaseOpenAI,|BaseLLM",
"target": "dndnode_61",
"targetHandle": "BaseLLM|llm|dndnode_61",
"source": "dndnode_1",
"sourceHandle": "LLMChain|dndnode_1|Chain",
"target": "dndnode_4",
"targetHandle": "LLMChain|llm_chain|dndnode_4",
"className": "animate-pulse",
"id": "reactflow__edge-dndnode_64OpenAI|dndnode_64|BaseOpenAI,|BaseLLM-dndnode_61BaseLLM|llm|dndnode_61"
"id": "reactflow__edge-dndnode_1LLMChain|dndnode_1|Chain-dndnode_4LLMChain|llm_chain|dndnode_4"
},
{
"source": "dndnode_65",
"sourceHandle": "Serper Search|dndnode_65|Tool",
"target": "dndnode_62",
"targetHandle": "Tool|allowed_tools|dndnode_62",
"source": "dndnode_8",
"sourceHandle": "OpenAI|dndnode_8|BaseOpenAI|BaseLLM|BaseLanguageModel",
"target": "dndnode_1",
"targetHandle": "BaseLanguageModel|llm|dndnode_1",
"className": "animate-pulse",
"id": "reactflow__edge-dndnode_65Serper Search|dndnode_65|Tool-dndnode_62Tool|allowed_tools|dndnode_62"
"id": "reactflow__edge-dndnode_8OpenAI|dndnode_8|BaseOpenAI|BaseLLM|BaseLanguageModel-dndnode_1BaseLanguageModel|llm|dndnode_1"
},
{
"source": "dndnode_61",
"sourceHandle": "LLMChain|dndnode_61|Chain",
"target": "dndnode_62",
"targetHandle": "LLMChain|llm_chain|dndnode_62",
"source": "dndnode_9",
"sourceHandle": "Serper Search|dndnode_9|Tool",
"target": "dndnode_4",
"targetHandle": "Tool|allowed_tools|dndnode_4",
"className": "animate-pulse",
"id": "reactflow__edge-dndnode_61LLMChain|dndnode_61|Chain-dndnode_62LLMChain|llm_chain|dndnode_62"
"id": "reactflow__edge-dndnode_9Serper Search|dndnode_9|Tool-dndnode_4Tool|allowed_tools|dndnode_4"
}
],
"viewport": {
"x": -103,
"y": -135,
"zoom": 1
"x": 22.5,
"y": 42,
"zoom": 0.5
}
},
"chat": [{
"message": "What is the new llm Prismer?",
"chat": [
{
"message": "Langflow. What is it?",
"isSend": true
},
{
"message": "What is the prismer llm?",
"isSend": true
},
{
"message": "What is the prismer llm?",
"isSend": true
},
{
"message": "What is the prismer llm?",
"isSend": true
},
{
"message": "What is the prismer llm?",
"isSend": true
},
{
"message": "What is the prismer llm?",
"isSend": true
},
{
"message": "What is the prismer llm?",
"isSend": true
},
{
"message": "What is the prismer llm?",
"isSend": true
},
{
"message": "Prismer LLM is a vision-language model with an ensemble of experts.",
"message": "Langflow is a GUI for LangChain, designed with react-flow to provide an effortless way to experiment and prototype flows with drag-and-drop components and a chrome extension powered by GPT-3.",
"isSend": false,
"thought": "Thought: I should research what this is\nAction: Serper Search\nAction Input: \"prismer llm\"\nObservation: Announcing Prismer, my team's latest vision-language AI, empowered by domain-expert models in depth, surface normal, segmentation, etc. My team's work, Prismer, is a representative example. We use a textual LM as the backbone, and plug in many visual domain experts through a neural adapter ... “Prismer is able to achieve superior data efficiency compared to ... New OpenAI post about future of ChatGPT's (and its other LLM) alignment. Prismer: A Vision-Language Model with An Ensemble of Experts ... UDAPDR: Unsupervised Domain Adaptation via LLM Prompting and Distillation of Rerankers. Emory Law's LLM program offers graduates of US and foreign law schools the opportunity to pursue the master of laws degree from one of the top nationally ranked ... Missing: prismer | Must include:prismer. Prismer: A Vision-Language Model with An Ensemble of Experts ... github: github.com/NVlabs/prismer ... into LLM embedding space viously to their being sworn, the prisoner, Michael. Solomon, standing at the bar, challenged two ... a« wa« brought before- them.\" The prisoner ?llM'eow*. RWKV is a RNN with transformer-level LLM performance. ... The implementation of \"Prismer: A Vision-Language Model with An Ensemble of Experts\". Announcing Prismer, our NVIDIA team's latest vision-language #ai empowered by domain-expert models in depth, surface normal, segmentation, etc. No paywall.\nThought:\n\n> Finished chain."
},
{
"message": "What is the prismer llm?",
"isSend": true
},
{
"message": "Prismer is an AI vision-language model with an ensemble of experts developed by a NVIDIA team. It is capable of achieving superior data efficiency compared to other models and can be directly trained like a GPT.",
"isSend": false,
"thought": "Thought: I need to search for the term\nAction: Serper Search\nAction Input: \"prismer llm\"\nObservation: Announcing Prismer, my team's latest vision-language AI, empowered by domain-expert models in depth, surface normal, segmentation, etc. My team's work, Prismer, is a representative example. We use a textual LM as the backbone, and plug in many visual domain experts through a neural adapter ... “Prismer is able to achieve superior data efficiency compared to ... New OpenAI post about future of ChatGPT's (and its other LLM) alignment. Prismer: A Vision-Language Model with An Ensemble of Experts ... UDAPDR: Unsupervised Domain Adaptation via LLM Prompting and Distillation of Rerankers. A very insightful post about the potential issues with LLM chatbots for general ... Announcing Prismer, our NVIDIA team's latest vision-language #ai ... As technology continues to transform our economy and culture, businesses need a new breed of lawyers who understand the legal and commercial aspects of new ... Missing: prismer | Must include:prismer. RWKV is a RNN with transformer-level LLM performance. ... The implementation of \"Prismer: A Vision-Language Model with An Ensemble of Experts\". ... Bendix vorad collision warning system, Vegglampe med prismer, Ok magazine ... Alabama llm, Colt 25 caliber magazine, Poppie clinch, Mcds upper school, ... RWKV is an RNN with transformer-level LLM performance. It can be directly trained like a GPT (parallelizable). So it's combining the best of RNN ...\nThought:\n\n> Finished chain."
"thought": "> Entering new AgentExecutor chain...\n I need to research what Langflow is.\nAction: Serper Search\nAction Input: \"What is Langflow?\"\nObservation: Researchers introduce LangFlow, a graphical user interface (GUI) for LangChain that simplifies testing and creation of smart applications. The drag-and-drop feature provides a quick and effortless way to experiment and prototype, and the built-in chat interface enables real-time ... LangFlow is a GUI for LangChain, designed with react-flow to provide an effortless way to experiment and prototype flows with drag-and-drop ... LangFlow is a GUI for LangChain, designed with react-flow to provide an effortless way to experiment and prototype flows with drag-and-drop components and a ... an open-source chrome extension powered by GPT-3 to get the explanation of a piece of code on any site (Stackoverflow, Github, etc). ChatGPT correctly identified Mona Lisa but failed to guess the cartoon character Michael was thinking of, suggesting Bugs Bunny holding a ... Introducing LangFlow! An open-source UI for. @LangChainAI. that enables seamless combination of multiple deep learning models to perform ... See student reviews, school photos, Chinese Mandarin courses, and housing options available at Langflow Education Centre (Macau, Macau) - Reviews - Language ... Longfellow Elementary students play on the playground during recess. Longfellow Elementary students pause for a photo during class.\nThought: Langflow is a GUI for LangChain, designed with react-flow to provide an effortless way to experiment and prototype flows with drag-and-drop components and a chrome extension powered by GPT-3.\nFinal Answer: Langflow is a GUI for LangChain, designed with react-flow to provide an effortless way to experiment and prototype flows with drag-and-drop components and a chrome extension powered by GPT-3.\n\n> Finished chain."
}
]
}

View file

@ -0,0 +1,723 @@
{
"name": "New Flow",
"id": "0",
"data": {
"nodes": [
{
"width": 384,
"height": 351,
"id": "dndnode_3",
"type": "genericNode",
"position": {
"x": 612.9299322834961,
"y": 194.75070242078417
},
"data": {
"type": "ZeroShotAgent",
"node": {
"template": {
"_type": "zero-shot-react-description",
"llm_chain": {
"type": "LLMChain",
"required": true,
"placeholder": "",
"list": false,
"show": true,
"password": false,
"multiline": false
},
"allowed_tools": {
"type": "Tool",
"required": false,
"placeholder": "",
"list": true,
"show": true,
"password": false,
"multiline": false,
"value": null
},
"return_values": {
"type": "str",
"required": false,
"placeholder": "",
"list": true,
"show": false,
"password": false,
"multiline": false,
"value": [
"output"
]
}
},
"description": "Agent for the MRKL chain.",
"base_classes": [
"Agent",
"function"
]
},
"id": "dndnode_3",
"value": null
},
"selected": false,
"positionAbsolute": {
"x": 612.9299322834961,
"y": 194.75070242078417
},
"dragging": false
},
{
"width": 384,
"height": 463,
"id": "dndnode_27",
"type": "genericNode",
"position": {
"x": 86.29922452047686,
"y": 39.132143332238115
},
"data": {
"type": "Tool",
"node": {
"template": {
"name": {
"type": "str",
"required": true,
"list": false,
"show": true,
"placeholder": "",
"value": "Uppercase",
"password": false,
"multiline": false
},
"description": {
"type": "str",
"required": true,
"list": false,
"show": true,
"placeholder": "",
"value": "Returns an uppercase version of the text passed.",
"password": false,
"multiline": false
},
"func": {
"type": "function",
"required": true,
"list": false,
"show": true,
"value": "",
"multiline": false,
"password": false
},
"_type": "Tool"
},
"name": "Tool",
"func": "",
"description": "",
"base_classes": [
"Tool"
]
},
"id": "dndnode_27",
"value": null
},
"selected": false,
"positionAbsolute": {
"x": 86.29922452047686,
"y": 39.132143332238115
},
"dragging": false
},
{
"width": 384,
"height": 463,
"id": "dndnode_28",
"type": "genericNode",
"position": {
"x": 1134.4549802672202,
"y": 287.9885910233929
},
"data": {
"type": "Tool",
"node": {
"template": {
"name": {
"type": "str",
"required": true,
"list": false,
"show": true,
"placeholder": "",
"value": "",
"password": false,
"multiline": false
},
"description": {
"type": "str",
"required": true,
"list": false,
"show": true,
"placeholder": "",
"value": "",
"password": false,
"multiline": false
},
"func": {
"type": "function",
"required": true,
"list": false,
"show": true,
"value": "",
"multiline": false,
"password": false
},
"_type": "Tool"
},
"name": "Tool",
"func": "",
"description": "",
"base_classes": [
"Tool"
]
},
"id": "dndnode_28",
"value": null
},
"positionAbsolute": {
"x": 1134.4549802672202,
"y": 287.9885910233929
},
"selected": false,
"dragging": false
},
{
"width": 384,
"height": 357,
"id": "dndnode_40",
"type": "genericNode",
"position": {
"x": -366.4341715850213,
"y": 136.29836646158452
},
"data": {
"type": "PythonFunction",
"node": {
"template": {
"code": {
"required": true,
"placeholder": "",
"show": true,
"multiline": true,
"value": "\ndef upper_case(text: str) -> str:\n return text.upper()\n",
"name": "code",
"type": "str",
"list": false
},
"_type": "python_function"
},
"description": "Python function to be executed.",
"base_classes": [
"function"
]
},
"id": "dndnode_40",
"value": null
},
"selected": false,
"positionAbsolute": {
"x": -366.4341715850213,
"y": 136.29836646158452
},
"dragging": false
},
{
"width": 384,
"height": 351,
"id": "dndnode_41",
"type": "genericNode",
"position": {
"x": 1642.7653281427417,
"y": 69.01105573790835
},
"data": {
"type": "ZeroShotAgent",
"node": {
"template": {
"_type": "zero-shot-react-description",
"llm_chain": {
"type": "LLMChain",
"required": true,
"placeholder": "",
"list": false,
"show": true,
"password": false,
"multiline": false
},
"allowed_tools": {
"type": "Tool",
"required": false,
"placeholder": "",
"list": true,
"show": true,
"password": false,
"multiline": false,
"value": null
},
"return_values": {
"type": "str",
"required": false,
"placeholder": "",
"list": true,
"show": false,
"password": false,
"multiline": false,
"value": [
"output"
]
}
},
"description": "Agent for the MRKL chain.",
"base_classes": [
"Agent",
"function"
]
},
"id": "dndnode_41",
"value": null
},
"selected": false,
"positionAbsolute": {
"x": 1642.7653281427417,
"y": 69.01105573790835
},
"dragging": false
},
{
"width": 384,
"height": 529,
"id": "dndnode_42",
"type": "genericNode",
"position": {
"x": -379.23467185725826,
"y": -551.3889442620921
},
"data": {
"type": "ZeroShotPrompt",
"node": {
"template": {
"prefix": {
"required": false,
"placeholder": "",
"show": true,
"multiline": true,
"value": "Answer the following questions as best you can. You have access to the following tools:",
"name": "prefix",
"type": "str",
"list": false
},
"suffix": {
"required": true,
"placeholder": "",
"show": true,
"multiline": true,
"value": "Begin!\n\nQuestion: {input}\nThought:{agent_scratchpad}",
"name": "suffix",
"type": "str",
"list": false
},
"format_instructions": {
"required": false,
"placeholder": "",
"show": true,
"multiline": true,
"value": "Use the following format:\n\nQuestion: the input question you must answer\nThought: you should always think about what to do\nAction: the action to take, should be one of [{tool_names}]\nAction Input: the input to the action\nObservation: the result of the action\n... (this Thought/Action/Action Input/Observation can repeat N times)\nThought: I now know the final answer\nFinal Answer: the final answer to the original input question",
"name": "format_instructions",
"type": "str",
"list": false
},
"_type": "zero_shot"
},
"description": "Prompt template for Zero Shot Agent.",
"base_classes": [
"BasePromptTemplate"
]
},
"id": "dndnode_42",
"value": null
},
"selected": false,
"positionAbsolute": {
"x": -379.23467185725826,
"y": -551.3889442620921
},
"dragging": false
},
{
"width": 384,
"height": 391,
"id": "dndnode_43",
"type": "genericNode",
"position": {
"x": 100.76532814274174,
"y": -437.78894426209195
},
"data": {
"type": "LLMChain",
"node": {
"template": {
"_type": "llm_chain",
"memory": {
"type": "BaseMemory",
"required": false,
"placeholder": "",
"list": false,
"show": true,
"password": false,
"multiline": false,
"value": null
},
"verbose": {
"type": "bool",
"required": false,
"placeholder": "",
"list": false,
"show": false,
"password": false,
"multiline": false,
"value": false
},
"prompt": {
"type": "BasePromptTemplate",
"required": true,
"placeholder": "",
"list": false,
"show": true,
"password": false,
"multiline": false
},
"llm": {
"type": "BaseLanguageModel",
"required": true,
"placeholder": "",
"list": false,
"show": true,
"password": false,
"multiline": false
},
"output_key": {
"type": "str",
"required": false,
"placeholder": "",
"list": false,
"show": false,
"password": true,
"multiline": false,
"value": "text"
}
},
"description": "Chain to run queries against LLMs.",
"base_classes": [
"Chain"
]
},
"id": "dndnode_43",
"value": null
},
"selected": false,
"positionAbsolute": {
"x": 100.76532814274174,
"y": -437.78894426209195
},
"dragging": false
},
{
"width": 384,
"height": 477,
"id": "dndnode_44",
"type": "genericNode",
"position": {
"x": -841.2346718572583,
"y": 368.6110557379079
},
"data": {
"type": "OpenAI",
"node": {
"template": {
"_type": "openai",
"cache": {
"type": "bool",
"required": false,
"placeholder": "",
"list": false,
"show": false,
"password": false,
"multiline": false,
"value": null
},
"verbose": {
"type": "bool",
"required": false,
"placeholder": "",
"list": false,
"show": false,
"password": false,
"multiline": false,
"value": null
},
"client": {
"type": "Any",
"required": false,
"placeholder": "",
"list": false,
"show": false,
"password": false,
"multiline": false,
"value": null
},
"model_name": {
"type": "str",
"required": false,
"placeholder": "",
"list": false,
"show": true,
"password": false,
"multiline": false,
"value": "text-davinci-003",
"options": [
"text-davinci-003",
"text-davinci-002",
"text-curie-001",
"text-babbage-001",
"text-ada-001"
]
},
"temperature": {
"type": "float",
"required": false,
"placeholder": "",
"list": false,
"show": true,
"password": false,
"multiline": false,
"value": 0.7
},
"max_tokens": {
"type": "int",
"required": false,
"placeholder": "",
"list": false,
"show": false,
"password": true,
"multiline": false,
"value": 256
},
"top_p": {
"type": "float",
"required": false,
"placeholder": "",
"list": false,
"show": false,
"password": false,
"multiline": false,
"value": 1
},
"frequency_penalty": {
"type": "float",
"required": false,
"placeholder": "",
"list": false,
"show": false,
"password": false,
"multiline": false,
"value": 0
},
"presence_penalty": {
"type": "float",
"required": false,
"placeholder": "",
"list": false,
"show": false,
"password": false,
"multiline": false,
"value": 0
},
"n": {
"type": "int",
"required": false,
"placeholder": "",
"list": false,
"show": false,
"password": false,
"multiline": false,
"value": 1
},
"best_of": {
"type": "int",
"required": false,
"placeholder": "",
"list": false,
"show": false,
"password": false,
"multiline": false,
"value": 1
},
"model_kwargs": {
"type": "dict[str, Any]",
"required": false,
"placeholder": "",
"list": false,
"show": false,
"password": false,
"multiline": false,
"value": null
},
"openai_api_key": {
"type": "str",
"required": false,
"placeholder": "",
"list": false,
"show": true,
"password": true,
"multiline": false,
"value": "sk-"
},
"batch_size": {
"type": "int",
"required": false,
"placeholder": "",
"list": false,
"show": false,
"password": false,
"multiline": false,
"value": 20
},
"request_timeout": {
"type": "Union[float, Tuple[float, float], NoneType]",
"required": false,
"placeholder": "",
"list": false,
"show": false,
"password": false,
"multiline": false,
"value": null
},
"logit_bias": {
"type": "dict[str, float]",
"required": false,
"placeholder": "",
"list": false,
"show": false,
"password": false,
"multiline": false,
"value": null
},
"max_retries": {
"type": "int",
"required": false,
"placeholder": "",
"list": false,
"show": false,
"password": false,
"multiline": false,
"value": 6
},
"streaming": {
"type": "bool",
"required": false,
"placeholder": "",
"list": false,
"show": false,
"password": false,
"multiline": false,
"value": false
}
},
"description": "Generic OpenAI class that uses model name.",
"base_classes": [
"BaseOpenAI",
"BaseLLM",
"BaseLanguageModel"
]
},
"id": "dndnode_44",
"value": null
},
"selected": false,
"positionAbsolute": {
"x": -841.2346718572583,
"y": 368.6110557379079
},
"dragging": false
}
],
"edges": [
{
"source": "dndnode_27",
"sourceHandle": "Tool|dndnode_27|Tool",
"target": "dndnode_3",
"targetHandle": "Tool|allowed_tools|dndnode_3",
"className": "animate-pulse",
"id": "reactflow__edge-dndnode_27Tool|dndnode_27|Tool-dndnode_3Tool|allowed_tools|dndnode_3"
},
{
"source": "dndnode_3",
"sourceHandle": "ZeroShotAgent|dndnode_3|Agent|function",
"target": "dndnode_28",
"targetHandle": "function|func|dndnode_28",
"className": "animate-pulse",
"id": "reactflow__edge-dndnode_3ZeroShotAgent|dndnode_3|Agent|function-dndnode_28function|func|dndnode_28"
},
{
"source": "dndnode_40",
"sourceHandle": "PythonFunction|dndnode_40|function",
"target": "dndnode_27",
"targetHandle": "function|func|dndnode_27",
"className": "animate-pulse",
"id": "reactflow__edge-dndnode_40PythonFunction|dndnode_40|function-dndnode_27function|func|dndnode_27"
},
{
"source": "dndnode_28",
"sourceHandle": "Tool|dndnode_28|Tool",
"target": "dndnode_41",
"targetHandle": "Tool|allowed_tools|dndnode_41",
"className": "animate-pulse",
"id": "reactflow__edge-dndnode_28Tool|dndnode_28|Tool-dndnode_41Tool|allowed_tools|dndnode_41"
},
{
"source": "dndnode_42",
"sourceHandle": "ZeroShotPrompt|dndnode_42|BasePromptTemplate",
"target": "dndnode_43",
"targetHandle": "BasePromptTemplate|prompt|dndnode_43",
"className": "animate-pulse",
"id": "reactflow__edge-dndnode_42ZeroShotPrompt|dndnode_42|BasePromptTemplate-dndnode_43BasePromptTemplate|prompt|dndnode_43"
},
{
"source": "dndnode_44",
"sourceHandle": "OpenAI|dndnode_44|BaseOpenAI|BaseLLM|BaseLanguageModel",
"target": "dndnode_43",
"targetHandle": "BaseLanguageModel|llm|dndnode_43",
"className": "animate-pulse",
"id": "reactflow__edge-dndnode_44OpenAI|dndnode_44|BaseOpenAI|BaseLLM|BaseLanguageModel-dndnode_43BaseLanguageModel|llm|dndnode_43"
},
{
"source": "dndnode_43",
"sourceHandle": "LLMChain|dndnode_43|Chain",
"target": "dndnode_3",
"targetHandle": "LLMChain|llm_chain|dndnode_3",
"className": "animate-pulse",
"id": "reactflow__edge-dndnode_43LLMChain|dndnode_43|Chain-dndnode_3LLMChain|llm_chain|dndnode_3"
},
{
"source": "dndnode_43",
"sourceHandle": "LLMChain|dndnode_43|Chain",
"target": "dndnode_41",
"targetHandle": "LLMChain|llm_chain|dndnode_41",
"className": "animate-pulse",
"id": "reactflow__edge-dndnode_43LLMChain|dndnode_43|Chain-dndnode_41LLMChain|llm_chain|dndnode_41"
}
],
"viewport": {
"x": 250.11733592862913,
"y": 349.94447213104604,
"zoom": 0.5
}
},
"chat": []
}

65
tests/test_cache.py Normal file
View file

@ -0,0 +1,65 @@
import json
import tempfile
from pathlib import Path
import pytest
from langflow.cache.utils import PREFIX, save_cache
from langflow.interface.run import load_langchain_object
def get_graph(_type="basic"):
"""Get a graph from a json file"""
if _type == "basic":
path = pytest.BASIC_EXAMPLE_PATH
elif _type == "complex":
path = pytest.COMPLEX_EXAMPLE_PATH
elif _type == "openapi":
path = pytest.OPENAPI_EXAMPLE_PATH
with open(path, "r") as f:
flow_graph = json.load(f)
return flow_graph["data"]
@pytest.fixture
def basic_data_graph():
return get_graph()
@pytest.fixture
def complex_data_graph():
return get_graph("complex")
@pytest.fixture
def openapi_data_graph():
return get_graph("openapi")
def langchain_objects_are_equal(obj1, obj2):
return str(obj1) == str(obj2)
def test_cache_creation(basic_data_graph):
# Compute hash for the input data_graph
# Call process_graph function to build and cache the langchain_object
is_first_message = True
computed_hash, langchain_object = load_langchain_object(
basic_data_graph, is_first_message=is_first_message
)
save_cache(computed_hash, langchain_object, is_first_message)
# Check if the cache file exists
cache_file = Path(tempfile.gettempdir()) / f"{PREFIX}_{computed_hash}.dill"
assert cache_file.exists()
def test_cache_reuse(basic_data_graph):
# Call process_graph function to build and cache the langchain_object
result1 = load_langchain_object(basic_data_graph)
# Call process_graph function again to use the cached langchain_object
result2 = load_langchain_object(basic_data_graph)
# Compare the results to ensure the same langchain_object was used
assert langchain_objects_are_equal(result1, result2)

Some files were not shown because too many files have changed in this diff Show more