Merge pull request #92 from logspace-ai/toolkits

This commit is contained in:
Gabriel Luiz Freitas Almeida 2023-04-01 18:10:06 -03:00 committed by GitHub
commit babe9f0e30
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
79 changed files with 3126 additions and 1272 deletions

View file

@ -15,4 +15,4 @@ COPY ./ ./
# Install dependencies
RUN poetry config virtualenvs.create false && poetry install --no-interaction --no-ansi
CMD ["uvicorn", "langflow.main:app", "--host", "0.0.0.0", "--port", "5003", "--reload"]
CMD ["uvicorn", "langflow.main:app", "--host", "0.0.0.0", "--port", "5003", "--reload", "log-level", "debug"]

157
poetry.lock generated
View file

@ -239,37 +239,37 @@ lxml = ["lxml"]
[[package]]
name = "black"
version = "23.1.0"
version = "23.3.0"
description = "The uncompromising code formatter."
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
{file = "black-23.1.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:b6a92a41ee34b883b359998f0c8e6eb8e99803aa8bf3123bf2b2e6fec505a221"},
{file = "black-23.1.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:57c18c5165c1dbe291d5306e53fb3988122890e57bd9b3dcb75f967f13411a26"},
{file = "black-23.1.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:9880d7d419bb7e709b37e28deb5e68a49227713b623c72b2b931028ea65f619b"},
{file = "black-23.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e6663f91b6feca5d06f2ccd49a10f254f9298cc1f7f49c46e498a0771b507104"},
{file = "black-23.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:9afd3f493666a0cd8f8df9a0200c6359ac53940cbde049dcb1a7eb6ee2dd7074"},
{file = "black-23.1.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:bfffba28dc52a58f04492181392ee380e95262af14ee01d4bc7bb1b1c6ca8d27"},
{file = "black-23.1.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:c1c476bc7b7d021321e7d93dc2cbd78ce103b84d5a4cf97ed535fbc0d6660648"},
{file = "black-23.1.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:382998821f58e5c8238d3166c492139573325287820963d2f7de4d518bd76958"},
{file = "black-23.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bf649fda611c8550ca9d7592b69f0637218c2369b7744694c5e4902873b2f3a"},
{file = "black-23.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:121ca7f10b4a01fd99951234abdbd97728e1240be89fde18480ffac16503d481"},
{file = "black-23.1.0-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:a8471939da5e824b891b25751955be52ee7f8a30a916d570a5ba8e0f2eb2ecad"},
{file = "black-23.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8178318cb74f98bc571eef19068f6ab5613b3e59d4f47771582f04e175570ed8"},
{file = "black-23.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:a436e7881d33acaf2536c46a454bb964a50eff59b21b51c6ccf5a40601fbef24"},
{file = "black-23.1.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:a59db0a2094d2259c554676403fa2fac3473ccf1354c1c63eccf7ae65aac8ab6"},
{file = "black-23.1.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:0052dba51dec07ed029ed61b18183942043e00008ec65d5028814afaab9a22fd"},
{file = "black-23.1.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:49f7b39e30f326a34b5c9a4213213a6b221d7ae9d58ec70df1c4a307cf2a1580"},
{file = "black-23.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:162e37d49e93bd6eb6f1afc3e17a3d23a823042530c37c3c42eeeaf026f38468"},
{file = "black-23.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:8b70eb40a78dfac24842458476135f9b99ab952dd3f2dab738c1881a9b38b753"},
{file = "black-23.1.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:a29650759a6a0944e7cca036674655c2f0f63806ddecc45ed40b7b8aa314b651"},
{file = "black-23.1.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:bb460c8561c8c1bec7824ecbc3ce085eb50005883a6203dcfb0122e95797ee06"},
{file = "black-23.1.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:c91dfc2c2a4e50df0026f88d2215e166616e0c80e86004d0003ece0488db2739"},
{file = "black-23.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a951cc83ab535d248c89f300eccbd625e80ab880fbcfb5ac8afb5f01a258ac9"},
{file = "black-23.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:0680d4380db3719ebcfb2613f34e86c8e6d15ffeabcf8ec59355c5e7b85bb555"},
{file = "black-23.1.0-py3-none-any.whl", hash = "sha256:7a0f701d314cfa0896b9001df70a530eb2472babb76086344e688829efd97d32"},
{file = "black-23.1.0.tar.gz", hash = "sha256:b0bd97bea8903f5a2ba7219257a44e3f1f9d00073d6cc1add68f0beec69692ac"},
{file = "black-23.3.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:0945e13506be58bf7db93ee5853243eb368ace1c08a24c65ce108986eac65915"},
{file = "black-23.3.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:67de8d0c209eb5b330cce2469503de11bca4085880d62f1628bd9972cc3366b9"},
{file = "black-23.3.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:7c3eb7cea23904399866c55826b31c1f55bbcd3890ce22ff70466b907b6775c2"},
{file = "black-23.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32daa9783106c28815d05b724238e30718f34155653d4d6e125dc7daec8e260c"},
{file = "black-23.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:35d1381d7a22cc5b2be2f72c7dfdae4072a3336060635718cc7e1ede24221d6c"},
{file = "black-23.3.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:a8a968125d0a6a404842fa1bf0b349a568634f856aa08ffaff40ae0dfa52e7c6"},
{file = "black-23.3.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:c7ab5790333c448903c4b721b59c0d80b11fe5e9803d8703e84dcb8da56fec1b"},
{file = "black-23.3.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:a6f6886c9869d4daae2d1715ce34a19bbc4b95006d20ed785ca00fa03cba312d"},
{file = "black-23.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f3c333ea1dd6771b2d3777482429864f8e258899f6ff05826c3a4fcc5ce3f70"},
{file = "black-23.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:11c410f71b876f961d1de77b9699ad19f939094c3a677323f43d7a29855fe326"},
{file = "black-23.3.0-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:1d06691f1eb8de91cd1b322f21e3bfc9efe0c7ca1f0e1eb1db44ea367dff656b"},
{file = "black-23.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50cb33cac881766a5cd9913e10ff75b1e8eb71babf4c7104f2e9c52da1fb7de2"},
{file = "black-23.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:e114420bf26b90d4b9daa597351337762b63039752bdf72bf361364c1aa05925"},
{file = "black-23.3.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:48f9d345675bb7fbc3dd85821b12487e1b9a75242028adad0333ce36ed2a6d27"},
{file = "black-23.3.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:714290490c18fb0126baa0fca0a54ee795f7502b44177e1ce7624ba1c00f2331"},
{file = "black-23.3.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:064101748afa12ad2291c2b91c960be28b817c0c7eaa35bec09cc63aa56493c5"},
{file = "black-23.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:562bd3a70495facf56814293149e51aa1be9931567474993c7942ff7d3533961"},
{file = "black-23.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:e198cf27888ad6f4ff331ca1c48ffc038848ea9f031a3b40ba36aced7e22f2c8"},
{file = "black-23.3.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:3238f2aacf827d18d26db07524e44741233ae09a584273aa059066d644ca7b30"},
{file = "black-23.3.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:f0bd2f4a58d6666500542b26354978218a9babcdc972722f4bf90779524515f3"},
{file = "black-23.3.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:92c543f6854c28a3c7f39f4d9b7694f9a6eb9d3c5e2ece488c327b6e7ea9b266"},
{file = "black-23.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a150542a204124ed00683f0db1f5cf1c2aaaa9cc3495b7a3b5976fb136090ab"},
{file = "black-23.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:6b39abdfb402002b8a7d030ccc85cf5afff64ee90fa4c5aebc531e3ad0175ddb"},
{file = "black-23.3.0-py3-none-any.whl", hash = "sha256:ec751418022185b0c1bb7d7736e6933d40bbb14c14a0abcf9123d1b159f98dd4"},
{file = "black-23.3.0.tar.gz", hash = "sha256:1c7b8d606e728a41ea1ccbd7264677e494e87cf630e399262ced92d4a8dac940"},
]
[package.dependencies]
@ -579,6 +579,21 @@ files = [
{file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"},
]
[[package]]
name = "dill"
version = "0.3.6"
description = "serialize all of python"
category = "main"
optional = false
python-versions = ">=3.7"
files = [
{file = "dill-0.3.6-py3-none-any.whl", hash = "sha256:a07ffd2351b8c678dfc4a856a3005f8067aea51d6ba6c700796a4d9e280f39f0"},
{file = "dill-0.3.6.tar.gz", hash = "sha256:e5db55f3687856d8fbdab002ed78544e1c4559a130302693d839dfe8f93f2373"},
]
[package.extras]
graph = ["objgraph (>=1.7.2)"]
[[package]]
name = "exceptiongroup"
version = "1.1.1"
@ -759,14 +774,14 @@ uritemplate = ">=3.0.1,<5"
[[package]]
name = "google-auth"
version = "2.17.0"
version = "2.17.1"
description = "Google Authentication Library"
category = "main"
optional = false
python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*"
files = [
{file = "google-auth-2.17.0.tar.gz", hash = "sha256:f51d26ebb3e5d723b9a7dbd310b6c88654ef1ad1fc35750d1fdba48ca4d82f52"},
{file = "google_auth-2.17.0-py2.py3-none-any.whl", hash = "sha256:45ba9b4b3e49406de3c5451697820694b2f6ce8a6b75bb187852fdae231dab94"},
{file = "google-auth-2.17.1.tar.gz", hash = "sha256:8f379b46bad381ad2a0b989dfb0c13ad28d3c2a79f27348213f8946a1d15d55a"},
{file = "google_auth-2.17.1-py2.py3-none-any.whl", hash = "sha256:357ff22a75b4c0f6093470f21816a825d2adee398177569824e37b6c10069e19"},
]
[package.dependencies]
@ -1079,14 +1094,14 @@ test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio"
[[package]]
name = "ipython"
version = "8.11.0"
version = "8.12.0"
description = "IPython: Productive Interactive Computing"
category = "dev"
optional = false
python-versions = ">=3.8"
files = [
{file = "ipython-8.11.0-py3-none-any.whl", hash = "sha256:5b54478e459155a326bf5f42ee4f29df76258c0279c36f21d71ddb560f88b156"},
{file = "ipython-8.11.0.tar.gz", hash = "sha256:735cede4099dbc903ee540307b9171fbfef4aa75cfcacc5a273b2cda2f02be04"},
{file = "ipython-8.12.0-py3-none-any.whl", hash = "sha256:1c183bf61b148b00bcebfa5d9b39312733ae97f6dad90d7e9b4d86c8647f498c"},
{file = "ipython-8.12.0.tar.gz", hash = "sha256:a950236df04ad75b5bc7f816f9af3d74dc118fd42f2ff7e80e8e60ca1f182e2d"},
]
[package.dependencies]
@ -1102,6 +1117,7 @@ prompt-toolkit = ">=3.0.30,<3.0.37 || >3.0.37,<3.1.0"
pygments = ">=2.4.0"
stack-data = "*"
traitlets = ">=5"
typing-extensions = {version = "*", markers = "python_version < \"3.10\""}
[package.extras]
all = ["black", "curio", "docrepr", "ipykernel", "ipyparallel", "ipywidgets", "matplotlib", "matplotlib (!=3.2.0)", "nbconvert", "nbformat", "notebook", "numpy (>=1.21)", "pandas", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "qtconsole", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "trio", "typing-extensions"]
@ -1183,14 +1199,14 @@ test = ["ipykernel", "pre-commit", "pytest", "pytest-cov", "pytest-timeout"]
[[package]]
name = "langchain"
version = "0.0.113"
version = "0.0.127"
description = "Building applications with LLMs through composability"
category = "main"
optional = false
python-versions = ">=3.8.1,<4.0"
files = [
{file = "langchain-0.0.113-py3-none-any.whl", hash = "sha256:9e146d116fd3b9b2210c8c447cabfa20ef27c26ea3f2bc986eab97d1dad0aab6"},
{file = "langchain-0.0.113.tar.gz", hash = "sha256:a494fe02bc63da4bcda7da8d7f4a346522fbc87f0a4955b72519ec2ed86bf906"},
{file = "langchain-0.0.127-py3-none-any.whl", hash = "sha256:04ba053881e6098e80e0f4afc8922f3fe78923b160fd12d856aebce49c261918"},
{file = "langchain-0.0.127.tar.gz", hash = "sha256:e8a3b67fd86a6f79c4334f0a7588c9476fcb57b27a8fb0e617f47c01eaab8be8"},
]
[package.dependencies]
@ -1198,14 +1214,14 @@ aiohttp = ">=3.8.3,<4.0.0"
dataclasses-json = ">=0.5.7,<0.6.0"
numpy = ">=1,<2"
pydantic = ">=1,<2"
PyYAML = ">=6,<7"
PyYAML = ">=5.4.1"
requests = ">=2,<3"
SQLAlchemy = ">=1,<2"
tenacity = ">=8.1.0,<9.0.0"
[package.extras]
all = ["aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.2.2,<0.3.0)", "beautifulsoup4 (>=4,<5)", "cohere (>=3,<4)", "deeplake (>=3.2.9,<4.0.0)", "elasticsearch (>=8,<9)", "faiss-cpu (>=1,<2)", "google-api-python-client (==2.70.0)", "google-search-results (>=2,<3)", "huggingface_hub (>=0,<1)", "jinja2 (>=3,<4)", "manifest-ml (>=0.0.1,<0.0.2)", "networkx (>=2.6.3,<3.0.0)", "nlpcloud (>=1,<2)", "nltk (>=3,<4)", "nomic (>=1.0.43,<2.0.0)", "openai (>=0,<1)", "opensearch-py (>=2.0.0,<3.0.0)", "pgvector (>=0.1.6,<0.2.0)", "pinecone-client (>=2,<3)", "psycopg2-binary (>=2.9.5,<3.0.0)", "pypdf (>=3.4.0,<4.0.0)", "qdrant-client (>=1.0.4,<2.0.0)", "redis (>=4,<5)", "sentence-transformers (>=2,<3)", "spacy (>=3,<4)", "tensorflow-text (>=2.11.0,<3.0.0)", "tiktoken (>=0,<1)", "torch (>=1,<2)", "transformers (>=4,<5)", "weaviate-client (>=3,<4)", "wikipedia (>=1,<2)", "wolframalpha (==5.0.0)"]
llms = ["anthropic (>=0.2.2,<0.3.0)", "cohere (>=3,<4)", "huggingface_hub (>=0,<1)", "manifest-ml (>=0.0.1,<0.0.2)", "nlpcloud (>=1,<2)", "openai (>=0,<1)", "torch (>=1,<2)", "transformers (>=4,<5)"]
all = ["aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.2.4,<0.3.0)", "beautifulsoup4 (>=4,<5)", "boto3 (>=1.26.96,<2.0.0)", "cohere (>=3,<4)", "deeplake (>=3.2.9,<4.0.0)", "elasticsearch (>=8,<9)", "faiss-cpu (>=1,<2)", "google-api-python-client (==2.70.0)", "google-search-results (>=2,<3)", "huggingface_hub (>=0,<1)", "jina (>=3.14,<4.0)", "jinja2 (>=3,<4)", "manifest-ml (>=0.0.1,<0.0.2)", "networkx (>=2.6.3,<3.0.0)", "nlpcloud (>=1,<2)", "nltk (>=3,<4)", "nomic (>=1.0.43,<2.0.0)", "openai (>=0,<1)", "opensearch-py (>=2.0.0,<3.0.0)", "pgvector (>=0.1.6,<0.2.0)", "pinecone-client (>=2,<3)", "psycopg2-binary (>=2.9.5,<3.0.0)", "pyowm (>=3.3.0,<4.0.0)", "pypdf (>=3.4.0,<4.0.0)", "qdrant-client (>=1.0.4,<2.0.0)", "redis (>=4,<5)", "sentence-transformers (>=2,<3)", "spacy (>=3,<4)", "tensorflow-text (>=2.11.0,<3.0.0)", "tiktoken (>=0.3.2,<0.4.0)", "torch (>=1,<2)", "transformers (>=4,<5)", "weaviate-client (>=3,<4)", "wikipedia (>=1,<2)", "wolframalpha (==5.0.0)"]
llms = ["anthropic (>=0.2.4,<0.3.0)", "cohere (>=3,<4)", "huggingface_hub (>=0,<1)", "manifest-ml (>=0.0.1,<0.0.2)", "nlpcloud (>=1,<2)", "openai (>=0,<1)", "torch (>=1,<2)", "transformers (>=4,<5)"]
[[package]]
name = "markdown-it-py"
@ -1523,6 +1539,55 @@ files = [
{file = "packaging-23.0.tar.gz", hash = "sha256:b6ad297f8907de0fa2fe1ccbd26fdaf387f5f47c7275fedf8cce89f99446cf97"},
]
[[package]]
name = "pandas"
version = "1.5.3"
description = "Powerful data structures for data analysis, time series, and statistics"
category = "main"
optional = false
python-versions = ">=3.8"
files = [
{file = "pandas-1.5.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3749077d86e3a2f0ed51367f30bf5b82e131cc0f14260c4d3e499186fccc4406"},
{file = "pandas-1.5.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:972d8a45395f2a2d26733eb8d0f629b2f90bebe8e8eddbb8829b180c09639572"},
{file = "pandas-1.5.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:50869a35cbb0f2e0cd5ec04b191e7b12ed688874bd05dd777c19b28cbea90996"},
{file = "pandas-1.5.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3ac844a0fe00bfaeb2c9b51ab1424e5c8744f89860b138434a363b1f620f354"},
{file = "pandas-1.5.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a0a56cef15fd1586726dace5616db75ebcfec9179a3a55e78f72c5639fa2a23"},
{file = "pandas-1.5.3-cp310-cp310-win_amd64.whl", hash = "sha256:478ff646ca42b20376e4ed3fa2e8d7341e8a63105586efe54fa2508ee087f328"},
{file = "pandas-1.5.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6973549c01ca91ec96199e940495219c887ea815b2083722821f1d7abfa2b4dc"},
{file = "pandas-1.5.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c39a8da13cede5adcd3be1182883aea1c925476f4e84b2807a46e2775306305d"},
{file = "pandas-1.5.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f76d097d12c82a535fda9dfe5e8dd4127952b45fea9b0276cb30cca5ea313fbc"},
{file = "pandas-1.5.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e474390e60ed609cec869b0da796ad94f420bb057d86784191eefc62b65819ae"},
{file = "pandas-1.5.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f2b952406a1588ad4cad5b3f55f520e82e902388a6d5a4a91baa8d38d23c7f6"},
{file = "pandas-1.5.3-cp311-cp311-win_amd64.whl", hash = "sha256:bc4c368f42b551bf72fac35c5128963a171b40dce866fb066540eeaf46faa003"},
{file = "pandas-1.5.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:14e45300521902689a81f3f41386dc86f19b8ba8dd5ac5a3c7010ef8d2932813"},
{file = "pandas-1.5.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9842b6f4b8479e41968eced654487258ed81df7d1c9b7b870ceea24ed9459b31"},
{file = "pandas-1.5.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:26d9c71772c7afb9d5046e6e9cf42d83dd147b5cf5bcb9d97252077118543792"},
{file = "pandas-1.5.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fbcb19d6fceb9e946b3e23258757c7b225ba450990d9ed63ccceeb8cae609f7"},
{file = "pandas-1.5.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:565fa34a5434d38e9d250af3c12ff931abaf88050551d9fbcdfafca50d62babf"},
{file = "pandas-1.5.3-cp38-cp38-win32.whl", hash = "sha256:87bd9c03da1ac870a6d2c8902a0e1fd4267ca00f13bc494c9e5a9020920e1d51"},
{file = "pandas-1.5.3-cp38-cp38-win_amd64.whl", hash = "sha256:41179ce559943d83a9b4bbacb736b04c928b095b5f25dd2b7389eda08f46f373"},
{file = "pandas-1.5.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c74a62747864ed568f5a82a49a23a8d7fe171d0c69038b38cedf0976831296fa"},
{file = "pandas-1.5.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c4c00e0b0597c8e4f59e8d461f797e5d70b4d025880516a8261b2817c47759ee"},
{file = "pandas-1.5.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a50d9a4336a9621cab7b8eb3fb11adb82de58f9b91d84c2cd526576b881a0c5a"},
{file = "pandas-1.5.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd05f7783b3274aa206a1af06f0ceed3f9b412cf665b7247eacd83be41cf7bf0"},
{file = "pandas-1.5.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f69c4029613de47816b1bb30ff5ac778686688751a5e9c99ad8c7031f6508e5"},
{file = "pandas-1.5.3-cp39-cp39-win32.whl", hash = "sha256:7cec0bee9f294e5de5bbfc14d0573f65526071029d036b753ee6507d2a21480a"},
{file = "pandas-1.5.3-cp39-cp39-win_amd64.whl", hash = "sha256:dfd681c5dc216037e0b0a2c821f5ed99ba9f03ebcf119c7dac0e9a7b960b9ec9"},
{file = "pandas-1.5.3.tar.gz", hash = "sha256:74a3fd7e5a7ec052f183273dc7b0acd3a863edf7520f5d3a1765c04ffdb3b0b1"},
]
[package.dependencies]
numpy = [
{version = ">=1.20.3", markers = "python_version < \"3.10\""},
{version = ">=1.21.0", markers = "python_version >= \"3.10\""},
{version = ">=1.23.2", markers = "python_version >= \"3.11\""},
]
python-dateutil = ">=2.8.1"
pytz = ">=2020.1"
[package.extras]
test = ["hypothesis (>=5.5.3)", "pytest (>=6.0)", "pytest-xdist (>=1.31)"]
[[package]]
name = "parso"
version = "0.8.3"
@ -1852,7 +1917,7 @@ testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.
name = "python-dateutil"
version = "2.8.2"
description = "Extensions to the standard Python datetime module"
category = "dev"
category = "main"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
files = [
@ -1863,6 +1928,18 @@ files = [
[package.dependencies]
six = ">=1.5"
[[package]]
name = "pytz"
version = "2023.3"
description = "World timezone definitions, modern and historical"
category = "main"
optional = false
python-versions = "*"
files = [
{file = "pytz-2023.3-py2.py3-none-any.whl", hash = "sha256:a151b3abb88eda1d4e34a9814df37de2a80e301e68ba0fd856fb9b46bfbbbffb"},
{file = "pytz-2023.3.tar.gz", hash = "sha256:1d8ce29db189191fb55338ee6d0387d82ab59f3d00eac103412d64e0ebd0c588"},
]
[[package]]
name = "pywin32"
version = "306"
@ -2635,4 +2712,4 @@ testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more
[metadata]
lock-version = "2.0"
python-versions = "^3.9"
content-hash = "18b858c93c242f3b53e9f77284904aa0eabb4c955f905cfe5fb227a6785bfabc"
content-hash = "2b523f3d737ef8f7082e8156f096bce6f4f84a8bee9d07bd4ed23a29d3dcfab1"

View file

@ -1,6 +1,6 @@
[tool.poetry]
name = "langflow"
version = "0.0.50"
version = "0.0.52"
description = "A Python package with a built-in web application"
authors = ["Logspace <contact@logspace.ai>"]
maintainers = [
@ -29,9 +29,11 @@ google-search-results = "^2.4.1"
google-api-python-client = "^2.79.0"
typer = "^0.7.0"
gunicorn = "^20.1.0"
langchain = "^0.0.113"
langchain = "^0.0.127"
openai = "^0.27.2"
types-pyyaml = "^6.0.12.8"
dill = "^0.3.6"
pandas = "^1.5.3"
[tool.poetry.group.dev.dependencies]
black = "^23.1.0"

View file

@ -7,6 +7,7 @@ import typer
from fastapi.staticfiles import StaticFiles
from langflow.main import create_app
from langflow.settings import settings
logger = logging.getLogger(__name__)
@ -17,9 +18,21 @@ def get_number_of_workers(workers=None):
return workers
def update_settings(config: str):
"""Update the settings from a config file."""
if config:
settings.update_from_yaml(config)
def serve(
host: str = "127.0.0.1", workers: int = 1, timeout: int = 60, port: int = 7860
host: str = "127.0.0.1",
workers: int = 1,
timeout: int = 60,
port: int = 7860,
config: str = "config.yaml",
log_level: str = "info",
):
update_settings(config)
app = create_app()
# get the directory of the current file
path = Path(__file__).parent
@ -42,7 +55,7 @@ def serve(
# MacOS requires a env variable to be set to use gunicorn
import uvicorn
uvicorn.run(app, host=host, port=port, log_level="info")
uvicorn.run(app, host=host, port=port, log_level=log_level)
else:
from langflow.server import LangflowApplication

View file

@ -1,15 +1,16 @@
import logging
from typing import Any, Dict
from fastapi import APIRouter, HTTPException
from langflow.api.base import Code, ValidationResponse
from langflow.api.base import Code, ValidationResponse
from langflow.interface.run import process_graph
from langflow.interface.types import build_langchain_types_dict
from langflow.utils.validate import validate_code
# build router
router = APIRouter()
logger = logging.getLogger(__name__)
@router.get("/all")
@ -22,7 +23,9 @@ def get_load(data: Dict[str, Any]):
try:
return process_graph(data)
except Exception as e:
return HTTPException(status_code=500, detail=str(e))
# Log stack trace
logger.exception(e)
raise HTTPException(status_code=500, detail=str(e)) from e
@router.post("/validate", status_code=200, response_model=ValidationResponse)

View file

@ -1,58 +0,0 @@
from fastapi import APIRouter
from langflow.interface.listing import list_type
# build router
router = APIRouter(
prefix="/list",
tags=["list"],
)
@router.get("/")
def read_items():
"""List all components"""
return [
"chains",
"agents",
"prompts",
"llms",
"tools",
]
@router.get("/chains")
def list_chains():
"""List all chain types"""
return list_type("chains")
@router.get("/agents")
def list_agents():
"""List all agent types"""
# return list(agents.loading.AGENT_TO_CLASS.keys())
return list_type("agents")
@router.get("/prompts")
def list_prompts():
"""List all prompt types"""
return list_type("prompts")
@router.get("/llms")
def list_llms():
"""List all llm types"""
return list_type("llms")
@router.get("/memories")
def list_memories():
"""List all memory types"""
return list_type("memories")
@router.get("/tools")
def list_tools():
"""List all load tools"""
return list_type("tools")

View file

@ -1,63 +0,0 @@
from fastapi import APIRouter, HTTPException
from langflow.interface.signature import get_signature
# build router
router = APIRouter(
prefix="/signatures",
tags=["signatures"],
)
@router.get("/chain")
def get_chain(name: str):
"""Get the signature of a chain."""
try:
return get_signature(name, "chains")
except ValueError as exc:
raise HTTPException(status_code=404, detail="Chain not found") from exc
@router.get("/agent")
def get_agent(name: str):
"""Get the signature of an agent."""
try:
return get_signature(name, "agents")
except ValueError as exc:
raise HTTPException(status_code=404, detail="Agent not found") from exc
@router.get("/prompt")
def get_prompt(name: str):
"""Get the signature of a prompt."""
try:
return get_signature(name, "prompts")
except ValueError as exc:
raise HTTPException(status_code=404, detail="Prompt not found") from exc
@router.get("/llm")
def get_llm(name: str):
"""Get the signature of an llm."""
try:
return get_signature(name, "llms")
except ValueError as exc:
raise HTTPException(status_code=404, detail="LLM not found") from exc
@router.get("/memory")
def get_memory(name: str):
"""Get the signature of a memory."""
try:
return get_signature(name, "memories")
except ValueError as exc:
raise HTTPException(status_code=404, detail="Memory not found") from exc
@router.get("/tool")
def get_tool(name: str):
"""Get the signature of a tool."""
try:
return get_signature(name, "tools")
except ValueError as exc:
raise HTTPException(status_code=404, detail="Tool not found") from exc

View file

50
src/backend/langflow/cache/utils.py vendored Normal file
View file

@ -0,0 +1,50 @@
import contextlib
import hashlib
import json
import os
import tempfile
from pathlib import Path
import dill # type: ignore
PREFIX = "langflow_cache"
def clear_old_cache_files(max_cache_size: int = 10):
cache_dir = Path(tempfile.gettempdir())
cache_files = list(cache_dir.glob(f"{PREFIX}_*.dill"))
if len(cache_files) > max_cache_size:
cache_files_sorted_by_mtime = sorted(
cache_files, key=lambda x: x.stat().st_mtime, reverse=True
)
for cache_file in cache_files_sorted_by_mtime[max_cache_size:]:
with contextlib.suppress(OSError):
os.remove(cache_file)
def remove_position_info(node):
node.pop("position", None)
def compute_hash(graph_data):
for node in graph_data["nodes"]:
remove_position_info(node)
cleaned_graph_json = json.dumps(graph_data, sort_keys=True)
return hashlib.sha256(cleaned_graph_json.encode("utf-8")).hexdigest()
def save_cache(hash_val, chat_data):
cache_path = Path(tempfile.gettempdir()) / f"{PREFIX}_{hash_val}.dill"
with cache_path.open("wb") as cache_file:
dill.dump(chat_data, cache_file)
def load_cache(hash_val):
cache_path = Path(tempfile.gettempdir()) / f"{PREFIX}_{hash_val}.dill"
if cache_path.exists():
with cache_path.open("rb") as cache_file:
return dill.load(cache_file)
return None

View file

@ -6,6 +6,8 @@ chains:
agents:
- ZeroShotAgent
- JsonAgent
- CSVAgent
prompts:
- PromptTemplate
@ -22,9 +24,14 @@ tools:
- Serper Search
- Tool
- PythonFunction
- JsonSpec
memories:
# - ConversationBufferMemory
wrappers:
- RequestsWrapper
toolkits:
- OpenAPIToolkit
- JsonToolkit
embeddings:
#

View file

@ -1,9 +1,9 @@
from langflow.template import nodes
CUSTOM_NODES = {
"prompts": {**nodes.ZeroShotPromptNode().to_dict()},
"tools": {**nodes.PythonFunctionNode().to_dict(), **nodes.ToolNode().to_dict()},
"agents": {**nodes.JsonAgentNode().to_dict(), **nodes.CSVAgentNode().to_dict()},
}

View file

@ -0,0 +1,4 @@
from langflow.graph.base import Edge, Node
from langflow.graph.graph import Graph
__all__ = ["Graph", "Node", "Edge"]

View file

@ -0,0 +1,251 @@
# Description: Graph class for building a graph of nodes and edges
# Insights:
# - Defer prompts building to the last moment or when they have all the tools
# - Build each inner agent first, then build the outer agent
import logging
import types
from copy import deepcopy
from typing import Any, Dict, List
from langflow.graph.constants import DIRECT_TYPES
from langflow.graph.utils import load_file
from langflow.interface import loading
from langflow.interface.listing import ALL_TYPES_DICT
logger = logging.getLogger(__name__)
class Node:
def __init__(self, data: Dict, base_type: str | None = None) -> None:
self.id: str = data["id"]
self._data = data
self.edges: List[Edge] = []
self.base_type: str | None = base_type
self._parse_data()
self._built_object = None
self._built = False
def _parse_data(self) -> None:
self.data = self._data["data"]
self.output = self.data["node"]["base_classes"]
template_dicts = {
key: value
for key, value in self.data["node"]["template"].items()
if isinstance(value, dict)
}
self.required_inputs = [
template_dicts[key]["type"]
for key, value in template_dicts.items()
if value["required"]
]
self.optional_inputs = [
template_dicts[key]["type"]
for key, value in template_dicts.items()
if not value["required"]
]
template_dict = self.data["node"]["template"]
self.node_type = (
self.data["type"] if "Tool" not in self.output else template_dict["_type"]
)
if self.base_type is None:
for base_type, value in ALL_TYPES_DICT.items():
if self.node_type in value:
self.base_type = base_type
break
def _build_params(self):
# Some params are required, some are optional
# but most importantly, some params are python base classes
# like str and others are LangChain objects like LLMChain, BasePromptTemplate
# so we need to be able to distinguish between the two
# The dicts with "type" == "str" are the ones that are python base classes
# and most likely have a "value" key
# So for each key besides "_type" in the template dict, we have a dict
# with a "type" key. If the type is not "str", then we need to get the
# edge that connects to that node and get the Node with the required data
# and use that as the value for the param
# If the type is "str", then we need to get the value of the "value" key
# and use that as the value for the param
template_dict = {
key: value
for key, value in self.data["node"]["template"].items()
if isinstance(value, dict)
}
params = {}
for key, value in template_dict.items():
if key == "_type":
continue
# If the type is not transformable to a python base class
# then we need to get the edge that connects to this node
if value["type"] == "file":
# Load the type in value.get('suffixes') using
# what is inside value.get('content')
# value.get('value') is the file name
type_to_load = value.get("suffixes")
file_name = value.get("value")
content = value.get("content")
loaded_dict = load_file(file_name, content, type_to_load)
params[key] = loaded_dict
# We should check if the type is in something not
# the opposite
elif value["type"] not in DIRECT_TYPES:
# Get the edge that connects to this node
try:
edge = next(
(
edge
for edge in self.edges
if edge.target == self
and edge.matched_type in value["type"]
),
None,
)
except Exception as e:
raise e
# Get the output of the node that the edge connects to
# if the value['list'] is True, then there will be more
# than one time setting to params[key]
# so we need to append to a list if it exists
# or create a new list if it doesn't
if edge is None and value["required"]:
# break line
raise ValueError(
f"Required input {key} for module {self.node_type} not found"
)
elif value["list"]:
if key in params:
params[key].append(edge.source)
else:
params[key] = [edge.source]
elif value["required"] or edge is not None:
params[key] = edge.source
elif value["required"] or value.get("value"):
params[key] = value["value"]
# Add _type to params
self.params = params
def _build(self):
# The params dict is used to build the module
# it contains values and keys that point to nodes which
# have their own params dict
# When build is called, we iterate through the params dict
# and if the value is a node, we call build on that node
# and use the output of that build as the value for the param
# if the value is not a node, then we use the value as the param
# and continue
# Another aspect is that the node_type is the class that we need to import
# and instantiate with these built params
logger.debug(f"Building {self.node_type}")
# Build each node in the params dict
for key, value in self.params.copy().items():
# Check if Node or list of Nodes and not self
# to avoid recursion
if isinstance(value, Node):
if value == self:
del self.params[key]
continue
result = value.build()
# If the key is "func", then we need to use the run method
if key == "func" and not isinstance(result, types.FunctionType):
# func can be PythonFunction(code='\ndef upper_case(text: str) -> str:\n return text.upper()\n')
# so we need to check if there is an attribute called run
if hasattr(result, "run"):
result = result.run # type: ignore
elif hasattr(result, "get_function"):
result = result.get_function() # type: ignore
self.params[key] = result
elif isinstance(value, list) and all(
isinstance(node, Node) for node in value
):
self.params[key] = [node.build() for node in value] # type: ignore
# Get the class from LANGCHAIN_TYPES_DICT
# and instantiate it with the params
# and return the instance
try:
self._built_object = loading.instantiate_class(
node_type=self.node_type,
base_type=self.base_type,
params=self.params,
)
except Exception as exc:
raise ValueError(f"Error building node {self.node_type}") from exc
if self._built_object is None:
raise ValueError(f"Node type {self.node_type} not found")
self._built = True
def build(self, force: bool = False) -> Any:
if not self._built or force:
self._build()
return deepcopy(self._built_object)
def add_edge(self, edge: "Edge") -> None:
self.edges.append(edge)
def __repr__(self) -> str:
return f"Node(id={self.id}, data={self.data})"
def __eq__(self, __o: object) -> bool:
return self.id == __o.id if isinstance(__o, Node) else False
def __hash__(self) -> int:
return id(self)
class Edge:
def __init__(self, source: "Node", target: "Node"):
self.source: "Node" = source
self.target: "Node" = target
self.validate_edge()
def validate_edge(self) -> None:
# Validate that the outputs of the source node are valid inputs
# for the target node
self.source_types = self.source.output
self.target_reqs = self.target.required_inputs + self.target.optional_inputs
# Both lists contain strings and sometimes a string contains the value we are
# looking for e.g. comgin_out=["Chain"] and target_reqs=["LLMChain"]
# so we need to check if any of the strings in source_types is in target_reqs
self.valid = any(
output in target_req
for output in self.source_types
for target_req in self.target_reqs
)
# Get what type of input the target node is expecting
self.matched_type = next(
(
output
for output in self.source_types
for target_req in self.target_reqs
if output in target_req
),
None,
)
no_matched_type = self.matched_type is None
if no_matched_type:
logger.debug(self.source_types)
logger.debug(self.target_reqs)
if no_matched_type:
raise ValueError(
f"Edge between {self.source.node_type} and {self.target.node_type} "
f"has no matched type"
)
def __repr__(self) -> str:
return (
f"Edge(source={self.source.id}, target={self.target.id}, valid={self.valid}"
f", matched_type={self.matched_type})"
)

View file

@ -0,0 +1 @@
DIRECT_TYPES = ["str", "bool", "code", "int", "float", "Any"]

View file

@ -1,296 +1,25 @@
# Description: Graph class for building a graph of nodes and edges
# Insights:
# - Defer prompts building to the last moment or when they have all the tools
# - Build each inner agent first, then build the outer agent
from typing import Dict, List, Union
from copy import deepcopy
import types
from typing import Any, Dict, List, Optional, Union
from langflow.graph.base import Edge, Node
from langflow.graph.nodes import (
AgentNode,
ChainNode,
FileToolNode,
LLMNode,
PromptNode,
ToolkitNode,
ToolNode,
WrapperNode,
)
from langflow.interface.agents.base import agent_creator
from langflow.interface.chains.base import chain_creator
from langflow.interface.llms.base import llm_creator
from langflow.interface.prompts.base import prompt_creator
from langflow.interface.toolkits.base import toolkits_creator
from langflow.interface.tools.base import tool_creator
from langflow.interface.tools.constants import ALL_TOOLS_NAMES, FILE_TOOLS
from langflow.interface.wrappers.base import wrapper_creator
from langflow.utils import payload
from langflow.interface.listing import ALL_TYPES_DICT, ALL_TOOLS_NAMES, TOOLS_DICT
from langflow.interface import loading
class Node:
def __init__(self, data: Dict):
self.id: str = data["id"]
self._data = data
self.edges: List[Edge] = []
self._parse_data()
self._built_object = None
self._built = False
def _parse_data(self) -> None:
self.data = self._data["data"]
self.output = self.data["node"]["base_classes"]
template_dicts = {
key: value
for key, value in self.data["node"]["template"].items()
if isinstance(value, dict)
}
self.required_inputs = [
template_dicts[key]["type"]
for key, value in template_dicts.items()
if value["required"]
]
self.optional_inputs = [
template_dicts[key]["type"]
for key, value in template_dicts.items()
if not value["required"]
]
template_dict = self.data["node"]["template"]
self.node_type = (
self.data["type"] if "Tool" not in self.output else template_dict["_type"]
)
def _build_params(self):
# Some params are required, some are optional
# but most importantly, some params are python base classes
# like str and others are LangChain objects like LLMChain, BasePromptTemplate
# so we need to be able to distinguish between the two
# The dicts with "type" == "str" are the ones that are python base classes
# and most likely have a "value" key
# So for each key besides "_type" in the template dict, we have a dict
# with a "type" key. If the type is not "str", then we need to get the
# edge that connects to that node and get the Node with the required data
# and use that as the value for the param
# If the type is "str", then we need to get the value of the "value" key
# and use that as the value for the param
template_dict = {
key: value
for key, value in self.data["node"]["template"].items()
if isinstance(value, dict)
}
params = {}
for key, value in template_dict.items():
if key == "_type":
continue
# If the type is not transformable to a python base class
# then we need to get the edge that connects to this node
if value["type"] not in ["str", "bool", "code"]:
# Get the edge that connects to this node
edge = next(
(
edge
for edge in self.edges
if edge.target == self and edge.matched_type in value["type"]
),
None,
)
# Get the output of the node that the edge connects to
# if the value['list'] is True, then there will be more
# than one time setting to params[key]
# so we need to append to a list if it exists
# or create a new list if it doesn't
if edge is None and value["required"]:
# break line
raise ValueError(
f"Required input {key} for module {self.node_type} not found"
)
if value["list"]:
if key in params:
params[key].append(edge.source)
else:
params[key] = [edge.source]
elif value["required"] or edge is not None:
params[key] = edge.source
elif value["required"] or value.get("value"):
params[key] = value["value"]
# Add _type to params
self.params = params
def _build(self):
# The params dict is used to build the module
# it contains values and keys that point to nodes which
# have their own params dict
# When build is called, we iterate through the params dict
# and if the value is a node, we call build on that node
# and use the output of that build as the value for the param
# if the value is not a node, then we use the value as the param
# and continue
# Another aspect is that the node_type is the class that we need to import
# and instantiate with these built params
# Build each node in the params dict
for key, value in self.params.items():
# Check if Node or list of Nodes
if isinstance(value, Node):
result = value.build()
# If the key is "func", then we need to use the run method
if key == "func" and not isinstance(result, types.FunctionType):
# func can be PythonFunction(code='\ndef upper_case(text: str) -> str:\n return text.upper()\n')
# so we need to check if there is an attribute called run
if hasattr(result, "run"):
result = result.run # type: ignore
elif hasattr(result, "get_function"):
result = result.get_function() # type: ignore
self.params[key] = result
elif isinstance(value, list) and all(
isinstance(node, Node) for node in value
):
self.params[key] = [node.build() for node in value] # type: ignore
# Get the class from LANGCHAIN_TYPES_DICT
# and instantiate it with the params
# and return the instance
for base_type, value in ALL_TYPES_DICT.items():
if base_type == "tools":
value = TOOLS_DICT
if self.node_type in value:
self._built_object = loading.instantiate_class(
node_type=self.node_type,
base_type=base_type,
params=self.params,
)
break
if self._built_object is None:
raise ValueError(f"Node type {self.node_type} not found")
self._built = True
def build(self, force: bool = False) -> Any:
if not self._built or force:
self._build()
return deepcopy(self._built_object)
def add_edge(self, edge: "Edge") -> None:
self.edges.append(edge)
def __repr__(self) -> str:
return f"Node(id={self.id}, data={self.data})"
def __eq__(self, __o: object) -> bool:
return self.id == __o.id if isinstance(__o, Node) else False
def __hash__(self) -> int:
return id(self)
class AgentNode(Node):
def __init__(self, data: Dict):
super().__init__(data)
self.tools: List[ToolNode] = []
self.chains: List[ChainNode] = []
def _set_tools_and_chains(self) -> None:
for edge in self.edges:
source_node = edge.source
if isinstance(source_node, ToolNode):
self.tools.append(source_node)
elif isinstance(source_node, ChainNode):
self.chains.append(source_node)
def build(self, force: bool = False) -> Any:
if not self._built or force:
self._set_tools_and_chains()
# First, build the tools
for tool_node in self.tools:
tool_node.build()
# Next, build the chains and the rest
for chain_node in self.chains:
chain_node.build(tools=self.tools)
self._build()
return deepcopy(self._built_object)
class Edge:
def __init__(self, source: "Node", target: "Node"):
self.source: "Node" = source
self.target: "Node" = target
self.validate_edge()
def validate_edge(self) -> None:
# Validate that the outputs of the source node are valid inputs
# for the target node
self.source_types = self.source.output
self.target_reqs = self.target.required_inputs + self.target.optional_inputs
# Both lists contain strings and sometimes a string contains the value we are
# looking for e.g. comgin_out=["Chain"] and target_reqs=["LLMChain"]
# so we need to check if any of the strings in source_types is in target_reqs
self.valid = any(
output in target_req
for output in self.source_types
for target_req in self.target_reqs
)
# Get what type of input the target node is expecting
self.matched_type = next(
(
output
for output in self.source_types
for target_req in self.target_reqs
if output in target_req
),
None,
)
def __repr__(self) -> str:
return (
f"Edge(source={self.source.id}, target={self.target.id}, valid={self.valid}"
f", matched_type={self.matched_type})"
)
class ToolNode(Node):
def __init__(self, data: Dict):
super().__init__(data)
def build(self, force: bool = False) -> Any:
if not self._built or force:
self._build()
return deepcopy(self._built_object)
class PromptNode(Node):
def __init__(self, data: Dict):
super().__init__(data)
def build(
self,
force: bool = False,
tools: Optional[Union[List[Node], List[ToolNode]]] = None,
) -> Any:
if not self._built or force:
# Check if it is a ZeroShotPrompt and needs a tool
if self.node_type == "ZeroShotPrompt":
tools = (
[tool_node.build() for tool_node in tools]
if tools is not None
else []
)
self.params["tools"] = tools
self._build()
return deepcopy(self._built_object)
class ChainNode(Node):
def __init__(self, data: Dict):
super().__init__(data)
def build(
self,
force: bool = False,
tools: Optional[Union[List[Node], List[ToolNode]]] = None,
) -> Any:
if not self._built or force:
# Check if the chain requires a PromptNode
for key, value in self.params.items():
if isinstance(value, PromptNode):
# Build the PromptNode, passing the tools if available
self.params[key] = value.build(tools=tools, force=force)
self._build()
return deepcopy(self._built_object)
class Graph:
@ -310,9 +39,19 @@ class Graph:
edge.source.add_edge(edge)
edge.target.add_edge(edge)
# This is a hack to make sure that the LLM node is sent to
# the toolkit node
llm_node = None
for node in self.nodes:
node._build_params()
if isinstance(node, LLMNode):
llm_node = node
for node in self.nodes:
if isinstance(node, ToolkitNode):
node.params["llm"] = llm_node
def get_node(self, node_id: str) -> Union[None, Node]:
return next((node for node in self.nodes if node.id == node_id), None)
@ -365,14 +104,22 @@ class Graph:
node_type: str = node_data["type"] # type: ignore
node_lc_type: str = node_data["node"]["template"]["_type"] # type: ignore
if node_type in {"ZeroShotPrompt", "PromptTemplate"}:
if node_type in prompt_creator.to_list():
nodes.append(PromptNode(node))
elif "agent" in node_type.lower():
elif node_type in agent_creator.to_list():
nodes.append(AgentNode(node))
elif "chain" in node_type.lower():
elif node_type in chain_creator.to_list():
nodes.append(ChainNode(node))
elif "tool" in node_type.lower() or node_lc_type in ALL_TOOLS_NAMES:
elif node_type in tool_creator.to_list() or node_lc_type in ALL_TOOLS_NAMES:
if node_type in FILE_TOOLS:
nodes.append(FileToolNode(node))
nodes.append(ToolNode(node))
elif node_type in toolkits_creator.to_list():
nodes.append(ToolkitNode(node))
elif node_type in wrapper_creator.to_list():
nodes.append(WrapperNode(node))
elif node_type in llm_creator.to_list():
nodes.append(LLMNode(node))
else:
nodes.append(Node(node))
return nodes

View file

@ -0,0 +1,144 @@
from copy import deepcopy
from typing import Any, Dict, List, Optional, Union
from langflow.graph.base import Node
from langflow.graph.utils import extract_input_variables_from_prompt
class AgentNode(Node):
def __init__(self, data: Dict):
super().__init__(data, base_type="agents")
self.tools: List[ToolNode] = []
self.chains: List[ChainNode] = []
def _set_tools_and_chains(self) -> None:
for edge in self.edges:
source_node = edge.source
if isinstance(source_node, ToolNode):
self.tools.append(source_node)
elif isinstance(source_node, ChainNode):
self.chains.append(source_node)
def build(self, force: bool = False) -> Any:
if not self._built or force:
self._set_tools_and_chains()
# First, build the tools
for tool_node in self.tools:
tool_node.build()
# Next, build the chains and the rest
for chain_node in self.chains:
chain_node.build(tools=self.tools)
self._build()
return deepcopy(self._built_object)
class ToolNode(Node):
def __init__(self, data: Dict):
super().__init__(data, base_type="tools")
def build(self, force: bool = False) -> Any:
if not self._built or force:
self._build()
return deepcopy(self._built_object)
class PromptNode(Node):
def __init__(self, data: Dict):
super().__init__(data, base_type="prompts")
def build(
self,
force: bool = False,
tools: Optional[Union[List[Node], List[ToolNode]]] = None,
) -> Any:
if not self._built or force:
if "input_variables" not in self.params:
self.params["input_variables"] = []
# Check if it is a ZeroShotPrompt and needs a tool
if "ShotPrompt" in self.node_type:
tools = (
[tool_node.build() for tool_node in tools]
if tools is not None
else []
)
self.params["tools"] = tools
prompt_params = [
key
for key, value in self.params.items()
if isinstance(value, str) and key != "format_instructions"
]
else:
prompt_params = ["template"]
for param in prompt_params:
prompt_text = self.params[param]
variables = extract_input_variables_from_prompt(prompt_text)
self.params["input_variables"].extend(variables)
self._build()
return deepcopy(self._built_object)
class ChainNode(Node):
def __init__(self, data: Dict):
super().__init__(data, base_type="chains")
def build(
self,
force: bool = False,
tools: Optional[Union[List[Node], List[ToolNode]]] = None,
) -> Any:
if not self._built or force:
# Check if the chain requires a PromptNode
for key, value in self.params.items():
if isinstance(value, PromptNode):
# Build the PromptNode, passing the tools if available
self.params[key] = value.build(tools=tools, force=force)
self._build()
return deepcopy(self._built_object)
class LLMNode(Node):
def __init__(self, data: Dict):
super().__init__(data, base_type="llms")
def build(self, force: bool = False) -> Any:
if not self._built or force:
self._build()
return deepcopy(self._built_object)
class ToolkitNode(Node):
def __init__(self, data: Dict):
super().__init__(data, base_type="toolkits")
def build(self, force: bool = False) -> Any:
if not self._built or force:
self._build()
return deepcopy(self._built_object)
class FileToolNode(ToolNode):
def __init__(self, data: Dict):
super().__init__(data)
def build(self, force: bool = False) -> Any:
if not self._built or force:
self._build()
return deepcopy(self._built_object)
class WrapperNode(Node):
def __init__(self, data: Dict):
super().__init__(data, base_type="wrappers")
def build(self, force: bool = False) -> Any:
if not self._built or force:
if "headers" in self.params:
self.params["headers"] = eval(self.params["headers"])
self._build()
return deepcopy(self._built_object)

View file

@ -0,0 +1,53 @@
import base64
import csv
import io
import json
import re
from typing import Any
import yaml
def load_file(file_name, file_content, accepted_types) -> Any:
"""Load a file from a string."""
# Check if the file is accepted
if not any(file_name.endswith(suffix) for suffix in accepted_types):
raise ValueError(f"File {file_name} is not accepted")
# Get the suffix
suffix = file_name.split(".")[-1]
# file_content == 'data:application/x-yaml;base64,b3BlbmFwaTogIjMuMC4wIg...'
data = file_content.split(",")[1]
decoded_bytes = base64.b64decode(data)
# Convert the bytes object to a string
decoded_string = decoded_bytes.decode("utf-8")
if suffix == "json":
# Return the json content
return json.loads(decoded_string)
elif suffix in ["yaml", "yml"]:
# Return the yaml content
return yaml.safe_load(decoded_string)
elif suffix == "csv":
# Load the csv content
csv_reader = csv.DictReader(io.StringIO(decoded_string))
return list(csv_reader)
else:
raise ValueError(f"File {file_name} is not accepted")
def validate_prompt(prompt: str):
"""Validate prompt."""
if extract_input_variables_from_prompt(prompt):
return prompt
return fix_prompt(prompt)
def fix_prompt(prompt: str):
"""Fix prompt."""
return prompt + " {input}"
def extract_input_variables_from_prompt(prompt: str) -> list[str]:
"""Extract input variables from prompt."""
return re.findall(r"{(.*?)}", prompt)

View file

@ -0,0 +1,3 @@
from langflow.interface.agents.base import AgentCreator
__all__ = ["AgentCreator"]

View file

@ -1,19 +1,30 @@
from langchain.agents import loading
from langflow.interface.base import LangChainTypeCreator
from langflow.utils.util import build_template_from_class
from langflow.settings import settings
from typing import Dict, List
from langchain.agents import loading
from langflow.custom.customs import get_custom_nodes
from langflow.interface.agents.custom import CUSTOM_AGENTS
from langflow.interface.base import LangChainTypeCreator
from langflow.settings import settings
from langflow.utils.util import build_template_from_class
class AgentCreator(LangChainTypeCreator):
type_name: str = "agents"
@property
def type_to_loader_dict(self) -> Dict:
return loading.AGENT_TO_CLASS
if self.type_dict is None:
self.type_dict = loading.AGENT_TO_CLASS
# Add JsonAgent to the list of agents
for name, agent in CUSTOM_AGENTS.items():
self.type_dict[name] = agent
return self.type_dict
def get_signature(self, name: str) -> Dict | None:
try:
if name in get_custom_nodes(self.type_name).keys():
return get_custom_nodes(self.type_name)[name]
return build_template_from_class(
name, self.type_to_loader_dict, add_function=True
)
@ -26,3 +37,6 @@ class AgentCreator(LangChainTypeCreator):
for agent in self.type_to_loader_dict.values()
if agent.__name__ in settings.agents or settings.dev
]
agent_creator = AgentCreator()

View file

@ -0,0 +1,93 @@
from typing import Any, Optional
from langchain import LLMChain
from langchain.agents import AgentExecutor, ZeroShotAgent
from langchain.agents.agent_toolkits.json.prompt import JSON_PREFIX, JSON_SUFFIX
from langchain.agents.agent_toolkits.json.toolkit import JsonToolkit
from langchain.agents.agent_toolkits.pandas.prompt import PREFIX as PANDAS_PREFIX
from langchain.agents.agent_toolkits.pandas.prompt import SUFFIX as PANDAS_SUFFIX
from langchain.agents.mrkl.prompt import FORMAT_INSTRUCTIONS
from langchain.schema import BaseLanguageModel
from langchain.tools.python.tool import PythonAstREPLTool
class JsonAgent(AgentExecutor):
"""Json agent"""
@classmethod
def initialize(cls, *args, **kwargs):
return cls.from_toolkit_and_llm(*args, **kwargs)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
@classmethod
def from_toolkit_and_llm(cls, toolkit: JsonToolkit, llm: BaseLanguageModel):
tools = toolkit.get_tools()
tool_names = [tool.name for tool in tools]
prompt = ZeroShotAgent.create_prompt(
tools,
prefix=JSON_PREFIX,
suffix=JSON_SUFFIX,
format_instructions=FORMAT_INSTRUCTIONS,
input_variables=None,
)
llm_chain = LLMChain(
llm=llm,
prompt=prompt,
)
agent = ZeroShotAgent(llm_chain=llm_chain, allowed_tools=tool_names)
return cls.from_agent_and_tools(agent=agent, tools=tools, verbose=True)
def run(self, *args, **kwargs):
return super().run(*args, **kwargs)
class CSVAgent(AgentExecutor):
"""CSV agent"""
@classmethod
def initialize(cls, *args, **kwargs):
return cls.from_toolkit_and_llm(*args, **kwargs)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
@classmethod
def from_toolkit_and_llm(
cls,
path: dict,
llm: BaseLanguageModel,
pandas_kwargs: Optional[dict] = None,
**kwargs: Any
):
import pandas as pd # type: ignore
_kwargs = pandas_kwargs or {}
df = pd.DataFrame.from_dict(path, **_kwargs)
tools = [PythonAstREPLTool(locals={"df": df})] # type: ignore
prompt = ZeroShotAgent.create_prompt(
tools,
prefix=PANDAS_PREFIX,
suffix=PANDAS_SUFFIX,
input_variables=["df", "input", "agent_scratchpad"],
)
partial_prompt = prompt.partial(df=str(df.head()))
llm_chain = LLMChain(
llm=llm,
prompt=partial_prompt,
)
tool_names = [tool.name for tool in tools]
agent = ZeroShotAgent(llm_chain=llm_chain, allowed_tools=tool_names, **kwargs)
return cls.from_agent_and_tools(agent=agent, tools=tools, verbose=True)
def run(self, *args, **kwargs):
return super().run(*args, **kwargs)
CUSTOM_AGENTS = {
"JsonAgent": JsonAgent,
"CSVAgent": CSVAgent,
}

View file

@ -1,40 +1,49 @@
from typing import Dict, List
from pydantic import BaseModel
from abc import ABC, abstractmethod
from langflow.template.template import Template, Field, FrontendNode
from typing import Any, Dict, List, Optional
from pydantic import BaseModel
from langflow.template.base import FrontendNode, Template, TemplateField
# Assuming necessary imports for Field, Template, and FrontendNode classes
class LangChainTypeCreator(BaseModel, ABC):
type_name: str
type_dict: Optional[Dict] = None
@property
@abstractmethod
def type_to_loader_dict(self) -> Dict:
pass
if self.type_dict is None:
raise NotImplementedError
return self.type_dict
@abstractmethod
def get_signature(self, name: str) -> Dict:
def get_signature(self, name: str) -> Optional[Dict[Any, Any]]:
pass
@abstractmethod
def to_list(self) -> List[str]:
pass
def to_dict(self):
result = {self.type_name: {}} # type: Dict
def to_dict(self) -> Dict:
result: Dict = {self.type_name: {}}
for name in self.to_list():
result[self.type_name][name] = self.get_signature(name)
# frontend_node.to_dict() returns a dict with the following structure:
# {name: {template: {fields}, description: str}}
# so we should update the result dict
result[self.type_name].update(self.frontend_node(name).to_dict())
return result
def frontend_node(self, name) -> FrontendNode:
signature = self.get_signature(name)
if signature is None:
raise ValueError(f"{name} not found")
fields = [
Field(
TemplateField(
name=key,
field_type=value["type"],
required=value.get("required", False),
@ -43,6 +52,9 @@ class LangChainTypeCreator(BaseModel, ABC):
show=value.get("show", True),
multiline=value.get("multiline", False),
value=value.get("value", None),
suffixes=value.get("suffixes", []),
file_types=value.get("fileTypes", []),
content=value.get("content", None),
)
for key, value in signature["template"].items()
if key != "_type"
@ -50,7 +62,7 @@ class LangChainTypeCreator(BaseModel, ABC):
template = Template(type_name=name, fields=fields)
return FrontendNode(
template=template,
description=signature["description"],
description=signature.get("description", ""),
base_classes=signature["base_classes"],
name=name,
)

View file

@ -0,0 +1,3 @@
from langflow.interface.chains.base import ChainCreator
__all__ = ["ChainCreator"]

View file

@ -1,11 +1,11 @@
from typing import Dict, List
from langflow.interface.base import LangChainTypeCreator
from langflow.interface.signature import get_chain_signature
from langflow.template.template import Field, FrontendNode, Template
from langflow.utils.util import build_template_from_function
from langflow.settings import settings
from langchain.chains import loading as chains_loading
from langflow.interface.base import LangChainTypeCreator
from langflow.settings import settings
from langflow.utils.util import build_template_from_function
# Assuming necessary imports for Field, Template, and FrontendNode classes
@ -14,7 +14,9 @@ class ChainCreator(LangChainTypeCreator):
@property
def type_to_loader_dict(self) -> Dict:
return chains_loading.type_to_loader_dict
if self.type_dict is None:
self.type_dict = chains_loading.type_to_loader_dict
return self.type_dict
def get_signature(self, name: str) -> Dict | None:
try:
@ -33,3 +35,6 @@ class ChainCreator(LangChainTypeCreator):
or settings.dev
)
]
chain_creator = ChainCreator()

View file

@ -1,7 +1,9 @@
from typing import Callable, Optional
from langflow.utils import validate
from pydantic import BaseModel, validator
from langflow.utils import validate
class Function(BaseModel):
code: str

View file

@ -1,15 +1,11 @@
## LLM
from typing import Any
from langchain import llms
from langchain.llms.openai import OpenAIChat
llm_type_to_cls_dict = llms.type_to_cls_dict
llm_type_to_cls_dict["openai-chat"] = OpenAIChat
## LLM
from langchain import llms, requests
from langchain.agents import agent_toolkits
from langchain.chat_models import ChatOpenAI
## Memory
# from langchain.memory.buffer_window import ConversationBufferWindowMemory
# from langchain.memory.chat_memory import ChatMessageHistory
# from langchain.memory.combined import CombinedMemory
@ -19,6 +15,96 @@ llm_type_to_cls_dict["openai-chat"] = OpenAIChat
# from langchain.memory.simple import SimpleMemory
# from langchain.memory.summary import ConversationSummaryMemory
# from langchain.memory.summary_buffer import ConversationSummaryBufferMemory
## Document Loaders
from langchain.document_loaders import (
AirbyteJSONLoader,
AZLyricsLoader,
CollegeConfidentialLoader,
CoNLLULoader,
CSVLoader,
DirectoryLoader,
EverNoteLoader,
FacebookChatLoader,
GCSDirectoryLoader,
GCSFileLoader,
GitbookLoader,
GoogleApiClient,
GoogleApiYoutubeLoader,
GoogleDriveLoader,
GutenbergLoader,
HNLoader,
IFixitLoader,
IMSDbLoader,
NotebookLoader,
NotionDirectoryLoader,
ObsidianLoader,
OnlinePDFLoader,
PagedPDFSplitter,
PDFMinerLoader,
PyMuPDFLoader,
PyPDFLoader,
ReadTheDocsLoader,
RoamLoader,
S3DirectoryLoader,
S3FileLoader,
SRTLoader,
TelegramChatLoader,
TextLoader,
UnstructuredEmailLoader,
UnstructuredFileIOLoader,
UnstructuredFileLoader,
UnstructuredHTMLLoader,
UnstructuredImageLoader,
UnstructuredMarkdownLoader,
UnstructuredPDFLoader,
# BSHTMLLoader,
UnstructuredPowerPointLoader,
UnstructuredURLLoader,
UnstructuredWordDocumentLoader,
WebBaseLoader,
YoutubeLoader,
)
## Embeddings
from langchain.embeddings import (
CohereEmbeddings,
FakeEmbeddings,
HuggingFaceEmbeddings,
HuggingFaceHubEmbeddings,
HuggingFaceInstructEmbeddings,
OpenAIEmbeddings,
SelfHostedEmbeddings,
SelfHostedHuggingFaceEmbeddings,
SelfHostedHuggingFaceInstructEmbeddings,
# SagemakerEndpointEmbeddings,
TensorflowHubEmbeddings,
)
## Vector Stores
from langchain.vectorstores import (
FAISS,
AtlasDB,
Chroma,
DeepLake,
ElasticVectorSearch,
Milvus,
OpenSearchVectorSearch,
Pinecone,
Qdrant,
VectorStore,
Weaviate,
)
## Toolkits
from langflow.interface.importing.utils import import_class
## LLM
llm_type_to_cls_dict = llms.type_to_cls_dict
llm_type_to_cls_dict["openai-chat"] = ChatOpenAI # type: ignore
## Memory
memory_type_to_cls_dict: dict[str, Any] = {
# "CombinedMemory": CombinedMemory,
@ -42,21 +128,26 @@ memory_type_to_cls_dict: dict[str, Any] = {
# chain_type_to_cls_dict = type_to_loader_dict
# chain_type_to_cls_dict["conversation_chain"] = ConversationChain
toolkit_type_to_loader_dict: dict[str, Any] = {
toolkit_name: import_class(f"langchain.agents.agent_toolkits.{toolkit_name}")
# if toolkit_name is lower case it is a loader
for toolkit_name in agent_toolkits.__all__
if toolkit_name.islower()
}
toolkit_type_to_cls_dict: dict[str, Any] = {
toolkit_name: import_class(f"langchain.agents.agent_toolkits.{toolkit_name}")
# if toolkit_name is not lower case it is a class
for toolkit_name in agent_toolkits.__all__
if not toolkit_name.islower()
}
wrapper_type_to_cls_dict: dict[str, Any] = {
wrapper.__name__: wrapper for wrapper in [requests.RequestsWrapper]
}
## Embeddings
from langchain.embeddings import (
CohereEmbeddings,
FakeEmbeddings,
HuggingFaceEmbeddings,
HuggingFaceInstructEmbeddings,
HuggingFaceHubEmbeddings,
OpenAIEmbeddings,
# SagemakerEndpointEmbeddings,
TensorflowHubEmbeddings,
SelfHostedHuggingFaceEmbeddings,
SelfHostedHuggingFaceInstructEmbeddings,
SelfHostedEmbeddings,
)
embedding_type_to_cls_dict = {
"OpenAIEmbeddings": OpenAIEmbeddings,
@ -73,19 +164,6 @@ embedding_type_to_cls_dict = {
}
## Vector Stores
from langchain.vectorstores import (
ElasticVectorSearch,
FAISS,
VectorStore,
Pinecone,
Weaviate,
Qdrant,
Milvus,
Chroma,
OpenSearchVectorSearch,
AtlasDB,
DeepLake,
)
vectorstores_type_to_cls_dict = {
"ElasticVectorSearch": ElasticVectorSearch,
@ -103,57 +181,6 @@ vectorstores_type_to_cls_dict = {
## Document Loaders
from langchain.document_loaders import (
UnstructuredFileLoader,
UnstructuredFileIOLoader,
UnstructuredURLLoader,
DirectoryLoader,
NotionDirectoryLoader,
ReadTheDocsLoader,
GoogleDriveLoader,
UnstructuredHTMLLoader,
# BSHTMLLoader,
UnstructuredPowerPointLoader,
UnstructuredWordDocumentLoader,
UnstructuredPDFLoader,
UnstructuredImageLoader,
ObsidianLoader,
UnstructuredEmailLoader,
UnstructuredMarkdownLoader,
RoamLoader,
YoutubeLoader,
S3FileLoader,
TextLoader,
HNLoader,
GitbookLoader,
S3DirectoryLoader,
GCSFileLoader,
GCSDirectoryLoader,
WebBaseLoader,
IMSDbLoader,
AZLyricsLoader,
CollegeConfidentialLoader,
IFixitLoader,
GutenbergLoader,
PagedPDFSplitter,
PyPDFLoader,
EverNoteLoader,
AirbyteJSONLoader,
OnlinePDFLoader,
PDFMinerLoader,
PyMuPDFLoader,
TelegramChatLoader,
SRTLoader,
FacebookChatLoader,
NotebookLoader,
CoNLLULoader,
GoogleApiYoutubeLoader,
GoogleApiClient,
CSVLoader,
# BlackboardLoader
)
documentloaders_type_to_cls_dict = {
"UnstructuredFileLoader": UnstructuredFileLoader,
"UnstructuredFileIOLoader": UnstructuredFileIOLoader,

View file

@ -1,8 +1,9 @@
from typing import Dict, List
from langflow.interface.base import LangChainTypeCreator
from langflow.interface.custom_lists import documentloaders_type_to_cls_dict
from langflow.settings import settings
from langflow.interface.base import LangChainTypeCreator
from langflow.utils.util import build_template_from_class
from typing import Dict, List
class DocumentLoaderCreator(LangChainTypeCreator):
@ -25,3 +26,6 @@ class DocumentLoaderCreator(LangChainTypeCreator):
for documentloader in self.type_to_loader_dict.values()
if documentloader.__name__ in settings.documentloaders or settings.dev
]
documentloader_creator = DocumentLoaderCreator()

View file

@ -1,8 +1,9 @@
from typing import Dict, List
from langflow.interface.base import LangChainTypeCreator
from langflow.interface.custom_lists import embedding_type_to_cls_dict
from langflow.settings import settings
from langflow.interface.base import LangChainTypeCreator
from langflow.utils.util import build_template_from_class
from typing import Dict, List
class EmbeddingCreator(LangChainTypeCreator):
@ -25,3 +26,6 @@ class EmbeddingCreator(LangChainTypeCreator):
for embedding in self.type_to_loader_dict.values()
if embedding.__name__ in settings.embeddings or settings.dev
]
embedding_creator = EmbeddingCreator()

View file

@ -8,12 +8,22 @@ from langchain.agents import Agent
from langchain.chains.base import Chain
from langchain.llms.base import BaseLLM
from langchain.tools import BaseTool
from langflow.utils.util import get_tool_by_name
from langflow.interface.tools.util import get_tool_by_name
def import_module(module_path: str) -> Any:
"""Import module from module path"""
return importlib.import_module(module_path)
if "from" not in module_path:
# Import the module using the module path
return importlib.import_module(module_path)
# Split the module path into its components
_, module_path, _, object_name = module_path.split()
# Import the module using the module path
module = importlib.import_module(module_path)
return getattr(module, object_name)
def import_by_type(_type: str, name: str) -> Any:
@ -24,6 +34,8 @@ def import_by_type(_type: str, name: str) -> Any:
"llms": import_llm,
"tools": import_tool,
"chains": import_chain,
"toolkits": import_toolkit,
"wrappers": import_wrapper,
}
return func_dict[_type](name)
@ -42,8 +54,20 @@ def import_prompt(prompt: str) -> PromptTemplate:
return import_class(f"langchain.prompts.{prompt}")
def import_wrapper(wrapper: str) -> Any:
"""Import wrapper from wrapper name"""
return import_module(f"from langchain.requests import {wrapper}")
def import_toolkit(toolkit: str) -> Any:
"""Import toolkit from toolkit name"""
return import_module(f"from langchain.agents.agent_toolkits import {toolkit}")
def import_agent(agent: str) -> Agent:
"""Import agent from agent name"""
# check for custom agent
return import_class(f"langchain.agents.{agent}")

View file

@ -1,106 +1,27 @@
from langchain import agents, chains, prompts
from langflow.custom import customs
from langflow.interface.custom_lists import (
llm_type_to_cls_dict,
memory_type_to_cls_dict,
)
from langflow.settings import settings
from langflow.utils import util
from langchain.agents.load_tools import get_all_tool_names
from langchain.agents import Tool
from langflow.interface.custom_types import PythonFunction
CUSTOM_TOOLS = {"Tool": Tool, "PythonFunction": PythonFunction}
TOOLS_DICT = util.get_tools_dict()
ALL_TOOLS_NAMES = set(get_all_tool_names() + list(CUSTOM_TOOLS.keys()))
from langflow.interface.agents.base import agent_creator
from langflow.interface.chains.base import chain_creator
from langflow.interface.llms.base import llm_creator
from langflow.interface.memories.base import memory_creator
from langflow.interface.prompts.base import prompt_creator
from langflow.interface.toolkits.base import toolkits_creator
from langflow.interface.tools.base import tool_creator
from langflow.interface.wrappers.base import wrapper_creator
def get_type_dict():
return {
"chains": list_chain_types,
"agents": list_agents,
"prompts": list_prompts,
"llms": list_llms,
"tools": list_tools,
"memories": list_memories,
"agents": agent_creator.to_list(),
"prompts": prompt_creator.to_list(),
"llms": llm_creator.to_list(),
"tools": tool_creator.to_list(),
"chains": chain_creator.to_list(),
"memory": memory_creator.to_list(),
"toolkits": toolkits_creator.to_list(),
"wrappers": wrapper_creator.to_list(),
}
def list_type(object_type: str):
"""List all components"""
return get_type_dict().get(object_type, lambda: None)()
def list_agents():
"""List all agent types"""
return [
agent.__name__
for agent in agents.loading.AGENT_TO_CLASS.values()
if agent.__name__ in settings.agents or settings.dev
]
def list_prompts():
"""List all prompt types"""
custom_prompts = customs.get_custom_nodes("prompts")
library_prompts = [
prompt.__annotations__["return"].__name__
for prompt in prompts.loading.type_to_loader_dict.values()
if prompt.__annotations__["return"].__name__ in settings.prompts or settings.dev
]
return library_prompts + list(custom_prompts.keys())
def list_tools():
"""List all load tools"""
tools = []
for tool in ALL_TOOLS_NAMES:
tool_params = util.get_tool_params(util.get_tool_by_name(tool))
if tool_params and (
tool_params.get("name") in settings.tools
or (tool_params.get("name") and settings.dev)
):
tools.append(tool_params["name"])
# Add Tool
custom_tools = customs.get_custom_nodes("tools")
return tools + list(custom_tools.keys())
def list_llms():
"""List all llm types"""
return [
llm.__name__
for llm in llm_type_to_cls_dict.values()
if llm.__name__ in settings.llms or settings.dev
]
def list_chain_types():
"""List all chain types"""
return [
chain.__annotations__["return"].__name__
for chain in chains.loading.type_to_loader_dict.values()
if chain.__annotations__["return"].__name__ in settings.chains or settings.dev
]
def list_memories():
"""List all memory types"""
return [
memory.__name__
for memory in memory_type_to_cls_dict.values()
if memory.__name__ in settings.memories or settings.dev
]
LANGCHAIN_TYPES_DICT = {
k: list_function() for k, list_function in get_type_dict().items()
}
LANGCHAIN_TYPES_DICT = get_type_dict()
# Now we'll build a dict with Langchain types and ours

View file

@ -0,0 +1,3 @@
from langflow.interface.llms.base import LLMCreator
__all__ = ["LLMCreator"]

View file

@ -1,8 +1,9 @@
from typing import Dict, List
from langflow.interface.base import LangChainTypeCreator
from langflow.interface.custom_lists import llm_type_to_cls_dict
from langflow.settings import settings
from langflow.interface.base import LangChainTypeCreator
from langflow.utils.util import build_template_from_class
from typing import Dict, List
class LLMCreator(LangChainTypeCreator):
@ -10,7 +11,9 @@ class LLMCreator(LangChainTypeCreator):
@property
def type_to_loader_dict(self) -> Dict:
return llm_type_to_cls_dict
if self.type_dict is None:
self.type_dict = llm_type_to_cls_dict
return self.type_dict
def get_signature(self, name: str) -> Dict | None:
"""Get the signature of an llm."""
@ -25,3 +28,6 @@ class LLMCreator(LangChainTypeCreator):
for llm in self.type_to_loader_dict.values()
if llm.__name__ in settings.llms or settings.dev
]
llm_creator = LLMCreator()

View file

@ -1,19 +1,16 @@
import json
from typing import Any, Dict, Optional
from typing import Any, Callable, Dict, Optional
from langchain.agents import ZeroShotAgent
from langchain.agents import agent as agent_module
from langchain.agents.agent import AgentExecutor
from langchain.agents.agent_toolkits.base import BaseToolkit
from langchain.agents.load_tools import (
_BASE_TOOLS,
_EXTRA_LLM_TOOLS,
_EXTRA_OPTIONAL_TOOLS,
_LLM_TOOLS,
)
from langchain.agents import agent as agent_module
from langflow.interface.importing.utils import import_by_type
from langchain.agents import ZeroShotAgent
from langchain.agents.loading import load_agent_from_config
from langchain.agents.tools import Tool
from langchain.callbacks.base import BaseCallbackManager
@ -21,20 +18,29 @@ from langchain.chains.loading import load_chain_from_config
from langchain.llms.base import BaseLLM
from langchain.llms.loading import load_llm_from_config
from langflow.interface.agents.custom import CUSTOM_AGENTS
from langflow.interface.importing.utils import import_by_type
from langflow.interface.toolkits.base import toolkits_creator
from langflow.interface.types import get_type_list
from langflow.utils import payload, util, validate
def instantiate_class(node_type: str, base_type: str, params: Dict) -> Any:
"""Instantiate class from module type and key, and params"""
if node_type in CUSTOM_AGENTS:
if custom_agent := CUSTOM_AGENTS.get(node_type):
return custom_agent.initialize(**params) # type: ignore
class_object = import_by_type(_type=base_type, name=node_type)
if base_type == "agents":
# We need to initialize it differently
allowed_tools = params["allowed_tools"]
llm_chain = params["llm_chain"]
return load_agent_executor(class_object, allowed_tools, llm_chain)
elif base_type == "tools" or node_type != "ZeroShotPrompt":
return class_object(**params)
return load_agent_executor(class_object, params)
elif node_type == "ZeroShotPrompt":
if "tools" not in params:
params["tools"] = []
return ZeroShotAgent.create_prompt(**params)
elif node_type == "PythonFunction":
# If the node_type is "PythonFunction"
# we need to get the function from the params
@ -45,15 +51,19 @@ def instantiate_class(node_type: str, base_type: str, params: Dict) -> Any:
if isinstance(function_string, str):
return validate.eval_function(function_string)
raise ValueError("Function should be a string")
elif base_type == "toolkits":
loaded_toolkit = class_object(**params)
# Check if node_type has a loader
if toolkits_creator.has_create_function(node_type):
return load_toolkits_executor(node_type, loaded_toolkit, params)
return loaded_toolkit
else:
if "tools" not in params:
params["tools"] = []
return ZeroShotAgent.create_prompt(**params)
return class_object(**params)
def load_flow_from_json(path: str):
# This is done to avoid circular imports
from langflow.graph.graph import Graph
from langflow.graph import Graph
"""Load flow from json file"""
with open(path, "r") as f:
@ -122,10 +132,10 @@ def load_agent_executor_from_config(
)
def load_agent_executor(
agent_class: type[agent_module.Agent], allowed_tools, llm_chain, **kwargs
):
def load_agent_executor(agent_class: type[agent_module.Agent], params, **kwargs):
"""Load agent executor from agent class, tools and chain"""
allowed_tools = params["allowed_tools"]
llm_chain = params["llm_chain"]
tool_names = [tool.name for tool in allowed_tools]
agent = agent_class(allowed_tools=tool_names, llm_chain=llm_chain)
return AgentExecutor.from_agent_and_tools(
@ -135,6 +145,12 @@ def load_agent_executor(
)
def load_toolkits_executor(node_type: str, toolkit: BaseToolkit, params: dict):
create_function: Callable = toolkits_creator.get_create_function(node_type)
if llm := params.get("llm"):
return create_function(llm=llm, toolkit=toolkit)
def load_tools_from_config(tool_list: list[dict]) -> list:
"""Load tools based on a config list.

View file

@ -0,0 +1,3 @@
from langflow.interface.memories.base import MemoryCreator
__all__ = ["MemoryCreator"]

View file

@ -1,8 +1,9 @@
from typing import Dict, List
from langflow.interface.base import LangChainTypeCreator
from langflow.interface.custom_lists import memory_type_to_cls_dict
from langflow.settings import settings
from langflow.interface.base import LangChainTypeCreator
from langflow.utils.util import build_template_from_class
from typing import Dict, List
class MemoryCreator(LangChainTypeCreator):
@ -10,7 +11,9 @@ class MemoryCreator(LangChainTypeCreator):
@property
def type_to_loader_dict(self) -> Dict:
return memory_type_to_cls_dict
if self.type_dict is None:
self.type_dict = memory_type_to_cls_dict
return self.type_dict
def get_signature(self, name: str) -> Dict | None:
"""Get the signature of a memory."""
@ -25,3 +28,6 @@ class MemoryCreator(LangChainTypeCreator):
for memory in self.type_to_loader_dict.values()
if memory.__name__ in settings.memories or settings.dev
]
memory_creator = MemoryCreator()

View file

@ -0,0 +1,3 @@
from langflow.interface.prompts.base import PromptCreator
__all__ = ["PromptCreator"]

View file

@ -1,22 +1,26 @@
from langchain.prompts import loading
from langflow.interface.base import LangChainTypeCreator
from langflow.utils.util import build_template_from_function
from langflow.settings import settings
from langflow.custom.customs import get_custom_nodes
from typing import Dict, List
from langchain.prompts import loading
from langflow.custom.customs import get_custom_nodes
from langflow.interface.base import LangChainTypeCreator
from langflow.settings import settings
from langflow.utils.util import build_template_from_function
class PromptCreator(LangChainTypeCreator):
type_name: str = "prompts"
@property
def type_to_loader_dict(self) -> Dict:
return loading.type_to_loader_dict
if self.type_dict is None:
self.type_dict = loading.type_to_loader_dict
return self.type_dict
def get_signature(self, name: str) -> Dict | None:
try:
if name in get_custom_nodes("prompts").keys():
return get_custom_nodes("prompts")[name]
if name in get_custom_nodes(self.type_name).keys():
return get_custom_nodes(self.type_name)[name]
return build_template_from_function(name, self.type_to_loader_dict)
except ValueError as exc:
raise ValueError("Prompt not found") from exc
@ -30,3 +34,6 @@ class PromptCreator(LangChainTypeCreator):
or settings.dev
]
return library_prompts + list(custom_prompts.keys())
prompt_creator = PromptCreator()

View file

@ -3,9 +3,28 @@ import io
import re
from typing import Any, Dict
from langflow.cache.utils import compute_hash, load_cache, save_cache
from langflow.graph.graph import Graph
from langflow.interface import loading
from langflow.utils import payload
from langflow.graph.graph import Graph
def load_langchain_object(data_graph):
computed_hash = compute_hash(data_graph)
# Load langchain_object from cache if it exists
langchain_object = load_cache(computed_hash)
if langchain_object is None:
nodes = data_graph["nodes"]
# Add input variables
nodes = payload.extract_input_variables(nodes)
# Nodes, edges and root node
edges = data_graph["edges"]
graph = Graph(nodes, edges)
langchain_object = graph.build()
return computed_hash, langchain_object
def process_graph(data_graph: Dict[str, Any]):
@ -13,19 +32,18 @@ def process_graph(data_graph: Dict[str, Any]):
Process graph by extracting input variables and replacing ZeroShotPrompt
with PromptTemplate,then run the graph and return the result and thought.
"""
nodes = data_graph["nodes"]
# Add input variables
# ? Is this necessary?
nodes = payload.extract_input_variables(nodes)
# Nodes, edges and root node
edges = data_graph["edges"]
graph = Graph(nodes, edges)
langchain_object = graph.build()
# Load langchain object
computed_hash, langchain_object = load_langchain_object(data_graph)
message = data_graph["message"]
# Process json
# Generate result and thought
result, thought = get_result_and_thought_using_graph(langchain_object, message)
# Save langchain_object to cache
# We have to save it here because if the
# memory is updated we need to keep the new values
save_cache(computed_hash, langchain_object)
return {
"result": result,
"thought": re.sub(
@ -48,9 +66,8 @@ def get_result_and_thought_using_graph(loaded_langchain, message: str):
)
thought = output_buffer.getvalue()
except Exception as e:
result = f"Error: {str(e)}"
thought = ""
except Exception as exc:
raise ValueError(f"Error: {str(exc)}") from exc
return result, thought

View file

@ -1,170 +0,0 @@
from typing import Any, Dict # noqa: F401
from langchain import agents, chains, prompts
from langchain.agents.load_tools import (
_BASE_TOOLS,
_EXTRA_LLM_TOOLS,
_EXTRA_OPTIONAL_TOOLS,
_LLM_TOOLS,
)
from langflow.custom import customs
from langflow.interface.custom_lists import (
llm_type_to_cls_dict,
memory_type_to_cls_dict,
)
from langflow.interface.listing import CUSTOM_TOOLS, ALL_TOOLS_NAMES
from langflow.template.template import Field, Template
from langflow.utils import util
def get_signature(name: str, object_type: str):
"""Get the signature of an object."""
return {
"chains": get_chain_signature,
"agents": get_agent_signature,
"prompts": get_prompt_signature,
"llms": get_llm_signature,
# "memories": get_memory_signature,
"tools": get_tool_signature,
}.get(object_type, lambda name: f"Invalid type: {name}")(name)
def get_chain_signature(name: str):
"""Get the chain type by signature."""
try:
return util.build_template_from_function(
name, chains.loading.type_to_loader_dict, add_function=True
)
except ValueError as exc:
raise ValueError("Chain not found") from exc
def get_agent_signature(name: str):
"""Get the signature of an agent."""
try:
return util.build_template_from_class(
name, agents.loading.AGENT_TO_CLASS, add_function=True
)
except ValueError as exc:
raise ValueError("Agent not found") from exc
def get_prompt_signature(name: str):
"""Get the signature of a prompt."""
try:
if name in customs.get_custom_nodes("prompts").keys():
return customs.get_custom_nodes("prompts")[name]
return util.build_template_from_function(
name, prompts.loading.type_to_loader_dict
)
except ValueError as exc:
raise ValueError("Prompt not found") from exc
def get_llm_signature(name: str):
"""Get the signature of an llm."""
try:
return util.build_template_from_class(name, llm_type_to_cls_dict)
except ValueError as exc:
raise ValueError("LLM not found") from exc
def get_memory_signature(name: str):
"""Get the signature of a memory."""
try:
return util.build_template_from_class(name, memory_type_to_cls_dict)
except ValueError as exc:
raise ValueError("Memory not found") from exc
def get_tool_signature(name: str):
"""Get the signature of a tool."""
NODE_INPUTS = ["llm", "func"]
base_classes = ["Tool"]
all_tools = {}
for tool in ALL_TOOLS_NAMES:
if tool_params := util.get_tool_params(util.get_tool_by_name(tool)):
tool_name = tool_params.get("name") or str(tool)
all_tools[tool_name] = {"type": tool, "params": tool_params}
# Raise error if name is not in tools
if name not in all_tools.keys():
raise ValueError("Tool not found")
type_dict = {
"str": Field(
field_type="str",
required=True,
is_list=False,
show=True,
placeholder="",
value="",
),
"llm": Field(field_type="BaseLLM", required=True, is_list=False, show=True),
"func": Field(
field_type="function",
required=True,
is_list=False,
show=True,
multiline=True,
),
"code": Field(
field_type="str",
required=True,
is_list=False,
show=True,
value="",
multiline=True,
),
}
tool_type: str = all_tools[name]["type"] # type: ignore
if tool_type in _BASE_TOOLS:
params = []
elif tool_type in _LLM_TOOLS:
params = ["llm"]
elif tool_type in _EXTRA_LLM_TOOLS:
_, extra_keys = _EXTRA_LLM_TOOLS[tool_type]
params = ["llm"] + extra_keys
elif tool_type in _EXTRA_OPTIONAL_TOOLS:
_, extra_keys = _EXTRA_OPTIONAL_TOOLS[tool_type]
params = extra_keys
elif tool_type == "Tool":
params = ["name", "description", "func"]
elif tool_type in CUSTOM_TOOLS:
# Get custom tool params
params = all_tools[name]["params"] # type: ignore
base_classes = ["function"]
if node := customs.get_custom_nodes("tools").get(tool_type):
return node
else:
params = []
# Copy the field and add the name
fields = []
for param in params:
if param in NODE_INPUTS:
field = type_dict[param].copy()
else:
field = type_dict["str"].copy()
field.name = param
if param == "aiosession":
field.show = False
field.required = False
fields.append(field)
template = Template(fields=fields, type_name=tool_type)
tool_params = util.get_tool_params(util.get_tool_by_name(tool_type))
if tool_params is None:
tool_params = {}
return {
"template": util.format_dict(template.to_dict()),
**tool_params,
"base_classes": base_classes,
}

View file

@ -0,0 +1,65 @@
from typing import Callable, Dict, List
from langchain.agents import agent_toolkits
from langflow.interface.base import LangChainTypeCreator
from langflow.interface.importing.utils import import_class, import_module
from langflow.settings import settings
from langflow.utils.util import build_template_from_class
class ToolkitCreator(LangChainTypeCreator):
type_name: str = "toolkits"
all_types: List[str] = agent_toolkits.__all__
create_functions: Dict = {
"JsonToolkit": [],
"SQLDatabaseToolkit": [],
"OpenAPIToolkit": ["create_openapi_agent"],
"VectorStoreToolkit": [
"create_vectorstore_agent",
"create_vectorstore_router_agent",
"VectorStoreInfo",
],
"ZapierToolkit": [],
"PandasToolkit": ["create_pandas_dataframe_agent"],
"CSVToolkit": ["create_csv_agent"],
}
@property
def type_to_loader_dict(self) -> Dict:
if self.type_dict is None:
self.type_dict = {
toolkit_name: import_class(
f"langchain.agents.agent_toolkits.{toolkit_name}"
)
# if toolkit_name is not lower case it is a class
for toolkit_name in agent_toolkits.__all__
if not toolkit_name.islower() and toolkit_name in settings.toolkits
}
return self.type_dict
def get_signature(self, name: str) -> Dict | None:
try:
return build_template_from_class(name, self.type_to_loader_dict)
except ValueError as exc:
raise ValueError("Prompt not found") from exc
def to_list(self) -> List[str]:
return list(self.type_to_loader_dict.keys())
def get_create_function(self, name: str) -> Callable:
if loader_name := self.create_functions.get(name, None):
# import loader
return import_module(
f"from langchain.agents.agent_toolkits import {loader_name[0]}"
)
else:
raise ValueError("Loader not found")
def has_create_function(self, name: str) -> bool:
# check if the function list is not empty
return bool(self.create_functions.get(name, None))
toolkits_creator = ToolkitCreator()

View file

@ -0,0 +1,3 @@
from langflow.interface.tools.base import ToolCreator
__all__ = ["ToolCreator"]

View file

@ -1,10 +1,5 @@
from langflow.custom import customs
from langflow.interface.listing import ALL_TOOLS_NAMES, CUSTOM_TOOLS
from langflow.template.template import Field, Template
from langflow.utils import util
from langflow.settings import settings
from langflow.interface.base import LangChainTypeCreator
from typing import Dict, List
from langchain.agents.load_tools import (
_BASE_TOOLS,
_EXTRA_LLM_TOOLS,
@ -12,22 +7,74 @@ from langchain.agents.load_tools import (
_LLM_TOOLS,
)
from langflow.custom import customs
from langflow.interface.base import LangChainTypeCreator
from langflow.interface.tools.constants import (
ALL_TOOLS_NAMES,
CUSTOM_TOOLS,
FILE_TOOLS,
)
from langflow.interface.tools.util import (
get_tool_by_name,
get_tool_params,
get_tools_dict,
)
from langflow.settings import settings
from langflow.template.base import Template, TemplateField
from langflow.utils import util
TOOL_INPUTS = {
"str": TemplateField(
field_type="str",
required=True,
is_list=False,
show=True,
placeholder="",
value="",
),
"llm": TemplateField(field_type="BaseLLM", required=True, is_list=False, show=True),
"func": TemplateField(
field_type="function",
required=True,
is_list=False,
show=True,
multiline=True,
),
"code": TemplateField(
field_type="str",
required=True,
is_list=False,
show=True,
value="",
multiline=True,
),
"dict_": TemplateField(
field_type="file",
required=True,
is_list=False,
show=True,
value="",
),
}
class ToolCreator(LangChainTypeCreator):
type_name: str = "tools"
tools_dict: Dict | None = None
@property
def type_to_loader_dict(self) -> Dict:
return ALL_TOOLS_NAMES
if self.tools_dict is None:
self.tools_dict = get_tools_dict()
return self.tools_dict
def get_signature(self, name: str) -> Dict | None:
"""Get the signature of a tool."""
NODE_INPUTS = ["llm", "func"]
base_classes = ["Tool"]
all_tools = {}
for tool in ALL_TOOLS_NAMES:
if tool_params := util.get_tool_params(util.get_tool_by_name(tool)):
for tool in self.type_to_loader_dict.keys():
if tool_params := get_tool_params(get_tool_by_name(tool)):
tool_name = tool_params.get("name") or str(tool)
all_tools[tool_name] = {"type": tool, "params": tool_params}
@ -35,33 +82,6 @@ class ToolCreator(LangChainTypeCreator):
if name not in all_tools.keys():
raise ValueError("Tool not found")
type_dict = {
"str": Field(
field_type="str",
required=True,
is_list=False,
show=True,
placeholder="",
value="",
),
"llm": Field(field_type="BaseLLM", required=True, is_list=False, show=True),
"func": Field(
field_type="function",
required=True,
is_list=False,
show=True,
multiline=True,
),
"code": Field(
field_type="str",
required=True,
is_list=False,
show=True,
value="",
multiline=True,
),
}
tool_type: str = all_tools[name]["type"] # type: ignore
if tool_type in _BASE_TOOLS:
@ -82,6 +102,9 @@ class ToolCreator(LangChainTypeCreator):
base_classes = ["function"]
if node := customs.get_custom_nodes("tools").get(tool_type):
return node
elif tool_type in FILE_TOOLS:
params = all_tools[name]["params"] # type: ignore
base_classes += [name]
else:
params = []
@ -89,10 +112,7 @@ class ToolCreator(LangChainTypeCreator):
# Copy the field and add the name
fields = []
for param in params:
if param in NODE_INPUTS:
field = type_dict[param].copy()
else:
field = type_dict["str"].copy()
field = TOOL_INPUTS.get(param, TOOL_INPUTS["str"]).copy()
field.name = param
if param == "aiosession":
field.show = False
@ -101,9 +121,7 @@ class ToolCreator(LangChainTypeCreator):
template = Template(fields=fields, type_name=tool_type)
tool_params = util.get_tool_params(util.get_tool_by_name(tool_type))
if tool_params is None:
tool_params = {}
tool_params = all_tools[name]["params"]
return {
"template": util.format_dict(template.to_dict()),
**tool_params,
@ -116,7 +134,11 @@ class ToolCreator(LangChainTypeCreator):
tools = []
for tool in ALL_TOOLS_NAMES:
tool_params = util.get_tool_params(util.get_tool_by_name(tool))
tool_params = get_tool_params(get_tool_by_name(tool))
if tool_params and not tool_params.get("name"):
tool_params["name"] = tool
if tool_params and (
tool_params.get("name") in settings.tools
or (tool_params.get("name") and settings.dev)
@ -126,3 +148,6 @@ class ToolCreator(LangChainTypeCreator):
# Add Tool
custom_tools = customs.get_custom_nodes("tools")
return tools + list(custom_tools.keys())
tool_creator = ToolCreator()

View file

@ -0,0 +1,11 @@
from langchain.agents import Tool
from langchain.agents.load_tools import get_all_tool_names
from langchain.tools.json.tool import JsonSpec
from langflow.interface.custom.types import PythonFunction
FILE_TOOLS = {"JsonSpec": JsonSpec}
CUSTOM_TOOLS = {"Tool": Tool, "PythonFunction": PythonFunction}
ALL_TOOLS_NAMES = set(
get_all_tool_names() + list(CUSTOM_TOOLS.keys()) + list(FILE_TOOLS.keys())
)

View file

@ -0,0 +1,124 @@
import ast
import inspect
from typing import Dict, Union
from langchain.agents.load_tools import (
_BASE_TOOLS,
_EXTRA_LLM_TOOLS,
_EXTRA_OPTIONAL_TOOLS,
_LLM_TOOLS,
)
from langchain.agents.tools import Tool
from langflow.interface.tools.constants import CUSTOM_TOOLS, FILE_TOOLS
def get_tools_dict():
"""Get the tools dictionary."""
return {
**_BASE_TOOLS,
**_LLM_TOOLS,
**{k: v[0] for k, v in _EXTRA_LLM_TOOLS.items()},
**{k: v[0] for k, v in _EXTRA_OPTIONAL_TOOLS.items()},
**CUSTOM_TOOLS,
**FILE_TOOLS,
}
def get_tool_by_name(name: str):
"""Get a tool from the tools dictionary."""
tools = get_tools_dict()
if name not in tools:
raise ValueError(f"{name} not found.")
return tools[name]
def get_func_tool_params(func, **kwargs) -> Union[Dict, None]:
tree = ast.parse(inspect.getsource(func))
# Iterate over the statements in the abstract syntax tree
for node in ast.walk(tree):
# Find the first return statement
if isinstance(node, ast.Return):
tool = node.value
if isinstance(tool, ast.Call):
if isinstance(tool.func, ast.Name) and tool.func.id == "Tool":
if tool.keywords:
tool_params = {}
for keyword in tool.keywords:
if keyword.arg == "name":
tool_params["name"] = ast.literal_eval(keyword.value)
elif keyword.arg == "description":
tool_params["description"] = ast.literal_eval(
keyword.value
)
return tool_params
return {
"name": ast.literal_eval(tool.args[0]),
"description": ast.literal_eval(tool.args[2]),
}
#
else:
# get the class object from the return statement
try:
class_obj = eval(
compile(ast.Expression(tool), "<string>", "eval")
)
except Exception:
return None
return {
"name": getattr(class_obj, "name"),
"description": getattr(class_obj, "description"),
}
# Return None if no return statement was found
return None
def get_class_tool_params(cls, **kwargs) -> Union[Dict, None]:
tree = ast.parse(inspect.getsource(cls))
tool_params = {}
# Iterate over the statements in the abstract syntax tree
for node in ast.walk(tree):
if isinstance(node, ast.ClassDef):
# Find the class definition and look for methods
for stmt in node.body:
if isinstance(stmt, ast.FunctionDef) and stmt.name == "__init__":
# There is no assignment statements in the __init__ method
# So we need to get the params from the function definition
for arg in stmt.args.args:
if arg.arg == "name":
# It should be the name of the class
tool_params[arg.arg] = cls.__name__
elif arg.arg == "self":
continue
# If there is not default value, set it to an empty string
else:
try:
annotation = ast.literal_eval(arg.annotation) # type: ignore
tool_params[arg.arg] = annotation
except ValueError:
tool_params[arg.arg] = ""
# Get the attribute name and the annotation
elif cls != Tool and isinstance(stmt, ast.AnnAssign):
# Get the attribute name and the annotation
tool_params[stmt.target.id] = "" # type: ignore
return tool_params
def get_tool_params(tool, **kwargs) -> Dict:
# Parse the function code into an abstract syntax tree
# Define if it is a function or a class
if inspect.isfunction(tool):
return get_func_tool_params(tool, **kwargs) or {}
elif inspect.isclass(tool):
# Get the parameters necessary to
# instantiate the class
return get_class_tool_params(tool, **kwargs) or {}
else:
raise ValueError("Tool must be a function or class.")

View file

@ -1,15 +1,14 @@
from langflow.interface.agents import AgentCreator
from langflow.interface.listing import list_type
from langflow.interface.llms import LLMCreator
from langflow.interface.memories import MemoryCreator
from langflow.interface.prompts import PromptCreator
from langflow.interface.signature import get_signature
from langflow.interface.embeddings import EmbeddingCreator
from langflow.interface.vectorstore import VectorstoreCreator
from langflow.interface.documentloaders import DocumentLoaderCreator
from langchain import chains
from langflow.interface.chains import ChainCreator
from langflow.interface.tools import ToolCreator
from langflow.interface.agents.base import agent_creator
from langflow.interface.chains.base import chain_creator
from langflow.interface.documentLoaders.base import documentloader_creator
from langflow.interface.embeddings.base import embedding_creator
from langflow.interface.llms.base import llm_creator
from langflow.interface.memories.base import memory_creator
from langflow.interface.prompts.base import prompt_creator
from langflow.interface.toolkits.base import toolkits_creator
from langflow.interface.tools.base import tool_creator
from langflow.interface.vectorStore.base import vectorstore_creator
from langflow.interface.wrappers.base import wrapper_creator
def get_type_list():
@ -26,15 +25,6 @@ def get_type_list():
def build_langchain_types_dict():
"""Build a dictionary of all langchain types"""
chain_creator = ChainCreator()
agent_creator = AgentCreator()
prompt_creator = PromptCreator()
tool_creator = ToolCreator()
llm_creator = LLMCreator()
memory_creator = MemoryCreator()
embedding_creator = EmbeddingCreator()
vectorstore_creator = VectorstoreCreator()
documentloader_creator = DocumentLoaderCreator()
all_types = {}
@ -45,6 +35,8 @@ def build_langchain_types_dict():
llm_creator,
memory_creator,
tool_creator,
toolkits_creator,
wrapper_creator,
embedding_creator,
vectorstore_creator,
documentloader_creator,

View file

@ -1,8 +1,9 @@
from typing import Dict, List
from langflow.interface.base import LangChainTypeCreator
from langflow.interface.custom_lists import vectorstores_type_to_cls_dict
from langflow.settings import settings
from langflow.interface.base import LangChainTypeCreator
from langflow.utils.util import build_template_from_class
from typing import Dict, List
class VectorstoreCreator(LangChainTypeCreator):
@ -25,3 +26,6 @@ class VectorstoreCreator(LangChainTypeCreator):
for vectorstore in self.type_to_loader_dict.keys()
if vectorstore in settings.vectorstores or settings.dev
]
vectorstore_creator = VectorstoreCreator()

View file

@ -0,0 +1,30 @@
from typing import Dict, List
from langchain import requests
from langflow.interface.base import LangChainTypeCreator
from langflow.utils.util import build_template_from_class
class WrapperCreator(LangChainTypeCreator):
type_name: str = "wrappers"
@property
def type_to_loader_dict(self) -> Dict:
if self.type_dict is None:
self.type_dict = {
wrapper.__name__: wrapper for wrapper in [requests.RequestsWrapper]
}
return self.type_dict
def get_signature(self, name: str) -> Dict | None:
try:
return build_template_from_class(name, self.type_to_loader_dict)
except ValueError as exc:
raise ValueError("Wrapper not found") from exc
def to_list(self) -> List[str]:
return list(self.type_to_loader_dict.keys())
wrapper_creator = WrapperCreator()

View file

@ -2,8 +2,6 @@ from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from langflow.api.endpoints import router as endpoints_router
from langflow.api.list_endpoints import router as list_router
from langflow.api.signature import router as signatures_router
def create_app():
@ -23,8 +21,6 @@ def create_app():
)
app.include_router(endpoints_router)
app.include_router(list_router)
app.include_router(signatures_router)
return app

View file

@ -1,32 +1,47 @@
import os
from typing import List, Optional
from typing import List
import yaml
from pydantic import BaseSettings, Field, root_validator
from pydantic import BaseSettings, root_validator
class Settings(BaseSettings):
chains: Optional[List[str]] = Field(...)
agents: Optional[List[str]] = Field(...)
prompts: Optional[List[str]] = Field(...)
llms: Optional[List[str]] = Field(...)
tools: Optional[List[str]] = Field(...)
memories: Optional[List[str]] = Field(...)
embeddings: Optional[List[str]] = Field(...)
vectorstores: Optional[List[str]] = Field(...)
documentloaders: Optional[List[str]] = Field(...)
dev: bool = Field(...)
chains: List[str] = []
agents: List[str] = []
prompts: List[str] = []
llms: List[str] = []
tools: List[str] = []
memories: List[str] = []
embeddings: List[str] = []
vectorstores: List[str] = []
documentloaders: List[str] = []
wrappers: List[str] = []
toolkits: List[str] = []
dev: bool = False
class Config:
validate_assignment = True
extra = "ignore"
@root_validator
@root_validator(allow_reuse=True)
def validate_lists(cls, values):
for key, value in values.items():
if key != "dev" and not value:
values[key] = []
return values
def update_from_yaml(self, file_path: str):
new_settings = load_settings_from_yaml(file_path)
self.chains = new_settings.chains or []
self.agents = new_settings.agents or []
self.prompts = new_settings.prompts or []
self.llms = new_settings.llms or []
self.tools = new_settings.tools or []
self.memories = new_settings.memories or []
self.wrappers = new_settings.wrappers or []
self.toolkits = new_settings.toolkits or []
self.dev = new_settings.dev or False
def save_settings_to_yaml(settings: Settings, file_path: str):
with open(file_path, "w") as f:
@ -44,9 +59,8 @@ def load_settings_from_yaml(file_path: str) -> Settings:
with open(file_path, "r") as f:
settings_dict = yaml.safe_load(f)
a = Settings.parse_obj(settings_dict)
return a
return Settings(**settings_dict)
settings = load_settings_from_yaml("config.yaml")

View file

@ -0,0 +1,159 @@
from abc import ABC
from typing import Any, Dict, Optional, Union
from pydantic import BaseModel
from langflow.utils import constants
class TemplateFieldCreator(BaseModel, ABC):
field_type: str = "str"
required: bool = False
placeholder: str = ""
is_list: bool = False
show: bool = True
multiline: bool = False
value: Any = None
suffixes: list[str] = []
fileTypes: list[str] = []
file_types: list[str] = []
content: Union[str, None] = None
password: bool = False
options: list[str] = []
# _name will be used to store the name of the field
# in the template
name: str = ""
def to_dict(self):
result = self.dict()
# Remove key if it is None
for key in list(result.keys()):
if result[key] is None or result[key] == []:
del result[key]
result["type"] = result.pop("field_type")
result["list"] = result.pop("is_list")
if result.get("file_types"):
result["fileTypes"] = result.pop("file_types")
if self.field_type == "file":
result["content"] = self.content
return result
def process_field(
self, key: str, value: Dict[str, Any], name: Optional[str] = None
) -> None:
_type = value["type"]
# Remove 'Optional' wrapper
if "Optional" in _type:
_type = _type.replace("Optional[", "")[:-1]
# Check for list type
if "List" in _type:
_type = _type.replace("List[", "")[:-1]
self.is_list = True
else:
self.is_list = False
# Replace 'Mapping' with 'dict'
if "Mapping" in _type:
_type = _type.replace("Mapping", "dict")
# Change type from str to Tool
self.field_type = "Tool" if key in ["allowed_tools"] else _type
self.field_type = "int" if key in ["max_value_length"] else self.field_type
# Show or not field
self.show = bool(
(self.required and key not in ["input_variables"])
or key
in [
"allowed_tools",
"memory",
"prefix",
"examples",
"temperature",
"model_name",
"headers",
"max_value_length",
]
or "api_key" in key
)
# Add password field
self.password = any(
text in key.lower() for text in ["password", "token", "api", "key"]
)
# Add multline
self.multiline = key in [
"suffix",
"prefix",
"template",
"examples",
"code",
"headers",
]
# Replace dict type with str
if "dict" in self.field_type.lower():
self.field_type = "code"
if key == "dict_":
self.field_type = "file"
self.suffixes = [".json", ".yaml", ".yml"]
self.file_types = ["json", "yaml", "yml"]
# Replace default value with actual value
if "default" in value:
self.value = value["default"]
if key == "headers":
self.value = """{'Authorization':
'Bearer <token>'}"""
# Add options to openai
if name == "OpenAI" and key == "model_name":
self.options = constants.OPENAI_MODELS
self.is_list = True
elif name == "OpenAIChat" and key == "model_name":
self.options = constants.CHAT_OPENAI_MODELS
self.is_list = True
class TemplateField(TemplateFieldCreator):
pass
class Template(BaseModel):
type_name: str
fields: list[TemplateField]
def process_fields(self, name: Optional[str] = None) -> None:
for field in self.fields:
signature = field.to_dict()
field.process_field(field.name, signature, name)
def to_dict(self):
self.process_fields(self.type_name)
result = {field.name: field.to_dict() for field in self.fields}
result["_type"] = self.type_name # type: ignore
return result
class FrontendNode(BaseModel):
template: Template
description: str
base_classes: list
name: str = ""
def to_dict(self):
return {
self.name: {
"template": self.template.to_dict(),
"description": self.description,
"base_classes": self.base_classes,
}
}

View file

View file

@ -1,5 +1,6 @@
from langflow.template.template import Field, FrontendNode, Template
from langchain.agents.mrkl import prompt
from langflow.template.base import FrontendNode, Template, TemplateField
from langflow.utils.constants import DEFAULT_PYTHON_FUNCTION
@ -8,7 +9,7 @@ class ZeroShotPromptNode(FrontendNode):
template: Template = Template(
type_name="zero_shot",
fields=[
Field(
TemplateField(
field_type="str",
required=False,
placeholder="",
@ -18,7 +19,7 @@ class ZeroShotPromptNode(FrontendNode):
value=prompt.PREFIX,
name="prefix",
),
Field(
TemplateField(
field_type="str",
required=True,
placeholder="",
@ -28,7 +29,7 @@ class ZeroShotPromptNode(FrontendNode):
value=prompt.SUFFIX,
name="suffix",
),
Field(
TemplateField(
field_type="str",
required=False,
placeholder="",
@ -52,7 +53,7 @@ class PythonFunctionNode(FrontendNode):
template: Template = Template(
type_name="python_function",
fields=[
Field(
TemplateField(
field_type="code",
required=True,
placeholder="",
@ -75,7 +76,7 @@ class ToolNode(FrontendNode):
template: Template = Template(
type_name="tool",
fields=[
Field(
TemplateField(
field_type="str",
required=True,
placeholder="",
@ -85,7 +86,7 @@ class ToolNode(FrontendNode):
value="",
name="name",
),
Field(
TemplateField(
field_type="str",
required=True,
placeholder="",
@ -95,7 +96,7 @@ class ToolNode(FrontendNode):
value="",
name="description",
),
Field(
TemplateField(
field_type="str",
required=True,
placeholder="",
@ -112,3 +113,58 @@ class ToolNode(FrontendNode):
def to_dict(self):
return super().to_dict()
class JsonAgentNode(FrontendNode):
name: str = "JsonAgent"
template: Template = Template(
type_name="json_agent",
fields=[
TemplateField(
field_type="BaseToolkit",
required=True,
show=True,
name="toolkit",
),
TemplateField(
field_type="BaseLanguageModel",
required=True,
show=True,
name="llm",
),
],
)
description: str = """Construct a json agent from an LLM and tools."""
base_classes: list[str] = ["AgentExecutor"]
def to_dict(self):
return super().to_dict()
class CSVAgentNode(FrontendNode):
name: str = "CSVAgent"
template: Template = Template(
type_name="csv_agent",
fields=[
TemplateField(
field_type="file",
required=True,
show=True,
name="path",
value="",
suffixes=[".csv"],
fileTypes=["csv"],
),
TemplateField(
field_type="BaseLanguageModel",
required=True,
show=True,
name="llm",
),
],
)
description: str = """Construct a json agent from a CSV and tools."""
base_classes: list[str] = ["AgentExecutor"]
def to_dict(self):
return super().to_dict()

View file

@ -1,51 +0,0 @@
from typing import Any
from pydantic import BaseModel
class Field(BaseModel):
field_type: str = "str"
required: bool = False
placeholder: str = ""
is_list: bool = False
show: bool = True
multiline: bool = False
value: Any = None
# _name will be used to store the name of the field
# in the template
name: str = ""
def to_dict(self):
result = self.dict()
# Remove key if it is None
for key in list(result.keys()):
if result[key] is None:
del result[key]
result["type"] = result.pop("field_type")
result["list"] = result.pop("is_list")
return result
class Template(BaseModel):
type_name: str
fields: list[Field]
def to_dict(self):
result = {field.name: field.to_dict() for field in self.fields}
result["_type"] = self.type_name # type: ignore
return result
class FrontendNode(BaseModel):
template: Template
description: str
base_classes: list
name: str = ""
def to_dict(self):
return {
self.name: {
"template": self.template.to_dict(),
"description": self.description,
"base_classes": self.base_classes,
}
}

View file

@ -1,23 +1,54 @@
import ast
import importlib
import inspect
import re
from typing import Dict, Optional, Union
from langchain.agents.load_tools import (
_BASE_TOOLS,
_EXTRA_LLM_TOOLS,
_EXTRA_OPTIONAL_TOOLS,
_LLM_TOOLS,
)
from langchain.agents.tools import Tool
from typing import Dict, Optional
from langflow.utils import constants
def build_template_from_parameters(
name: str, type_to_loader_dict: Dict, add_function: bool = False
):
# Retrieve the function that matches the provided name
func = None
for _, v in type_to_loader_dict.items():
if v.__name__ == name:
func = v
break
if func is None:
raise ValueError(f"{name} not found")
# Process parameters
parameters = func.__annotations__
variables = {}
for param_name, param_type in parameters.items():
if param_name in ["return", "kwargs"]:
continue
variables[param_name] = {
"type": param_type.__name__,
"default": parameters[param_name].__repr_args__()[0][1],
# Op
"placeholder": "",
}
# Get the base classes of the return type
return_type = parameters.get("return")
base_classes = get_base_classes(return_type) if return_type else []
if add_function:
base_classes.append("function")
# Get the function's docstring
docs = inspect.getdoc(func) or ""
return {
"template": format_dict(variables, name),
"description": docs["Description"], # type: ignore
"base_classes": base_classes,
}
def build_template_from_function(
name: str, type_to_loader_dict: Dict, add_function: bool = False
):
@ -37,7 +68,7 @@ def build_template_from_function(
variables = {"_type": _type}
for class_field_items, value in _class.__fields__.items():
if class_field_items in ["callback_manager", "requests_wrapper"]:
if class_field_items in ["callback_manager"]:
continue
variables[class_field_items] = {}
for name_, value_ in value.__repr_args__():
@ -67,7 +98,7 @@ def build_template_from_function(
return {
"template": format_dict(variables, name),
"description": docs["Description"],
"base_classes": get_base_classes(_class),
"base_classes": base_classes,
}
@ -128,17 +159,23 @@ def get_base_classes(cls):
"""Get the base classes of a class.
These are used to determine the output of the nodes.
"""
bases = cls.__bases__
if not bases:
return []
else:
if bases := cls.__bases__:
result = []
for base in bases:
if any(type in base.__module__ for type in ["pydantic", "abc"]):
continue
result.append(base.__name__)
result.extend(get_base_classes(base))
return result
base_classes = get_base_classes(base)
# check if the base_classes are in the result
# if not, add them
for base_class in base_classes:
if base_class not in result:
result.append(base_class)
else:
result = [cls.__name__]
if not result:
result = [cls.__name__]
return list(set(result + [cls.__name__]))
def get_default_factory(module: str, function: str):
@ -150,118 +187,6 @@ def get_default_factory(module: str, function: str):
return None
def get_tools_dict():
"""Get the tools dictionary."""
from langflow.interface.listing import CUSTOM_TOOLS
tools = {
**_BASE_TOOLS,
**_LLM_TOOLS,
**{k: v[0] for k, v in _EXTRA_LLM_TOOLS.items()},
**{k: v[0] for k, v in _EXTRA_OPTIONAL_TOOLS.items()},
**CUSTOM_TOOLS,
}
return tools
def get_tool_by_name(name: str):
"""Get a tool from the tools dictionary."""
tools = get_tools_dict()
if name not in tools:
raise ValueError(f"{name} not found.")
return tools[name]
def get_tool_params(tool, **kwargs) -> Union[Dict, None]:
# Parse the function code into an abstract syntax tree
# Define if it is a function or a class
if inspect.isfunction(tool):
return get_func_tool_params(tool, **kwargs)
elif inspect.isclass(tool):
# Get the parameters necessary to
# instantiate the class
return get_class_tool_params(tool, **kwargs)
else:
raise ValueError("Tool must be a function or class.")
def get_func_tool_params(func, **kwargs) -> Union[Dict, None]:
tree = ast.parse(inspect.getsource(func))
# Iterate over the statements in the abstract syntax tree
for node in ast.walk(tree):
# Find the first return statement
if isinstance(node, ast.Return):
tool = node.value
if isinstance(tool, ast.Call):
if isinstance(tool.func, ast.Name) and tool.func.id == "Tool":
if tool.keywords:
tool_params = {}
for keyword in tool.keywords:
if keyword.arg == "name":
tool_params["name"] = ast.literal_eval(keyword.value)
elif keyword.arg == "description":
tool_params["description"] = ast.literal_eval(
keyword.value
)
return tool_params
return {
"name": ast.literal_eval(tool.args[0]),
"description": ast.literal_eval(tool.args[2]),
}
#
else:
# get the class object from the return statement
try:
class_obj = eval(
compile(ast.Expression(tool), "<string>", "eval")
)
except Exception:
return None
return {
"name": getattr(class_obj, "name"),
"description": getattr(class_obj, "description"),
}
# Return None if no return statement was found
return None
def get_class_tool_params(cls, **kwargs) -> Union[Dict, None]:
tree = ast.parse(inspect.getsource(cls))
tool_params = {}
# Iterate over the statements in the abstract syntax tree
for node in ast.walk(tree):
if isinstance(node, ast.ClassDef):
# Find the class definition and look for methods
for stmt in node.body:
if isinstance(stmt, ast.FunctionDef) and stmt.name == "__init__":
# There is no assignment statements in the __init__ method
# So we need to get the params from the function definition
for arg in stmt.args.args:
if arg.arg == "name":
# It should be the name of the class
tool_params[arg.arg] = cls.__name__
elif arg.arg == "self":
continue
# If there is not default value, set it to an empty string
else:
try:
annotation = ast.literal_eval(arg.annotation) # type: ignore
tool_params[arg.arg] = annotation
except ValueError:
tool_params[arg.arg] = ""
# Get the attribute name and the annotation
elif cls != Tool and isinstance(stmt, ast.AnnAssign):
# Get the attribute name and the annotation
tool_params[stmt.target.id] = "" # type: ignore
return tool_params
def get_class_doc(class_name):
"""
Extracts information from the docstring of a given class.
@ -354,6 +279,8 @@ def format_dict(d, name: Optional[str] = None):
# Change type from str to Tool
value["type"] = "Tool" if key in ["allowed_tools"] else _type
value["type"] = "int" if key in ["max_value_length"] else value["type"]
# Show or not field
value["show"] = bool(
(value["required"] and key not in ["input_variables"])
@ -365,6 +292,8 @@ def format_dict(d, name: Optional[str] = None):
"examples",
"temperature",
"model_name",
"headers",
"max_value_length",
]
or "api_key" in key
)
@ -375,18 +304,41 @@ def format_dict(d, name: Optional[str] = None):
)
# Add multline
value["multiline"] = key in ["suffix", "prefix", "template", "examples", "code"]
value["multiline"] = key in [
"suffix",
"prefix",
"template",
"examples",
"code",
"headers",
]
# Replace dict type with str
if "dict" in value["type"].lower():
value["type"] = "code"
if key == "dict_":
value["type"] = "file"
value["suffixes"] = [".json", ".yaml", ".yml"]
value["fileTypes"] = ["json", "yaml", "yml"]
# Replace default value with actual value
if "default" in value:
value["value"] = value["default"]
value.pop("default")
if key == "headers":
value[
"value"
] = """{'Authorization':
'Bearer <token>'}"""
# Add options to openai
if name == "OpenAI" and key == "model_name":
value["options"] = constants.OPENAI_MODELS
value["list"] = True
elif name == "OpenAIChat" and key == "model_name":
value["options"] = constants.CHAT_OPENAI_MODELS
value["list"] = True
return d

Binary file not shown.

After

Width:  |  Height:  |  Size: 102 KiB

View file

@ -4,7 +4,8 @@
<meta charset="UTF-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>LangFLow</title>
<link rel="icon" href="%PUBLIC_URL%/favicon.ico" />
<title>LangFlow</title>
</head>
<body id='body' style="width: 100%; height:100%">
<noscript>You need to enable JavaScript to run this app.</noscript>

View file

@ -11,6 +11,9 @@ import { ParameterComponentType } from "../../../../types/components";
import FloatComponent from "../../../../components/floatComponent";
import Dropdown from "../../../../components/dropdownComponent";
import CodeAreaComponent from "../../../../components/codeAreaComponent";
import InputFileComponent from "../../../../components/inputFileComponent";
import { TabsContext } from "../../../../contexts/tabsContext";
import IntComponent from "../../../../components/intComponent";
export default function ParameterComponent({
left,
@ -43,6 +46,7 @@ export default function ParameterComponent({
const { reactFlowInstance } = useContext(typesContext);
let disabled =
reactFlowInstance?.getEdges().some((e) => e.targetHandle === id) ?? false;
const { save } = useContext(TabsContext);
return (
<div
@ -50,11 +54,17 @@ export default function ParameterComponent({
className="w-full flex flex-wrap justify-between items-center bg-gray-50 dark:bg-gray-800 dark:text-white mt-1 px-5 py-2"
>
<>
<div className="text-sm truncate">
<div className={"text-sm truncate w-full " + (left ? "" : "text-end")}>
{title}
<span className="text-red-600">{required ? " *" : ""}</span>
</div>
{left && (type === "str" || type === "bool" || type === "float"||type=="code") ? (
{left &&
(type === "str" ||
type === "bool" ||
type === "float" ||
type === "code" ||
type === "file" ||
type === "int") ? (
<></>
) : (
<Tooltip title={tooltipTitle + (required ? " (required)" : "")}>
@ -92,6 +102,7 @@ export default function ParameterComponent({
}
onChange={(t: string[]) => {
data.node.template[name].value = t;
save();
}}
/>
) : data.node.template[name].multiline ? (
@ -100,15 +111,17 @@ export default function ParameterComponent({
value={data.node.template[name].value ?? ""}
onChange={(t: string) => {
data.node.template[name].value = t;
save();
}}
/>
) : (
<InputComponent
disabled={disabled}
password={data.node.template[name].password ?? true}
password={data.node.template[name].password ?? false}
value={data.node.template[name].value ?? ""}
onChange={(t) => {
data.node.template[name].value = t;
save();
}}
/>
)}
@ -121,6 +134,7 @@ export default function ParameterComponent({
setEnabled={(t) => {
data.node.template[name].value = t;
setEnabled(t);
save();
}}
/>
</div>
@ -130,6 +144,7 @@ export default function ParameterComponent({
value={data.node.template[name].value ?? ""}
onChange={(t) => {
data.node.template[name].value = t;
save();
}}
/>
) : left === true &&
@ -146,6 +161,30 @@ export default function ParameterComponent({
value={data.node.template[name].value ?? ""}
onChange={(t: string) => {
data.node.template[name].value = t;
save();
}}
/>
) : left === true && type === "file" ? (
<InputFileComponent
disabled={disabled}
value={data.node.template[name].value ?? ""}
onChange={(t: string) => {
data.node.template[name].value = t;
}}
fileTypes={data.node.template[name].fileTypes}
suffixes={data.node.template[name].suffixes}
onFileChange={(t: string) => {
data.node.template[name].content = t;
save();
}}
></InputFileComponent>
) : left === true && type === "int" ? (
<IntComponent
disabled={disabled}
value={data.node.template[name].value ?? ""}
onChange={(t) => {
data.node.template[name].value = t;
save();
}}
/>
) : (

View file

@ -71,7 +71,7 @@ export default function GenericNode({
<div key={idx}>
{idx === 0 ? (
<div className="px-5 py-2 mt-2 dark:text-white text-center">
Inputs:
Inputs
</div>
) : (
<></>
@ -101,7 +101,7 @@ export default function GenericNode({
</div>
))}
<div className="px-5 py-2 mt-2 dark:text-white text-center">
Output:
Output
</div>
<ParameterComponent
data={data}
@ -109,7 +109,7 @@ export default function GenericNode({
title={data.type}
tooltipTitle={`Type: ${data.node.base_classes.join(" | ")}`}
id={[data.type, data.id, ...data.node.base_classes].join("|")}
type={"str"}
type={data.node.base_classes.join("|")}
left={false}
/>
</>

View file

@ -103,7 +103,7 @@ export default function Chat({ flow, reactFlowInstance }: ChatType) {
setLockChat(false);
})
.catch((error) => {
setErrorData({ title: error.message ?? "unknow error" });
setErrorData({ title: error.message ?? "Unknown Error", list: [error.response.data.detail]});
setLockChat(false);
});
} else {

View file

@ -23,7 +23,7 @@ export default function CodeAreaComponent({
<div className="w-full flex items-center gap-3">
<span
className={
"truncate block max-w-full text-gray-500 px-3 py-2 rounded-md border border-gray-300 dark:border-gray-700 shadow-sm focus:border-indigo-500 focus:ring-indigo-500 sm:text-sm" +
"truncate block w-full text-gray-500 px-3 py-2 rounded-md border border-gray-300 dark:border-gray-700 shadow-sm focus:border-indigo-500 focus:ring-indigo-500 sm:text-sm" +
(disabled ? " bg-gray-200" : "")
}
>

View file

@ -0,0 +1,85 @@
import { DocumentMagnifyingGlassIcon } from "@heroicons/react/24/outline";
import { useContext, useEffect, useState } from "react";
import { alertContext } from "../../contexts/alertContext";
import { FileComponentType } from "../../types/components";
export default function InputFileComponent({
value,
onChange,
disabled,
suffixes,
fileTypes,
onFileChange
}: FileComponentType) {
const [myValue, setMyValue] = useState(value);
const { setErrorData } = useContext(alertContext);
useEffect(() => {
if (disabled) {
setMyValue("");
onChange("");
onFileChange("")
}
}, [disabled, onChange]);
function attachFile(fileReadEvent: ProgressEvent<FileReader>) {
fileReadEvent.preventDefault();
const file = fileReadEvent.target.result;
onFileChange(file as string)
}
function checkFileType(fileName:string):boolean{
for (let index = 0; index < suffixes.length; index++) {
if(fileName.endsWith(suffixes[index])){
return true
}
}
return false
}
const handleButtonClick = () => {
const input = document.createElement("input");
input.type = "file";
input.accept = suffixes.join(",");
input.style.display = "none";
input.multiple = false;
input.onchange = (e: Event) => {
const file = (e.target as HTMLInputElement).files?.[0];
const fileData = new FileReader();
fileData.onload = attachFile;
if (file && checkFileType(file.name)) {
fileData.readAsDataURL(file);
setMyValue(file.name);
onChange(file.name);
} else {
setErrorData({
title:
"Please select a valid file. Only files this files are allowed:",
list: fileTypes,
});
}
};
input.click();
};
return (
<div
className={
disabled ? "pointer-events-none cursor-not-allowed w-full" : "w-full"
}
>
<div className="w-full flex items-center gap-3">
<span
className={
"truncate block w-full text-gray-500 px-3 py-2 rounded-md border border-gray-300 dark:border-gray-700 shadow-sm focus:border-indigo-500 focus:ring-indigo-500 sm:text-sm" +
(disabled ? " bg-gray-200" : "")
}
>
{myValue !== "" ? myValue : "No file"}
</span>
<button onClick={handleButtonClick}>
<DocumentMagnifyingGlassIcon className="w-8 h-8 hover:text-blue-600" />
</button>
</div>
</div>
);
}

View file

@ -0,0 +1,38 @@
import { useEffect, useState } from "react";
import { FloatComponentType } from "../../types/components";
export default function IntComponent({
value,
onChange,
disabled,
}: FloatComponentType) {
const [myValue, setMyValue] = useState(value ?? "");
useEffect(() => {
if (disabled) {
setMyValue("");
onChange("");
}
}, [disabled, onChange]);
return (
<div className={disabled ? "pointer-events-none cursor-not-allowed w-full" : "w-full"}>
<input
onKeyDown={(event) => {
if (event.key !== 'Backspace' && event.key !== 'Enter' && event.key !== 'Delete' && event.key !== 'ArrowLeft' && event.key !== 'ArrowRight' && !/^[-]?\d*$/.test(event.key)) {
event.preventDefault();
}
}}
type="number"
value={myValue}
className={
"block w-full form-input dark:bg-gray-900 arrow-hide dark:border-gray-600 rounded-md border-gray-300 shadow-sm focus:border-indigo-500 focus:ring-indigo-500 sm:text-sm" +
(disabled ? " bg-gray-200 dark:bg-gray-700" : "")
}
placeholder="Type a integer number"
onChange={(e) => {
setMyValue(e.target.value);
onChange(e.target.value);
}}
/>
</div>
);
}

View file

@ -5,6 +5,7 @@ import { normalCaseToSnakeCase } from "../utils";
import { alertContext } from "./alertContext";
const TabsContextInitialValue: TabsContextType = {
save:()=>{},
tabIndex: 0,
setTabIndex: (index: number) => {},
flows: [],
@ -16,7 +17,7 @@ const TabsContextInitialValue: TabsContextType = {
uploadFlow: () => {},
lockChat: false,
setLockChat:(prevState:boolean)=>{},
hardReset:()=>{}
hardReset:()=>{},
};
export const TabsContext = createContext<TabsContextType>(
@ -35,15 +36,20 @@ export function TabsProvider({ children }: { children: ReactNode }) {
newNodeId.current = newNodeId.current + 1;
return newNodeId.current;
}
function save(){
if (flows.length !== 0)
window.localStorage.setItem(
"tabsData",
JSON.stringify({ tabIndex, flows, id, nodeId: newNodeId.current })
);
}
useEffect(() => {
//save tabs locally
if (flows.length !== 0)
window.localStorage.setItem(
"tabsData",
JSON.stringify({ tabIndex, flows, id, nodeId: newNodeId.current })
);
save()
}, [flows, id, tabIndex, newNodeId]);
useEffect(() => {
//get tabs locally saved
let cookie = window.localStorage.getItem("tabsData");
@ -177,6 +183,7 @@ export function TabsProvider({ children }: { children: ReactNode }) {
return (
<TabsContext.Provider
value={{
save,
hardReset,
lockChat,
setLockChat,

View file

@ -39,6 +39,15 @@ export type TextAreaComponentType = {
value: string;
};
export type FileComponentType = {
disabled: boolean;
onChange: (value: string[] | string) => void;
value: string;
suffixes:Array<string>;
fileTypes:Array<string>;
onFileChange:(value: string) => void;
};
export type DisclosureComponentType = {
children: ReactNode;
button: {

View file

@ -1,6 +1,7 @@
import { FlowType } from "../flow";
export type TabsContextType = {
save:()=>void;
tabIndex: number;
setTabIndex: (index: number) => void;
flows: Array<FlowType>;

View file

@ -5,8 +5,10 @@ import {
LightBulbIcon,
CommandLineIcon,
WrenchScrewdriverIcon,
WrenchIcon,
ComputerDesktopIcon,
Bars3CenterLeftIcon,
GiftIcon,
PaperClipIcon,
QuestionMarkCircleIcon,
} from "@heroicons/react/24/outline";
@ -88,6 +90,8 @@ export const nodeNames:{[char: string]: string} = {
advanced: "Advanced",
chat: "Chat",
docloaders:"Document Loader",
toolkits:"Toolkits",
wrappers:"Wrappers",
unknown:"Unknown"
};
@ -97,10 +101,12 @@ export const nodeIcons:{[char: string]: React.ForwardRefExoticComponent<React.SV
memories: CpuChipIcon,
llms: LightBulbIcon,
prompts: CommandLineIcon,
tools: WrenchScrewdriverIcon,
tools: WrenchIcon,
advanced: ComputerDesktopIcon,
chat: Bars3CenterLeftIcon,
docloaders:Bars3CenterLeftIcon,
toolkits:WrenchScrewdriverIcon,
wrappers:GiftIcon,
unknown:QuestionMarkCircleIcon
};

View file

@ -1,4 +1,5 @@
from pathlib import Path
import pytest
from fastapi.testclient import TestClient
@ -10,6 +11,9 @@ def pytest_configure():
pytest.COMPLEX_EXAMPLE_PATH = (
Path(__file__).parent.absolute() / "data" / "complex_example.json"
)
pytest.OPENAPI_EXAMPLE_PATH = (
Path(__file__).parent.absolute() / "data" / "Openapi.json"
)
pytest.CODE_WITH_SYNTAX_ERROR = """
def get_text():

445
tests/data/Openapi.json Normal file

File diff suppressed because one or more lines are too long

63
tests/test_cache.py Normal file
View file

@ -0,0 +1,63 @@
import json
import tempfile
from pathlib import Path
import pytest
from langflow.cache.utils import PREFIX, compute_hash
from langflow.interface.run import load_langchain_object
def get_graph(_type="basic"):
"""Get a graph from a json file"""
if _type == "basic":
path = pytest.BASIC_EXAMPLE_PATH
elif _type == "complex":
path = pytest.COMPLEX_EXAMPLE_PATH
elif _type == "openapi":
path = pytest.OPENAPI_EXAMPLE_PATH
with open(path, "r") as f:
flow_graph = json.load(f)
return flow_graph["data"]
@pytest.fixture
def basic_data_graph():
return get_graph()
@pytest.fixture
def complex_data_graph():
return get_graph("complex")
@pytest.fixture
def openapi_data_graph():
return get_graph("openapi")
def langchain_objects_are_equal(obj1, obj2):
return str(obj1) == str(obj2)
def test_cache_creation(basic_data_graph):
# Compute hash for the input data_graph
computed_hash = compute_hash(basic_data_graph)
# Call process_graph function to build and cache the langchain_object
_ = load_langchain_object(basic_data_graph)
# Check if the cache file exists
cache_file = Path(tempfile.gettempdir()) / f"{PREFIX}_{computed_hash}.dill"
assert cache_file.exists()
def test_cache_reuse(basic_data_graph):
# Call process_graph function to build and cache the langchain_object
result1 = load_langchain_object(basic_data_graph)
# Call process_graph function again to use the cached langchain_object
result2 = load_langchain_object(basic_data_graph)
# Compare the results to ensure the same langchain_object was used
assert langchain_objects_are_equal(result1, result2)

50
tests/test_creators.py Normal file
View file

@ -0,0 +1,50 @@
from typing import Dict, List
import pytest
from langflow.interface.agents.base import AgentCreator
from langflow.interface.base import LangChainTypeCreator
@pytest.fixture
def sample_lang_chain_type_creator() -> LangChainTypeCreator:
class SampleLangChainTypeCreator(LangChainTypeCreator):
type_name: str = "test_type"
def type_to_loader_dict(self) -> Dict: # type: ignore
return {"test_type": "TestClass"}
def to_list(self) -> List[str]:
return ["node1", "node2"]
def get_signature(self, name: str) -> Dict:
return {
"template": {"test_field": {"type": "str"}},
"description": "test description",
"base_classes": ["base_class1", "base_class2"],
}
return SampleLangChainTypeCreator()
@pytest.fixture
def sample_agent_creator() -> AgentCreator:
return AgentCreator()
def test_lang_chain_type_creator_to_dict(
sample_lang_chain_type_creator: LangChainTypeCreator,
):
type_dict = sample_lang_chain_type_creator.to_dict()
assert len(type_dict) == 1
assert "test_type" in type_dict
assert "node1" in type_dict["test_type"]
assert "node2" in type_dict["test_type"]
assert "template" in type_dict["test_type"]["node1"]
assert "description" in type_dict["test_type"]["node1"]
assert "base_classes" in type_dict["test_type"]["node1"]
def test_agent_creator_type_to_loader_dict(sample_agent_creator: AgentCreator):
type_to_loader_dict = sample_agent_creator.type_to_loader_dict
assert len(type_to_loader_dict) > 0
assert "JsonAgent"

View file

@ -1,7 +1,7 @@
# Test this:
from langflow.interface.custom_types import PythonFunction
from langflow.utils import constants
import pytest
from langflow.interface.custom.types import PythonFunction
from langflow.utils import constants
def test_python_function():

View file

@ -1,5 +1,5 @@
from langflow.interface.listing import CUSTOM_TOOLS
from fastapi.testclient import TestClient
from langflow.interface.tools.constants import CUSTOM_TOOLS
def test_get_all(client: TestClient):

View file

@ -0,0 +1,57 @@
import pytest
from langflow.template.base import FrontendNode, Template, TemplateField
@pytest.fixture
def sample_template_field() -> TemplateField:
return TemplateField(name="test_field", field_type="str")
@pytest.fixture
def sample_template(sample_template_field: TemplateField) -> Template:
return Template(type_name="test_template", fields=[sample_template_field])
@pytest.fixture
def sample_frontend_node(sample_template: Template) -> FrontendNode:
return FrontendNode(
template=sample_template,
description="test description",
base_classes=["base_class1", "base_class2"],
name="test_frontend_node",
)
def test_template_field_defaults(sample_template_field: TemplateField):
assert sample_template_field.field_type == "str"
assert sample_template_field.required is False
assert sample_template_field.placeholder == ""
assert sample_template_field.is_list is False
assert sample_template_field.show is True
assert sample_template_field.multiline is False
assert sample_template_field.value is None
assert sample_template_field.suffixes == []
assert sample_template_field.file_types == []
assert sample_template_field.content is None
assert sample_template_field.password is False
assert sample_template_field.name == "test_field"
def test_template_to_dict(
sample_template: Template, sample_template_field: TemplateField
):
template_dict = sample_template.to_dict()
assert template_dict["_type"] == "test_template"
assert len(template_dict) == 2 # _type and test_field
assert "test_field" in template_dict
assert "type" in template_dict["test_field"]
assert "required" in template_dict["test_field"]
def test_frontend_node_to_dict(sample_frontend_node: FrontendNode):
node_dict = sample_frontend_node.to_dict()
assert len(node_dict) == 1
assert "test_frontend_node" in node_dict
assert "description" in node_dict["test_frontend_node"]
assert "template" in node_dict["test_frontend_node"]
assert "base_classes" in node_dict["test_frontend_node"]

View file

@ -1,15 +1,35 @@
import json
from langflow.graph.graph import Edge, Graph, Node
import pytest
from langflow.utils.payload import build_json, get_root_node
from langchain.agents import AgentExecutor
from langflow.graph import Edge, Graph, Node
from langflow.graph.nodes import (
AgentNode,
ChainNode,
FileToolNode,
LLMNode,
PromptNode,
ToolkitNode,
ToolNode,
WrapperNode,
)
from langflow.utils.payload import build_json, get_root_node
# Test cases for the graph module
# now we have three types of graph:
# BASIC_EXAMPLE_PATH, COMPLEX_EXAMPLE_PATH, OPENAPI_EXAMPLE_PATH
def get_graph(basic=True):
def get_graph(_type="basic"):
"""Get a graph from a json file"""
path = pytest.BASIC_EXAMPLE_PATH if basic else pytest.COMPLEX_EXAMPLE_PATH
if _type == "basic":
path = pytest.BASIC_EXAMPLE_PATH
elif _type == "complex":
path = pytest.COMPLEX_EXAMPLE_PATH
elif _type == "openapi":
path = pytest.OPENAPI_EXAMPLE_PATH
with open(path, "r") as f:
flow_graph = json.load(f)
data_graph = flow_graph["data"]
@ -18,26 +38,94 @@ def get_graph(basic=True):
return Graph(nodes, edges)
def test_get_nodes_with_target():
@pytest.fixture
def basic_graph():
return get_graph()
@pytest.fixture
def complex_graph():
return get_graph("complex")
@pytest.fixture
def openapi_graph():
return get_graph("openapi")
def get_node_by_type(graph, node_type):
"""Get a node by type"""
return next((node for node in graph.nodes if isinstance(node, node_type)), None)
def test_graph_structure(basic_graph):
assert isinstance(basic_graph, Graph)
assert len(basic_graph.nodes) > 0
assert len(basic_graph.edges) > 0
for node in basic_graph.nodes:
assert isinstance(node, Node)
for edge in basic_graph.edges:
assert isinstance(edge, Edge)
assert edge.source in basic_graph.nodes
assert edge.target in basic_graph.nodes
def test_circular_dependencies(basic_graph):
assert isinstance(basic_graph, Graph)
def check_circular(node, visited):
visited.add(node)
neighbors = basic_graph.get_nodes_with_target(node)
for neighbor in neighbors:
if neighbor in visited:
return True
if check_circular(neighbor, visited.copy()):
return True
return False
for node in basic_graph.nodes:
assert not check_circular(node, set())
def test_invalid_node_types():
graph_data = {
"nodes": [
{
"id": "1",
"data": {
"node": {
"base_classes": ["BaseClass"],
"template": {
"_type": "InvalidNodeType",
},
},
},
},
],
"edges": [],
}
with pytest.raises(Exception):
Graph(graph_data["nodes"], graph_data["edges"])
def test_get_nodes_with_target(basic_graph):
"""Test getting connected nodes"""
graph = get_graph()
assert isinstance(graph, Graph)
assert isinstance(basic_graph, Graph)
# Get root node
root = get_root_node(graph)
root = get_root_node(basic_graph)
assert root is not None
connected_nodes = graph.get_nodes_with_target(root)
connected_nodes = basic_graph.get_nodes_with_target(root)
assert connected_nodes is not None
def test_get_node_neighbors_basic():
def test_get_node_neighbors_basic(basic_graph):
"""Test getting node neighbors"""
graph = get_graph(basic=True)
assert isinstance(graph, Graph)
assert isinstance(basic_graph, Graph)
# Get root node
root = get_root_node(graph)
root = get_root_node(basic_graph)
assert root is not None
neighbors = graph.get_node_neighbors(root)
neighbors = basic_graph.get_node_neighbors(root)
assert neighbors is not None
assert isinstance(neighbors, dict)
# Root Node is an Agent, it requires an LLMChain and tools
@ -56,7 +144,7 @@ def test_get_node_neighbors_basic():
for neighbor, val in neighbors.items()
if "Chain" in neighbor.data["type"] and val
)
chain_neighbors = graph.get_node_neighbors(chain)
chain_neighbors = basic_graph.get_node_neighbors(chain)
assert chain_neighbors is not None
assert isinstance(chain_neighbors, dict)
# Check if there is a LLM in the chain's neighbors
@ -73,15 +161,13 @@ def test_get_node_neighbors_basic():
)
def test_get_node_neighbors_complex():
def test_get_node_neighbors_complex(complex_graph):
"""Test getting node neighbors"""
graph = get_graph(basic=False)
assert isinstance(graph, Graph)
assert isinstance(complex_graph, Graph)
# Get root node
root = get_root_node(graph)
root = get_root_node(complex_graph)
assert root is not None
neighbors = graph.get_nodes_with_target(root)
neighbors = complex_graph.get_nodes_with_target(root)
assert neighbors is not None
# Neighbors should be a list of nodes
assert isinstance(neighbors, list)
@ -92,7 +178,7 @@ def test_get_node_neighbors_complex():
assert any("Tool" in neighbor.data["type"] for neighbor in neighbors)
# Now on to the Chain's neighbors
chain = next(neighbor for neighbor in neighbors if "Chain" in neighbor.data["type"])
chain_neighbors = graph.get_nodes_with_target(chain)
chain_neighbors = complex_graph.get_nodes_with_target(chain)
assert chain_neighbors is not None
# Check if there is a LLM in the chain's neighbors
assert any("OpenAI" in neighbor.data["type"] for neighbor in chain_neighbors)
@ -100,7 +186,7 @@ def test_get_node_neighbors_complex():
assert any("Prompt" in neighbor.data["type"] for neighbor in chain_neighbors)
# Now on to the Tool's neighbors
tool = next(neighbor for neighbor in neighbors if "Tool" in neighbor.data["type"])
tool_neighbors = graph.get_nodes_with_target(tool)
tool_neighbors = complex_graph.get_nodes_with_target(tool)
assert tool_neighbors is not None
# Check if there is an Agent in the tool's neighbors
assert any("Agent" in neighbor.data["type"] for neighbor in tool_neighbors)
@ -108,7 +194,7 @@ def test_get_node_neighbors_complex():
agent = next(
neighbor for neighbor in tool_neighbors if "Agent" in neighbor.data["type"]
)
agent_neighbors = graph.get_nodes_with_target(agent)
agent_neighbors = complex_graph.get_nodes_with_target(agent)
assert agent_neighbors is not None
# Check if there is a Tool in the agent's neighbors
assert any("Tool" in neighbor.data["type"] for neighbor in agent_neighbors)
@ -116,62 +202,57 @@ def test_get_node_neighbors_complex():
tool = next(
neighbor for neighbor in agent_neighbors if "Tool" in neighbor.data["type"]
)
tool_neighbors = graph.get_nodes_with_target(tool)
tool_neighbors = complex_graph.get_nodes_with_target(tool)
assert tool_neighbors is not None
# Check if there is a PythonFunction in the tool's neighbors
assert any("PythonFunction" in neighbor.data["type"] for neighbor in tool_neighbors)
def test_get_node():
def test_get_node(basic_graph):
"""Test getting a single node"""
graph = get_graph()
node_id = graph.nodes[0].id
node = graph.get_node(node_id)
node_id = basic_graph.nodes[0].id
node = basic_graph.get_node(node_id)
assert isinstance(node, Node)
assert node.id == node_id
def test_build_nodes():
def test_build_nodes(basic_graph):
"""Test building nodes"""
graph = get_graph()
assert len(graph.nodes) == len(graph._nodes)
for node in graph.nodes:
assert len(basic_graph.nodes) == len(basic_graph._nodes)
for node in basic_graph.nodes:
assert isinstance(node, Node)
def test_build_edges():
def test_build_edges(basic_graph):
"""Test building edges"""
graph = get_graph()
assert len(graph.edges) == len(graph._edges)
for edge in graph.edges:
assert len(basic_graph.edges) == len(basic_graph._edges)
for edge in basic_graph.edges:
assert isinstance(edge, Edge)
assert isinstance(edge.source, Node)
assert isinstance(edge.target, Node)
def test_get_root_node():
def test_get_root_node(basic_graph, complex_graph):
"""Test getting root node"""
graph = get_graph(basic=True)
assert isinstance(graph, Graph)
root = get_root_node(graph)
assert isinstance(basic_graph, Graph)
root = get_root_node(basic_graph)
assert root is not None
assert isinstance(root, Node)
assert root.data["type"] == "ZeroShotAgent"
# For complex example, the root node is a ZeroShotAgent too
graph = get_graph(basic=False)
assert isinstance(graph, Graph)
root = get_root_node(graph)
assert isinstance(complex_graph, Graph)
root = get_root_node(complex_graph)
assert root is not None
assert isinstance(root, Node)
assert root.data["type"] == "ZeroShotAgent"
def test_build_json():
def test_build_json(basic_graph):
"""Test building JSON from graph"""
graph = get_graph()
assert isinstance(graph, Graph)
root = get_root_node(graph)
json_data = build_json(root, graph)
assert isinstance(basic_graph, Graph)
root = get_root_node(basic_graph)
json_data = build_json(root, basic_graph)
assert isinstance(json_data, dict)
assert json_data["_type"] == "zero-shot-react-description"
assert isinstance(json_data["llm_chain"], dict)
@ -187,38 +268,37 @@ def test_build_json():
assert all(isinstance(val, str) for val in json_data["return_values"])
def test_validate_edges():
def test_validate_edges(basic_graph):
"""Test validating edges"""
graph = get_graph()
assert isinstance(graph, Graph)
assert isinstance(basic_graph, Graph)
# all edges should be valid
assert all(edge.valid for edge in graph.edges)
assert all(edge.valid for edge in basic_graph.edges)
def test_matched_type():
def test_matched_type(basic_graph):
"""Test matched type attribute in Edge"""
graph = get_graph()
assert isinstance(graph, Graph)
assert isinstance(basic_graph, Graph)
# all edges should be valid
assert all(edge.valid for edge in graph.edges)
assert all(edge.valid for edge in basic_graph.edges)
# all edges should have a matched_type attribute
assert all(hasattr(edge, "matched_type") for edge in graph.edges)
assert all(hasattr(edge, "matched_type") for edge in basic_graph.edges)
# The matched_type attribute should be in the source_types attr
assert all(edge.matched_type in edge.source_types for edge in graph.edges)
assert all(edge.matched_type in edge.source_types for edge in basic_graph.edges)
def test_build_params():
def test_build_params(basic_graph):
"""Test building params"""
graph = get_graph()
assert isinstance(graph, Graph)
assert isinstance(basic_graph, Graph)
# all edges should be valid
assert all(edge.valid for edge in graph.edges)
assert all(edge.valid for edge in basic_graph.edges)
# all edges should have a matched_type attribute
assert all(hasattr(edge, "matched_type") for edge in graph.edges)
assert all(hasattr(edge, "matched_type") for edge in basic_graph.edges)
# The matched_type attribute should be in the source_types attr
assert all(edge.matched_type in edge.source_types for edge in graph.edges)
assert all(edge.matched_type in edge.source_types for edge in basic_graph.edges)
# Get the root node
root = get_root_node(graph)
root = get_root_node(basic_graph)
# Root node is a ZeroShotAgent
# which requires an llm_chain, allowed_tools and return_values
assert isinstance(root.params, dict)
@ -260,7 +340,7 @@ def test_build_params():
assert isinstance(llm_node.params["model_name"], str)
def test_build():
def test_build(basic_graph, complex_graph):
"""Test Node's build method"""
# def build(self):
# # The params dict is used to build the module
@ -283,18 +363,81 @@ def test_build():
# # and instantiate it with the params
# # and return the instance
# return LANGCHAIN_TYPES_DICT[self.node_type](**self.params)
graph = get_graph()
assert isinstance(graph, Graph)
assert isinstance(basic_graph, Graph)
# Now we test the build method
# Build the Agent
agent = graph.build()
agent = basic_graph.build()
# The agent should be a AgentExecutor
assert isinstance(agent, AgentExecutor)
# Now we test the complex example
graph = get_graph(basic=False)
assert isinstance(graph, Graph)
assert isinstance(complex_graph, Graph)
# Now we test the build method
agent = graph.build()
agent = complex_graph.build()
# The agent should be a AgentExecutor
assert isinstance(agent, AgentExecutor)
def test_agent_node_build(basic_graph):
agent_node = get_node_by_type(basic_graph, AgentNode)
assert agent_node is not None
built_object = agent_node.build()
assert built_object is not None
# Add any further assertions specific to the AgentNode's build() method
def test_tool_node_build(basic_graph):
tool_node = get_node_by_type(basic_graph, ToolNode)
assert tool_node is not None
built_object = tool_node.build()
assert built_object is not None
# Add any further assertions specific to the ToolNode's build() method
def test_chain_node_build(complex_graph):
chain_node = get_node_by_type(complex_graph, ChainNode)
assert chain_node is not None
built_object = chain_node.build()
assert built_object is not None
# Add any further assertions specific to the ChainNode's build() method
def test_prompt_node_build(complex_graph):
prompt_node = get_node_by_type(complex_graph, PromptNode)
assert prompt_node is not None
built_object = prompt_node.build()
assert built_object is not None
# Add any further assertions specific to the PromptNode's build() method
def test_llm_node_build(basic_graph):
llm_node = get_node_by_type(basic_graph, LLMNode)
assert llm_node is not None
built_object = llm_node.build()
assert built_object is not None
# Add any further assertions specific to the LLMNode's build() method
def test_toolkit_node_build(openapi_graph):
toolkit_node = get_node_by_type(openapi_graph, ToolkitNode)
assert toolkit_node is not None
built_object = toolkit_node.build()
assert built_object is not None
# Add any further assertions specific to the ToolkitNode's build() method
def test_file_tool_node_build(openapi_graph):
file_tool_node = get_node_by_type(openapi_graph, FileToolNode)
assert file_tool_node is not None
built_object = file_tool_node.build()
assert built_object is not None
# Add any further assertions specific to the FileToolNode's build() method
def test_wrapper_node_build(openapi_graph):
wrapper_node = get_node_by_type(openapi_graph, WrapperNode)
assert wrapper_node is not None
built_object = wrapper_node.build()
assert built_object is not None
# Add any further assertions specific to the WrapperNode's build() method

View file

@ -1,10 +1,10 @@
import json
from langflow.graph.graph import Graph
import pytest
from langflow import load_flow_from_json
from langflow.utils.payload import get_root_node
import pytest
from langchain.agents import AgentExecutor
from langflow import load_flow_from_json
from langflow.graph import Graph
from langflow.utils.payload import get_root_node
def test_load_flow_from_json():

291
tests/test_template.py Normal file
View file

@ -0,0 +1,291 @@
import importlib
from typing import Dict, List, Optional
import pytest
from langflow.utils.constants import CHAT_OPENAI_MODELS, OPENAI_MODELS
from langflow.utils.util import (
build_template_from_class,
build_template_from_function,
format_dict,
get_base_classes,
get_class_doc,
get_default_factory,
)
from pydantic import BaseModel
# Dummy classes for testing purposes
class Parent(BaseModel):
"""Parent Class"""
parent_field: str
class Child(Parent):
"""Child Class"""
child_field: int
class ExampleClass1(BaseModel):
"""Example class 1."""
def __init__(self, data: Optional[List[int]] = None):
self.data = data or [1, 2, 3]
class ExampleClass2(BaseModel):
"""Example class 2."""
def __init__(self, data: Optional[Dict[str, int]] = None):
self.data = data or {"a": 1, "b": 2, "c": 3}
def example_loader_1() -> ExampleClass1:
"""Example loader function 1."""
return ExampleClass1()
def example_loader_2() -> ExampleClass2:
"""Example loader function 2."""
return ExampleClass2()
def test_build_template_from_function():
type_to_loader_dict = {
"example1": example_loader_1,
"example2": example_loader_2,
}
# Test with valid name
result = build_template_from_function("ExampleClass1", type_to_loader_dict)
assert "template" in result
assert "description" in result
assert "base_classes" in result
# Test with add_function=True
result_with_function = build_template_from_function(
"ExampleClass1", type_to_loader_dict, add_function=True
)
assert "function" in result_with_function["base_classes"]
# Test with invalid name
with pytest.raises(ValueError, match=r".* not found"):
build_template_from_function("NonExistent", type_to_loader_dict)
# Test build_template_from_class
def test_build_template_from_class():
type_to_cls_dict: Dict[str, type] = {"parent": Parent, "child": Child}
# Test valid input
result = build_template_from_class("Child", type_to_cls_dict)
assert "template" in result
assert "description" in result
assert "base_classes" in result
assert "Child" in result["base_classes"]
assert "Parent" in result["base_classes"]
assert result["description"] == "Child Class"
# Test invalid input
with pytest.raises(ValueError, match="InvalidClass not found."):
build_template_from_class("InvalidClass", type_to_cls_dict)
# Test format_dict
def test_format_dict():
# Test 1: Optional type removal
input_dict = {
"field1": {"type": "Optional[str]", "required": False},
}
expected_output = {
"field1": {
"type": "str",
"required": False,
"list": False,
"show": False,
"password": False,
"multiline": False,
},
}
assert format_dict(input_dict) == expected_output
# Test 2: List type processing
input_dict = {
"field1": {"type": "List[str]", "required": False},
}
expected_output = {
"field1": {
"type": "str",
"required": False,
"list": True,
"show": False,
"password": False,
"multiline": False,
},
}
assert format_dict(input_dict) == expected_output
# Test 3: Mapping type replacement
input_dict = {
"field1": {"type": "Mapping[str, int]", "required": False},
}
expected_output = {
"field1": {
"type": "code", # Mapping type is replaced with dict which is replaced with code
"required": False,
"list": False,
"show": False,
"password": False,
"multiline": False,
},
}
assert format_dict(input_dict) == expected_output
# Test 4: Replace default value with actual value
input_dict = {
"field1": {"type": "str", "required": False, "default": "test"},
}
expected_output = {
"field1": {
"type": "str",
"required": False,
"list": False,
"show": False,
"password": False,
"multiline": False,
"value": "test",
},
}
assert format_dict(input_dict) == expected_output
# Test 5: Add password field
input_dict = {
"field1": {"type": "str", "required": False},
"api_key": {"type": "str", "required": False},
}
expected_output = {
"field1": {
"type": "str",
"required": False,
"list": False,
"show": False,
"password": False,
"multiline": False,
},
"api_key": {
"type": "str",
"required": False,
"list": False,
"show": True,
"password": True,
"multiline": False,
},
}
assert format_dict(input_dict) == expected_output
# Test 6: Add multiline
input_dict = {
"field1": {"type": "str", "required": False},
"prefix": {"type": "str", "required": False},
}
expected_output = {
"field1": {
"type": "str",
"required": False,
"list": False,
"show": False,
"password": False,
"multiline": False,
},
"prefix": {
"type": "str",
"required": False,
"list": False,
"show": True,
"password": False,
"multiline": True,
},
}
assert format_dict(input_dict) == expected_output
# Test 7: Check class name-specific cases (OpenAI, OpenAIChat)
input_dict = {
"model_name": {"type": "str", "required": False},
}
expected_output_openai = {
"model_name": {
"type": "str",
"required": False,
"list": True,
"show": True,
"password": False,
"multiline": False,
"options": OPENAI_MODELS,
},
}
expected_output_openai_chat = {
"model_name": {
"type": "str",
"required": False,
"list": True,
"show": True,
"password": False,
"multiline": False,
"options": CHAT_OPENAI_MODELS,
},
}
assert format_dict(input_dict, "OpenAI") == expected_output_openai
assert format_dict(input_dict, "OpenAIChat") == expected_output_openai_chat
# Test 8: Replace dict type with str
input_dict = {
"field1": {"type": "Dict[str, int]", "required": False},
}
expected_output = {
"field1": {
"type": "code",
"required": False,
"list": False,
"show": False,
"password": False,
"multiline": False,
},
}
assert format_dict(input_dict) == expected_output
# Test get_base_classes
def test_get_base_classes():
base_classes_parent = get_base_classes(Parent)
base_classes_child = get_base_classes(Child)
assert "Parent" in base_classes_parent
assert "Child" in base_classes_child
assert "Parent" in base_classes_child
# Test get_default_factory
def test_get_default_factory():
module_name = "langflow.utils.util"
function_repr = "<function dummy_function>"
def dummy_function():
return "default_value"
# Add dummy_function to your_module
setattr(importlib.import_module(module_name), "dummy_function", dummy_function)
default_value = get_default_factory(module_name, function_repr)
assert default_value == "default_value"
# Test get_class_doc
def test_get_class_doc():
class_doc_parent = get_class_doc(Parent)
class_doc_child = get_class_doc(Child)
assert class_doc_parent["Description"] == "Parent Class"
assert class_doc_child["Description"] == "Child Class"

View file

@ -1,12 +1,13 @@
from unittest import mock
import pytest
from langflow.utils.validate import (
create_function,
execute_function,
extract_function_name,
validate_code,
execute_function,
)
import pytest
from requests.exceptions import MissingSchema
from unittest import mock
def test_validate_code():