Merge branch 'toolkits' into inputFile
This commit is contained in:
commit
d8f8364b9e
17 changed files with 576 additions and 337 deletions
111
poetry.lock
generated
111
poetry.lock
generated
|
|
@ -239,37 +239,37 @@ lxml = ["lxml"]
|
|||
|
||||
[[package]]
|
||||
name = "black"
|
||||
version = "23.1.0"
|
||||
version = "23.3.0"
|
||||
description = "The uncompromising code formatter."
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "black-23.1.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:b6a92a41ee34b883b359998f0c8e6eb8e99803aa8bf3123bf2b2e6fec505a221"},
|
||||
{file = "black-23.1.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:57c18c5165c1dbe291d5306e53fb3988122890e57bd9b3dcb75f967f13411a26"},
|
||||
{file = "black-23.1.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:9880d7d419bb7e709b37e28deb5e68a49227713b623c72b2b931028ea65f619b"},
|
||||
{file = "black-23.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e6663f91b6feca5d06f2ccd49a10f254f9298cc1f7f49c46e498a0771b507104"},
|
||||
{file = "black-23.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:9afd3f493666a0cd8f8df9a0200c6359ac53940cbde049dcb1a7eb6ee2dd7074"},
|
||||
{file = "black-23.1.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:bfffba28dc52a58f04492181392ee380e95262af14ee01d4bc7bb1b1c6ca8d27"},
|
||||
{file = "black-23.1.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:c1c476bc7b7d021321e7d93dc2cbd78ce103b84d5a4cf97ed535fbc0d6660648"},
|
||||
{file = "black-23.1.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:382998821f58e5c8238d3166c492139573325287820963d2f7de4d518bd76958"},
|
||||
{file = "black-23.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bf649fda611c8550ca9d7592b69f0637218c2369b7744694c5e4902873b2f3a"},
|
||||
{file = "black-23.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:121ca7f10b4a01fd99951234abdbd97728e1240be89fde18480ffac16503d481"},
|
||||
{file = "black-23.1.0-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:a8471939da5e824b891b25751955be52ee7f8a30a916d570a5ba8e0f2eb2ecad"},
|
||||
{file = "black-23.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8178318cb74f98bc571eef19068f6ab5613b3e59d4f47771582f04e175570ed8"},
|
||||
{file = "black-23.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:a436e7881d33acaf2536c46a454bb964a50eff59b21b51c6ccf5a40601fbef24"},
|
||||
{file = "black-23.1.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:a59db0a2094d2259c554676403fa2fac3473ccf1354c1c63eccf7ae65aac8ab6"},
|
||||
{file = "black-23.1.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:0052dba51dec07ed029ed61b18183942043e00008ec65d5028814afaab9a22fd"},
|
||||
{file = "black-23.1.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:49f7b39e30f326a34b5c9a4213213a6b221d7ae9d58ec70df1c4a307cf2a1580"},
|
||||
{file = "black-23.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:162e37d49e93bd6eb6f1afc3e17a3d23a823042530c37c3c42eeeaf026f38468"},
|
||||
{file = "black-23.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:8b70eb40a78dfac24842458476135f9b99ab952dd3f2dab738c1881a9b38b753"},
|
||||
{file = "black-23.1.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:a29650759a6a0944e7cca036674655c2f0f63806ddecc45ed40b7b8aa314b651"},
|
||||
{file = "black-23.1.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:bb460c8561c8c1bec7824ecbc3ce085eb50005883a6203dcfb0122e95797ee06"},
|
||||
{file = "black-23.1.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:c91dfc2c2a4e50df0026f88d2215e166616e0c80e86004d0003ece0488db2739"},
|
||||
{file = "black-23.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a951cc83ab535d248c89f300eccbd625e80ab880fbcfb5ac8afb5f01a258ac9"},
|
||||
{file = "black-23.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:0680d4380db3719ebcfb2613f34e86c8e6d15ffeabcf8ec59355c5e7b85bb555"},
|
||||
{file = "black-23.1.0-py3-none-any.whl", hash = "sha256:7a0f701d314cfa0896b9001df70a530eb2472babb76086344e688829efd97d32"},
|
||||
{file = "black-23.1.0.tar.gz", hash = "sha256:b0bd97bea8903f5a2ba7219257a44e3f1f9d00073d6cc1add68f0beec69692ac"},
|
||||
{file = "black-23.3.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:0945e13506be58bf7db93ee5853243eb368ace1c08a24c65ce108986eac65915"},
|
||||
{file = "black-23.3.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:67de8d0c209eb5b330cce2469503de11bca4085880d62f1628bd9972cc3366b9"},
|
||||
{file = "black-23.3.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:7c3eb7cea23904399866c55826b31c1f55bbcd3890ce22ff70466b907b6775c2"},
|
||||
{file = "black-23.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32daa9783106c28815d05b724238e30718f34155653d4d6e125dc7daec8e260c"},
|
||||
{file = "black-23.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:35d1381d7a22cc5b2be2f72c7dfdae4072a3336060635718cc7e1ede24221d6c"},
|
||||
{file = "black-23.3.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:a8a968125d0a6a404842fa1bf0b349a568634f856aa08ffaff40ae0dfa52e7c6"},
|
||||
{file = "black-23.3.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:c7ab5790333c448903c4b721b59c0d80b11fe5e9803d8703e84dcb8da56fec1b"},
|
||||
{file = "black-23.3.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:a6f6886c9869d4daae2d1715ce34a19bbc4b95006d20ed785ca00fa03cba312d"},
|
||||
{file = "black-23.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f3c333ea1dd6771b2d3777482429864f8e258899f6ff05826c3a4fcc5ce3f70"},
|
||||
{file = "black-23.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:11c410f71b876f961d1de77b9699ad19f939094c3a677323f43d7a29855fe326"},
|
||||
{file = "black-23.3.0-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:1d06691f1eb8de91cd1b322f21e3bfc9efe0c7ca1f0e1eb1db44ea367dff656b"},
|
||||
{file = "black-23.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50cb33cac881766a5cd9913e10ff75b1e8eb71babf4c7104f2e9c52da1fb7de2"},
|
||||
{file = "black-23.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:e114420bf26b90d4b9daa597351337762b63039752bdf72bf361364c1aa05925"},
|
||||
{file = "black-23.3.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:48f9d345675bb7fbc3dd85821b12487e1b9a75242028adad0333ce36ed2a6d27"},
|
||||
{file = "black-23.3.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:714290490c18fb0126baa0fca0a54ee795f7502b44177e1ce7624ba1c00f2331"},
|
||||
{file = "black-23.3.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:064101748afa12ad2291c2b91c960be28b817c0c7eaa35bec09cc63aa56493c5"},
|
||||
{file = "black-23.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:562bd3a70495facf56814293149e51aa1be9931567474993c7942ff7d3533961"},
|
||||
{file = "black-23.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:e198cf27888ad6f4ff331ca1c48ffc038848ea9f031a3b40ba36aced7e22f2c8"},
|
||||
{file = "black-23.3.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:3238f2aacf827d18d26db07524e44741233ae09a584273aa059066d644ca7b30"},
|
||||
{file = "black-23.3.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:f0bd2f4a58d6666500542b26354978218a9babcdc972722f4bf90779524515f3"},
|
||||
{file = "black-23.3.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:92c543f6854c28a3c7f39f4d9b7694f9a6eb9d3c5e2ece488c327b6e7ea9b266"},
|
||||
{file = "black-23.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a150542a204124ed00683f0db1f5cf1c2aaaa9cc3495b7a3b5976fb136090ab"},
|
||||
{file = "black-23.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:6b39abdfb402002b8a7d030ccc85cf5afff64ee90fa4c5aebc531e3ad0175ddb"},
|
||||
{file = "black-23.3.0-py3-none-any.whl", hash = "sha256:ec751418022185b0c1bb7d7736e6933d40bbb14c14a0abcf9123d1b159f98dd4"},
|
||||
{file = "black-23.3.0.tar.gz", hash = "sha256:1c7b8d606e728a41ea1ccbd7264677e494e87cf630e399262ced92d4a8dac940"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -715,6 +715,18 @@ files = [
|
|||
{file = "frozenlist-1.3.3.tar.gz", hash = "sha256:58bcc55721e8a90b88332d6cd441261ebb22342e238296bb330968952fbb3a6a"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "geojson"
|
||||
version = "2.5.0"
|
||||
description = "Python bindings and utilities for GeoJSON"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "geojson-2.5.0-py2.py3-none-any.whl", hash = "sha256:ccbd13368dd728f4e4f13ffe6aaf725b6e802c692ba0dde628be475040c534ba"},
|
||||
{file = "geojson-2.5.0.tar.gz", hash = "sha256:6e4bb7ace4226a45d9c8c8b1348b3fc43540658359f93c3f7e03efa9f15f658a"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "google-api-core"
|
||||
version = "2.11.0"
|
||||
|
|
@ -1183,14 +1195,14 @@ test = ["ipykernel", "pre-commit", "pytest", "pytest-cov", "pytest-timeout"]
|
|||
|
||||
[[package]]
|
||||
name = "langchain"
|
||||
version = "0.0.113"
|
||||
version = "0.0.125"
|
||||
description = "Building applications with LLMs through composability"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.8.1,<4.0"
|
||||
files = [
|
||||
{file = "langchain-0.0.113-py3-none-any.whl", hash = "sha256:9e146d116fd3b9b2210c8c447cabfa20ef27c26ea3f2bc986eab97d1dad0aab6"},
|
||||
{file = "langchain-0.0.113.tar.gz", hash = "sha256:a494fe02bc63da4bcda7da8d7f4a346522fbc87f0a4955b72519ec2ed86bf906"},
|
||||
{file = "langchain-0.0.125-py3-none-any.whl", hash = "sha256:678cf9d6b0d2b48fab574b5e6faa3bf6e9d249847f3956cf0970c7d48724ec43"},
|
||||
{file = "langchain-0.0.125.tar.gz", hash = "sha256:af54d190bd0ae8cab633c1b6a652c76aae685d6eb27ff39d3f9b24d27ba9f1af"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -1198,13 +1210,14 @@ aiohttp = ">=3.8.3,<4.0.0"
|
|||
dataclasses-json = ">=0.5.7,<0.6.0"
|
||||
numpy = ">=1,<2"
|
||||
pydantic = ">=1,<2"
|
||||
PyYAML = ">=6,<7"
|
||||
pyowm = ">=3.3.0,<4.0.0"
|
||||
PyYAML = ">=5.4.1"
|
||||
requests = ">=2,<3"
|
||||
SQLAlchemy = ">=1,<2"
|
||||
tenacity = ">=8.1.0,<9.0.0"
|
||||
|
||||
[package.extras]
|
||||
all = ["aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.2.2,<0.3.0)", "beautifulsoup4 (>=4,<5)", "cohere (>=3,<4)", "deeplake (>=3.2.9,<4.0.0)", "elasticsearch (>=8,<9)", "faiss-cpu (>=1,<2)", "google-api-python-client (==2.70.0)", "google-search-results (>=2,<3)", "huggingface_hub (>=0,<1)", "jinja2 (>=3,<4)", "manifest-ml (>=0.0.1,<0.0.2)", "networkx (>=2.6.3,<3.0.0)", "nlpcloud (>=1,<2)", "nltk (>=3,<4)", "nomic (>=1.0.43,<2.0.0)", "openai (>=0,<1)", "opensearch-py (>=2.0.0,<3.0.0)", "pgvector (>=0.1.6,<0.2.0)", "pinecone-client (>=2,<3)", "psycopg2-binary (>=2.9.5,<3.0.0)", "pypdf (>=3.4.0,<4.0.0)", "qdrant-client (>=1.0.4,<2.0.0)", "redis (>=4,<5)", "sentence-transformers (>=2,<3)", "spacy (>=3,<4)", "tensorflow-text (>=2.11.0,<3.0.0)", "tiktoken (>=0,<1)", "torch (>=1,<2)", "transformers (>=4,<5)", "weaviate-client (>=3,<4)", "wikipedia (>=1,<2)", "wolframalpha (==5.0.0)"]
|
||||
all = ["aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.2.2,<0.3.0)", "beautifulsoup4 (>=4,<5)", "cohere (>=3,<4)", "deeplake (>=3.2.9,<4.0.0)", "elasticsearch (>=8,<9)", "faiss-cpu (>=1,<2)", "google-api-python-client (==2.70.0)", "google-search-results (>=2,<3)", "huggingface_hub (>=0,<1)", "jina (>=3.14,<4.0)", "jinja2 (>=3,<4)", "manifest-ml (>=0.0.1,<0.0.2)", "networkx (>=2.6.3,<3.0.0)", "nlpcloud (>=1,<2)", "nltk (>=3,<4)", "nomic (>=1.0.43,<2.0.0)", "openai (>=0,<1)", "opensearch-py (>=2.0.0,<3.0.0)", "pgvector (>=0.1.6,<0.2.0)", "pinecone-client (>=2,<3)", "psycopg2-binary (>=2.9.5,<3.0.0)", "pypdf (>=3.4.0,<4.0.0)", "qdrant-client (>=1.0.4,<2.0.0)", "redis (>=4,<5)", "sentence-transformers (>=2,<3)", "spacy (>=3,<4)", "tensorflow-text (>=2.11.0,<3.0.0)", "tiktoken (>=0.3.2,<0.4.0)", "torch (>=1,<2)", "transformers (>=4,<5)", "weaviate-client (>=3,<4)", "wikipedia (>=1,<2)", "wolframalpha (==5.0.0)"]
|
||||
llms = ["anthropic (>=0.2.2,<0.3.0)", "cohere (>=3,<4)", "huggingface_hub (>=0,<1)", "manifest-ml (>=0.0.1,<0.0.2)", "nlpcloud (>=1,<2)", "openai (>=0,<1)", "torch (>=1,<2)", "transformers (>=4,<5)"]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -1809,6 +1822,26 @@ files = [
|
|||
[package.extras]
|
||||
plugins = ["importlib-metadata"]
|
||||
|
||||
[[package]]
|
||||
name = "pyowm"
|
||||
version = "3.3.0"
|
||||
description = "A Python wrapper around OpenWeatherMap web APIs"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "pyowm-3.3.0-py3-none-any.whl", hash = "sha256:86463108e7613171531ba306040b43c972b3fc0b0acf73b12c50910cdd2107ab"},
|
||||
{file = "pyowm-3.3.0.tar.gz", hash = "sha256:8196f77c91eac680676ed5ee484aae8a165408055e3e2b28025cbf60b8681e03"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
geojson = ">=2.3.0,<3"
|
||||
PySocks = ">=1.7.1,<2"
|
||||
requests = [
|
||||
{version = ">=2.20.0,<3"},
|
||||
{version = "*", extras = ["socks"]},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pyparsing"
|
||||
version = "3.0.9"
|
||||
|
|
@ -1824,6 +1857,19 @@ files = [
|
|||
[package.extras]
|
||||
diagrams = ["jinja2", "railroad-diagrams"]
|
||||
|
||||
[[package]]
|
||||
name = "pysocks"
|
||||
version = "1.7.1"
|
||||
description = "A Python SOCKS client module. See https://github.com/Anorov/PySocks for more information."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
|
||||
files = [
|
||||
{file = "PySocks-1.7.1-py27-none-any.whl", hash = "sha256:08e69f092cc6dbe92a0fdd16eeb9b9ffbc13cadfe5ca4c7bd92ffb078b293299"},
|
||||
{file = "PySocks-1.7.1-py3-none-any.whl", hash = "sha256:2725bd0a9925919b9b51739eea5f9e2bae91e83288108a9ad338b2e3a4435ee5"},
|
||||
{file = "PySocks-1.7.1.tar.gz", hash = "sha256:3f8804571ebe159c380ac6de37643bb4685970655d3bba243530d6558b799aa0"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pytest"
|
||||
version = "7.2.2"
|
||||
|
|
@ -2043,6 +2089,7 @@ files = [
|
|||
certifi = ">=2017.4.17"
|
||||
charset-normalizer = ">=2,<4"
|
||||
idna = ">=2.5,<4"
|
||||
PySocks = {version = ">=1.5.6,<1.5.7 || >1.5.7", optional = true, markers = "extra == \"socks\""}
|
||||
urllib3 = ">=1.21.1,<1.27"
|
||||
|
||||
[package.extras]
|
||||
|
|
@ -2635,4 +2682,4 @@ testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more
|
|||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = "^3.9"
|
||||
content-hash = "18b858c93c242f3b53e9f77284904aa0eabb4c955f905cfe5fb227a6785bfabc"
|
||||
content-hash = "2c201e79c486802be55495286b288ea79caa4bec2dc74a8eca90030116b6f8a6"
|
||||
|
|
|
|||
|
|
@ -29,7 +29,7 @@ google-search-results = "^2.4.1"
|
|||
google-api-python-client = "^2.79.0"
|
||||
typer = "^0.7.0"
|
||||
gunicorn = "^20.1.0"
|
||||
langchain = "^0.0.113"
|
||||
langchain = "^0.0.125"
|
||||
openai = "^0.27.2"
|
||||
types-pyyaml = "^6.0.12.8"
|
||||
|
||||
|
|
|
|||
|
|
@ -22,8 +22,11 @@ tools:
|
|||
- Serper Search
|
||||
- Tool
|
||||
- PythonFunction
|
||||
- JsonSpec
|
||||
|
||||
memories:
|
||||
# - ConversationBufferMemory
|
||||
|
||||
|
||||
|
||||
dev: false
|
||||
|
|
|
|||
|
|
@ -0,0 +1,4 @@
|
|||
from langflow.graph.graph import Graph
|
||||
from langflow.graph.base import Node, Edge
|
||||
|
||||
__all__ = ["Graph", "Node", "Edge"]
|
||||
213
src/backend/langflow/graph/base.py
Normal file
213
src/backend/langflow/graph/base.py
Normal file
|
|
@ -0,0 +1,213 @@
|
|||
# Description: Graph class for building a graph of nodes and edges
|
||||
# Insights:
|
||||
# - Defer prompts building to the last moment or when they have all the tools
|
||||
# - Build each inner agent first, then build the outer agent
|
||||
|
||||
from copy import deepcopy
|
||||
import types
|
||||
from typing import Any, Dict, List, Optional, Union
|
||||
from langflow.utils import payload
|
||||
from langflow.interface.listing import ALL_TYPES_DICT, ALL_TOOLS_NAMES, TOOLS_DICT
|
||||
from langflow.interface import loading
|
||||
|
||||
|
||||
class Node:
|
||||
def __init__(self, data: Dict):
|
||||
self.id: str = data["id"]
|
||||
self._data = data
|
||||
self.edges: List[Edge] = []
|
||||
self._parse_data()
|
||||
self._built_object = None
|
||||
self._built = False
|
||||
|
||||
def _parse_data(self) -> None:
|
||||
self.data = self._data["data"]
|
||||
self.output = self.data["node"]["base_classes"]
|
||||
template_dicts = {
|
||||
key: value
|
||||
for key, value in self.data["node"]["template"].items()
|
||||
if isinstance(value, dict)
|
||||
}
|
||||
|
||||
self.required_inputs = [
|
||||
template_dicts[key]["type"]
|
||||
for key, value in template_dicts.items()
|
||||
if value["required"]
|
||||
]
|
||||
self.optional_inputs = [
|
||||
template_dicts[key]["type"]
|
||||
for key, value in template_dicts.items()
|
||||
if not value["required"]
|
||||
]
|
||||
|
||||
template_dict = self.data["node"]["template"]
|
||||
self.node_type = (
|
||||
self.data["type"] if "Tool" not in self.output else template_dict["_type"]
|
||||
)
|
||||
|
||||
def _build_params(self):
|
||||
# Some params are required, some are optional
|
||||
# but most importantly, some params are python base classes
|
||||
# like str and others are LangChain objects like LLMChain, BasePromptTemplate
|
||||
# so we need to be able to distinguish between the two
|
||||
|
||||
# The dicts with "type" == "str" are the ones that are python base classes
|
||||
# and most likely have a "value" key
|
||||
|
||||
# So for each key besides "_type" in the template dict, we have a dict
|
||||
# with a "type" key. If the type is not "str", then we need to get the
|
||||
# edge that connects to that node and get the Node with the required data
|
||||
# and use that as the value for the param
|
||||
# If the type is "str", then we need to get the value of the "value" key
|
||||
# and use that as the value for the param
|
||||
template_dict = {
|
||||
key: value
|
||||
for key, value in self.data["node"]["template"].items()
|
||||
if isinstance(value, dict)
|
||||
}
|
||||
params = {}
|
||||
for key, value in template_dict.items():
|
||||
if key == "_type":
|
||||
continue
|
||||
# If the type is not transformable to a python base class
|
||||
# then we need to get the edge that connects to this node
|
||||
if value["type"] not in ["str", "bool", "code"]:
|
||||
# Get the edge that connects to this node
|
||||
edge = next(
|
||||
(
|
||||
edge
|
||||
for edge in self.edges
|
||||
if edge.target == self and edge.matched_type in value["type"]
|
||||
),
|
||||
None,
|
||||
)
|
||||
# Get the output of the node that the edge connects to
|
||||
# if the value['list'] is True, then there will be more
|
||||
# than one time setting to params[key]
|
||||
# so we need to append to a list if it exists
|
||||
# or create a new list if it doesn't
|
||||
if edge is None and value["required"]:
|
||||
# break line
|
||||
raise ValueError(
|
||||
f"Required input {key} for module {self.node_type} not found"
|
||||
)
|
||||
if value["list"]:
|
||||
if key in params:
|
||||
params[key].append(edge.source)
|
||||
else:
|
||||
params[key] = [edge.source]
|
||||
elif value["required"] or edge is not None:
|
||||
params[key] = edge.source
|
||||
elif value["required"] or value.get("value"):
|
||||
params[key] = value["value"]
|
||||
|
||||
# Add _type to params
|
||||
self.params = params
|
||||
|
||||
def _build(self):
|
||||
# The params dict is used to build the module
|
||||
# it contains values and keys that point to nodes which
|
||||
# have their own params dict
|
||||
# When build is called, we iterate through the params dict
|
||||
# and if the value is a node, we call build on that node
|
||||
# and use the output of that build as the value for the param
|
||||
# if the value is not a node, then we use the value as the param
|
||||
# and continue
|
||||
# Another aspect is that the node_type is the class that we need to import
|
||||
# and instantiate with these built params
|
||||
|
||||
# Build each node in the params dict
|
||||
for key, value in self.params.items():
|
||||
# Check if Node or list of Nodes
|
||||
if isinstance(value, Node):
|
||||
result = value.build()
|
||||
# If the key is "func", then we need to use the run method
|
||||
if key == "func" and not isinstance(result, types.FunctionType):
|
||||
# func can be PythonFunction(code='\ndef upper_case(text: str) -> str:\n return text.upper()\n')
|
||||
# so we need to check if there is an attribute called run
|
||||
if hasattr(result, "run"):
|
||||
result = result.run # type: ignore
|
||||
elif hasattr(result, "get_function"):
|
||||
result = result.get_function() # type: ignore
|
||||
self.params[key] = result
|
||||
elif isinstance(value, list) and all(
|
||||
isinstance(node, Node) for node in value
|
||||
):
|
||||
self.params[key] = [node.build() for node in value] # type: ignore
|
||||
|
||||
# Get the class from LANGCHAIN_TYPES_DICT
|
||||
# and instantiate it with the params
|
||||
# and return the instance
|
||||
for base_type, value in ALL_TYPES_DICT.items():
|
||||
if base_type == "tools":
|
||||
value = TOOLS_DICT
|
||||
|
||||
if self.node_type in value:
|
||||
self._built_object = loading.instantiate_class(
|
||||
node_type=self.node_type,
|
||||
base_type=base_type,
|
||||
params=self.params,
|
||||
)
|
||||
break
|
||||
|
||||
if self._built_object is None:
|
||||
raise ValueError(f"Node type {self.node_type} not found")
|
||||
|
||||
self._built = True
|
||||
|
||||
def build(self, force: bool = False) -> Any:
|
||||
if not self._built or force:
|
||||
self._build()
|
||||
return deepcopy(self._built_object)
|
||||
|
||||
def add_edge(self, edge: "Edge") -> None:
|
||||
self.edges.append(edge)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"Node(id={self.id}, data={self.data})"
|
||||
|
||||
def __eq__(self, __o: object) -> bool:
|
||||
return self.id == __o.id if isinstance(__o, Node) else False
|
||||
|
||||
def __hash__(self) -> int:
|
||||
return id(self)
|
||||
|
||||
|
||||
class Edge:
|
||||
def __init__(self, source: "Node", target: "Node"):
|
||||
self.source: "Node" = source
|
||||
self.target: "Node" = target
|
||||
self.validate_edge()
|
||||
|
||||
def validate_edge(self) -> None:
|
||||
# Validate that the outputs of the source node are valid inputs
|
||||
# for the target node
|
||||
self.source_types = self.source.output
|
||||
self.target_reqs = self.target.required_inputs + self.target.optional_inputs
|
||||
# Both lists contain strings and sometimes a string contains the value we are
|
||||
# looking for e.g. comgin_out=["Chain"] and target_reqs=["LLMChain"]
|
||||
# so we need to check if any of the strings in source_types is in target_reqs
|
||||
self.valid = any(
|
||||
output in target_req
|
||||
for output in self.source_types
|
||||
for target_req in self.target_reqs
|
||||
)
|
||||
# Get what type of input the target node is expecting
|
||||
|
||||
self.matched_type = next(
|
||||
(
|
||||
output
|
||||
for output in self.source_types
|
||||
for target_req in self.target_reqs
|
||||
if output in target_req
|
||||
),
|
||||
None,
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return (
|
||||
f"Edge(source={self.source.id}, target={self.target.id}, valid={self.valid}"
|
||||
f", matched_type={self.matched_type})"
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -1,296 +1,17 @@
|
|||
# Description: Graph class for building a graph of nodes and edges
|
||||
# Insights:
|
||||
# - Defer prompts building to the last moment or when they have all the tools
|
||||
# - Build each inner agent first, then build the outer agent
|
||||
|
||||
from copy import deepcopy
|
||||
import types
|
||||
from typing import Any, Dict, List, Optional, Union
|
||||
from typing import Dict, List, Union
|
||||
from langflow.utils import payload
|
||||
from langflow.interface.listing import ALL_TYPES_DICT, ALL_TOOLS_NAMES, TOOLS_DICT
|
||||
from langflow.interface import loading
|
||||
from langflow.interface.listing import ALL_TOOLS_NAMES
|
||||
|
||||
|
||||
class Node:
|
||||
def __init__(self, data: Dict):
|
||||
self.id: str = data["id"]
|
||||
self._data = data
|
||||
self.edges: List[Edge] = []
|
||||
self._parse_data()
|
||||
self._built_object = None
|
||||
self._built = False
|
||||
|
||||
def _parse_data(self) -> None:
|
||||
self.data = self._data["data"]
|
||||
self.output = self.data["node"]["base_classes"]
|
||||
template_dicts = {
|
||||
key: value
|
||||
for key, value in self.data["node"]["template"].items()
|
||||
if isinstance(value, dict)
|
||||
}
|
||||
|
||||
self.required_inputs = [
|
||||
template_dicts[key]["type"]
|
||||
for key, value in template_dicts.items()
|
||||
if value["required"]
|
||||
]
|
||||
self.optional_inputs = [
|
||||
template_dicts[key]["type"]
|
||||
for key, value in template_dicts.items()
|
||||
if not value["required"]
|
||||
]
|
||||
|
||||
template_dict = self.data["node"]["template"]
|
||||
self.node_type = (
|
||||
self.data["type"] if "Tool" not in self.output else template_dict["_type"]
|
||||
)
|
||||
|
||||
def _build_params(self):
|
||||
# Some params are required, some are optional
|
||||
# but most importantly, some params are python base classes
|
||||
# like str and others are LangChain objects like LLMChain, BasePromptTemplate
|
||||
# so we need to be able to distinguish between the two
|
||||
|
||||
# The dicts with "type" == "str" are the ones that are python base classes
|
||||
# and most likely have a "value" key
|
||||
|
||||
# So for each key besides "_type" in the template dict, we have a dict
|
||||
# with a "type" key. If the type is not "str", then we need to get the
|
||||
# edge that connects to that node and get the Node with the required data
|
||||
# and use that as the value for the param
|
||||
# If the type is "str", then we need to get the value of the "value" key
|
||||
# and use that as the value for the param
|
||||
template_dict = {
|
||||
key: value
|
||||
for key, value in self.data["node"]["template"].items()
|
||||
if isinstance(value, dict)
|
||||
}
|
||||
params = {}
|
||||
for key, value in template_dict.items():
|
||||
if key == "_type":
|
||||
continue
|
||||
# If the type is not transformable to a python base class
|
||||
# then we need to get the edge that connects to this node
|
||||
if value["type"] not in ["str", "bool", "code"]:
|
||||
# Get the edge that connects to this node
|
||||
edge = next(
|
||||
(
|
||||
edge
|
||||
for edge in self.edges
|
||||
if edge.target == self and edge.matched_type in value["type"]
|
||||
),
|
||||
None,
|
||||
)
|
||||
# Get the output of the node that the edge connects to
|
||||
# if the value['list'] is True, then there will be more
|
||||
# than one time setting to params[key]
|
||||
# so we need to append to a list if it exists
|
||||
# or create a new list if it doesn't
|
||||
if edge is None and value["required"]:
|
||||
# break line
|
||||
raise ValueError(
|
||||
f"Required input {key} for module {self.node_type} not found"
|
||||
)
|
||||
if value["list"]:
|
||||
if key in params:
|
||||
params[key].append(edge.source)
|
||||
else:
|
||||
params[key] = [edge.source]
|
||||
elif value["required"] or edge is not None:
|
||||
params[key] = edge.source
|
||||
elif value["required"] or value.get("value"):
|
||||
params[key] = value["value"]
|
||||
|
||||
# Add _type to params
|
||||
self.params = params
|
||||
|
||||
def _build(self):
|
||||
# The params dict is used to build the module
|
||||
# it contains values and keys that point to nodes which
|
||||
# have their own params dict
|
||||
# When build is called, we iterate through the params dict
|
||||
# and if the value is a node, we call build on that node
|
||||
# and use the output of that build as the value for the param
|
||||
# if the value is not a node, then we use the value as the param
|
||||
# and continue
|
||||
# Another aspect is that the node_type is the class that we need to import
|
||||
# and instantiate with these built params
|
||||
|
||||
# Build each node in the params dict
|
||||
for key, value in self.params.items():
|
||||
# Check if Node or list of Nodes
|
||||
if isinstance(value, Node):
|
||||
result = value.build()
|
||||
# If the key is "func", then we need to use the run method
|
||||
if key == "func" and not isinstance(result, types.FunctionType):
|
||||
# func can be PythonFunction(code='\ndef upper_case(text: str) -> str:\n return text.upper()\n')
|
||||
# so we need to check if there is an attribute called run
|
||||
if hasattr(result, "run"):
|
||||
result = result.run # type: ignore
|
||||
elif hasattr(result, "get_function"):
|
||||
result = result.get_function() # type: ignore
|
||||
self.params[key] = result
|
||||
elif isinstance(value, list) and all(
|
||||
isinstance(node, Node) for node in value
|
||||
):
|
||||
self.params[key] = [node.build() for node in value] # type: ignore
|
||||
|
||||
# Get the class from LANGCHAIN_TYPES_DICT
|
||||
# and instantiate it with the params
|
||||
# and return the instance
|
||||
for base_type, value in ALL_TYPES_DICT.items():
|
||||
if base_type == "tools":
|
||||
value = TOOLS_DICT
|
||||
|
||||
if self.node_type in value:
|
||||
self._built_object = loading.instantiate_class(
|
||||
node_type=self.node_type,
|
||||
base_type=base_type,
|
||||
params=self.params,
|
||||
)
|
||||
break
|
||||
|
||||
if self._built_object is None:
|
||||
raise ValueError(f"Node type {self.node_type} not found")
|
||||
|
||||
self._built = True
|
||||
|
||||
def build(self, force: bool = False) -> Any:
|
||||
if not self._built or force:
|
||||
self._build()
|
||||
return deepcopy(self._built_object)
|
||||
|
||||
def add_edge(self, edge: "Edge") -> None:
|
||||
self.edges.append(edge)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"Node(id={self.id}, data={self.data})"
|
||||
|
||||
def __eq__(self, __o: object) -> bool:
|
||||
return self.id == __o.id if isinstance(__o, Node) else False
|
||||
|
||||
def __hash__(self) -> int:
|
||||
return id(self)
|
||||
|
||||
|
||||
class AgentNode(Node):
|
||||
def __init__(self, data: Dict):
|
||||
super().__init__(data)
|
||||
self.tools: List[ToolNode] = []
|
||||
self.chains: List[ChainNode] = []
|
||||
|
||||
def _set_tools_and_chains(self) -> None:
|
||||
for edge in self.edges:
|
||||
source_node = edge.source
|
||||
if isinstance(source_node, ToolNode):
|
||||
self.tools.append(source_node)
|
||||
elif isinstance(source_node, ChainNode):
|
||||
self.chains.append(source_node)
|
||||
|
||||
def build(self, force: bool = False) -> Any:
|
||||
if not self._built or force:
|
||||
self._set_tools_and_chains()
|
||||
# First, build the tools
|
||||
for tool_node in self.tools:
|
||||
tool_node.build()
|
||||
|
||||
# Next, build the chains and the rest
|
||||
for chain_node in self.chains:
|
||||
chain_node.build(tools=self.tools)
|
||||
|
||||
self._build()
|
||||
return deepcopy(self._built_object)
|
||||
|
||||
|
||||
class Edge:
|
||||
def __init__(self, source: "Node", target: "Node"):
|
||||
self.source: "Node" = source
|
||||
self.target: "Node" = target
|
||||
self.validate_edge()
|
||||
|
||||
def validate_edge(self) -> None:
|
||||
# Validate that the outputs of the source node are valid inputs
|
||||
# for the target node
|
||||
self.source_types = self.source.output
|
||||
self.target_reqs = self.target.required_inputs + self.target.optional_inputs
|
||||
# Both lists contain strings and sometimes a string contains the value we are
|
||||
# looking for e.g. comgin_out=["Chain"] and target_reqs=["LLMChain"]
|
||||
# so we need to check if any of the strings in source_types is in target_reqs
|
||||
self.valid = any(
|
||||
output in target_req
|
||||
for output in self.source_types
|
||||
for target_req in self.target_reqs
|
||||
)
|
||||
# Get what type of input the target node is expecting
|
||||
|
||||
self.matched_type = next(
|
||||
(
|
||||
output
|
||||
for output in self.source_types
|
||||
for target_req in self.target_reqs
|
||||
if output in target_req
|
||||
),
|
||||
None,
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return (
|
||||
f"Edge(source={self.source.id}, target={self.target.id}, valid={self.valid}"
|
||||
f", matched_type={self.matched_type})"
|
||||
)
|
||||
|
||||
|
||||
class ToolNode(Node):
|
||||
def __init__(self, data: Dict):
|
||||
super().__init__(data)
|
||||
|
||||
def build(self, force: bool = False) -> Any:
|
||||
if not self._built or force:
|
||||
self._build()
|
||||
return deepcopy(self._built_object)
|
||||
|
||||
|
||||
class PromptNode(Node):
|
||||
def __init__(self, data: Dict):
|
||||
super().__init__(data)
|
||||
|
||||
def build(
|
||||
self,
|
||||
force: bool = False,
|
||||
tools: Optional[Union[List[Node], List[ToolNode]]] = None,
|
||||
) -> Any:
|
||||
if not self._built or force:
|
||||
# Check if it is a ZeroShotPrompt and needs a tool
|
||||
if self.node_type == "ZeroShotPrompt":
|
||||
tools = (
|
||||
[tool_node.build() for tool_node in tools]
|
||||
if tools is not None
|
||||
else []
|
||||
)
|
||||
self.params["tools"] = tools
|
||||
|
||||
self._build()
|
||||
return deepcopy(self._built_object)
|
||||
|
||||
|
||||
class ChainNode(Node):
|
||||
def __init__(self, data: Dict):
|
||||
super().__init__(data)
|
||||
|
||||
def build(
|
||||
self,
|
||||
force: bool = False,
|
||||
tools: Optional[Union[List[Node], List[ToolNode]]] = None,
|
||||
) -> Any:
|
||||
if not self._built or force:
|
||||
# Check if the chain requires a PromptNode
|
||||
for key, value in self.params.items():
|
||||
if isinstance(value, PromptNode):
|
||||
# Build the PromptNode, passing the tools if available
|
||||
self.params[key] = value.build(tools=tools, force=force)
|
||||
|
||||
self._build()
|
||||
return deepcopy(self._built_object)
|
||||
from langflow.graph.base import Node, Edge
|
||||
from langflow.graph.nodes import (
|
||||
AgentNode,
|
||||
ChainNode,
|
||||
PromptNode,
|
||||
ToolkitNode,
|
||||
ToolNode,
|
||||
)
|
||||
|
||||
|
||||
class Graph:
|
||||
|
|
@ -373,6 +94,8 @@ class Graph:
|
|||
nodes.append(ChainNode(node))
|
||||
elif "tool" in node_type.lower() or node_lc_type in ALL_TOOLS_NAMES:
|
||||
nodes.append(ToolNode(node))
|
||||
elif "toolkit" in node_type.lower():
|
||||
nodes.append(ToolkitNode(node))
|
||||
else:
|
||||
nodes.append(Node(node))
|
||||
return nodes
|
||||
|
|
|
|||
99
src/backend/langflow/graph/nodes.py
Normal file
99
src/backend/langflow/graph/nodes.py
Normal file
|
|
@ -0,0 +1,99 @@
|
|||
from copy import deepcopy
|
||||
import types
|
||||
from typing import Any, Dict, List, Optional, Union
|
||||
|
||||
from langflow.interface.listing import ALL_TYPES_DICT, TOOLS_DICT
|
||||
from langflow.interface import loading
|
||||
from langflow.graph.base import Node
|
||||
|
||||
|
||||
class AgentNode(Node):
|
||||
def __init__(self, data: Dict):
|
||||
super().__init__(data)
|
||||
self.tools: List[ToolNode] = []
|
||||
self.chains: List[ChainNode] = []
|
||||
|
||||
def _set_tools_and_chains(self) -> None:
|
||||
for edge in self.edges:
|
||||
source_node = edge.source
|
||||
if isinstance(source_node, ToolNode):
|
||||
self.tools.append(source_node)
|
||||
elif isinstance(source_node, ChainNode):
|
||||
self.chains.append(source_node)
|
||||
|
||||
def build(self, force: bool = False) -> Any:
|
||||
if not self._built or force:
|
||||
self._set_tools_and_chains()
|
||||
# First, build the tools
|
||||
for tool_node in self.tools:
|
||||
tool_node.build()
|
||||
|
||||
# Next, build the chains and the rest
|
||||
for chain_node in self.chains:
|
||||
chain_node.build(tools=self.tools)
|
||||
|
||||
self._build()
|
||||
return deepcopy(self._built_object)
|
||||
|
||||
|
||||
class ToolNode(Node):
|
||||
def __init__(self, data: Dict):
|
||||
super().__init__(data)
|
||||
|
||||
def build(self, force: bool = False) -> Any:
|
||||
if not self._built or force:
|
||||
self._build()
|
||||
return deepcopy(self._built_object)
|
||||
|
||||
|
||||
class PromptNode(Node):
|
||||
def __init__(self, data: Dict):
|
||||
super().__init__(data)
|
||||
|
||||
def build(
|
||||
self,
|
||||
force: bool = False,
|
||||
tools: Optional[Union[List[Node], List[ToolNode]]] = None,
|
||||
) -> Any:
|
||||
if not self._built or force:
|
||||
# Check if it is a ZeroShotPrompt and needs a tool
|
||||
if self.node_type == "ZeroShotPrompt":
|
||||
tools = (
|
||||
[tool_node.build() for tool_node in tools]
|
||||
if tools is not None
|
||||
else []
|
||||
)
|
||||
self.params["tools"] = tools
|
||||
|
||||
self._build()
|
||||
return deepcopy(self._built_object)
|
||||
|
||||
|
||||
class ChainNode(Node):
|
||||
def __init__(self, data: Dict):
|
||||
super().__init__(data)
|
||||
|
||||
def build(
|
||||
self,
|
||||
force: bool = False,
|
||||
tools: Optional[Union[List[Node], List[ToolNode]]] = None,
|
||||
) -> Any:
|
||||
if not self._built or force:
|
||||
# Check if the chain requires a PromptNode
|
||||
for key, value in self.params.items():
|
||||
if isinstance(value, PromptNode):
|
||||
# Build the PromptNode, passing the tools if available
|
||||
self.params[key] = value.build(tools=tools, force=force)
|
||||
|
||||
self._build()
|
||||
return deepcopy(self._built_object)
|
||||
|
||||
|
||||
class ToolkitNode(Node):
|
||||
def __init__(self, data: Dict):
|
||||
super().__init__(data)
|
||||
|
||||
def build(self, force: bool = False) -> Any:
|
||||
if not self._built or force:
|
||||
self._build()
|
||||
return deepcopy(self._built_object)
|
||||
|
|
@ -1,8 +1,11 @@
|
|||
## LLM
|
||||
from typing import Any
|
||||
|
||||
from langchain import llms
|
||||
from langchain import llms, requests
|
||||
from langchain.llms.openai import OpenAIChat
|
||||
from langchain.agents import agent_toolkits
|
||||
from langflow.interface.importing.utils import import_class
|
||||
|
||||
|
||||
llm_type_to_cls_dict = llms.type_to_cls_dict
|
||||
llm_type_to_cls_dict["openai-chat"] = OpenAIChat
|
||||
|
|
@ -41,3 +44,21 @@ memory_type_to_cls_dict: dict[str, Any] = {
|
|||
|
||||
# chain_type_to_cls_dict = type_to_loader_dict
|
||||
# chain_type_to_cls_dict["conversation_chain"] = ConversationChain
|
||||
|
||||
toolkit_type_to_loader_dict: dict[str, Any] = {
|
||||
toolkit_name: import_class(f"langchain.agents.agent_toolkits.{toolkit_name}")
|
||||
# if toolkit_name is lower case it is a loader
|
||||
for toolkit_name in agent_toolkits.__all__
|
||||
if toolkit_name.islower()
|
||||
}
|
||||
|
||||
toolkit_type_to_cls_dict: dict[str, Any] = {
|
||||
toolkit_name: import_class(f"langchain.agents.agent_toolkits.{toolkit_name}")
|
||||
# if toolkit_name is not lower case it is a class
|
||||
for toolkit_name in agent_toolkits.__all__
|
||||
if not toolkit_name.islower()
|
||||
}
|
||||
|
||||
wrapper_type_to_cls_dict: dict[str, Any] = {
|
||||
wrapper.__name__: wrapper for wrapper in [requests.RequestsWrapper]
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
from langchain import agents, chains, prompts
|
||||
|
||||
from langchain.agents import agent_toolkits
|
||||
from langchain import requests
|
||||
from langflow.custom import customs
|
||||
from langflow.interface.custom_lists import (
|
||||
llm_type_to_cls_dict,
|
||||
|
|
@ -10,11 +11,14 @@ from langflow.utils import util
|
|||
from langchain.agents.load_tools import get_all_tool_names
|
||||
from langchain.agents import Tool
|
||||
from langflow.interface.custom_types import PythonFunction
|
||||
from langchain.tools.json.tool import JsonSpec
|
||||
|
||||
|
||||
OTHER_TOOLS = {"JsonSpec": JsonSpec}
|
||||
CUSTOM_TOOLS = {"Tool": Tool, "PythonFunction": PythonFunction}
|
||||
TOOLS_DICT = util.get_tools_dict()
|
||||
ALL_TOOLS_NAMES = set(get_all_tool_names() + list(CUSTOM_TOOLS.keys()))
|
||||
ALL_TOOLS_NAMES = set(
|
||||
get_all_tool_names() + list(CUSTOM_TOOLS.keys()) + list(OTHER_TOOLS.keys())
|
||||
)
|
||||
|
||||
|
||||
def get_type_dict():
|
||||
|
|
@ -25,6 +29,8 @@ def get_type_dict():
|
|||
"llms": list_llms,
|
||||
"tools": list_tools,
|
||||
"memories": list_memories,
|
||||
"toolkits": list_toolkis,
|
||||
"wrappers": list_wrappers,
|
||||
}
|
||||
|
||||
|
||||
|
|
@ -33,6 +39,11 @@ def list_type(object_type: str):
|
|||
return get_type_dict().get(object_type, lambda: None)()
|
||||
|
||||
|
||||
def list_wrappers():
|
||||
"""List all wrapper types"""
|
||||
return [requests.RequestsWrapper.__name__]
|
||||
|
||||
|
||||
def list_agents():
|
||||
"""List all agent types"""
|
||||
return [
|
||||
|
|
@ -42,6 +53,11 @@ def list_agents():
|
|||
]
|
||||
|
||||
|
||||
def list_toolkis():
|
||||
"""List all toolkit types"""
|
||||
return agent_toolkits.__all__
|
||||
|
||||
|
||||
def list_prompts():
|
||||
"""List all prompt types"""
|
||||
custom_prompts = customs.get_custom_nodes("prompts")
|
||||
|
|
@ -60,6 +76,10 @@ def list_tools():
|
|||
|
||||
for tool in ALL_TOOLS_NAMES:
|
||||
tool_params = util.get_tool_params(util.get_tool_by_name(tool))
|
||||
|
||||
if "name" not in tool_params:
|
||||
tool_params["name"] = tool
|
||||
|
||||
if tool_params and (
|
||||
tool_params.get("name") in settings.tools
|
||||
or (tool_params.get("name") and settings.dev)
|
||||
|
|
|
|||
|
|
@ -53,7 +53,7 @@ def instantiate_class(node_type: str, base_type: str, params: Dict) -> Any:
|
|||
|
||||
def load_flow_from_json(path: str):
|
||||
# This is done to avoid circular imports
|
||||
from langflow.graph.graph import Graph
|
||||
from langflow.graph import Graph
|
||||
|
||||
"""Load flow from json file"""
|
||||
with open(path, "r") as f:
|
||||
|
|
|
|||
|
|
@ -12,7 +12,11 @@ from langflow.custom import customs
|
|||
from langflow.interface.custom_lists import (
|
||||
llm_type_to_cls_dict,
|
||||
memory_type_to_cls_dict,
|
||||
toolkit_type_to_cls_dict,
|
||||
toolkit_type_to_loader_dict,
|
||||
wrapper_type_to_cls_dict,
|
||||
)
|
||||
|
||||
from langflow.interface.listing import CUSTOM_TOOLS, ALL_TOOLS_NAMES
|
||||
from langflow.template.template import Field, Template
|
||||
from langflow.utils import util
|
||||
|
|
@ -21,15 +25,44 @@ from langflow.utils import util
|
|||
def get_signature(name: str, object_type: str):
|
||||
"""Get the signature of an object."""
|
||||
return {
|
||||
"toolkits": get_toolkit_signature,
|
||||
"chains": get_chain_signature,
|
||||
"agents": get_agent_signature,
|
||||
"prompts": get_prompt_signature,
|
||||
"llms": get_llm_signature,
|
||||
# "memories": get_memory_signature,
|
||||
"tools": get_tool_signature,
|
||||
"wrappers": get_wrapper_signature,
|
||||
}.get(object_type, lambda name: f"Invalid type: {name}")(name)
|
||||
|
||||
|
||||
def get_toolkit_signature(name: str):
|
||||
"""Get the signature of a toolkit."""
|
||||
try:
|
||||
if name.islower():
|
||||
pass
|
||||
# return util.build_template_from_function(
|
||||
# name, toolkit_type_to_loader_dict, add_function=True
|
||||
# )
|
||||
else:
|
||||
return util.build_template_from_class(
|
||||
name, toolkit_type_to_cls_dict, add_function=True
|
||||
)
|
||||
except ValueError as exc:
|
||||
raise ValueError("Toolkit not found") from exc
|
||||
|
||||
|
||||
def get_wrapper_signature(name: str):
|
||||
"""Get the signature of a wrapper."""
|
||||
try:
|
||||
return util.build_template_from_class(
|
||||
name,
|
||||
wrapper_type_to_cls_dict,
|
||||
)
|
||||
except ValueError as exc:
|
||||
raise ValueError("Wrapper not found") from exc
|
||||
|
||||
|
||||
def get_chain_signature(name: str):
|
||||
"""Get the chain type by signature."""
|
||||
try:
|
||||
|
|
|
|||
|
|
@ -33,4 +33,18 @@ def build_langchain_types_dict():
|
|||
for memory in list_type("memories")
|
||||
},
|
||||
"tools": {tool: get_signature(tool, "tools") for tool in list_type("tools")},
|
||||
"toolkits": get_toolkits(),
|
||||
"wrappers": {
|
||||
wrapper: get_signature(wrapper, "wrappers")
|
||||
for wrapper in list_type("wrappers")
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def get_toolkits():
|
||||
"""Get a list of all toolkits"""
|
||||
result = {}
|
||||
for toolkit in list_type("toolkits"):
|
||||
if sig := get_signature(toolkit, "toolkits"):
|
||||
result[toolkit] = sig
|
||||
return result
|
||||
|
|
|
|||
|
|
@ -3,7 +3,6 @@ import importlib
|
|||
import inspect
|
||||
import re
|
||||
from typing import Dict, Optional, Union
|
||||
|
||||
from langchain.agents.load_tools import (
|
||||
_BASE_TOOLS,
|
||||
_EXTRA_LLM_TOOLS,
|
||||
|
|
@ -18,6 +17,49 @@ from langchain.agents.tools import Tool
|
|||
from langflow.utils import constants
|
||||
|
||||
|
||||
def build_template_from_parameters(
|
||||
name: str, type_to_loader_dict: Dict, add_function: bool = False
|
||||
):
|
||||
# Retrieve the function that matches the provided name
|
||||
func = None
|
||||
for _, v in type_to_loader_dict.items():
|
||||
if v.__name__ == name:
|
||||
func = v
|
||||
break
|
||||
|
||||
if func is None:
|
||||
raise ValueError(f"{name} not found")
|
||||
|
||||
# Process parameters
|
||||
parameters = func.__annotations__
|
||||
variables = {}
|
||||
for param_name, param_type in parameters.items():
|
||||
if param_name in ["return", "kwargs"]:
|
||||
continue
|
||||
|
||||
variables[param_name] = {
|
||||
"type": param_type.__name__,
|
||||
"default": parameters[param_name].__repr_args__()[0][1],
|
||||
# Op
|
||||
"placeholder": "",
|
||||
}
|
||||
|
||||
# Get the base classes of the return type
|
||||
return_type = parameters.get("return")
|
||||
base_classes = get_base_classes(return_type) if return_type else []
|
||||
if add_function:
|
||||
base_classes.append("function")
|
||||
|
||||
# Get the function's docstring
|
||||
docs = inspect.getdoc(func) or ""
|
||||
|
||||
return {
|
||||
"template": format_dict(variables, name),
|
||||
"description": docs["Description"],
|
||||
"base_classes": base_classes,
|
||||
}
|
||||
|
||||
|
||||
def build_template_from_function(
|
||||
name: str, type_to_loader_dict: Dict, add_function: bool = False
|
||||
):
|
||||
|
|
@ -37,7 +79,7 @@ def build_template_from_function(
|
|||
|
||||
variables = {"_type": _type}
|
||||
for class_field_items, value in _class.__fields__.items():
|
||||
if class_field_items in ["callback_manager", "requests_wrapper"]:
|
||||
if class_field_items in ["callback_manager"]:
|
||||
continue
|
||||
variables[class_field_items] = {}
|
||||
for name_, value_ in value.__repr_args__():
|
||||
|
|
@ -150,7 +192,7 @@ def get_default_factory(module: str, function: str):
|
|||
|
||||
def get_tools_dict():
|
||||
"""Get the tools dictionary."""
|
||||
from langflow.interface.listing import CUSTOM_TOOLS
|
||||
from langflow.interface.listing import CUSTOM_TOOLS, OTHER_TOOLS
|
||||
|
||||
tools = {
|
||||
**_BASE_TOOLS,
|
||||
|
|
@ -158,6 +200,7 @@ def get_tools_dict():
|
|||
**{k: v[0] for k, v in _EXTRA_LLM_TOOLS.items()},
|
||||
**{k: v[0] for k, v in _EXTRA_OPTIONAL_TOOLS.items()},
|
||||
**CUSTOM_TOOLS,
|
||||
**OTHER_TOOLS,
|
||||
}
|
||||
return tools
|
||||
|
||||
|
|
@ -170,15 +213,15 @@ def get_tool_by_name(name: str):
|
|||
return tools[name]
|
||||
|
||||
|
||||
def get_tool_params(tool, **kwargs) -> Union[Dict, None]:
|
||||
def get_tool_params(tool, **kwargs) -> Dict:
|
||||
# Parse the function code into an abstract syntax tree
|
||||
# Define if it is a function or a class
|
||||
if inspect.isfunction(tool):
|
||||
return get_func_tool_params(tool, **kwargs)
|
||||
return get_func_tool_params(tool, **kwargs) or {}
|
||||
elif inspect.isclass(tool):
|
||||
# Get the parameters necessary to
|
||||
# instantiate the class
|
||||
return get_class_tool_params(tool, **kwargs)
|
||||
return get_class_tool_params(tool, **kwargs) or {}
|
||||
else:
|
||||
raise ValueError("Tool must be a function or class.")
|
||||
|
||||
|
|
@ -373,7 +416,20 @@ def format_dict(d, name: Optional[str] = None):
|
|||
)
|
||||
|
||||
# Add multline
|
||||
value["multiline"] = key in ["suffix", "prefix", "template", "examples", "code"]
|
||||
value["multiline"] = key in [
|
||||
"suffix",
|
||||
"prefix",
|
||||
"template",
|
||||
"examples",
|
||||
"code",
|
||||
"headers",
|
||||
]
|
||||
|
||||
# Replace dict type with str
|
||||
if "dict" in value["type"].lower():
|
||||
value["type"] = "str"
|
||||
|
||||
value["file"] = key in ["dict_"]
|
||||
|
||||
# Replace default value with actual value
|
||||
if "default" in value:
|
||||
|
|
|
|||
|
|
@ -5,8 +5,10 @@ import {
|
|||
LightBulbIcon,
|
||||
CommandLineIcon,
|
||||
WrenchScrewdriverIcon,
|
||||
WrenchIcon,
|
||||
ComputerDesktopIcon,
|
||||
Bars3CenterLeftIcon,
|
||||
GiftIcon,
|
||||
PaperClipIcon,
|
||||
QuestionMarkCircleIcon,
|
||||
} from "@heroicons/react/24/outline";
|
||||
|
|
@ -88,6 +90,8 @@ export const nodeNames:{[char: string]: string} = {
|
|||
advanced: "Advanced",
|
||||
chat: "Chat",
|
||||
docloaders:"Document Loader",
|
||||
toolkits:"Toolkits",
|
||||
wrappers:"Wrappers",
|
||||
unknown:"Unknown"
|
||||
};
|
||||
|
||||
|
|
@ -97,10 +101,12 @@ export const nodeIcons:{[char: string]: React.ForwardRefExoticComponent<React.SV
|
|||
memories: CpuChipIcon,
|
||||
llms: LightBulbIcon,
|
||||
prompts: CommandLineIcon,
|
||||
tools: WrenchScrewdriverIcon,
|
||||
tools: WrenchIcon,
|
||||
advanced: ComputerDesktopIcon,
|
||||
chat: Bars3CenterLeftIcon,
|
||||
docloaders:Bars3CenterLeftIcon,
|
||||
toolkits:WrenchScrewdriverIcon,
|
||||
wrappers:GiftIcon,
|
||||
unknown:QuestionMarkCircleIcon
|
||||
};
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
import json
|
||||
from langflow.graph.graph import Edge, Graph, Node
|
||||
from langflow.graph import Edge, Node, Graph
|
||||
import pytest
|
||||
from langflow.utils.payload import build_json, get_root_node
|
||||
from langchain.agents import AgentExecutor
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
import json
|
||||
from langflow.graph.graph import Graph
|
||||
from langflow.graph import Graph
|
||||
import pytest
|
||||
|
||||
from langflow import load_flow_from_json
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue