Merge pull request #79 from logspace-ai/45-implement-agents-as-tools

This commit is contained in:
Gabriel Luiz Freitas Almeida 2023-03-28 22:08:38 -03:00 committed by GitHub
commit 9e48ddf780
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
47 changed files with 3267 additions and 433 deletions

View file

@ -6,7 +6,7 @@ on:
pull_request:
env:
POETRY_VERSION: "1.3.1"
POETRY_VERSION: "1.4.0"
jobs:
build:

View file

@ -10,7 +10,7 @@ on:
- "pyproject.toml"
env:
POETRY_VERSION: "1.3.1"
POETRY_VERSION: "1.4.0"
jobs:
if_release:

33
.github/workflows/test.yml vendored Normal file
View file

@ -0,0 +1,33 @@
name: test
on:
push:
branches: [main]
pull_request:
branches: [dev]
env:
POETRY_VERSION: "1.4.0"
jobs:
build:
runs-on: ubuntu-latest
strategy:
matrix:
python-version:
- "3.10"
- "3.11"
steps:
- uses: actions/checkout@v3
- name: Install poetry
run: pipx install poetry==$POETRY_VERSION
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
cache: "poetry"
- name: Install dependencies
run: poetry install
- name: Run unit tests
run: |
make test

546
poetry.lock generated
View file

@ -220,14 +220,14 @@ files = [
[[package]]
name = "beautifulsoup4"
version = "4.11.2"
version = "4.12.0"
description = "Screen-scraping library"
category = "main"
optional = false
python-versions = ">=3.6.0"
files = [
{file = "beautifulsoup4-4.11.2-py3-none-any.whl", hash = "sha256:0e79446b10b3ecb499c1556f7e228a53e64a2bfcebd455f370d8927cb5b59e39"},
{file = "beautifulsoup4-4.11.2.tar.gz", hash = "sha256:bc4bdda6717de5a2987436fb8d72f45dc90dd856bdfd512a1314ce90349a0106"},
{file = "beautifulsoup4-4.12.0-py3-none-any.whl", hash = "sha256:2130a5ad7f513200fae61a17abb5e338ca980fa28c439c0571014bc0217e9591"},
{file = "beautifulsoup4-4.12.0.tar.gz", hash = "sha256:c5fceeaec29d09c84970e47c65f2f0efe57872f7cff494c9691a26ec0ff13234"},
]
[package.dependencies]
@ -502,21 +502,23 @@ files = [
[[package]]
name = "comm"
version = "0.1.2"
version = "0.1.3"
description = "Jupyter Python Comm implementation, for usage in ipykernel, xeus-python etc."
category = "dev"
optional = false
python-versions = ">=3.6"
files = [
{file = "comm-0.1.2-py3-none-any.whl", hash = "sha256:9f3abf3515112fa7c55a42a6a5ab358735c9dccc8b5910a9d8e3ef5998130666"},
{file = "comm-0.1.2.tar.gz", hash = "sha256:3e2f5826578e683999b93716285b3b1f344f157bf75fa9ce0a797564e742f062"},
{file = "comm-0.1.3-py3-none-any.whl", hash = "sha256:16613c6211e20223f215fc6d3b266a247b6e2641bf4e0a3ad34cb1aff2aa3f37"},
{file = "comm-0.1.3.tar.gz", hash = "sha256:a61efa9daffcfbe66fd643ba966f846a624e4e6d6767eda9cf6e993aadaab93e"},
]
[package.dependencies]
traitlets = ">=5.3"
[package.extras]
lint = ["black (>=22.6.0)", "mdformat (>0.7)", "mdformat-gfm (>=0.3.5)", "ruff (>=0.0.156)"]
test = ["pytest"]
typing = ["mypy (>=0.990)"]
[[package]]
name = "dataclasses-json"
@ -577,6 +579,21 @@ files = [
{file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"},
]
[[package]]
name = "exceptiongroup"
version = "1.1.1"
description = "Backport of PEP 654 (exception groups)"
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
{file = "exceptiongroup-1.1.1-py3-none-any.whl", hash = "sha256:232c37c63e4f682982c8b6459f33a8981039e5fb8756b2074364e5055c498c9e"},
{file = "exceptiongroup-1.1.1.tar.gz", hash = "sha256:d484c3090ba2889ae2928419117447a14daf3c1231d5e30d0aae34f354f01785"},
]
[package.extras]
test = ["pytest (>=6)"]
[[package]]
name = "executing"
version = "1.2.0"
@ -723,14 +740,14 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0dev)"]
[[package]]
name = "google-api-python-client"
version = "2.81.0"
version = "2.83.0"
description = "Google API Client Library for Python"
category = "main"
optional = false
python-versions = ">=3.7"
files = [
{file = "google-api-python-client-2.81.0.tar.gz", hash = "sha256:8faab0b9b19d3797b455d33320c643253b6761fd0d3f3adb54792ab155d0795a"},
{file = "google_api_python_client-2.81.0-py2.py3-none-any.whl", hash = "sha256:ad6700ae3a76ead8956d7f30935978cea308530e342ad8c1e26a4e40fc05c054"},
{file = "google-api-python-client-2.83.0.tar.gz", hash = "sha256:d07509f1b2d2b2427363b454db996f7a15e1751a48cfcaf28427050560dd51cf"},
{file = "google_api_python_client-2.83.0-py2.py3-none-any.whl", hash = "sha256:afa7fe2a5d77e8f136cdb8f40a120dd6660c2292f791c1b22734dfe786bd1dac"},
]
[package.dependencies]
@ -742,14 +759,14 @@ uritemplate = ">=3.0.1,<5"
[[package]]
name = "google-auth"
version = "2.16.2"
version = "2.17.0"
description = "Google Authentication Library"
category = "main"
optional = false
python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*"
files = [
{file = "google-auth-2.16.2.tar.gz", hash = "sha256:07e14f34ec288e3f33e00e2e3cc40c8942aa5d4ceac06256a28cd8e786591420"},
{file = "google_auth-2.16.2-py2.py3-none-any.whl", hash = "sha256:2fef3cf94876d1a0e204afece58bb4d83fb57228aaa366c64045039fda6770a2"},
{file = "google-auth-2.17.0.tar.gz", hash = "sha256:f51d26ebb3e5d723b9a7dbd310b6c88654ef1ad1fc35750d1fdba48ca4d82f52"},
{file = "google_auth-2.17.0-py2.py3-none-any.whl", hash = "sha256:45ba9b4b3e49406de3c5451697820694b2f6ce8a6b75bb187852fdae231dab94"},
]
[package.dependencies]
@ -798,14 +815,14 @@ requests = "*"
[[package]]
name = "googleapis-common-protos"
version = "1.58.0"
version = "1.59.0"
description = "Common protobufs used in Google APIs"
category = "main"
optional = false
python-versions = ">=3.7"
files = [
{file = "googleapis-common-protos-1.58.0.tar.gz", hash = "sha256:c727251ec025947d545184ba17e3578840fc3a24a0516a020479edab660457df"},
{file = "googleapis_common_protos-1.58.0-py2.py3-none-any.whl", hash = "sha256:ca3befcd4580dab6ad49356b46bf165bb68ff4b32389f028f1abd7c10ab9519a"},
{file = "googleapis-common-protos-1.59.0.tar.gz", hash = "sha256:4168fcb568a826a52f23510412da405abd93f4d23ba544bb68d943b14ba3cb44"},
{file = "googleapis_common_protos-1.59.0-py2.py3-none-any.whl", hash = "sha256:b287dc48449d1d41af0c69f4ea26242b5ae4c3d7249a38b0984c86a4caffff1f"},
]
[package.dependencies]
@ -921,21 +938,67 @@ files = [
{file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"},
]
[[package]]
name = "httpcore"
version = "0.16.3"
description = "A minimal low-level HTTP client."
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
{file = "httpcore-0.16.3-py3-none-any.whl", hash = "sha256:da1fb708784a938aa084bde4feb8317056c55037247c787bd7e19eb2c2949dc0"},
{file = "httpcore-0.16.3.tar.gz", hash = "sha256:c5d6f04e2fc530f39e0c077e6a30caa53f1451096120f1f38b954afd0b17c0cb"},
]
[package.dependencies]
anyio = ">=3.0,<5.0"
certifi = "*"
h11 = ">=0.13,<0.15"
sniffio = ">=1.0.0,<2.0.0"
[package.extras]
http2 = ["h2 (>=3,<5)"]
socks = ["socksio (>=1.0.0,<2.0.0)"]
[[package]]
name = "httplib2"
version = "0.21.0"
version = "0.22.0"
description = "A comprehensive HTTP client library."
category = "main"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
files = [
{file = "httplib2-0.21.0-py3-none-any.whl", hash = "sha256:987c8bb3eb82d3fa60c68699510a692aa2ad9c4bd4f123e51dfb1488c14cdd01"},
{file = "httplib2-0.21.0.tar.gz", hash = "sha256:fc144f091c7286b82bec71bdbd9b27323ba709cc612568d3000893bfd9cb4b34"},
{file = "httplib2-0.22.0-py3-none-any.whl", hash = "sha256:14ae0a53c1ba8f3d37e9e27cf37eabb0fb9980f435ba405d546948b009dd64dc"},
{file = "httplib2-0.22.0.tar.gz", hash = "sha256:d7a10bc5ef5ab08322488bde8c726eeee5c8618723fdb399597ec58f3d82df81"},
]
[package.dependencies]
pyparsing = {version = ">=2.4.2,<3.0.0 || >3.0.0,<3.0.1 || >3.0.1,<3.0.2 || >3.0.2,<3.0.3 || >3.0.3,<4", markers = "python_version > \"3.0\""}
[[package]]
name = "httpx"
version = "0.23.3"
description = "The next generation HTTP client."
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
{file = "httpx-0.23.3-py3-none-any.whl", hash = "sha256:a211fcce9b1254ea24f0cd6af9869b3d29aba40154e947d2a07bb499b3e310d6"},
{file = "httpx-0.23.3.tar.gz", hash = "sha256:9818458eb565bb54898ccb9b8b251a28785dd4a55afbc23d0eb410754fe7d0f9"},
]
[package.dependencies]
certifi = "*"
httpcore = ">=0.15.0,<0.17.0"
rfc3986 = {version = ">=1.3,<2", extras = ["idna2008"]}
sniffio = "*"
[package.extras]
brotli = ["brotli", "brotlicffi"]
cli = ["click (>=8.0.0,<9.0.0)", "pygments (>=2.0.0,<3.0.0)", "rich (>=10,<13)"]
http2 = ["h2 (>=3,<5)"]
socks = ["socksio (>=1.0.0,<2.0.0)"]
[[package]]
name = "idna"
version = "3.4"
@ -968,16 +1031,28 @@ docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker
perf = ["ipython"]
testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"]
[[package]]
name = "iniconfig"
version = "2.0.0"
description = "brain-dead simple config-ini parsing"
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
{file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"},
{file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"},
]
[[package]]
name = "ipykernel"
version = "6.21.3"
version = "6.22.0"
description = "IPython Kernel for Jupyter"
category = "dev"
optional = false
python-versions = ">=3.8"
files = [
{file = "ipykernel-6.21.3-py3-none-any.whl", hash = "sha256:24ebd9715e317c185e37156ab3a87382410185230dde7aeffce389d6c7d4428a"},
{file = "ipykernel-6.21.3.tar.gz", hash = "sha256:c8ff581905d70e7299bc1473a2f7c113bec1744fb3746d58e5b4b93bd8ee7001"},
{file = "ipykernel-6.22.0-py3-none-any.whl", hash = "sha256:1ae6047c1277508933078163721bbb479c3e7292778a04b4bacf0874550977d6"},
{file = "ipykernel-6.22.0.tar.gz", hash = "sha256:302558b81f1bc22dc259fb2a0c5c7cf2f4c0bdb21b50484348f7bafe7fb71421"},
]
[package.dependencies]
@ -1063,14 +1138,14 @@ testing = ["Django (<3.1)", "attrs", "colorama", "docopt", "pytest (<7.0.0)"]
[[package]]
name = "jupyter-client"
version = "8.0.3"
version = "8.1.0"
description = "Jupyter protocol implementation and client libraries"
category = "dev"
optional = false
python-versions = ">=3.8"
files = [
{file = "jupyter_client-8.0.3-py3-none-any.whl", hash = "sha256:be48ac6bd659cbbddb7a674cf06b3b8afbf53f228253cf58bde604c03bd487b0"},
{file = "jupyter_client-8.0.3.tar.gz", hash = "sha256:ed65498bea6d876ef9d8da3e0db3dd33c5d129f5b2645f56ae03993782966bd0"},
{file = "jupyter_client-8.1.0-py3-none-any.whl", hash = "sha256:d5b8e739d7816944be50f81121a109788a3d92732ecf1ad1e4dadebc948818fe"},
{file = "jupyter_client-8.1.0.tar.gz", hash = "sha256:3fbab64100a0dcac7701b1e0f1a4412f1ccb45546ff2ad9bc4fcbe4e19804811"},
]
[package.dependencies]
@ -1132,6 +1207,31 @@ tenacity = ">=8.1.0,<9.0.0"
all = ["aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.2.2,<0.3.0)", "beautifulsoup4 (>=4,<5)", "cohere (>=3,<4)", "deeplake (>=3.2.9,<4.0.0)", "elasticsearch (>=8,<9)", "faiss-cpu (>=1,<2)", "google-api-python-client (==2.70.0)", "google-search-results (>=2,<3)", "huggingface_hub (>=0,<1)", "jinja2 (>=3,<4)", "manifest-ml (>=0.0.1,<0.0.2)", "networkx (>=2.6.3,<3.0.0)", "nlpcloud (>=1,<2)", "nltk (>=3,<4)", "nomic (>=1.0.43,<2.0.0)", "openai (>=0,<1)", "opensearch-py (>=2.0.0,<3.0.0)", "pgvector (>=0.1.6,<0.2.0)", "pinecone-client (>=2,<3)", "psycopg2-binary (>=2.9.5,<3.0.0)", "pypdf (>=3.4.0,<4.0.0)", "qdrant-client (>=1.0.4,<2.0.0)", "redis (>=4,<5)", "sentence-transformers (>=2,<3)", "spacy (>=3,<4)", "tensorflow-text (>=2.11.0,<3.0.0)", "tiktoken (>=0,<1)", "torch (>=1,<2)", "transformers (>=4,<5)", "weaviate-client (>=3,<4)", "wikipedia (>=1,<2)", "wolframalpha (==5.0.0)"]
llms = ["anthropic (>=0.2.2,<0.3.0)", "cohere (>=3,<4)", "huggingface_hub (>=0,<1)", "manifest-ml (>=0.0.1,<0.0.2)", "nlpcloud (>=1,<2)", "openai (>=0,<1)", "torch (>=1,<2)", "transformers (>=4,<5)"]
[[package]]
name = "markdown-it-py"
version = "2.2.0"
description = "Python port of markdown-it. Markdown parsing, done right!"
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
{file = "markdown-it-py-2.2.0.tar.gz", hash = "sha256:7c9a5e412688bc771c67432cbfebcdd686c93ce6484913dccf06cb5a0bea35a1"},
{file = "markdown_it_py-2.2.0-py3-none-any.whl", hash = "sha256:5a35f8d1870171d9acc47b99612dc146129b631baf04970128b568f190d0cc30"},
]
[package.dependencies]
mdurl = ">=0.1,<1.0"
[package.extras]
benchmarking = ["psutil", "pytest", "pytest-benchmark"]
code-style = ["pre-commit (>=3.0,<4.0)"]
compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"]
linkify = ["linkify-it-py (>=1,<3)"]
plugins = ["mdit-py-plugins"]
profiling = ["gprof2dot"]
rtd = ["attrs", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"]
testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"]
[[package]]
name = "marshmallow"
version = "3.19.0"
@ -1183,6 +1283,18 @@ files = [
[package.dependencies]
traitlets = "*"
[[package]]
name = "mdurl"
version = "0.1.2"
description = "Markdown URL utilities"
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
{file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"},
{file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"},
]
[[package]]
name = "multidict"
version = "6.0.4"
@ -1468,19 +1580,35 @@ files = [
[[package]]
name = "platformdirs"
version = "3.1.1"
version = "3.2.0"
description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
{file = "platformdirs-3.1.1-py3-none-any.whl", hash = "sha256:e5986afb596e4bb5bde29a79ac9061aa955b94fca2399b7aaac4090860920dd8"},
{file = "platformdirs-3.1.1.tar.gz", hash = "sha256:024996549ee88ec1a9aa99ff7f8fc819bb59e2c3477b410d90a16d32d6e707aa"},
{file = "platformdirs-3.2.0-py3-none-any.whl", hash = "sha256:ebe11c0d7a805086e99506aa331612429a72ca7cd52a1f0d277dc4adc20cb10e"},
{file = "platformdirs-3.2.0.tar.gz", hash = "sha256:d5b638ca397f25f979350ff789db335903d7ea010ab28903f57b27e1b16c2b08"},
]
[package.extras]
docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.22,!=1.23.4)"]
test = ["appdirs (==1.4.4)", "covdefaults (>=2.2.2)", "pytest (>=7.2.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"]
test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.2.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"]
[[package]]
name = "pluggy"
version = "1.0.0"
description = "plugin and hook calling mechanisms for python"
category = "dev"
optional = false
python-versions = ">=3.6"
files = [
{file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"},
{file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"},
]
[package.extras]
dev = ["pre-commit", "tox"]
testing = ["pytest", "pytest-benchmark"]
[[package]]
name = "prompt-toolkit"
@ -1615,48 +1743,48 @@ files = [
[[package]]
name = "pydantic"
version = "1.10.6"
version = "1.10.7"
description = "Data validation and settings management using python type hints"
category = "main"
optional = false
python-versions = ">=3.7"
files = [
{file = "pydantic-1.10.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f9289065611c48147c1dd1fd344e9d57ab45f1d99b0fb26c51f1cf72cd9bcd31"},
{file = "pydantic-1.10.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8c32b6bba301490d9bb2bf5f631907803135e8085b6aa3e5fe5a770d46dd0160"},
{file = "pydantic-1.10.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd9b9e98068fa1068edfc9eabde70a7132017bdd4f362f8b4fd0abed79c33083"},
{file = "pydantic-1.10.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c84583b9df62522829cbc46e2b22e0ec11445625b5acd70c5681ce09c9b11c4"},
{file = "pydantic-1.10.6-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b41822064585fea56d0116aa431fbd5137ce69dfe837b599e310034171996084"},
{file = "pydantic-1.10.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:61f1f08adfaa9cc02e0cbc94f478140385cbd52d5b3c5a657c2fceb15de8d1fb"},
{file = "pydantic-1.10.6-cp310-cp310-win_amd64.whl", hash = "sha256:32937835e525d92c98a1512218db4eed9ddc8f4ee2a78382d77f54341972c0e7"},
{file = "pydantic-1.10.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:bbd5c531b22928e63d0cb1868dee76123456e1de2f1cb45879e9e7a3f3f1779b"},
{file = "pydantic-1.10.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e277bd18339177daa62a294256869bbe84df1fb592be2716ec62627bb8d7c81d"},
{file = "pydantic-1.10.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89f15277d720aa57e173954d237628a8d304896364b9de745dcb722f584812c7"},
{file = "pydantic-1.10.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b243b564cea2576725e77aeeda54e3e0229a168bc587d536cd69941e6797543d"},
{file = "pydantic-1.10.6-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3ce13a558b484c9ae48a6a7c184b1ba0e5588c5525482681db418268e5f86186"},
{file = "pydantic-1.10.6-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3ac1cd4deed871dfe0c5f63721e29debf03e2deefa41b3ed5eb5f5df287c7b70"},
{file = "pydantic-1.10.6-cp311-cp311-win_amd64.whl", hash = "sha256:b1eb6610330a1dfba9ce142ada792f26bbef1255b75f538196a39e9e90388bf4"},
{file = "pydantic-1.10.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4ca83739c1263a044ec8b79df4eefc34bbac87191f0a513d00dd47d46e307a65"},
{file = "pydantic-1.10.6-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea4e2a7cb409951988e79a469f609bba998a576e6d7b9791ae5d1e0619e1c0f2"},
{file = "pydantic-1.10.6-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:53de12b4608290992a943801d7756f18a37b7aee284b9ffa794ee8ea8153f8e2"},
{file = "pydantic-1.10.6-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:60184e80aac3b56933c71c48d6181e630b0fbc61ae455a63322a66a23c14731a"},
{file = "pydantic-1.10.6-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:415a3f719ce518e95a92effc7ee30118a25c3d032455d13e121e3840985f2efd"},
{file = "pydantic-1.10.6-cp37-cp37m-win_amd64.whl", hash = "sha256:72cb30894a34d3a7ab6d959b45a70abac8a2a93b6480fc5a7bfbd9c935bdc4fb"},
{file = "pydantic-1.10.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3091d2eaeda25391405e36c2fc2ed102b48bac4b384d42b2267310abae350ca6"},
{file = "pydantic-1.10.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:751f008cd2afe812a781fd6aa2fb66c620ca2e1a13b6a2152b1ad51553cb4b77"},
{file = "pydantic-1.10.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:12e837fd320dd30bd625be1b101e3b62edc096a49835392dcf418f1a5ac2b832"},
{file = "pydantic-1.10.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:587d92831d0115874d766b1f5fddcdde0c5b6c60f8c6111a394078ec227fca6d"},
{file = "pydantic-1.10.6-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:476f6674303ae7965730a382a8e8d7fae18b8004b7b69a56c3d8fa93968aa21c"},
{file = "pydantic-1.10.6-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:3a2be0a0f32c83265fd71a45027201e1278beaa82ea88ea5b345eea6afa9ac7f"},
{file = "pydantic-1.10.6-cp38-cp38-win_amd64.whl", hash = "sha256:0abd9c60eee6201b853b6c4be104edfba4f8f6c5f3623f8e1dba90634d63eb35"},
{file = "pydantic-1.10.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6195ca908045054dd2d57eb9c39a5fe86409968b8040de8c2240186da0769da7"},
{file = "pydantic-1.10.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:43cdeca8d30de9a897440e3fb8866f827c4c31f6c73838e3a01a14b03b067b1d"},
{file = "pydantic-1.10.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c19eb5163167489cb1e0161ae9220dadd4fc609a42649e7e84a8fa8fff7a80f"},
{file = "pydantic-1.10.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:012c99a9c0d18cfde7469aa1ebff922e24b0c706d03ead96940f5465f2c9cf62"},
{file = "pydantic-1.10.6-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:528dcf7ec49fb5a84bf6fe346c1cc3c55b0e7603c2123881996ca3ad79db5bfc"},
{file = "pydantic-1.10.6-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:163e79386c3547c49366e959d01e37fc30252285a70619ffc1b10ede4758250a"},
{file = "pydantic-1.10.6-cp39-cp39-win_amd64.whl", hash = "sha256:189318051c3d57821f7233ecc94708767dd67687a614a4e8f92b4a020d4ffd06"},
{file = "pydantic-1.10.6-py3-none-any.whl", hash = "sha256:acc6783751ac9c9bc4680379edd6d286468a1dc8d7d9906cd6f1186ed682b2b0"},
{file = "pydantic-1.10.6.tar.gz", hash = "sha256:cf95adb0d1671fc38d8c43dd921ad5814a735e7d9b4d9e437c088002863854fd"},
{file = "pydantic-1.10.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e79e999e539872e903767c417c897e729e015872040e56b96e67968c3b918b2d"},
{file = "pydantic-1.10.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:01aea3a42c13f2602b7ecbbea484a98169fb568ebd9e247593ea05f01b884b2e"},
{file = "pydantic-1.10.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:516f1ed9bc2406a0467dd777afc636c7091d71f214d5e413d64fef45174cfc7a"},
{file = "pydantic-1.10.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae150a63564929c675d7f2303008d88426a0add46efd76c3fc797cd71cb1b46f"},
{file = "pydantic-1.10.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ecbbc51391248116c0a055899e6c3e7ffbb11fb5e2a4cd6f2d0b93272118a209"},
{file = "pydantic-1.10.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f4a2b50e2b03d5776e7f21af73e2070e1b5c0d0df255a827e7c632962f8315af"},
{file = "pydantic-1.10.7-cp310-cp310-win_amd64.whl", hash = "sha256:a7cd2251439988b413cb0a985c4ed82b6c6aac382dbaff53ae03c4b23a70e80a"},
{file = "pydantic-1.10.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:68792151e174a4aa9e9fc1b4e653e65a354a2fa0fed169f7b3d09902ad2cb6f1"},
{file = "pydantic-1.10.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dfe2507b8ef209da71b6fb5f4e597b50c5a34b78d7e857c4f8f3115effaef5fe"},
{file = "pydantic-1.10.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10a86d8c8db68086f1e30a530f7d5f83eb0685e632e411dbbcf2d5c0150e8dcd"},
{file = "pydantic-1.10.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d75ae19d2a3dbb146b6f324031c24f8a3f52ff5d6a9f22f0683694b3afcb16fb"},
{file = "pydantic-1.10.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:464855a7ff7f2cc2cf537ecc421291b9132aa9c79aef44e917ad711b4a93163b"},
{file = "pydantic-1.10.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:193924c563fae6ddcb71d3f06fa153866423ac1b793a47936656e806b64e24ca"},
{file = "pydantic-1.10.7-cp311-cp311-win_amd64.whl", hash = "sha256:b4a849d10f211389502059c33332e91327bc154acc1845f375a99eca3afa802d"},
{file = "pydantic-1.10.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:cc1dde4e50a5fc1336ee0581c1612215bc64ed6d28d2c7c6f25d2fe3e7c3e918"},
{file = "pydantic-1.10.7-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e0cfe895a504c060e5d36b287ee696e2fdad02d89e0d895f83037245218a87fe"},
{file = "pydantic-1.10.7-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:670bb4683ad1e48b0ecb06f0cfe2178dcf74ff27921cdf1606e527d2617a81ee"},
{file = "pydantic-1.10.7-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:950ce33857841f9a337ce07ddf46bc84e1c4946d2a3bba18f8280297157a3fd1"},
{file = "pydantic-1.10.7-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c15582f9055fbc1bfe50266a19771bbbef33dd28c45e78afbe1996fd70966c2a"},
{file = "pydantic-1.10.7-cp37-cp37m-win_amd64.whl", hash = "sha256:82dffb306dd20bd5268fd6379bc4bfe75242a9c2b79fec58e1041fbbdb1f7914"},
{file = "pydantic-1.10.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8c7f51861d73e8b9ddcb9916ae7ac39fb52761d9ea0df41128e81e2ba42886cd"},
{file = "pydantic-1.10.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6434b49c0b03a51021ade5c4daa7d70c98f7a79e95b551201fff682fc1661245"},
{file = "pydantic-1.10.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64d34ab766fa056df49013bb6e79921a0265204c071984e75a09cbceacbbdd5d"},
{file = "pydantic-1.10.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:701daea9ffe9d26f97b52f1d157e0d4121644f0fcf80b443248434958fd03dc3"},
{file = "pydantic-1.10.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:cf135c46099ff3f919d2150a948ce94b9ce545598ef2c6c7bf55dca98a304b52"},
{file = "pydantic-1.10.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b0f85904f73161817b80781cc150f8b906d521fa11e3cdabae19a581c3606209"},
{file = "pydantic-1.10.7-cp38-cp38-win_amd64.whl", hash = "sha256:9f6f0fd68d73257ad6685419478c5aece46432f4bdd8d32c7345f1986496171e"},
{file = "pydantic-1.10.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c230c0d8a322276d6e7b88c3f7ce885f9ed16e0910354510e0bae84d54991143"},
{file = "pydantic-1.10.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:976cae77ba6a49d80f461fd8bba183ff7ba79f44aa5cfa82f1346b5626542f8e"},
{file = "pydantic-1.10.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d45fc99d64af9aaf7e308054a0067fdcd87ffe974f2442312372dfa66e1001d"},
{file = "pydantic-1.10.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d2a5ebb48958754d386195fe9e9c5106f11275867051bf017a8059410e9abf1f"},
{file = "pydantic-1.10.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:abfb7d4a7cd5cc4e1d1887c43503a7c5dd608eadf8bc615413fc498d3e4645cd"},
{file = "pydantic-1.10.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:80b1fab4deb08a8292d15e43a6edccdffa5377a36a4597bb545b93e79c5ff0a5"},
{file = "pydantic-1.10.7-cp39-cp39-win_amd64.whl", hash = "sha256:d71e69699498b020ea198468e2480a2f1e7433e32a3a99760058c6520e2bea7e"},
{file = "pydantic-1.10.7-py3-none-any.whl", hash = "sha256:0cd181f1d0b1d00e2b705f1bf1ac7799a2d938cce3376b8007df62b29be3c2c6"},
{file = "pydantic-1.10.7.tar.gz", hash = "sha256:cfc83c0678b6ba51b0532bea66860617c4cd4251ecf76e9846fa5a9f3454e97e"},
]
[package.dependencies]
@ -1696,6 +1824,30 @@ files = [
[package.extras]
diagrams = ["jinja2", "railroad-diagrams"]
[[package]]
name = "pytest"
version = "7.2.2"
description = "pytest: simple powerful testing with Python"
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
{file = "pytest-7.2.2-py3-none-any.whl", hash = "sha256:130328f552dcfac0b1cec75c12e3f005619dc5f874f0a06e8ff7263f0ee6225e"},
{file = "pytest-7.2.2.tar.gz", hash = "sha256:c99ab0c73aceb050f68929bc93af19ab6db0558791c6a0715723abe9d0ade9d4"},
]
[package.dependencies]
attrs = ">=19.2.0"
colorama = {version = "*", markers = "sys_platform == \"win32\""}
exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""}
iniconfig = "*"
packaging = "*"
pluggy = ">=0.12,<2.0"
tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""}
[package.extras]
testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"]
[[package]]
name = "python-dateutil"
version = "2.8.2"
@ -1713,26 +1865,26 @@ six = ">=1.5"
[[package]]
name = "pywin32"
version = "305"
version = "306"
description = "Python for Window Extensions"
category = "dev"
optional = false
python-versions = "*"
files = [
{file = "pywin32-305-cp310-cp310-win32.whl", hash = "sha256:421f6cd86e84bbb696d54563c48014b12a23ef95a14e0bdba526be756d89f116"},
{file = "pywin32-305-cp310-cp310-win_amd64.whl", hash = "sha256:73e819c6bed89f44ff1d690498c0a811948f73777e5f97c494c152b850fad478"},
{file = "pywin32-305-cp310-cp310-win_arm64.whl", hash = "sha256:742eb905ce2187133a29365b428e6c3b9001d79accdc30aa8969afba1d8470f4"},
{file = "pywin32-305-cp311-cp311-win32.whl", hash = "sha256:19ca459cd2e66c0e2cc9a09d589f71d827f26d47fe4a9d09175f6aa0256b51c2"},
{file = "pywin32-305-cp311-cp311-win_amd64.whl", hash = "sha256:326f42ab4cfff56e77e3e595aeaf6c216712bbdd91e464d167c6434b28d65990"},
{file = "pywin32-305-cp311-cp311-win_arm64.whl", hash = "sha256:4ecd404b2c6eceaca52f8b2e3e91b2187850a1ad3f8b746d0796a98b4cea04db"},
{file = "pywin32-305-cp36-cp36m-win32.whl", hash = "sha256:48d8b1659284f3c17b68587af047d110d8c44837736b8932c034091683e05863"},
{file = "pywin32-305-cp36-cp36m-win_amd64.whl", hash = "sha256:13362cc5aa93c2beaf489c9c9017c793722aeb56d3e5166dadd5ef82da021fe1"},
{file = "pywin32-305-cp37-cp37m-win32.whl", hash = "sha256:a55db448124d1c1484df22fa8bbcbc45c64da5e6eae74ab095b9ea62e6d00496"},
{file = "pywin32-305-cp37-cp37m-win_amd64.whl", hash = "sha256:109f98980bfb27e78f4df8a51a8198e10b0f347257d1e265bb1a32993d0c973d"},
{file = "pywin32-305-cp38-cp38-win32.whl", hash = "sha256:9dd98384da775afa009bc04863426cb30596fd78c6f8e4e2e5bbf4edf8029504"},
{file = "pywin32-305-cp38-cp38-win_amd64.whl", hash = "sha256:56d7a9c6e1a6835f521788f53b5af7912090674bb84ef5611663ee1595860fc7"},
{file = "pywin32-305-cp39-cp39-win32.whl", hash = "sha256:9d968c677ac4d5cbdaa62fd3014ab241718e619d8e36ef8e11fb930515a1e918"},
{file = "pywin32-305-cp39-cp39-win_amd64.whl", hash = "sha256:50768c6b7c3f0b38b7fb14dd4104da93ebced5f1a50dc0e834594bff6fbe1271"},
{file = "pywin32-306-cp310-cp310-win32.whl", hash = "sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d"},
{file = "pywin32-306-cp310-cp310-win_amd64.whl", hash = "sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8"},
{file = "pywin32-306-cp311-cp311-win32.whl", hash = "sha256:e65028133d15b64d2ed8f06dd9fbc268352478d4f9289e69c190ecd6818b6407"},
{file = "pywin32-306-cp311-cp311-win_amd64.whl", hash = "sha256:a7639f51c184c0272e93f244eb24dafca9b1855707d94c192d4a0b4c01e1100e"},
{file = "pywin32-306-cp311-cp311-win_arm64.whl", hash = "sha256:70dba0c913d19f942a2db25217d9a1b726c278f483a919f1abfed79c9cf64d3a"},
{file = "pywin32-306-cp312-cp312-win32.whl", hash = "sha256:383229d515657f4e3ed1343da8be101000562bf514591ff383ae940cad65458b"},
{file = "pywin32-306-cp312-cp312-win_amd64.whl", hash = "sha256:37257794c1ad39ee9be652da0462dc2e394c8159dfd913a8a4e8eb6fd346da0e"},
{file = "pywin32-306-cp312-cp312-win_arm64.whl", hash = "sha256:5821ec52f6d321aa59e2db7e0a35b997de60c201943557d108af9d4ae1ec7040"},
{file = "pywin32-306-cp37-cp37m-win32.whl", hash = "sha256:1c73ea9a0d2283d889001998059f5eaaba3b6238f767c9cf2833b13e6a685f65"},
{file = "pywin32-306-cp37-cp37m-win_amd64.whl", hash = "sha256:72c5f621542d7bdd4fdb716227be0dd3f8565c11b280be6315b06ace35487d36"},
{file = "pywin32-306-cp38-cp38-win32.whl", hash = "sha256:e4c092e2589b5cf0d365849e73e02c391c1349958c5ac3e9d5ccb9a28e017b3a"},
{file = "pywin32-306-cp38-cp38-win_amd64.whl", hash = "sha256:e8ac1ae3601bee6ca9f7cb4b5363bf1c0badb935ef243c4733ff9a393b1690c0"},
{file = "pywin32-306-cp39-cp39-win32.whl", hash = "sha256:e25fd5b485b55ac9c057f67d94bc203f3f6595078d1fb3b458c9c28b7153a802"},
{file = "pywin32-306-cp39-cp39-win_amd64.whl", hash = "sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4"},
]
[[package]]
@ -1787,89 +1939,89 @@ files = [
[[package]]
name = "pyzmq"
version = "25.0.1"
version = "25.0.2"
description = "Python bindings for 0MQ"
category = "dev"
optional = false
python-versions = ">=3.6"
files = [
{file = "pyzmq-25.0.1-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:94f65e13e6df035b0ae90d49adfe7891aa4e7bdeaa65265729fecc04ab3eb0fe"},
{file = "pyzmq-25.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f0399450d970990705ce47ed65f5efed3e4627dfc80628c3798100e7b72e023b"},
{file = "pyzmq-25.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f29709b0431668a967d7ff0394b00a865e7b7dde827ee0a47938b705b7c4aec3"},
{file = "pyzmq-25.0.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4fee9420b34c0ab426f105926a701a3d73f878fe77f07a1b92e0b78d1e2c795c"},
{file = "pyzmq-25.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:57be375c6bc66b0f685cd298e5c1c3d7ee34a254145b8087aed6e25db372b0f3"},
{file = "pyzmq-25.0.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:a3309b2c5a5be3d48c9ade77b340361764449aa22854ac65935b1e6c0cdabe2c"},
{file = "pyzmq-25.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7574d24579e83ee8c5d3b14769e7ba895161c43a601e911dd89d449e545e00ad"},
{file = "pyzmq-25.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:041d617091258133e602919b28fdce4d3e2f8aedcd1e8b34c599653bc288d59e"},
{file = "pyzmq-25.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7897ba8c3fedc6b3023bad676ceb69dbf90c077ff18ae3133ba43db47417cc72"},
{file = "pyzmq-25.0.1-cp310-cp310-win32.whl", hash = "sha256:c462f70dadbd4649e572ca7cd1e7cf3305a8c2afc53b84214c0a7c0c3af8a657"},
{file = "pyzmq-25.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:e3a721710992cf0e213bbb7be48fb0f32202e8d01f556c196c870373bb9ad4f4"},
{file = "pyzmq-25.0.1-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:b0a0fcf56279b9f3acc9b36a83feb7640c51b0db444b6870e4406d002be1d514"},
{file = "pyzmq-25.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:95aff52fc847ea5755d2370f86e379ba2ed6eb67a0a6f90f0e8e99c553693b81"},
{file = "pyzmq-25.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b55366e6c11e1ef7403d072b9867b62cf63eebd31dd038ef65bc8d65572854f6"},
{file = "pyzmq-25.0.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:64a2bc72bcad705ee42a8fe877478ddadb7e260e806562833d3d814125e28a44"},
{file = "pyzmq-25.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ca66aa24422d7f324acd5cb7fc7df616eb6f0205e059393fb108702e33e90c7"},
{file = "pyzmq-25.0.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:58d5dfec2e2befd09b04c4683b3c984d2203cf6e054d0f9786be3826737ad612"},
{file = "pyzmq-25.0.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3549292d65987e422e2c9f105b1485448381f489d8a6b6b040fc8b8f497bd578"},
{file = "pyzmq-25.0.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5b1ca8b0df50d1ac88857ffe9ebd1347e0a5bb5f6e1d99940fdd7df0ffdefb49"},
{file = "pyzmq-25.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a1a107e89cdcf799060ba4fa85fd3c942e19df7b24eb2600618b2406cc73c18e"},
{file = "pyzmq-25.0.1-cp311-cp311-win32.whl", hash = "sha256:0f22ba4e9041549a5a3f5a545169dda52fa0aa7b5ef46b336cbe6679c4c3c134"},
{file = "pyzmq-25.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:0644c0d5c73e4bfeee8148f638ab16ad783df1c4d6c2f968552a26a43fb002a1"},
{file = "pyzmq-25.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:c5eb4b17d73b1fc208a4faa6b5918983ccc961770aa37741891f61db302dae4e"},
{file = "pyzmq-25.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:649dd55948144a108041397f07c1299086ce1c85c2e166831db3a33dac1d0c7f"},
{file = "pyzmq-25.0.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c99fd8d3efc138d6a7fb1e822133f62bb18ffec66dc6d398dcb2ac2ab8eb2cb0"},
{file = "pyzmq-25.0.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:d72d69d4bb37c05a446d10bc40b391cf8fb7572654fb73fa69e7d2a395197e65"},
{file = "pyzmq-25.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:036dbf8373aed4ccf56d58c561b23601b8f33919ec1093d8c77b37ac1259702d"},
{file = "pyzmq-25.0.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:861c37649c75a2ecfc2034a32b9d5ca744e1e0cddcbf65afbd8027cf7d9755be"},
{file = "pyzmq-25.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:92f04d63aecbb71d41f7db5f988167ef429f96d8197fd46684688cdb513e8a2e"},
{file = "pyzmq-25.0.1-cp36-cp36m-win32.whl", hash = "sha256:866a4e918f1f4b2f83e9982b817df257910e3e50e456ffa74f141a10adcd11d1"},
{file = "pyzmq-25.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:ec29c880b82cd38a63810a93b77e13f167e05732049101947772eed9ae805097"},
{file = "pyzmq-25.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0241a334e39aa74e4ba0ae5f9e29521f1b48b8d56bf707f25f322c04eb423e99"},
{file = "pyzmq-25.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3b7032f55b1ed2cd8c349a89e467dca2338b7765fab82cb64c3504e49adaf51"},
{file = "pyzmq-25.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:960f98f562ee6a50ecf283bc62479d00f5ee10e9068a21683b9e961cd87c9261"},
{file = "pyzmq-25.0.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:835da498b71570d56e5526de4d5b36fa10dd9b8a82e2c405f963afeb51ff5bdc"},
{file = "pyzmq-25.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:21de2ef6099fa8d6a3c2dc15aaca58e9f9ffdcc7b82a246590aa9564815699d9"},
{file = "pyzmq-25.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1e448a5a294958e915a7e1b664e6fbfcd3814989d381fb068673317f6f3ea3f8"},
{file = "pyzmq-25.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:40d909bdc8a2d64ad260925154712602ee6a0425ae0b08bce78a19adfdc2f05b"},
{file = "pyzmq-25.0.1-cp37-cp37m-win32.whl", hash = "sha256:6ff37f2b818df25c887fd40bb434569db7ff66b35f5dfff6f40cc476aee92e3f"},
{file = "pyzmq-25.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:f66ee27a0221771bbaa2cce456e8ca890569c3d18b08b955eb6420c12516537c"},
{file = "pyzmq-25.0.1-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:1003bbae89435eadec03b4fa3bb6516dd1529fb09ae5704284f7400cc77009ba"},
{file = "pyzmq-25.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:dde7a65a8bfa88aa1721add504320f8344272542291ce4e7c77993fa32901567"},
{file = "pyzmq-25.0.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:20b6155429d3b57e9e7bd11f1680985ef8b5b0868f1a64073fb8c01326c7c80c"},
{file = "pyzmq-25.0.1-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e37a764cbf91c1ed9a02e4fede79a414284aca2a0b7d92d82a3c7b82d678ec2d"},
{file = "pyzmq-25.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa56a362066b3a853a64d35693a08046f640961efcc0e7643768916403e72e70"},
{file = "pyzmq-25.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c4bdf1241886d39d816535d3ef9fc325bbf02470c9fd5f2cb62706eeb834f7f2"},
{file = "pyzmq-25.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:446acbac24427ef42bff61a807ddcad8d03df78fb976184a4d7d6f4b1e7d8a67"},
{file = "pyzmq-25.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b39847501d229e5fab155d88a565edfb182cdd3f7046f15a7f2df9c77cdc422d"},
{file = "pyzmq-25.0.1-cp38-cp38-win32.whl", hash = "sha256:cba6b81b653d789d76e438c2e77b49f610b23e84b3bb43b99100f08a0a5d637b"},
{file = "pyzmq-25.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:6eca6b90c4fb290efd27582780b5eaf048887a32b2c5fcd6330819192cb07b38"},
{file = "pyzmq-25.0.1-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:58207a6709e53b723105bac6bb3c6795ee134f7e71351f39c09d52ac235c6b0d"},
{file = "pyzmq-25.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c62084f37682e7ee4064e8310078be4f6f7687bf528ae5761e2ba7216c5b8949"},
{file = "pyzmq-25.0.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9c44e9f04f8ed99c6f2e9e49f29d400d7557dd9e9e3f64e1e8a595aedc4258a2"},
{file = "pyzmq-25.0.1-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c635d1c40d341835066931a018e378428dfbe0347ed4bb45a6b57f7d8c34196e"},
{file = "pyzmq-25.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eef93b5574c9ff36b4be376555efd369bd55b99bcc7be72f23bd38102dd9392b"},
{file = "pyzmq-25.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:44bc81099ab33388f6c061c1b194307d877428cb2b18282d0385584d5c73ed72"},
{file = "pyzmq-25.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6d988844ed6caa21b0076b64671e83a136d93c57f1ae5a72b915661af55d313b"},
{file = "pyzmq-25.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9d5eb6e88ae8a8734f239ffe1ed90559a426cf5b859b8ee66e0cd43fc5daf5c9"},
{file = "pyzmq-25.0.1-cp39-cp39-win32.whl", hash = "sha256:f6b45db9de4c8adbf5fda58e827a32315d282cfb01e54dc74e7c7ccc0988c010"},
{file = "pyzmq-25.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:47eeb94b78aa442568b85ad28f85bd37a9c3c34d052cbf8ebf8622c45f23a9cd"},
{file = "pyzmq-25.0.1-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0ed7475f3adf0c7750d75740b3267947b501a33f4625ceae709fda2e75ec9ed7"},
{file = "pyzmq-25.0.1-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6d09c22ed4d0afcc662d17c2429a03fc1fae7fe7e3bc1f413e744bccfeaabdc3"},
{file = "pyzmq-25.0.1-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:703ec5f5a8369c09d8f3eb626358bdb590a2b1375bcce8b7da01b3a03f8b8668"},
{file = "pyzmq-25.0.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20aea31cc0d1f6c3fb4685db08b4c771545cf3fed3c4b4c8942c0a4e97042ec8"},
{file = "pyzmq-25.0.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:b1c03b942557bb366fd3dc377a15763d5d688de1328228136c75e50f968333cc"},
{file = "pyzmq-25.0.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4e8a5ced9d92837f52ccdae6351c627b5012669727bc3eede2dc0f581eca1d0e"},
{file = "pyzmq-25.0.1-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:d78f840d88244272fb7252e47522b1179833aff7ec64583bda3d21259c9c2c20"},
{file = "pyzmq-25.0.1-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c3f78fa80780e24d294f9421123cb3bd3b68677953c53da85273a22d1c983298"},
{file = "pyzmq-25.0.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f6de4305e02560111a5d4555758faa85d44a5bff70cccff58dbf30c81a079f0"},
{file = "pyzmq-25.0.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:34a1b1a8ce9b20e01aba71b9279d9b1d4e5980a6a4e42092180e16628a444ca1"},
{file = "pyzmq-25.0.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:625759a0112af7c3fb560de5724d749729f00b901f7625d1a3f3fb38897544b1"},
{file = "pyzmq-25.0.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8cff159b21438c24476a49865f3d5700c9cc5833600661bc0e672decec2ff357"},
{file = "pyzmq-25.0.1-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4cc47652d990de9ef967c494c526d73920ef064fef0444355a7cebec6fc50542"},
{file = "pyzmq-25.0.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44db5162a6881f7d740dec65917f38f9bfbc5ad9a10e06d7d5deebb27eb63939"},
{file = "pyzmq-25.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f38bf2c60a3f7b87cf5177043eb7a331a4f53bc9305a2452decbd42ad0c98741"},
{file = "pyzmq-25.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:b1cf4becd15669bc62a41c1b1bb742e22ac25965134e4254cde82a4dc2554b1b"},
{file = "pyzmq-25.0.1.tar.gz", hash = "sha256:44a24f7ce44e70d20e2a4c9ba5af70b4611df7a4b920eed2c8e0bdd5a5af225f"},
{file = "pyzmq-25.0.2-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:ac178e666c097c8d3deb5097b58cd1316092fc43e8ef5b5fdb259b51da7e7315"},
{file = "pyzmq-25.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:659e62e1cbb063151c52f5b01a38e1df6b54feccfa3e2509d44c35ca6d7962ee"},
{file = "pyzmq-25.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8280ada89010735a12b968ec3ea9a468ac2e04fddcc1cede59cb7f5178783b9c"},
{file = "pyzmq-25.0.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9b5eeb5278a8a636bb0abdd9ff5076bcbb836cd2302565df53ff1fa7d106d54"},
{file = "pyzmq-25.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a2e5fe42dfe6b73ca120b97ac9f34bfa8414feb15e00e37415dbd51cf227ef6"},
{file = "pyzmq-25.0.2-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:827bf60e749e78acb408a6c5af6688efbc9993e44ecc792b036ec2f4b4acf485"},
{file = "pyzmq-25.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7b504ae43d37e282301da586529e2ded8b36d4ee2cd5e6db4386724ddeaa6bbc"},
{file = "pyzmq-25.0.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:cb1f69a0a2a2b1aae8412979dd6293cc6bcddd4439bf07e4758d864ddb112354"},
{file = "pyzmq-25.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2b9c9cc965cdf28381e36da525dcb89fc1571d9c54800fdcd73e3f73a2fc29bd"},
{file = "pyzmq-25.0.2-cp310-cp310-win32.whl", hash = "sha256:24abbfdbb75ac5039205e72d6c75f10fc39d925f2df8ff21ebc74179488ebfca"},
{file = "pyzmq-25.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6a821a506822fac55d2df2085a52530f68ab15ceed12d63539adc32bd4410f6e"},
{file = "pyzmq-25.0.2-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:9af0bb0277e92f41af35e991c242c9c71920169d6aa53ade7e444f338f4c8128"},
{file = "pyzmq-25.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:54a96cf77684a3a537b76acfa7237b1e79a8f8d14e7f00e0171a94b346c5293e"},
{file = "pyzmq-25.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88649b19ede1cab03b96b66c364cbbf17c953615cdbc844f7f6e5f14c5e5261c"},
{file = "pyzmq-25.0.2-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:715cff7644a80a7795953c11b067a75f16eb9fc695a5a53316891ebee7f3c9d5"},
{file = "pyzmq-25.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:312b3f0f066b4f1d17383aae509bacf833ccaf591184a1f3c7a1661c085063ae"},
{file = "pyzmq-25.0.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:d488c5c8630f7e782e800869f82744c3aca4aca62c63232e5d8c490d3d66956a"},
{file = "pyzmq-25.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:38d9f78d69bcdeec0c11e0feb3bc70f36f9b8c44fc06e5d06d91dc0a21b453c7"},
{file = "pyzmq-25.0.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3059a6a534c910e1d5d068df42f60d434f79e6cc6285aa469b384fa921f78cf8"},
{file = "pyzmq-25.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6526d097b75192f228c09d48420854d53dfbc7abbb41b0e26f363ccb26fbc177"},
{file = "pyzmq-25.0.2-cp311-cp311-win32.whl", hash = "sha256:5c5fbb229e40a89a2fe73d0c1181916f31e30f253cb2d6d91bea7927c2e18413"},
{file = "pyzmq-25.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:ed15e3a2c3c2398e6ae5ce86d6a31b452dfd6ad4cd5d312596b30929c4b6e182"},
{file = "pyzmq-25.0.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:032f5c8483c85bf9c9ca0593a11c7c749d734ce68d435e38c3f72e759b98b3c9"},
{file = "pyzmq-25.0.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:374b55516393bfd4d7a7daa6c3b36d6dd6a31ff9d2adad0838cd6a203125e714"},
{file = "pyzmq-25.0.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:08bfcc21b5997a9be4fefa405341320d8e7f19b4d684fb9c0580255c5bd6d695"},
{file = "pyzmq-25.0.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1a843d26a8da1b752c74bc019c7b20e6791ee813cd6877449e6a1415589d22ff"},
{file = "pyzmq-25.0.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:b48616a09d7df9dbae2f45a0256eee7b794b903ddc6d8657a9948669b345f220"},
{file = "pyzmq-25.0.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:d4427b4a136e3b7f85516c76dd2e0756c22eec4026afb76ca1397152b0ca8145"},
{file = "pyzmq-25.0.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:26b0358e8933990502f4513c991c9935b6c06af01787a36d133b7c39b1df37fa"},
{file = "pyzmq-25.0.2-cp36-cp36m-win32.whl", hash = "sha256:c8fedc3ccd62c6b77dfe6f43802057a803a411ee96f14e946f4a76ec4ed0e117"},
{file = "pyzmq-25.0.2-cp36-cp36m-win_amd64.whl", hash = "sha256:2da6813b7995b6b1d1307329c73d3e3be2fd2d78e19acfc4eff2e27262732388"},
{file = "pyzmq-25.0.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a35960c8b2f63e4ef67fd6731851030df68e4b617a6715dd11b4b10312d19fef"},
{file = "pyzmq-25.0.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eef2a0b880ab40aca5a878933376cb6c1ec483fba72f7f34e015c0f675c90b20"},
{file = "pyzmq-25.0.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:85762712b74c7bd18e340c3639d1bf2f23735a998d63f46bb6584d904b5e401d"},
{file = "pyzmq-25.0.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:64812f29d6eee565e129ca14b0c785744bfff679a4727137484101b34602d1a7"},
{file = "pyzmq-25.0.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:510d8e55b3a7cd13f8d3e9121edf0a8730b87d925d25298bace29a7e7bc82810"},
{file = "pyzmq-25.0.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b164cc3c8acb3d102e311f2eb6f3c305865ecb377e56adc015cb51f721f1dda6"},
{file = "pyzmq-25.0.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:28fdb9224a258134784a9cf009b59265a9dde79582fb750d4e88a6bcbc6fa3dc"},
{file = "pyzmq-25.0.2-cp37-cp37m-win32.whl", hash = "sha256:dd771a440effa1c36d3523bc6ba4e54ff5d2e54b4adcc1e060d8f3ca3721d228"},
{file = "pyzmq-25.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:9bdc40efb679b9dcc39c06d25629e55581e4c4f7870a5e88db4f1c51ce25e20d"},
{file = "pyzmq-25.0.2-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:1f82906a2d8e4ee310f30487b165e7cc8ed09c009e4502da67178b03083c4ce0"},
{file = "pyzmq-25.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:21ec0bf4831988af43c8d66ba3ccd81af2c5e793e1bf6790eb2d50e27b3c570a"},
{file = "pyzmq-25.0.2-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:abbce982a17c88d2312ec2cf7673985d444f1beaac6e8189424e0a0e0448dbb3"},
{file = "pyzmq-25.0.2-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9e1d2f2d86fc75ed7f8845a992c5f6f1ab5db99747fb0d78b5e4046d041164d2"},
{file = "pyzmq-25.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a2e92ff20ad5d13266bc999a29ed29a3b5b101c21fdf4b2cf420c09db9fb690e"},
{file = "pyzmq-25.0.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:edbbf06cc2719889470a8d2bf5072bb00f423e12de0eb9ffec946c2c9748e149"},
{file = "pyzmq-25.0.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:77942243ff4d14d90c11b2afd8ee6c039b45a0be4e53fb6fa7f5e4fd0b59da39"},
{file = "pyzmq-25.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ab046e9cb902d1f62c9cc0eca055b1d11108bdc271caf7c2171487298f229b56"},
{file = "pyzmq-25.0.2-cp38-cp38-win32.whl", hash = "sha256:ad761cfbe477236802a7ab2c080d268c95e784fe30cafa7e055aacd1ca877eb0"},
{file = "pyzmq-25.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:8560756318ec7c4c49d2c341012167e704b5a46d9034905853c3d1ade4f55bee"},
{file = "pyzmq-25.0.2-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:ab2c056ac503f25a63f6c8c6771373e2a711b98b304614151dfb552d3d6c81f6"},
{file = "pyzmq-25.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:cca8524b61c0eaaa3505382dc9b9a3bc8165f1d6c010fdd1452c224225a26689"},
{file = "pyzmq-25.0.2-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:cfb9f7eae02d3ac42fbedad30006b7407c984a0eb4189a1322241a20944d61e5"},
{file = "pyzmq-25.0.2-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5eaeae038c68748082137d6896d5c4db7927e9349237ded08ee1bbd94f7361c9"},
{file = "pyzmq-25.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a31992a8f8d51663ebf79df0df6a04ffb905063083d682d4380ab8d2c67257c"},
{file = "pyzmq-25.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6a979e59d2184a0c8f2ede4b0810cbdd86b64d99d9cc8a023929e40dce7c86cc"},
{file = "pyzmq-25.0.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:1f124cb73f1aa6654d31b183810febc8505fd0c597afa127c4f40076be4574e0"},
{file = "pyzmq-25.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:65c19a63b4a83ae45d62178b70223adeee5f12f3032726b897431b6553aa25af"},
{file = "pyzmq-25.0.2-cp39-cp39-win32.whl", hash = "sha256:83d822e8687621bed87404afc1c03d83fa2ce39733d54c2fd52d8829edb8a7ff"},
{file = "pyzmq-25.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:24683285cc6b7bf18ad37d75b9db0e0fefe58404e7001f1d82bf9e721806daa7"},
{file = "pyzmq-25.0.2-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4a4b4261eb8f9ed71f63b9eb0198dd7c934aa3b3972dac586d0ef502ba9ab08b"},
{file = "pyzmq-25.0.2-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:62ec8d979f56c0053a92b2b6a10ff54b9ec8a4f187db2b6ec31ee3dd6d3ca6e2"},
{file = "pyzmq-25.0.2-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:affec1470351178e892121b3414c8ef7803269f207bf9bef85f9a6dd11cde264"},
{file = "pyzmq-25.0.2-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffc71111433bd6ec8607a37b9211f4ef42e3d3b271c6d76c813669834764b248"},
{file = "pyzmq-25.0.2-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:6fadc60970714d86eff27821f8fb01f8328dd36bebd496b0564a500fe4a9e354"},
{file = "pyzmq-25.0.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:269968f2a76c0513490aeb3ba0dc3c77b7c7a11daa894f9d1da88d4a0db09835"},
{file = "pyzmq-25.0.2-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f7c8b8368e84381ae7c57f1f5283b029c888504aaf4949c32e6e6fb256ec9bf0"},
{file = "pyzmq-25.0.2-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:25e6873a70ad5aa31e4a7c41e5e8c709296edef4a92313e1cd5fc87bbd1874e2"},
{file = "pyzmq-25.0.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b733076ff46e7db5504c5e7284f04a9852c63214c74688bdb6135808531755a3"},
{file = "pyzmq-25.0.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:a6f6ae12478fdc26a6d5fdb21f806b08fa5403cd02fd312e4cb5f72df078f96f"},
{file = "pyzmq-25.0.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:67da1c213fbd208906ab3470cfff1ee0048838365135a9bddc7b40b11e6d6c89"},
{file = "pyzmq-25.0.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:531e36d9fcd66f18de27434a25b51d137eb546931033f392e85674c7a7cea853"},
{file = "pyzmq-25.0.2-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:34a6fddd159ff38aa9497b2e342a559f142ab365576284bc8f77cb3ead1f79c5"},
{file = "pyzmq-25.0.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b491998ef886662c1f3d49ea2198055a9a536ddf7430b051b21054f2a5831800"},
{file = "pyzmq-25.0.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:5d496815074e3e3d183fe2c7fcea2109ad67b74084c254481f87b64e04e9a471"},
{file = "pyzmq-25.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:56a94ab1d12af982b55ca96c6853db6ac85505e820d9458ac76364c1998972f4"},
{file = "pyzmq-25.0.2.tar.gz", hash = "sha256:6b8c1bbb70e868dc88801aa532cae6bd4e3b5233784692b786f17ad2962e5149"},
]
[package.dependencies]
@ -1897,6 +2049,43 @@ urllib3 = ">=1.21.1,<1.27"
socks = ["PySocks (>=1.5.6,!=1.5.7)"]
use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
[[package]]
name = "rfc3986"
version = "1.5.0"
description = "Validating URI References per RFC 3986"
category = "dev"
optional = false
python-versions = "*"
files = [
{file = "rfc3986-1.5.0-py2.py3-none-any.whl", hash = "sha256:a86d6e1f5b1dc238b218b012df0aa79409667bb209e58da56d0b94704e712a97"},
{file = "rfc3986-1.5.0.tar.gz", hash = "sha256:270aaf10d87d0d4e095063c65bf3ddbc6ee3d0b226328ce21e036f946e421835"},
]
[package.dependencies]
idna = {version = "*", optional = true, markers = "extra == \"idna2008\""}
[package.extras]
idna2008 = ["idna"]
[[package]]
name = "rich"
version = "13.3.3"
description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal"
category = "dev"
optional = false
python-versions = ">=3.7.0"
files = [
{file = "rich-13.3.3-py3-none-any.whl", hash = "sha256:540c7d6d26a1178e8e8b37e9ba44573a3cd1464ff6348b99ee7061b95d1c6333"},
{file = "rich-13.3.3.tar.gz", hash = "sha256:dc84400a9d842b3a9c5ff74addd8eb798d155f36c1c91303888e0a66850d2a15"},
]
[package.dependencies]
markdown-it-py = ">=2.2.0,<3.0.0"
pygments = ">=2.13.0,<3.0.0"
[package.extras]
jupyter = ["ipywidgets (>=7.5.1,<9)"]
[[package]]
name = "rsa"
version = "4.9"
@ -1941,14 +2130,14 @@ files = [
[[package]]
name = "setuptools"
version = "67.6.0"
version = "67.6.1"
description = "Easily download, build, install, upgrade, and uninstall Python packages"
category = "main"
optional = false
python-versions = ">=3.7"
files = [
{file = "setuptools-67.6.0-py3-none-any.whl", hash = "sha256:b78aaa36f6b90a074c1fa651168723acbf45d14cb1196b6f02c0fd07f17623b2"},
{file = "setuptools-67.6.0.tar.gz", hash = "sha256:2ee892cd5f29f3373097f5a814697e397cf3ce313616df0af11231e2ad118077"},
{file = "setuptools-67.6.1-py3-none-any.whl", hash = "sha256:e728ca814a823bf7bf60162daf9db95b93d532948c4c0bea762ce62f60189078"},
{file = "setuptools-67.6.1.tar.gz", hash = "sha256:257de92a9d50a60b8e22abfcbb771571fde0dbf3ec234463212027a4eeecbe9a"},
]
[package.extras]
@ -2214,14 +2403,41 @@ test = ["black (>=22.3.0,<23.0.0)", "coverage (>=6.2,<7.0)", "isort (>=5.0.6,<6.
[[package]]
name = "types-pyyaml"
version = "6.0.12.8"
version = "6.0.12.9"
description = "Typing stubs for PyYAML"
category = "main"
optional = false
python-versions = "*"
files = [
{file = "types-PyYAML-6.0.12.8.tar.gz", hash = "sha256:19304869a89d49af00be681e7b267414df213f4eb89634c4495fa62e8f942b9f"},
{file = "types_PyYAML-6.0.12.8-py3-none-any.whl", hash = "sha256:5314a4b2580999b2ea06b2e5f9a7763d860d6e09cdf21c0e9561daa9cbd60178"},
{file = "types-PyYAML-6.0.12.9.tar.gz", hash = "sha256:c51b1bd6d99ddf0aa2884a7a328810ebf70a4262c292195d3f4f9a0005f9eeb6"},
{file = "types_PyYAML-6.0.12.9-py3-none-any.whl", hash = "sha256:5aed5aa66bd2d2e158f75dda22b059570ede988559f030cf294871d3b647e3e8"},
]
[[package]]
name = "types-requests"
version = "2.28.11.17"
description = "Typing stubs for requests"
category = "dev"
optional = false
python-versions = "*"
files = [
{file = "types-requests-2.28.11.17.tar.gz", hash = "sha256:0d580652ce903f643f8c3b494dd01d29367ea57cea0c7ad7f65cf3169092edb0"},
{file = "types_requests-2.28.11.17-py3-none-any.whl", hash = "sha256:cc1aba862575019306b2ed134eb1ea994cab1c887a22e18d3383e6dd42e9789b"},
]
[package.dependencies]
types-urllib3 = "<1.27"
[[package]]
name = "types-urllib3"
version = "1.26.25.10"
description = "Typing stubs for urllib3"
category = "dev"
optional = false
python-versions = "*"
files = [
{file = "types-urllib3-1.26.25.10.tar.gz", hash = "sha256:c44881cde9fc8256d05ad6b21f50c4681eb20092552351570ab0a8a0653286d6"},
{file = "types_urllib3-1.26.25.10-py3-none-any.whl", hash = "sha256:12c744609d588340a07e45d333bf870069fc8793bcf96bae7a96d4712a42591d"},
]
[[package]]
@ -2419,4 +2635,4 @@ testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more
[metadata]
lock-version = "2.0"
python-versions = "^3.9"
content-hash = "9acd2b7396be651321ac517873a398d1631a76918fefdb003f7f587f031d9ba1"
content-hash = "18b858c93c242f3b53e9f77284904aa0eabb4c955f905cfe5fb227a6785bfabc"

View file

@ -1,6 +1,6 @@
[tool.poetry]
name = "langflow"
version = "0.0.46"
version = "0.0.50"
description = "A Python package with a built-in web application"
authors = ["Logspace <contact@logspace.ai>"]
maintainers = [
@ -38,6 +38,14 @@ black = "^23.1.0"
ipykernel = "^6.21.2"
mypy = "^1.1.1"
ruff = "^0.0.254"
httpx = "^0.23.3"
rich = "^13.3.3"
pytest = "^7.2.2"
types-requests = "^2.28.11"
requests = "^2.28.0"
[tool.ruff]
line-length = 120
[build-system]
requires = ["poetry-core"]

View file

@ -0,0 +1,23 @@
from pydantic import BaseModel, validator
class Code(BaseModel):
code: str
@validator("code")
def validate_code(cls, v):
return v
# Build ValidationResponse class for {"imports": {"errors": []}, "function": {"errors": []}}
class ValidationResponse(BaseModel):
imports: dict
function: dict
@validator("imports")
def validate_imports(cls, v):
return v or {"errors": []}
@validator("function")
def validate_function(cls, v):
return v or {"errors": []}

View file

@ -1,9 +1,12 @@
from typing import Any, Dict
from fastapi import APIRouter, HTTPException
from langflow.api.base import Code, ValidationResponse
from langflow.interface.run import process_data_graph
from langflow.interface.run import process_graph
from langflow.interface.types import build_langchain_types_dict
from langflow.utils.validate import validate_code
# build router
router = APIRouter()
@ -17,6 +20,18 @@ def get_all():
@router.post("/predict")
def get_load(data: Dict[str, Any]):
try:
return process_data_graph(data)
return process_graph(data)
except Exception as e:
return HTTPException(status_code=500, detail=str(e))
@router.post("/validate", status_code=200, response_model=ValidationResponse)
def post_validate_code(code: Code):
try:
errors = validate_code(code.code)
return ValidationResponse(
imports=errors.get("imports", {}),
function=errors.get("function", {}),
)
except Exception as e:
return HTTPException(status_code=500, detail=str(e))

View file

@ -20,6 +20,8 @@ tools:
- PAL-MATH
- Calculator
- Serper Search
- Tool
- PythonFunction
memories:
# - ConversationBufferMemory

View file

@ -1,42 +1,12 @@
from langchain.agents.mrkl import prompt
from langflow.template import nodes
def get_custom_prompts():
"""Get custom prompts."""
CUSTOM_NODES = {
"prompts": {**nodes.ZeroShotPromptNode().to_dict()},
"tools": {**nodes.PythonFunctionNode().to_dict(), **nodes.ToolNode().to_dict()},
}
return {
"ZeroShotPrompt": {
"template": {
"_type": "zero_shot",
"prefix": {
"type": "str",
"required": False,
"placeholder": "",
"list": False,
"show": True,
"multiline": True,
"value": prompt.PREFIX,
},
"suffix": {
"type": "str",
"required": True,
"placeholder": "",
"list": False,
"show": True,
"multiline": True,
"value": prompt.SUFFIX,
},
"format_instructions": {
"type": "str",
"required": False,
"placeholder": "",
"list": False,
"show": True,
"multiline": True,
"value": prompt.FORMAT_INSTRUCTIONS,
},
},
"description": "Prompt template for Zero Shot Agent.",
"base_classes": ["BasePromptTemplate"],
}
}
def get_custom_nodes(node_type: str):
"""Get custom nodes."""
return CUSTOM_NODES.get(node_type, {})

View file

View file

@ -0,0 +1,387 @@
# Description: Graph class for building a graph of nodes and edges
# Insights:
# - Defer prompts building to the last moment or when they have all the tools
# - Build each inner agent first, then build the outer agent
from copy import deepcopy
import types
from typing import Any, Dict, List, Optional, Union
from langflow.utils import payload
from langflow.interface.listing import ALL_TYPES_DICT, ALL_TOOLS_NAMES, TOOLS_DICT
from langflow.interface import loading
class Node:
def __init__(self, data: Dict):
self.id: str = data["id"]
self._data = data
self.edges: List[Edge] = []
self._parse_data()
self._built_object = None
self._built = False
def _parse_data(self) -> None:
self.data = self._data["data"]
self.output = self.data["node"]["base_classes"]
template_dicts = {
key: value
for key, value in self.data["node"]["template"].items()
if isinstance(value, dict)
}
self.required_inputs = [
template_dicts[key]["type"]
for key, value in template_dicts.items()
if value["required"]
]
self.optional_inputs = [
template_dicts[key]["type"]
for key, value in template_dicts.items()
if not value["required"]
]
template_dict = self.data["node"]["template"]
self.node_type = (
self.data["type"] if "Tool" not in self.output else template_dict["_type"]
)
def _build_params(self):
# Some params are required, some are optional
# but most importantly, some params are python base classes
# like str and others are LangChain objects like LLMChain, BasePromptTemplate
# so we need to be able to distinguish between the two
# The dicts with "type" == "str" are the ones that are python base classes
# and most likely have a "value" key
# So for each key besides "_type" in the template dict, we have a dict
# with a "type" key. If the type is not "str", then we need to get the
# edge that connects to that node and get the Node with the required data
# and use that as the value for the param
# If the type is "str", then we need to get the value of the "value" key
# and use that as the value for the param
template_dict = {
key: value
for key, value in self.data["node"]["template"].items()
if isinstance(value, dict)
}
params = {}
for key, value in template_dict.items():
if key == "_type":
continue
# If the type is not transformable to a python base class
# then we need to get the edge that connects to this node
if value["type"] not in ["str", "bool", "code"]:
# Get the edge that connects to this node
edge = next(
(
edge
for edge in self.edges
if edge.target == self and edge.matched_type in value["type"]
),
None,
)
# Get the output of the node that the edge connects to
# if the value['list'] is True, then there will be more
# than one time setting to params[key]
# so we need to append to a list if it exists
# or create a new list if it doesn't
if edge is None and value["required"]:
# break line
raise ValueError(
f"Required input {key} for module {self.node_type} not found"
)
if value["list"]:
if key in params:
params[key].append(edge.source)
else:
params[key] = [edge.source]
elif value["required"] or edge is not None:
params[key] = edge.source
elif value["required"] or value.get("value"):
params[key] = value["value"]
# Add _type to params
self.params = params
def _build(self):
# The params dict is used to build the module
# it contains values and keys that point to nodes which
# have their own params dict
# When build is called, we iterate through the params dict
# and if the value is a node, we call build on that node
# and use the output of that build as the value for the param
# if the value is not a node, then we use the value as the param
# and continue
# Another aspect is that the node_type is the class that we need to import
# and instantiate with these built params
# Build each node in the params dict
for key, value in self.params.items():
# Check if Node or list of Nodes
if isinstance(value, Node):
result = value.build()
# If the key is "func", then we need to use the run method
if key == "func" and not isinstance(result, types.FunctionType):
# func can be PythonFunction(code='\ndef upper_case(text: str) -> str:\n return text.upper()\n')
# so we need to check if there is an attribute called run
if hasattr(result, "run"):
result = result.run # type: ignore
elif hasattr(result, "get_function"):
result = result.get_function() # type: ignore
self.params[key] = result
elif isinstance(value, list) and all(
isinstance(node, Node) for node in value
):
self.params[key] = [node.build() for node in value] # type: ignore
# Get the class from LANGCHAIN_TYPES_DICT
# and instantiate it with the params
# and return the instance
for base_type, value in ALL_TYPES_DICT.items():
if base_type == "tools":
value = TOOLS_DICT
if self.node_type in value:
self._built_object = loading.instantiate_class(
node_type=self.node_type,
base_type=base_type,
params=self.params,
)
break
if self._built_object is None:
raise ValueError(f"Node type {self.node_type} not found")
self._built = True
def build(self, force: bool = False) -> Any:
if not self._built or force:
self._build()
return deepcopy(self._built_object)
def add_edge(self, edge: "Edge") -> None:
self.edges.append(edge)
def __repr__(self) -> str:
return f"Node(id={self.id}, data={self.data})"
def __eq__(self, __o: object) -> bool:
return self.id == __o.id if isinstance(__o, Node) else False
def __hash__(self) -> int:
return id(self)
class AgentNode(Node):
def __init__(self, data: Dict):
super().__init__(data)
self.tools: List[ToolNode] = []
self.chains: List[ChainNode] = []
def _set_tools_and_chains(self) -> None:
for edge in self.edges:
source_node = edge.source
if isinstance(source_node, ToolNode):
self.tools.append(source_node)
elif isinstance(source_node, ChainNode):
self.chains.append(source_node)
def build(self, force: bool = False) -> Any:
if not self._built or force:
self._set_tools_and_chains()
# First, build the tools
for tool_node in self.tools:
tool_node.build()
# Next, build the chains and the rest
for chain_node in self.chains:
chain_node.build(tools=self.tools)
self._build()
return deepcopy(self._built_object)
class Edge:
def __init__(self, source: "Node", target: "Node"):
self.source: "Node" = source
self.target: "Node" = target
self.validate_edge()
def validate_edge(self) -> None:
# Validate that the outputs of the source node are valid inputs
# for the target node
self.source_types = self.source.output
self.target_reqs = self.target.required_inputs + self.target.optional_inputs
# Both lists contain strings and sometimes a string contains the value we are
# looking for e.g. comgin_out=["Chain"] and target_reqs=["LLMChain"]
# so we need to check if any of the strings in source_types is in target_reqs
self.valid = any(
output in target_req
for output in self.source_types
for target_req in self.target_reqs
)
# Get what type of input the target node is expecting
self.matched_type = next(
(
output
for output in self.source_types
for target_req in self.target_reqs
if output in target_req
),
None,
)
def __repr__(self) -> str:
return (
f"Edge(source={self.source.id}, target={self.target.id}, valid={self.valid}"
f", matched_type={self.matched_type})"
)
class ToolNode(Node):
def __init__(self, data: Dict):
super().__init__(data)
def build(self, force: bool = False) -> Any:
if not self._built or force:
self._build()
return deepcopy(self._built_object)
class PromptNode(Node):
def __init__(self, data: Dict):
super().__init__(data)
def build(
self,
force: bool = False,
tools: Optional[Union[List[Node], List[ToolNode]]] = None,
) -> Any:
if not self._built or force:
# Check if it is a ZeroShotPrompt and needs a tool
if self.node_type == "ZeroShotPrompt":
tools = (
[tool_node.build() for tool_node in tools]
if tools is not None
else []
)
self.params["tools"] = tools
self._build()
return deepcopy(self._built_object)
class ChainNode(Node):
def __init__(self, data: Dict):
super().__init__(data)
def build(
self,
force: bool = False,
tools: Optional[Union[List[Node], List[ToolNode]]] = None,
) -> Any:
if not self._built or force:
# Check if the chain requires a PromptNode
for key, value in self.params.items():
if isinstance(value, PromptNode):
# Build the PromptNode, passing the tools if available
self.params[key] = value.build(tools=tools, force=force)
self._build()
return deepcopy(self._built_object)
class Graph:
def __init__(
self,
nodes: List[Dict[str, Union[str, Dict[str, Union[str, List[str]]]]]],
edges: List[Dict[str, str]],
) -> None:
self._nodes = nodes
self._edges = edges
self._build_graph()
def _build_graph(self) -> None:
self.nodes = self._build_nodes()
self.edges = self._build_edges()
for edge in self.edges:
edge.source.add_edge(edge)
edge.target.add_edge(edge)
for node in self.nodes:
node._build_params()
def get_node(self, node_id: str) -> Union[None, Node]:
return next((node for node in self.nodes if node.id == node_id), None)
def get_nodes_with_target(self, node: Node) -> List[Node]:
connected_nodes: List[Node] = [
edge.source for edge in self.edges if edge.target == node
]
return connected_nodes
def build(self) -> List[Node]:
# Get root node
root_node = payload.get_root_node(self)
return root_node.build()
def get_node_neighbors(self, node: Node) -> Dict[Node, int]:
neighbors: Dict[Node, int] = {}
for edge in self.edges:
if edge.source == node:
neighbor = edge.target
if neighbor not in neighbors:
neighbors[neighbor] = 0
neighbors[neighbor] += 1
elif edge.target == node:
neighbor = edge.source
if neighbor not in neighbors:
neighbors[neighbor] = 0
neighbors[neighbor] += 1
return neighbors
def _build_edges(self) -> List[Edge]:
# Edge takes two nodes as arguments, so we need to build the nodes first
# and then build the edges
# if we can't find a node, we raise an error
edges: List[Edge] = []
for edge in self._edges:
source = self.get_node(edge["source"])
target = self.get_node(edge["target"])
if source is None:
raise ValueError(f"Source node {edge['source']} not found")
if target is None:
raise ValueError(f"Target node {edge['target']} not found")
edges.append(Edge(source, target))
return edges
def _build_nodes(self) -> List[Node]:
nodes: List[Node] = []
for node in self._nodes:
node_data = node["data"]
node_type: str = node_data["type"] # type: ignore
node_lc_type: str = node_data["node"]["template"]["_type"] # type: ignore
if node_type in {"ZeroShotPrompt", "PromptTemplate"}:
nodes.append(PromptNode(node))
elif "agent" in node_type.lower():
nodes.append(AgentNode(node))
elif "chain" in node_type.lower():
nodes.append(ChainNode(node))
elif "tool" in node_type.lower() or node_lc_type in ALL_TOOLS_NAMES:
nodes.append(ToolNode(node))
else:
nodes.append(Node(node))
return nodes
def get_children_by_node_type(self, node: Node, node_type: str) -> List[Node]:
children = []
node_types = [node.data["type"]]
if "node" in node.data:
node_types += node.data["node"]["base_classes"]
if node_type in node_types:
children.append(node)
return children

View file

View file

@ -0,0 +1,35 @@
from typing import Callable, Optional
from langflow.utils import validate
from pydantic import BaseModel, validator
class Function(BaseModel):
code: str
function: Optional[Callable] = None
imports: Optional[str] = None
# Eval code and store the function
def __init__(self, **data):
super().__init__(**data)
# Validate the function
@validator("code")
def validate_func(cls, v):
try:
validate.eval_function(v)
except Exception as e:
raise e
return v
def get_function(self):
"""Get the function"""
function_name = validate.extract_function_name(self.code)
return validate.create_function(self.code, function_name)
class PythonFunction(Function):
"""Python function"""
code: str

View file

@ -0,0 +1,7 @@
from langflow.interface.importing.utils import import_by_type # noqa: F401
# This module is used to import any langchain class by name.
ALL = [
"import_by_type",
]

View file

@ -0,0 +1,63 @@
# This module is used to import any langchain class by name.
import importlib
from typing import Any
from langchain import PromptTemplate
from langchain.agents import Agent
from langchain.chains.base import Chain
from langchain.llms.base import BaseLLM
from langchain.tools import BaseTool
from langflow.utils.util import get_tool_by_name
def import_module(module_path: str) -> Any:
"""Import module from module path"""
return importlib.import_module(module_path)
def import_by_type(_type: str, name: str) -> Any:
"""Import class by type and name"""
func_dict = {
"agents": import_agent,
"prompts": import_prompt,
"llms": import_llm,
"tools": import_tool,
"chains": import_chain,
}
return func_dict[_type](name)
def import_class(class_path: str) -> Any:
"""Import class from class path"""
module_path, class_name = class_path.rsplit(".", 1)
module = import_module(module_path)
return getattr(module, class_name)
def import_prompt(prompt: str) -> PromptTemplate:
"""Import prompt from prompt name"""
if prompt == "ZeroShotPrompt":
return import_class("langchain.prompts.PromptTemplate")
return import_class(f"langchain.prompts.{prompt}")
def import_agent(agent: str) -> Agent:
"""Import agent from agent name"""
return import_class(f"langchain.agents.{agent}")
def import_llm(llm: str) -> BaseLLM:
"""Import llm from llm name"""
return import_class(f"langchain.llms.{llm}")
def import_tool(tool: str) -> BaseTool:
"""Import tool from tool name"""
return get_tool_by_name(tool)
def import_chain(chain: str) -> Chain:
"""Import chain from chain name"""
return import_class(f"langchain.chains.{chain}")

View file

@ -1,5 +1,4 @@
from langchain import agents, chains, prompts
from langchain.agents.load_tools import get_all_tool_names
from langflow.custom import customs
from langflow.interface.custom_lists import (
@ -8,18 +7,30 @@ from langflow.interface.custom_lists import (
)
from langflow.settings import settings
from langflow.utils import util
from langchain.agents.load_tools import get_all_tool_names
from langchain.agents import Tool
from langflow.interface.custom_types import PythonFunction
def list_type(object_type: str):
"""List all components"""
CUSTOM_TOOLS = {"Tool": Tool, "PythonFunction": PythonFunction}
TOOLS_DICT = util.get_tools_dict()
ALL_TOOLS_NAMES = set(get_all_tool_names() + list(CUSTOM_TOOLS.keys()))
def get_type_dict():
return {
"chains": list_chain_types,
"agents": list_agents,
"prompts": list_prompts,
"llms": list_llms,
"memories": list_memories,
"tools": list_tools,
}.get(object_type, lambda: "Invalid type")()
"memories": list_memories,
}
def list_type(object_type: str):
"""List all components"""
return get_type_dict().get(object_type, lambda: None)()
def list_agents():
@ -33,7 +44,7 @@ def list_agents():
def list_prompts():
"""List all prompt types"""
custom_prompts = customs.get_custom_prompts()
custom_prompts = customs.get_custom_nodes("prompts")
library_prompts = [
prompt.__annotations__["return"].__name__
for prompt in prompts.loading.type_to_loader_dict.values()
@ -47,12 +58,14 @@ def list_tools():
tools = []
for tool in get_all_tool_names():
tool_params = util.get_tool_params(util.get_tools_dict(tool))
if tool_params and tool_params["name"] in settings.tools or settings.dev:
for tool in ALL_TOOLS_NAMES:
tool_params = util.get_tool_params(util.get_tool_by_name(tool))
if tool_params and tool_params.get("name") in settings.tools or settings.dev:
tools.append(tool_params["name"])
return tools
# Add Tool
custom_tools = customs.get_custom_nodes("tools")
return tools + list(custom_tools.keys())
def list_llms():
@ -80,3 +93,15 @@ def list_memories():
for memory in memory_type_to_cls_dict.values()
if memory.__name__ in settings.memories or settings.dev
]
LANGCHAIN_TYPES_DICT = {
k: list_function() for k, list_function in get_type_dict().items()
}
# Now we'll build a dict with Langchain types and ours
ALL_TYPES_DICT = {
**LANGCHAIN_TYPES_DICT,
"Custom": ["Custom Tool", "Python Function"],
}

View file

@ -8,6 +8,12 @@ from langchain.agents.load_tools import (
_EXTRA_OPTIONAL_TOOLS,
_LLM_TOOLS,
)
from langchain.agents import agent as agent_module
from langflow.interface.importing.utils import import_by_type
from langchain.agents import ZeroShotAgent
from langchain.agents.loading import load_agent_from_config
from langchain.agents.tools import Tool
from langchain.callbacks.base import BaseCallbackManager
@ -16,19 +22,43 @@ from langchain.llms.base import BaseLLM
from langchain.llms.loading import load_llm_from_config
from langflow.interface.types import get_type_list
from langflow.utils import payload, util
from langflow.utils import payload, util, validate
def instantiate_class(node_type: str, base_type: str, params: Dict) -> Any:
"""Instantiate class from module type and key, and params"""
class_object = import_by_type(_type=base_type, name=node_type)
if base_type == "agents":
# We need to initialize it differently
allowed_tools = params["allowed_tools"]
llm_chain = params["llm_chain"]
return load_agent_executor(class_object, allowed_tools, llm_chain)
elif base_type == "tools" or node_type != "ZeroShotPrompt":
return class_object(**params)
elif node_type == "PythonFunction":
# If the node_type is "PythonFunction"
# we need to get the function from the params
# which will be a str containing a python function
# and then we need to compile it and return the function
# as the instance
function_string = params["code"]
if isinstance(function_string, str):
return validate.eval_function(function_string)
raise ValueError("Function should be a string")
else:
if "tools" not in params:
params["tools"] = []
return ZeroShotAgent.create_prompt(**params)
def load_flow_from_json(path: str):
# This is done to avoid circular imports
from langflow.graph.graph import Graph
"""Load flow from json file"""
with open(path, "r") as f:
flow_graph = json.load(f)
data_graph = flow_graph["data"]
extracted_json = extract_json(data_graph)
return load_langchain_type_from_config(config=extracted_json)
def extract_json(data_graph):
nodes = data_graph["nodes"]
# Substitute ZeroShotPrompt with PromptTemplate
nodes = replace_zero_shot_prompt_with_prompt_template(nodes)
@ -36,8 +66,8 @@ def extract_json(data_graph):
nodes = payload.extract_input_variables(nodes)
# Nodes, edges and root node
edges = data_graph["edges"]
root = payload.get_root_node(nodes, edges)
return payload.build_json(root, nodes, edges)
graph = Graph(nodes, edges)
return graph.build()
def replace_zero_shot_prompt_with_prompt_template(nodes):
@ -92,6 +122,19 @@ def load_agent_executor_from_config(
)
def load_agent_executor(
agent_class: type[agent_module.Agent], allowed_tools, llm_chain, **kwargs
):
"""Load agent executor from agent class, tools and chain"""
tool_names = [tool.name for tool in allowed_tools]
agent = agent_class(allowed_tools=tool_names, llm_chain=llm_chain)
return AgentExecutor.from_agent_and_tools(
agent=agent,
tools=allowed_tools,
**kwargs,
)
def load_tools_from_config(tool_list: list[dict]) -> list:
"""Load tools based on a config list.

View file

@ -4,20 +4,27 @@ import re
from typing import Any, Dict
from langflow.interface import loading
from langflow.utils import payload
from langflow.graph.graph import Graph
def process_data_graph(data_graph: Dict[str, Any]):
def process_graph(data_graph: Dict[str, Any]):
"""
Process data graph by extracting input variables and replacing ZeroShotPrompt
Process graph by extracting input variables and replacing ZeroShotPrompt
with PromptTemplate,then run the graph and return the result and thought.
"""
nodes = data_graph["nodes"]
# Add input variables
# ? Is this necessary?
nodes = payload.extract_input_variables(nodes)
# Nodes, edges and root node
edges = data_graph["edges"]
graph = Graph(nodes, edges)
extracted_json = loading.extract_json(data_graph)
langchain_object = graph.build()
message = data_graph["message"]
# Process json
result, thought = get_result_and_thought(extracted_json, message)
result, thought = get_result_and_thought_using_graph(langchain_object, message)
return {
"result": result,
@ -27,6 +34,26 @@ def process_data_graph(data_graph: Dict[str, Any]):
}
def get_result_and_thought_using_graph(loaded_langchain, message: str):
"""Get result and thought from extracted json"""
loaded_langchain.verbose = True
try:
with io.StringIO() as output_buffer, contextlib.redirect_stdout(output_buffer):
result = loaded_langchain(message)
result = (
result.get(loaded_langchain.output_keys[0])
if isinstance(result, dict)
else result
)
thought = output_buffer.getvalue()
except Exception as e:
result = f"Error: {str(e)}"
thought = ""
return result, thought
def get_result_and_thought(extracted_json: Dict[str, Any], message: str):
"""Get result and thought from extracted json"""
try:
@ -41,6 +68,7 @@ def get_result_and_thought(extracted_json: Dict[str, Any], message: str):
else result
)
thought = output_buffer.getvalue()
except Exception as e:
result = f"Error: {str(e)}"
thought = ""

View file

@ -6,7 +6,6 @@ from langchain.agents.load_tools import (
_EXTRA_LLM_TOOLS,
_EXTRA_OPTIONAL_TOOLS,
_LLM_TOOLS,
get_all_tool_names,
)
from langflow.custom import customs
@ -14,6 +13,8 @@ from langflow.interface.custom_lists import (
llm_type_to_cls_dict,
memory_type_to_cls_dict,
)
from langflow.interface.listing import CUSTOM_TOOLS, ALL_TOOLS_NAMES
from langflow.template.template import Field, Template
from langflow.utils import util
@ -24,7 +25,7 @@ def get_signature(name: str, object_type: str):
"agents": get_agent_signature,
"prompts": get_prompt_signature,
"llms": get_llm_signature,
"memories": get_memory_signature,
# "memories": get_memory_signature,
"tools": get_tool_signature,
}.get(object_type, lambda name: f"Invalid type: {name}")(name)
@ -33,8 +34,9 @@ def get_chain_signature(name: str):
"""Get the chain type by signature."""
try:
return util.build_template_from_function(
name, chains.loading.type_to_loader_dict
name, chains.loading.type_to_loader_dict, add_function=True
)
except ValueError as exc:
raise ValueError("Chain not found") from exc
@ -42,7 +44,9 @@ def get_chain_signature(name: str):
def get_agent_signature(name: str):
"""Get the signature of an agent."""
try:
return util.build_template_from_class(name, agents.loading.AGENT_TO_CLASS)
return util.build_template_from_class(
name, agents.loading.AGENT_TO_CLASS, add_function=True
)
except ValueError as exc:
raise ValueError("Agent not found") from exc
@ -50,8 +54,8 @@ def get_agent_signature(name: str):
def get_prompt_signature(name: str):
"""Get the signature of a prompt."""
try:
if name in customs.get_custom_prompts().keys():
return customs.get_custom_prompts()[name]
if name in customs.get_custom_nodes("prompts").keys():
return customs.get_custom_nodes("prompts")[name]
return util.build_template_from_function(
name, prompts.loading.type_to_loader_dict
)
@ -78,28 +82,46 @@ def get_memory_signature(name: str):
def get_tool_signature(name: str):
"""Get the signature of a tool."""
NODE_INPUTS = ["llm", "func"]
base_classes = ["Tool"]
all_tools = {}
for tool in get_all_tool_names():
if tool_params := util.get_tool_params(util.get_tools_dict(tool)):
all_tools[tool_params["name"]] = tool
for tool in ALL_TOOLS_NAMES:
if tool_params := util.get_tool_params(util.get_tool_by_name(tool)):
tool_name = tool_params.get("name") or str(tool)
all_tools[tool_name] = {"type": tool, "params": tool_params}
# Raise error if name is not in tools
if name not in all_tools.keys():
raise ValueError("Tool not found")
type_dict = {
"str": {
"type": "str",
"required": True,
"list": False,
"show": True,
"placeholder": "",
"value": "",
},
"llm": {"type": "BaseLLM", "required": True, "list": False, "show": True},
"str": Field(
field_type="str",
required=True,
is_list=False,
show=True,
placeholder="",
value="",
),
"llm": Field(field_type="BaseLLM", required=True, is_list=False, show=True),
"func": Field(
field_type="function",
required=True,
is_list=False,
show=True,
multiline=True,
),
"code": Field(
field_type="str",
required=True,
is_list=False,
show=True,
value="",
multiline=True,
),
}
tool_type = all_tools[name]
tool_type: str = all_tools[name]["type"] # type: ignore
if tool_type in _BASE_TOOLS:
params = []
@ -111,23 +133,38 @@ def get_tool_signature(name: str):
elif tool_type in _EXTRA_OPTIONAL_TOOLS:
_, extra_keys = _EXTRA_OPTIONAL_TOOLS[tool_type]
params = extra_keys
elif tool_type == "Tool":
params = ["name", "description", "func"]
elif tool_type in CUSTOM_TOOLS:
# Get custom tool params
params = all_tools[name]["params"] # type: ignore
base_classes = ["function"]
if node := customs.get_custom_nodes("tools").get(tool_type):
return node
else:
params = []
template = {
param: (type_dict[param].copy() if param == "llm" else type_dict["str"].copy())
for param in params
}
# Copy the field and add the name
fields = []
for param in params:
if param in NODE_INPUTS:
field = type_dict[param].copy()
else:
field = type_dict["str"].copy()
field.name = param
if param == "aiosession":
field.show = False
field.required = False
fields.append(field)
# Remove required from aiosession
if "aiosession" in template.keys():
template["aiosession"]["required"] = False
template["aiosession"]["show"] = False
template["_type"] = tool_type # type: ignore
template = Template(fields=fields, type_name=tool_type)
tool_params = util.get_tool_params(util.get_tool_by_name(tool_type))
if tool_params is None:
tool_params = {}
return {
"template": template,
**util.get_tool_params(util.get_tools_dict(tool_type)),
"base_classes": ["Tool"],
"template": util.format_dict(template.to_dict()),
**tool_params,
"base_classes": base_classes,
}

View file

@ -6,7 +6,7 @@ def get_type_list():
"""Get a list of all langchain types"""
all_types = build_langchain_types_dict()
all_types.pop("tools")
# all_types.pop("tools")
for key, value in all_types.items():
all_types[key] = [item["template"]["_type"] for item in value.values()]

View file

@ -0,0 +1,114 @@
from langflow.template.template import Field, FrontendNode, Template
from langchain.agents.mrkl import prompt
from langflow.utils.constants import DEFAULT_PYTHON_FUNCTION
class ZeroShotPromptNode(FrontendNode):
name: str = "ZeroShotPrompt"
template: Template = Template(
type_name="zero_shot",
fields=[
Field(
field_type="str",
required=False,
placeholder="",
is_list=False,
show=True,
multiline=True,
value=prompt.PREFIX,
name="prefix",
),
Field(
field_type="str",
required=True,
placeholder="",
is_list=False,
show=True,
multiline=True,
value=prompt.SUFFIX,
name="suffix",
),
Field(
field_type="str",
required=False,
placeholder="",
is_list=False,
show=True,
multiline=True,
value=prompt.FORMAT_INSTRUCTIONS,
name="format_instructions",
),
],
)
description: str = "Prompt template for Zero Shot Agent."
base_classes: list[str] = ["BasePromptTemplate"]
def to_dict(self):
return super().to_dict()
class PythonFunctionNode(FrontendNode):
name: str = "PythonFunction"
template: Template = Template(
type_name="python_function",
fields=[
Field(
field_type="code",
required=True,
placeholder="",
is_list=False,
show=True,
value=DEFAULT_PYTHON_FUNCTION,
name="code",
)
],
)
description: str = "Python function to be executed."
base_classes: list[str] = ["function"]
def to_dict(self):
return super().to_dict()
class ToolNode(FrontendNode):
name: str = "Tool"
template: Template = Template(
type_name="tool",
fields=[
Field(
field_type="str",
required=True,
placeholder="",
is_list=False,
show=True,
multiline=True,
value="",
name="name",
),
Field(
field_type="str",
required=True,
placeholder="",
is_list=False,
show=True,
multiline=True,
value="",
name="description",
),
Field(
field_type="str",
required=True,
placeholder="",
is_list=False,
show=True,
multiline=True,
value="",
name="func",
),
],
)
description: str = "Tool to be used in the flow."
base_classes: list[str] = ["BaseTool"]
def to_dict(self):
return super().to_dict()

View file

@ -0,0 +1,51 @@
from typing import Any
from pydantic import BaseModel
class Field(BaseModel):
field_type: str = "str"
required: bool = False
placeholder: str = ""
is_list: bool = False
show: bool = True
multiline: bool = False
value: Any = None
# _name will be used to store the name of the field
# in the template
name: str = ""
def to_dict(self):
result = self.dict()
# Remove key if it is None
for key in list(result.keys()):
if result[key] is None:
del result[key]
result["type"] = result.pop("field_type")
result["list"] = result.pop("is_list")
return result
class Template(BaseModel):
type_name: str
fields: list[Field]
def to_dict(self):
result = {field.name: field.to_dict() for field in self.fields}
result["_type"] = self.type_name # type: ignore
return result
class FrontendNode(BaseModel):
template: Template
description: str
base_classes: list
name: str = ""
def to_dict(self):
return {
self.name: {
"template": self.template.to_dict(),
"description": self.description,
"base_classes": self.base_classes,
}
}

View file

@ -6,3 +6,10 @@ OPENAI_MODELS = [
"text-ada-001",
]
CHAT_OPENAI_MODELS = ["gpt-3.5-turbo", "gpt-4", "gpt-4-32k"]
DEFAULT_PYTHON_FUNCTION = """
def python_function(text: str) -> str:
\"\"\"This is a default python function that returns the input text\"\"\"
return text
"""

View file

@ -1,5 +1,6 @@
import contextlib
import re
from typing import Dict
def extract_input_variables(nodes):
@ -27,48 +28,59 @@ def extract_input_variables(nodes):
return nodes
def get_root_node(nodes, edges):
def get_root_node(graph):
"""
Returns the root node of the template.
"""
incoming_edges = {edge["source"] for edge in edges}
return next((node for node in nodes if node["id"] not in incoming_edges), None)
incoming_edges = {edge.source for edge in graph.edges}
return next((node for node in graph.nodes if node not in incoming_edges), None)
def build_json(root, nodes, edges):
"""
Builds a json from the nodes and edges
"""
edge_ids = [edge["source"] for edge in edges if edge["target"] == root["id"]]
local_nodes = [node for node in nodes if node["id"] in edge_ids]
def build_json(root, graph) -> Dict:
if "node" not in root.data:
# If the root node has no "node" key, then it has only one child,
# which is the target of the single outgoing edge
edge = root.edges[0]
local_nodes = [edge.target]
else:
# Otherwise, find all children whose type matches the type
# specified in the template
node_type = root.node_type
local_nodes = graph.get_nodes_with_target(root)
if "node" not in root["data"]:
return build_json(local_nodes[0], nodes, edges)
final_dict = root["data"]["node"]["template"].copy()
if len(local_nodes) == 1:
return build_json(local_nodes[0], graph)
# Build a dictionary from the template
template = root.data["node"]["template"]
final_dict = template.copy()
for key, value in final_dict.items():
if key == "_type":
continue
module_type = value["type"]
node_type = value["type"]
if "value" in value and value["value"] is not None:
# If the value is specified, use it
value = value["value"]
elif "dict" in module_type:
elif "dict" in node_type:
# If the value is a dictionary, create an empty dictionary
value = {}
else:
# Otherwise, recursively build the child nodes
children = []
for c in local_nodes:
module_types = [c["data"]["type"]]
if "node" in c["data"]:
module_types += c["data"]["node"]["base_classes"]
if module_type in module_types:
children.append(c)
for local_node in local_nodes:
node_children = graph.get_children_by_node_type(local_node, node_type)
children.extend(node_children)
if value["required"] and not children:
raise ValueError(f"No child with type {module_type} found")
values = [build_json(child, nodes, edges) for child in children]
value = list(values) if value["list"] else next(iter(values), None)
raise ValueError(f"No child with type {node_type} found")
values = [build_json(child, graph) for child in children]
value = (
list(values)
if value["list"]
else next(iter(values), None) # type: ignore
)
final_dict[key] = value
return final_dict

View file

@ -2,7 +2,7 @@ import ast
import importlib
import inspect
import re
from typing import Dict, Optional
from typing import Dict, Optional, Union
from langchain.agents.load_tools import (
_BASE_TOOLS,
@ -11,10 +11,16 @@ from langchain.agents.load_tools import (
_LLM_TOOLS,
)
from langchain.agents.tools import Tool
from langflow.utils import constants
def build_template_from_function(name: str, type_to_loader_dict: Dict):
def build_template_from_function(
name: str, type_to_loader_dict: Dict, add_function: bool = False
):
classes = [
item.__annotations__["return"].__name__ for item in type_to_loader_dict.values()
]
@ -52,6 +58,11 @@ def build_template_from_function(name: str, type_to_loader_dict: Dict):
if class_field_items in docs["Attributes"]
else ""
)
# Adding function to base classes to allow
# the output to be a function
base_classes = get_base_classes(_class)
if add_function:
base_classes.append("function")
return {
"template": format_dict(variables, name),
@ -60,7 +71,9 @@ def build_template_from_function(name: str, type_to_loader_dict: Dict):
}
def build_template_from_class(name: str, type_to_cls_dict: Dict):
def build_template_from_class(
name: str, type_to_cls_dict: Dict, add_function: bool = False
):
classes = [item.__name__ for item in type_to_cls_dict.values()]
# Raise error if name is not in chains
@ -97,15 +110,22 @@ def build_template_from_class(name: str, type_to_cls_dict: Dict):
if class_field_items in docs["Attributes"]
else ""
)
base_classes = get_base_classes(_class)
# Adding function to base classes to allow
# the output to be a function
if add_function:
base_classes.append("function")
return {
"template": format_dict(variables, name),
"description": docs["Description"],
"base_classes": get_base_classes(_class),
"base_classes": base_classes,
}
def get_base_classes(cls):
"""Get the base classes of a class.
These are used to determine the output of the nodes.
"""
bases = cls.__bases__
if not bases:
return []
@ -128,19 +148,42 @@ def get_default_factory(module: str, function: str):
return None
def get_tools_dict(name: Optional[str] = None):
def get_tools_dict():
"""Get the tools dictionary."""
from langflow.interface.listing import CUSTOM_TOOLS
tools = {
**_BASE_TOOLS,
**_LLM_TOOLS, # type: ignore
**{k: v[0] for k, v in _EXTRA_LLM_TOOLS.items()}, # type: ignore
**_LLM_TOOLS,
**{k: v[0] for k, v in _EXTRA_LLM_TOOLS.items()},
**{k: v[0] for k, v in _EXTRA_OPTIONAL_TOOLS.items()},
**CUSTOM_TOOLS,
}
return tools[name] if name else tools
return tools
def get_tool_params(func, **kwargs):
def get_tool_by_name(name: str):
"""Get a tool from the tools dictionary."""
tools = get_tools_dict()
if name not in tools:
raise ValueError(f"{name} not found.")
return tools[name]
def get_tool_params(tool, **kwargs) -> Union[Dict, None]:
# Parse the function code into an abstract syntax tree
# Define if it is a function or a class
if inspect.isfunction(tool):
return get_func_tool_params(tool, **kwargs)
elif inspect.isclass(tool):
# Get the parameters necessary to
# instantiate the class
return get_class_tool_params(tool, **kwargs)
else:
raise ValueError("Tool must be a function or class.")
def get_func_tool_params(func, **kwargs) -> Union[Dict, None]:
tree = ast.parse(inspect.getsource(func))
# Iterate over the statements in the abstract syntax tree
@ -149,7 +192,7 @@ def get_tool_params(func, **kwargs):
if isinstance(node, ast.Return):
tool = node.value
if isinstance(tool, ast.Call):
if tool.func.id == "Tool":
if isinstance(tool.func, ast.Name) and tool.func.id == "Tool":
if tool.keywords:
tool_params = {}
for keyword in tool.keywords:
@ -159,11 +202,13 @@ def get_tool_params(func, **kwargs):
tool_params["description"] = ast.literal_eval(
keyword.value
)
return tool_params
return {
"name": ast.literal_eval(tool.args[0]),
"description": ast.literal_eval(tool.args[2]),
}
#
else:
# get the class object from the return statement
try:
@ -177,11 +222,44 @@ def get_tool_params(func, **kwargs):
"name": getattr(class_obj, "name"),
"description": getattr(class_obj, "description"),
}
# Return None if no return statement was found
# Return None if no return statement was found
return None
def get_class_tool_params(cls, **kwargs) -> Union[Dict, None]:
tree = ast.parse(inspect.getsource(cls))
tool_params = {}
# Iterate over the statements in the abstract syntax tree
for node in ast.walk(tree):
if isinstance(node, ast.ClassDef):
# Find the class definition and look for methods
for stmt in node.body:
if isinstance(stmt, ast.FunctionDef) and stmt.name == "__init__":
# There is no assignment statements in the __init__ method
# So we need to get the params from the function definition
for arg in stmt.args.args:
if arg.arg == "name":
# It should be the name of the class
tool_params[arg.arg] = cls.__name__
elif arg.arg == "self":
continue
# If there is not default value, set it to an empty string
else:
try:
annotation = ast.literal_eval(arg.annotation) # type: ignore
tool_params[arg.arg] = annotation
except ValueError:
tool_params[arg.arg] = ""
# Get the attribute name and the annotation
elif cls != Tool and isinstance(stmt, ast.AnnAssign):
# Get the attribute name and the annotation
tool_params[stmt.target.id] = "" # type: ignore
return tool_params
def get_class_doc(class_name):
"""
Extracts information from the docstring of a given class.
@ -272,7 +350,7 @@ def format_dict(d, name: Optional[str] = None):
_type = _type.replace("Mapping", "dict")
# Change type from str to Tool
value["type"] = "Tool" if key == "allowed_tools" else _type
value["type"] = "Tool" if key in ["allowed_tools"] else _type
# Show or not field
value["show"] = bool(
@ -291,11 +369,11 @@ def format_dict(d, name: Optional[str] = None):
# Add password field
value["password"] = any(
text in key for text in ["password", "token", "api", "key"]
text in key.lower() for text in ["password", "token", "api", "key"]
)
# Add multline
value["multiline"] = key in ["suffix", "prefix", "template", "examples"]
value["multiline"] = key in ["suffix", "prefix", "template", "examples", "code"]
# Replace default value with actual value
if "default" in value:

View file

@ -0,0 +1,168 @@
import ast
import importlib
import types
from typing import Dict
def add_type_ignores():
if not hasattr(ast, "TypeIgnore"):
class TypeIgnore(ast.AST):
_fields = ()
ast.TypeIgnore = TypeIgnore
def validate_code(code):
# Initialize the errors dictionary
errors = {"imports": {"errors": []}, "function": {"errors": []}}
# Parse the code string into an abstract syntax tree (AST)
try:
tree = ast.parse(code)
except Exception as e:
errors["function"]["errors"].append(str(e))
return errors
# Add a dummy type_ignores field to the AST
add_type_ignores()
tree.type_ignores = []
# Evaluate the import statements
for node in tree.body:
if isinstance(node, ast.Import):
for alias in node.names:
try:
importlib.import_module(alias.name)
except ModuleNotFoundError as e:
errors["imports"]["errors"].append(str(e))
# Evaluate the function definition
for node in tree.body:
if isinstance(node, ast.FunctionDef):
code_obj = compile(
ast.Module(body=[node], type_ignores=[]), "<string>", "exec"
)
try:
exec(code_obj)
except Exception as e:
errors["function"]["errors"].append(str(e))
# Return the errors dictionary
return errors
def eval_function(function_string: str):
# Create an empty dictionary to serve as a separate namespace
namespace: Dict = {}
# Execute the code string in the new namespace
exec(function_string, namespace)
function_object = next(
(obj for name, obj in namespace.items() if isinstance(obj, types.FunctionType)),
None,
)
if function_object is None:
raise ValueError("Function string does not contain a function")
return function_object
def execute_function(code, function_name, *args, **kwargs):
add_type_ignores()
module = ast.parse(code)
exec_globals = globals().copy()
for node in module.body:
if isinstance(node, ast.Import):
for alias in node.names:
try:
exec(
f"{alias.asname or alias.name} = importlib.import_module('{alias.name}')",
exec_globals,
locals(),
)
exec_globals[alias.asname or alias.name] = importlib.import_module(
alias.name
)
except ModuleNotFoundError as e:
raise ModuleNotFoundError(
f"Module {alias.name} not found. Please install it and try again."
) from e
function_code = next(
node
for node in module.body
if isinstance(node, ast.FunctionDef) and node.name == function_name
)
function_code.parent = None
code_obj = compile(
ast.Module(body=[function_code], type_ignores=[]), "<string>", "exec"
)
try:
exec(code_obj, exec_globals, locals())
except Exception as exc:
raise ValueError("Function string does not contain a function") from exc
# Add the function to the exec_globals dictionary
exec_globals[function_name] = locals()[function_name]
return exec_globals[function_name](*args, **kwargs)
def create_function(code, function_name):
if not hasattr(ast, "TypeIgnore"):
class TypeIgnore(ast.AST):
_fields = ()
ast.TypeIgnore = TypeIgnore
module = ast.parse(code)
exec_globals = globals().copy()
for node in module.body:
if isinstance(node, ast.Import):
for alias in node.names:
try:
exec_globals[alias.asname or alias.name] = importlib.import_module(
alias.name
)
except ModuleNotFoundError as e:
raise ModuleNotFoundError(
f"Module {alias.name} not found. Please install it and try again."
) from e
function_code = next(
node
for node in module.body
if isinstance(node, ast.FunctionDef) and node.name == function_name
)
function_code.parent = None
code_obj = compile(
ast.Module(body=[function_code], type_ignores=[]), "<string>", "exec"
)
try:
exec(code_obj, exec_globals, locals())
except Exception:
pass
exec_globals[function_name] = locals()[function_name]
# Return a function that imports necessary modules and calls the target function
def wrapped_function(*args, **kwargs):
for module_name, module in exec_globals.items():
if isinstance(module, type(importlib)):
globals()[module_name] = module
return exec_globals[function_name](*args, **kwargs)
return wrapped_function
def extract_function_name(code):
module = ast.parse(code)
for node in module.body:
if isinstance(node, ast.FunctionDef):
return node.name
raise ValueError("No function definition found in the code string")

View file

@ -21,9 +21,11 @@
"@types/node": "^16.18.12",
"@types/react": "^18.0.27",
"@types/react-dom": "^18.0.10",
"ace-builds": "^1.16.0",
"axios": "^1.3.2",
"lodash": "^4.17.21",
"react": "^18.2.0",
"react-ace": "^10.1.0",
"react-cookie": "^4.1.1",
"react-dom": "^18.2.0",
"react-error-boundary": "^4.0.2",
@ -5098,6 +5100,11 @@
"node": ">= 0.6"
}
},
"node_modules/ace-builds": {
"version": "1.16.0",
"resolved": "https://registry.npmjs.org/ace-builds/-/ace-builds-1.16.0.tgz",
"integrity": "sha512-EriMhoxdfhh0zKm7icSt8EXekODAOVsYh9fpnlru9ALwf0Iw7J7bpuqLjhi3QRxvVKR7P0teQdJwTvjVMcYHuw=="
},
"node_modules/acorn": {
"version": "8.8.2",
"resolved": "https://registry.npmjs.org/acorn/-/acorn-8.8.2.tgz",
@ -7205,6 +7212,11 @@
"resolved": "https://registry.npmjs.org/didyoumean/-/didyoumean-1.2.2.tgz",
"integrity": "sha512-gxtyfqMg7GKyhQmb056K7M3xszy/myH8w+B4RT+QXBQsvAOdc3XymqDDPHx1BgPgsdAA5SIifona89YtRATDzw=="
},
"node_modules/diff-match-patch": {
"version": "1.0.5",
"resolved": "https://registry.npmjs.org/diff-match-patch/-/diff-match-patch-1.0.5.tgz",
"integrity": "sha512-IayShXAgj/QMXgB0IWmKx+rOPuGMhqm5w6jvFxmVenXKIzRqTAAsbBPT3kWQeGANj3jGgvcvv4yK6SxqYmikgw=="
},
"node_modules/diff-sequences": {
"version": "27.5.1",
"resolved": "https://registry.npmjs.org/diff-sequences/-/diff-sequences-27.5.1.tgz",
@ -12409,6 +12421,16 @@
"resolved": "https://registry.npmjs.org/lodash.debounce/-/lodash.debounce-4.0.8.tgz",
"integrity": "sha512-FT1yDzDYEoYWhnSGnpE/4Kj1fLZkDFyqRb7fNt6FdYOSxlUWAtp42Eh6Wb0rGIv/m9Bgo7x4GhQbm5Ys4SG5ow=="
},
"node_modules/lodash.get": {
"version": "4.4.2",
"resolved": "https://registry.npmjs.org/lodash.get/-/lodash.get-4.4.2.tgz",
"integrity": "sha512-z+Uw/vLuy6gQe8cfaFWD7p0wVv8fJl3mbzXh33RS+0oW2wvUqiRXiQ69gLWSLpgB5/6sU+r6BlQR0MBILadqTQ=="
},
"node_modules/lodash.isequal": {
"version": "4.5.0",
"resolved": "https://registry.npmjs.org/lodash.isequal/-/lodash.isequal-4.5.0.tgz",
"integrity": "sha512-pDo3lu8Jhfjqls6GkMgpahsF9kCyayhgykjyLMNFTKWrpVdAQtYyB4muAMWozBB4ig/dtWAmsMxLEI8wuz+DYQ=="
},
"node_modules/lodash.memoize": {
"version": "4.1.2",
"resolved": "https://registry.npmjs.org/lodash.memoize/-/lodash.memoize-4.1.2.tgz",
@ -14803,6 +14825,22 @@
"node": ">=0.10.0"
}
},
"node_modules/react-ace": {
"version": "10.1.0",
"resolved": "https://registry.npmjs.org/react-ace/-/react-ace-10.1.0.tgz",
"integrity": "sha512-VkvUjZNhdYTuKOKQpMIZi7uzZZVgzCjM7cLYu6F64V0mejY8a2XTyPUIMszC6A4trbeMIHbK5fYFcT/wkP/8VA==",
"dependencies": {
"ace-builds": "^1.4.14",
"diff-match-patch": "^1.0.5",
"lodash.get": "^4.4.2",
"lodash.isequal": "^4.5.0",
"prop-types": "^15.7.2"
},
"peerDependencies": {
"react": "^0.13.0 || ^0.14.0 || ^15.0.1 || ^16.0.0 || ^17.0.0 || ^18.0.0",
"react-dom": "^0.13.0 || ^0.14.0 || ^15.0.1 || ^16.0.0 || ^17.0.0 || ^18.0.0"
}
},
"node_modules/react-app-polyfill": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/react-app-polyfill/-/react-app-polyfill-3.0.0.tgz",

View file

@ -16,9 +16,11 @@
"@types/node": "^16.18.12",
"@types/react": "^18.0.27",
"@types/react-dom": "^18.0.10",
"ace-builds": "^1.16.0",
"axios": "^1.3.2",
"lodash": "^4.17.21",
"react": "^18.2.0",
"react-ace": "^10.1.0",
"react-cookie": "^4.1.1",
"react-dom": "^18.2.0",
"react-error-boundary": "^4.0.2",
@ -57,4 +59,4 @@
]
},
"proxy": "http://backend:7860"
}
}

View file

@ -10,6 +10,7 @@ import { typesContext } from "../../../../contexts/typesContext";
import { ParameterComponentType } from "../../../../types/components";
import FloatComponent from "../../../../components/floatComponent";
import Dropdown from "../../../../components/dropdownComponent";
import CodeAreaComponent from "../../../../components/codeAreaComponent";
export default function ParameterComponent({
left,
@ -136,9 +137,17 @@ export default function ParameterComponent({
data.node.template[name].options ? (
<Dropdown
options={data.node.template[name].options}
onSelect={(newValue) => data.node.template[name].value=newValue}
value={data.node.template[name].value??"chose an option"}
onSelect={(newValue) => (data.node.template[name].value = newValue)}
value={data.node.template[name].value ?? "chose an option"}
></Dropdown>
) : left === true && type === "code" ? (
<CodeAreaComponent
disabled={disabled}
value={data.node.template[name].value ?? ""}
onChange={(t: string) => {
data.node.template[name].value = t;
}}
/>
) : (
<></>
)}

View file

@ -23,7 +23,6 @@ export default function GenericNode({
const { types, deleteNode } = useContext(typesContext);
const Icon = nodeIcons[types[data.type]];
if (!Icon) {
console.log(data);
if (showError.current) {
setErrorData({
title: data.type
@ -34,7 +33,6 @@ export default function GenericNode({
}
return;
}
return (
<div
className={classNames(

View file

@ -6,7 +6,7 @@ import {
XMarkIcon,
} from "@heroicons/react/24/outline";
import { MouseEventHandler, useContext, useEffect, useRef, useState } from "react";
import { sendAll } from "../../controllers/NodesServices";
import { sendAll } from "../../controllers/API";
import { alertContext } from "../../contexts/alertContext";
import { classNames, nodeColors } from "../../utils";
import { TabsContext } from "../../contexts/tabsContext";
@ -151,7 +151,7 @@ export default function Chat({ flow, reactFlowInstance }: ChatType) {
/>
Chat
</div>
<button className="hover:text-blue-500"
<button className="hover:text-blue-500 dark:text-white"
onClick={(e) => {
e.stopPropagation()
clearChat();

View file

@ -0,0 +1,50 @@
import { ArrowTopRightOnSquareIcon } from "@heroicons/react/24/outline";
import { useContext, useEffect, useState } from "react";
import { PopUpContext } from "../../contexts/popUpContext";
import CodeAreaModal from "../../modals/codeAreaModal";
import TextAreaModal from "../../modals/textAreaModal";
import { TextAreaComponentType } from "../../types/components";
export default function CodeAreaComponent({
value,
onChange,
disabled,
}: TextAreaComponentType) {
const [myValue, setMyValue] = useState(value);
const { openPopUp } = useContext(PopUpContext);
useEffect(() => {
if (disabled) {
setMyValue("");
onChange("");
}
}, [disabled, onChange]);
return (
<div className={disabled ? "pointer-events-none cursor-not-allowed w-full" : "w-full"}>
<div className="w-full flex items-center gap-3">
<span
className={
"truncate block max-w-full text-gray-500 px-3 py-2 rounded-md border border-gray-300 dark:border-gray-700 shadow-sm focus:border-indigo-500 focus:ring-indigo-500 sm:text-sm" +
(disabled ? " bg-gray-200" : "")
}
>
{myValue !== "" ? myValue : "Text empty"}
</span>
<button
onClick={() => {
openPopUp(
<CodeAreaModal
value={myValue}
setValue={(t: string) => {
setMyValue(t);
onChange(t);
}}
/>
);
}}
>
<ArrowTopRightOnSquareIcon className="w-6 h-6 hover:text-blue-600" />
</button>
</div>
</div>
);
}

View file

@ -1,6 +1,7 @@
import { ArrowTopRightOnSquareIcon } from "@heroicons/react/24/outline";
import { useContext, useEffect, useState } from "react";
import { PopUpContext } from "../../contexts/popUpContext";
import CodeAreaModal from "../../modals/codeAreaModal";
import TextAreaModal from "../../modals/textAreaModal";
import { TextAreaComponentType } from "../../types/components";
@ -9,8 +10,8 @@ export default function TextAreaComponent({ value, onChange, disabled }:TextArea
const { openPopUp } = useContext(PopUpContext);
useEffect(() => {
if (disabled) {
setMyValue([""]);
onChange([""]);
setMyValue("");
onChange("");
}
}, [disabled, onChange]);
return (

View file

@ -1,3 +1,4 @@
import { errorsTypeAPI } from './../../types/api/index';
import { APIObjectType, sendAllProps } from '../../types/api/index';
import axios, { AxiosResponse } from "axios";
@ -7,4 +8,9 @@ export async function getAll():Promise<AxiosResponse<APIObjectType>> {
export async function sendAll(data:sendAllProps) {
return await axios.post(`/predict`, data);
}
export async function checkCode(code:string):Promise<AxiosResponse<errorsTypeAPI>>{
return await axios.post('/validate',{code})
}

View file

@ -0,0 +1,178 @@
import { Dialog, Transition } from "@headlessui/react";
import { XMarkIcon, CommandLineIcon } from "@heroicons/react/24/outline";
import { Fragment, useContext, useRef, useState } from "react";
import { PopUpContext } from "../../contexts/popUpContext";
import AceEditor from "react-ace";
import "ace-builds/src-noconflict/mode-python";
import "ace-builds/src-noconflict/theme-github";
import "ace-builds/src-noconflict/theme-twilight";
import "ace-builds/src-noconflict/ext-language_tools";
import "ace-builds/webpack-resolver";
import { darkContext } from "../../contexts/darkContext";
import { checkCode } from "../../controllers/API";
import { alertContext } from "../../contexts/alertContext";
export default function CodeAreaModal({
value,
setValue,
}: {
setValue: (value: string) => void;
value: string;
}) {
const [open, setOpen] = useState(true);
const [code, setCode] = useState(value);
const { dark } = useContext(darkContext);
const { setErrorData, setSuccessData } = useContext(alertContext);
const { closePopUp } = useContext(PopUpContext);
const ref = useRef();
function setModalOpen(x: boolean) {
setOpen(x);
if (x === false) {
setTimeout(() => {
closePopUp();
}, 300);
}
}
return (
<Transition.Root show={open} appear={true} as={Fragment}>
<Dialog
as="div"
className="relative z-10"
onClose={setModalOpen}
initialFocus={ref}
>
<Transition.Child
as={Fragment}
enter="ease-out duration-300"
enterFrom="opacity-0"
enterTo="opacity-100"
leave="ease-in duration-200"
leaveFrom="opacity-100"
leaveTo="opacity-0"
>
<div className="fixed inset-0 bg-gray-500 dark:bg-gray-600 dark:bg-opacity-75 bg-opacity-75 transition-opacity" />
</Transition.Child>
<div className="fixed inset-0 z-10 overflow-y-auto">
<div className="flex h-full items-end justify-center p-4 text-center sm:items-center sm:p-0">
<Transition.Child
as={Fragment}
enter="ease-out duration-300"
enterFrom="opacity-0 translate-y-4 sm:translate-y-0 sm:scale-95"
enterTo="opacity-100 translate-y-0 sm:scale-100"
leave="ease-in duration-200"
leaveFrom="opacity-100 translate-y-0 sm:scale-100"
leaveTo="opacity-0 translate-y-4 sm:translate-y-0 sm:scale-95"
>
<Dialog.Panel className="relative flex flex-col justify-between transform h-[600px] overflow-hidden rounded-lg bg-white dark:bg-gray-800 text-left shadow-xl transition-all sm:my-8 w-[700px]">
<div className=" z-50 absolute top-0 right-0 hidden pt-4 pr-4 sm:block">
<button
type="button"
className="rounded-md text-gray-400 hover:text-gray-500 focus:outline-none focus:ring-2 focus:ring-indigo-500 focus:ring-offset-2"
onClick={() => {
setModalOpen(false);
}}
>
<span className="sr-only">Close</span>
<XMarkIcon className="h-6 w-6" aria-hidden="true" />
</button>
</div>
<div className="h-full w-full flex flex-col justify-center items-center">
<div className="flex w-full pb-4 z-10 justify-center shadow-sm">
<div className="mx-auto mt-4 flex h-12 w-12 flex-shrink-0 items-center justify-center rounded-full bg-blue-100 dark:bg-gray-900 sm:mx-0 sm:h-10 sm:w-10">
<CommandLineIcon
className="h-6 w-6 text-blue-600"
aria-hidden="true"
/>
</div>
<div className="mt-4 text-center sm:ml-4 sm:text-left">
<Dialog.Title
as="h3"
className="text-lg font-medium dark:text-white leading-10 text-gray-900"
>
Edit Code
</Dialog.Title>
</div>
</div>
<div className="h-full w-full bg-gray-200 overflow-auto dark:bg-gray-900 p-4 gap-4 flex flex-row justify-center items-center">
<div className="flex h-full w-full">
<div className="overflow-hidden px-4 py-5 sm:p-6 w-full h-full rounded-lg bg-white dark:bg-gray-800 shadow">
{/* need to insert code editor */}
<AceEditor
value={code}
mode="python"
highlightActiveLine={true}
showPrintMargin={false}
fontSize={14}
showGutter
enableLiveAutocompletion
theme={dark ? "twilight" : "github"}
name="CodeEditor"
onChange={(value) => {
setCode(value);
}}
className="h-full w-full rounded-lg"
/>
</div>
</div>
</div>
<div className="bg-gray-200 dark:bg-gray-900 w-full pb-3 flex flex-row-reverse px-4">
<button
type="button"
className="inline-flex w-full justify-center rounded-md border border-transparent bg-indigo-600 px-4 py-2 text-base font-medium text-white shadow-sm hover:bg-indigo-700 focus:outline-none focus:ring-2 focus:ring-indigo-500 focus:ring-offset-2 sm:ml-3 sm:w-auto sm:text-sm"
onClick={() => {
checkCode(code)
.then((apiReturn) => {
console.log(apiReturn);
if (apiReturn.data) {
console.log(apiReturn.data);
let importsErrors = apiReturn.data.imports.errors;
let funcErrors = apiReturn.data.function.errors;
if (
funcErrors.length === 0 &&
importsErrors.length === 0
) {
setSuccessData({
title: "Code is ready to run",
});
setModalOpen(false);
setValue(code)
} else {
if (funcErrors.length !== 0) {
setErrorData({
title: "There is an error in your function",
list: funcErrors,
});
}
if(importsErrors.length!==0){
setErrorData({
title: "There is an error in your imports",
list: importsErrors,
});
}
}
} else {
setErrorData({
title: "Something went wrong, please try again",
});
}
})
.catch((_) =>
setErrorData({
title:
"There is something wrong with this code, please review it",
})
);
}}
>
Check & Save
</button>
</div>
</div>
</Dialog.Panel>
</Transition.Child>
</div>
</div>
</Dialog>
</Transition.Root>
);
}

View file

@ -6,7 +6,7 @@ import {
nodeNames,
} from "../../../../utils";
import { useContext, useEffect, useState } from "react";
import { getAll } from "../../../../controllers/NodesServices";
import { getAll } from "../../../../controllers/API";
import { typesContext } from "../../../../contexts/typesContext";
import { APIClassType, APIKindType, APIObjectType } from "../../../../types/api";

View file

@ -15,4 +15,5 @@ export type sendAllProps={
message:string;
chatHistory:{message:string,isSend:boolean}[],
};
};
export type errorsTypeAPI={function:{errors:Array<string>},imports:{errors:Array<string>}}

View file

@ -36,7 +36,7 @@ export type InputListComponentType = {
export type TextAreaComponentType = {
disabled: boolean;
onChange: (value: string[] | string) => void;
value: string[] | string;
value: string;
};
export type DisclosureComponentType = {

28
tests/conftest.py Normal file
View file

@ -0,0 +1,28 @@
from pathlib import Path
import pytest
from fastapi.testclient import TestClient
def pytest_configure():
pytest.BASIC_EXAMPLE_PATH = (
Path(__file__).parent.absolute() / "data" / "basic_example.json"
)
pytest.COMPLEX_EXAMPLE_PATH = (
Path(__file__).parent.absolute() / "data" / "complex_example.json"
)
pytest.CODE_WITH_SYNTAX_ERROR = """
def get_text():
retun "Hello World"
"""
# Create client fixture for FastAPI
@pytest.fixture(scope="module")
def client():
from langflow.main import create_app
app = create_app()
with TestClient(app) as client:
yield client

View file

@ -1,15 +1,16 @@
{
"name": "New Flow 9",
"id": "1",
"name": "New Flow ",
"id": "0",
"data": {
"nodes": [{
"nodes": [
{
"width": 384,
"height": 391,
"id": "dndnode_61",
"id": "dndnode_1",
"type": "genericNode",
"position": {
"x": 764,
"y": 382
"x": 644,
"y": 348
},
"data": {
"type": "LLMChain",
@ -17,7 +18,7 @@
"template": {
"_type": "llm_chain",
"memory": {
"type": "Memory",
"type": "BaseMemory",
"required": false,
"placeholder": "",
"list": false,
@ -34,7 +35,7 @@
"show": false,
"password": false,
"multiline": false,
"value": true
"value": false
},
"prompt": {
"type": "BasePromptTemplate",
@ -46,7 +47,7 @@
"multiline": false
},
"llm": {
"type": "BaseLLM",
"type": "BaseLanguageModel",
"required": true,
"placeholder": "",
"list": false,
@ -70,24 +71,22 @@
"Chain"
]
},
"id": "dndnode_61",
"id": "dndnode_1",
"value": null
},
"selected": false,
"positionAbsolute": {
"x": 764,
"y": 382
},
"dragging": false
"x": 644,
"y": 348
}
},
{
"width": 384,
"height": 351,
"id": "dndnode_62",
"id": "dndnode_4",
"type": "genericNode",
"position": {
"x": 1488,
"y": 550
"x": 1236,
"y": 39.59999999999957
},
"data": {
"type": "ZeroShotAgent",
@ -131,22 +130,22 @@
"Agent"
]
},
"id": "dndnode_62",
"id": "dndnode_4",
"value": null
},
"positionAbsolute": {
"x": 1488,
"y": 550
"x": 1236,
"y": 39.59999999999957
}
},
{
"width": 384,
"height": 529,
"id": "dndnode_63",
"id": "dndnode_5",
"type": "genericNode",
"position": {
"x": 206,
"y": 210
"x": 96,
"y": -299.2000000000003
},
"data": {
"type": "ZeroShotPrompt",
@ -186,24 +185,24 @@
"BasePromptTemplate"
]
},
"id": "dndnode_63",
"id": "dndnode_5",
"value": null
},
"selected": false,
"positionAbsolute": {
"x": 206,
"y": 210
"x": 96,
"y": -299.2000000000003
},
"dragging": false
},
{
"width": 384,
"height": 501,
"id": "dndnode_64",
"height": 477,
"id": "dndnode_8",
"type": "genericNode",
"position": {
"x": 210,
"y": 818
"x": 75,
"y": 328
},
"data": {
"type": "OpenAI",
@ -228,7 +227,7 @@
"show": false,
"password": false,
"multiline": false,
"value": true
"value": null
},
"client": {
"type": "Any",
@ -250,7 +249,6 @@
"multiline": false,
"value": "text-davinci-003",
"options": [
"gpt-3.5-turbo",
"text-davinci-003",
"text-davinci-002"
]
@ -343,7 +341,7 @@
"show": true,
"password": true,
"multiline": false,
"value": "---"
"value": "sk-"
},
"batch_size": {
"type": "int",
@ -399,27 +397,28 @@
"description": "Generic OpenAI class that uses model name.",
"base_classes": [
"BaseOpenAI",
"BaseLLM"
"BaseLLM",
"BaseLanguageModel"
]
},
"id": "dndnode_64",
"id": "dndnode_8",
"value": null
},
"selected": false,
"positionAbsolute": {
"x": 210,
"y": 818
"x": 75,
"y": 328
},
"dragging": false
},
{
"width": 384,
"height": 397,
"id": "dndnode_65",
"id": "dndnode_9",
"type": "genericNode",
"position": {
"x": 776,
"y": 922
"x": 643,
"y": 824
},
"data": {
"type": "Serper Search",
@ -427,7 +426,7 @@
"template": {
"serper_api_key": {
"type": "str",
"required": false,
"required": true,
"list": false,
"show": true,
"placeholder": "",
@ -441,101 +440,66 @@
"Tool"
]
},
"id": "dndnode_65",
"id": "dndnode_9",
"value": null
},
"selected": false,
"selected": true,
"positionAbsolute": {
"x": 776,
"y": 922
"x": 643,
"y": 824
},
"dragging": false
}
],
"edges": [{
"source": "dndnode_63",
"sourceHandle": "ZeroShotPrompt|dndnode_63|BasePromptTemplate",
"target": "dndnode_61",
"targetHandle": "BasePromptTemplate|prompt|dndnode_61",
"edges": [
{
"source": "dndnode_5",
"sourceHandle": "ZeroShotPrompt|dndnode_5|BasePromptTemplate",
"target": "dndnode_1",
"targetHandle": "BasePromptTemplate|prompt|dndnode_1",
"className": "animate-pulse",
"id": "reactflow__edge-dndnode_63ZeroShotPrompt|dndnode_63|BasePromptTemplate-dndnode_61BasePromptTemplate|prompt|dndnode_61"
"id": "reactflow__edge-dndnode_5ZeroShotPrompt|dndnode_5|BasePromptTemplate-dndnode_1BasePromptTemplate|prompt|dndnode_1"
},
{
"source": "dndnode_64",
"sourceHandle": "OpenAI|dndnode_64|BaseOpenAI,|BaseLLM",
"target": "dndnode_61",
"targetHandle": "BaseLLM|llm|dndnode_61",
"source": "dndnode_1",
"sourceHandle": "LLMChain|dndnode_1|Chain",
"target": "dndnode_4",
"targetHandle": "LLMChain|llm_chain|dndnode_4",
"className": "animate-pulse",
"id": "reactflow__edge-dndnode_64OpenAI|dndnode_64|BaseOpenAI,|BaseLLM-dndnode_61BaseLLM|llm|dndnode_61"
"id": "reactflow__edge-dndnode_1LLMChain|dndnode_1|Chain-dndnode_4LLMChain|llm_chain|dndnode_4"
},
{
"source": "dndnode_65",
"sourceHandle": "Serper Search|dndnode_65|Tool",
"target": "dndnode_62",
"targetHandle": "Tool|allowed_tools|dndnode_62",
"source": "dndnode_8",
"sourceHandle": "OpenAI|dndnode_8|BaseOpenAI|BaseLLM|BaseLanguageModel",
"target": "dndnode_1",
"targetHandle": "BaseLanguageModel|llm|dndnode_1",
"className": "animate-pulse",
"id": "reactflow__edge-dndnode_65Serper Search|dndnode_65|Tool-dndnode_62Tool|allowed_tools|dndnode_62"
"id": "reactflow__edge-dndnode_8OpenAI|dndnode_8|BaseOpenAI|BaseLLM|BaseLanguageModel-dndnode_1BaseLanguageModel|llm|dndnode_1"
},
{
"source": "dndnode_61",
"sourceHandle": "LLMChain|dndnode_61|Chain",
"target": "dndnode_62",
"targetHandle": "LLMChain|llm_chain|dndnode_62",
"source": "dndnode_9",
"sourceHandle": "Serper Search|dndnode_9|Tool",
"target": "dndnode_4",
"targetHandle": "Tool|allowed_tools|dndnode_4",
"className": "animate-pulse",
"id": "reactflow__edge-dndnode_61LLMChain|dndnode_61|Chain-dndnode_62LLMChain|llm_chain|dndnode_62"
"id": "reactflow__edge-dndnode_9Serper Search|dndnode_9|Tool-dndnode_4Tool|allowed_tools|dndnode_4"
}
],
"viewport": {
"x": -103,
"y": -135,
"zoom": 1
"x": 22.5,
"y": 42,
"zoom": 0.5
}
},
"chat": [{
"message": "What is the new llm Prismer?",
"chat": [
{
"message": "Langflow. What is it?",
"isSend": true
},
{
"message": "What is the prismer llm?",
"isSend": true
},
{
"message": "What is the prismer llm?",
"isSend": true
},
{
"message": "What is the prismer llm?",
"isSend": true
},
{
"message": "What is the prismer llm?",
"isSend": true
},
{
"message": "What is the prismer llm?",
"isSend": true
},
{
"message": "What is the prismer llm?",
"isSend": true
},
{
"message": "What is the prismer llm?",
"isSend": true
},
{
"message": "Prismer LLM is a vision-language model with an ensemble of experts.",
"message": "Langflow is a GUI for LangChain, designed with react-flow to provide an effortless way to experiment and prototype flows with drag-and-drop components and a chrome extension powered by GPT-3.",
"isSend": false,
"thought": "Thought: I should research what this is\nAction: Serper Search\nAction Input: \"prismer llm\"\nObservation: Announcing Prismer, my team's latest vision-language AI, empowered by domain-expert models in depth, surface normal, segmentation, etc. My team's work, Prismer, is a representative example. We use a textual LM as the backbone, and plug in many visual domain experts through a neural adapter ... “Prismer is able to achieve superior data efficiency compared to ... New OpenAI post about future of ChatGPT's (and its other LLM) alignment. Prismer: A Vision-Language Model with An Ensemble of Experts ... UDAPDR: Unsupervised Domain Adaptation via LLM Prompting and Distillation of Rerankers. Emory Law's LLM program offers graduates of US and foreign law schools the opportunity to pursue the master of laws degree from one of the top nationally ranked ... Missing: prismer | Must include:prismer. Prismer: A Vision-Language Model with An Ensemble of Experts ... github: github.com/NVlabs/prismer ... into LLM embedding space viously to their being sworn, the prisoner, Michael. Solomon, standing at the bar, challenged two ... a« wa« brought before- them.\" The prisoner ?llM'eow*. RWKV is a RNN with transformer-level LLM performance. ... The implementation of \"Prismer: A Vision-Language Model with An Ensemble of Experts\". Announcing Prismer, our NVIDIA team's latest vision-language #ai empowered by domain-expert models in depth, surface normal, segmentation, etc. No paywall.\nThought:\n\n> Finished chain."
},
{
"message": "What is the prismer llm?",
"isSend": true
},
{
"message": "Prismer is an AI vision-language model with an ensemble of experts developed by a NVIDIA team. It is capable of achieving superior data efficiency compared to other models and can be directly trained like a GPT.",
"isSend": false,
"thought": "Thought: I need to search for the term\nAction: Serper Search\nAction Input: \"prismer llm\"\nObservation: Announcing Prismer, my team's latest vision-language AI, empowered by domain-expert models in depth, surface normal, segmentation, etc. My team's work, Prismer, is a representative example. We use a textual LM as the backbone, and plug in many visual domain experts through a neural adapter ... “Prismer is able to achieve superior data efficiency compared to ... New OpenAI post about future of ChatGPT's (and its other LLM) alignment. Prismer: A Vision-Language Model with An Ensemble of Experts ... UDAPDR: Unsupervised Domain Adaptation via LLM Prompting and Distillation of Rerankers. A very insightful post about the potential issues with LLM chatbots for general ... Announcing Prismer, our NVIDIA team's latest vision-language #ai ... As technology continues to transform our economy and culture, businesses need a new breed of lawyers who understand the legal and commercial aspects of new ... Missing: prismer | Must include:prismer. RWKV is a RNN with transformer-level LLM performance. ... The implementation of \"Prismer: A Vision-Language Model with An Ensemble of Experts\". ... Bendix vorad collision warning system, Vegglampe med prismer, Ok magazine ... Alabama llm, Colt 25 caliber magazine, Poppie clinch, Mcds upper school, ... RWKV is an RNN with transformer-level LLM performance. It can be directly trained like a GPT (parallelizable). So it's combining the best of RNN ...\nThought:\n\n> Finished chain."
"thought": "> Entering new AgentExecutor chain...\n I need to research what Langflow is.\nAction: Serper Search\nAction Input: \"What is Langflow?\"\nObservation: Researchers introduce LangFlow, a graphical user interface (GUI) for LangChain that simplifies testing and creation of smart applications. The drag-and-drop feature provides a quick and effortless way to experiment and prototype, and the built-in chat interface enables real-time ... LangFlow is a GUI for LangChain, designed with react-flow to provide an effortless way to experiment and prototype flows with drag-and-drop ... LangFlow is a GUI for LangChain, designed with react-flow to provide an effortless way to experiment and prototype flows with drag-and-drop components and a ... an open-source chrome extension powered by GPT-3 to get the explanation of a piece of code on any site (Stackoverflow, Github, etc). ChatGPT correctly identified Mona Lisa but failed to guess the cartoon character Michael was thinking of, suggesting Bugs Bunny holding a ... Introducing LangFlow! An open-source UI for. @LangChainAI. that enables seamless combination of multiple deep learning models to perform ... See student reviews, school photos, Chinese Mandarin courses, and housing options available at Langflow Education Centre (Macau, Macau) - Reviews - Language ... Longfellow Elementary students play on the playground during recess. Longfellow Elementary students pause for a photo during class.\nThought: Langflow is a GUI for LangChain, designed with react-flow to provide an effortless way to experiment and prototype flows with drag-and-drop components and a chrome extension powered by GPT-3.\nFinal Answer: Langflow is a GUI for LangChain, designed with react-flow to provide an effortless way to experiment and prototype flows with drag-and-drop components and a chrome extension powered by GPT-3.\n\n> Finished chain."
}
]
}

View file

@ -0,0 +1,723 @@
{
"name": "New Flow",
"id": "0",
"data": {
"nodes": [
{
"width": 384,
"height": 351,
"id": "dndnode_3",
"type": "genericNode",
"position": {
"x": 612.9299322834961,
"y": 194.75070242078417
},
"data": {
"type": "ZeroShotAgent",
"node": {
"template": {
"_type": "zero-shot-react-description",
"llm_chain": {
"type": "LLMChain",
"required": true,
"placeholder": "",
"list": false,
"show": true,
"password": false,
"multiline": false
},
"allowed_tools": {
"type": "Tool",
"required": false,
"placeholder": "",
"list": true,
"show": true,
"password": false,
"multiline": false,
"value": null
},
"return_values": {
"type": "str",
"required": false,
"placeholder": "",
"list": true,
"show": false,
"password": false,
"multiline": false,
"value": [
"output"
]
}
},
"description": "Agent for the MRKL chain.",
"base_classes": [
"Agent",
"function"
]
},
"id": "dndnode_3",
"value": null
},
"selected": false,
"positionAbsolute": {
"x": 612.9299322834961,
"y": 194.75070242078417
},
"dragging": false
},
{
"width": 384,
"height": 463,
"id": "dndnode_27",
"type": "genericNode",
"position": {
"x": 86.29922452047686,
"y": 39.132143332238115
},
"data": {
"type": "Tool",
"node": {
"template": {
"name": {
"type": "str",
"required": true,
"list": false,
"show": true,
"placeholder": "",
"value": "Uppercase",
"password": false,
"multiline": false
},
"description": {
"type": "str",
"required": true,
"list": false,
"show": true,
"placeholder": "",
"value": "Returns an uppercase version of the text passed.",
"password": false,
"multiline": false
},
"func": {
"type": "function",
"required": true,
"list": false,
"show": true,
"value": "",
"multiline": false,
"password": false
},
"_type": "Tool"
},
"name": "Tool",
"func": "",
"description": "",
"base_classes": [
"Tool"
]
},
"id": "dndnode_27",
"value": null
},
"selected": false,
"positionAbsolute": {
"x": 86.29922452047686,
"y": 39.132143332238115
},
"dragging": false
},
{
"width": 384,
"height": 463,
"id": "dndnode_28",
"type": "genericNode",
"position": {
"x": 1134.4549802672202,
"y": 287.9885910233929
},
"data": {
"type": "Tool",
"node": {
"template": {
"name": {
"type": "str",
"required": true,
"list": false,
"show": true,
"placeholder": "",
"value": "",
"password": false,
"multiline": false
},
"description": {
"type": "str",
"required": true,
"list": false,
"show": true,
"placeholder": "",
"value": "",
"password": false,
"multiline": false
},
"func": {
"type": "function",
"required": true,
"list": false,
"show": true,
"value": "",
"multiline": false,
"password": false
},
"_type": "Tool"
},
"name": "Tool",
"func": "",
"description": "",
"base_classes": [
"Tool"
]
},
"id": "dndnode_28",
"value": null
},
"positionAbsolute": {
"x": 1134.4549802672202,
"y": 287.9885910233929
},
"selected": false,
"dragging": false
},
{
"width": 384,
"height": 357,
"id": "dndnode_40",
"type": "genericNode",
"position": {
"x": -366.4341715850213,
"y": 136.29836646158452
},
"data": {
"type": "PythonFunction",
"node": {
"template": {
"code": {
"required": true,
"placeholder": "",
"show": true,
"multiline": true,
"value": "\ndef upper_case(text: str) -> str:\n return text.upper()\n",
"name": "code",
"type": "str",
"list": false
},
"_type": "python_function"
},
"description": "Python function to be executed.",
"base_classes": [
"function"
]
},
"id": "dndnode_40",
"value": null
},
"selected": false,
"positionAbsolute": {
"x": -366.4341715850213,
"y": 136.29836646158452
},
"dragging": false
},
{
"width": 384,
"height": 351,
"id": "dndnode_41",
"type": "genericNode",
"position": {
"x": 1642.7653281427417,
"y": 69.01105573790835
},
"data": {
"type": "ZeroShotAgent",
"node": {
"template": {
"_type": "zero-shot-react-description",
"llm_chain": {
"type": "LLMChain",
"required": true,
"placeholder": "",
"list": false,
"show": true,
"password": false,
"multiline": false
},
"allowed_tools": {
"type": "Tool",
"required": false,
"placeholder": "",
"list": true,
"show": true,
"password": false,
"multiline": false,
"value": null
},
"return_values": {
"type": "str",
"required": false,
"placeholder": "",
"list": true,
"show": false,
"password": false,
"multiline": false,
"value": [
"output"
]
}
},
"description": "Agent for the MRKL chain.",
"base_classes": [
"Agent",
"function"
]
},
"id": "dndnode_41",
"value": null
},
"selected": false,
"positionAbsolute": {
"x": 1642.7653281427417,
"y": 69.01105573790835
},
"dragging": false
},
{
"width": 384,
"height": 529,
"id": "dndnode_42",
"type": "genericNode",
"position": {
"x": -379.23467185725826,
"y": -551.3889442620921
},
"data": {
"type": "ZeroShotPrompt",
"node": {
"template": {
"prefix": {
"required": false,
"placeholder": "",
"show": true,
"multiline": true,
"value": "Answer the following questions as best you can. You have access to the following tools:",
"name": "prefix",
"type": "str",
"list": false
},
"suffix": {
"required": true,
"placeholder": "",
"show": true,
"multiline": true,
"value": "Begin!\n\nQuestion: {input}\nThought:{agent_scratchpad}",
"name": "suffix",
"type": "str",
"list": false
},
"format_instructions": {
"required": false,
"placeholder": "",
"show": true,
"multiline": true,
"value": "Use the following format:\n\nQuestion: the input question you must answer\nThought: you should always think about what to do\nAction: the action to take, should be one of [{tool_names}]\nAction Input: the input to the action\nObservation: the result of the action\n... (this Thought/Action/Action Input/Observation can repeat N times)\nThought: I now know the final answer\nFinal Answer: the final answer to the original input question",
"name": "format_instructions",
"type": "str",
"list": false
},
"_type": "zero_shot"
},
"description": "Prompt template for Zero Shot Agent.",
"base_classes": [
"BasePromptTemplate"
]
},
"id": "dndnode_42",
"value": null
},
"selected": false,
"positionAbsolute": {
"x": -379.23467185725826,
"y": -551.3889442620921
},
"dragging": false
},
{
"width": 384,
"height": 391,
"id": "dndnode_43",
"type": "genericNode",
"position": {
"x": 100.76532814274174,
"y": -437.78894426209195
},
"data": {
"type": "LLMChain",
"node": {
"template": {
"_type": "llm_chain",
"memory": {
"type": "BaseMemory",
"required": false,
"placeholder": "",
"list": false,
"show": true,
"password": false,
"multiline": false,
"value": null
},
"verbose": {
"type": "bool",
"required": false,
"placeholder": "",
"list": false,
"show": false,
"password": false,
"multiline": false,
"value": false
},
"prompt": {
"type": "BasePromptTemplate",
"required": true,
"placeholder": "",
"list": false,
"show": true,
"password": false,
"multiline": false
},
"llm": {
"type": "BaseLanguageModel",
"required": true,
"placeholder": "",
"list": false,
"show": true,
"password": false,
"multiline": false
},
"output_key": {
"type": "str",
"required": false,
"placeholder": "",
"list": false,
"show": false,
"password": true,
"multiline": false,
"value": "text"
}
},
"description": "Chain to run queries against LLMs.",
"base_classes": [
"Chain"
]
},
"id": "dndnode_43",
"value": null
},
"selected": false,
"positionAbsolute": {
"x": 100.76532814274174,
"y": -437.78894426209195
},
"dragging": false
},
{
"width": 384,
"height": 477,
"id": "dndnode_44",
"type": "genericNode",
"position": {
"x": -841.2346718572583,
"y": 368.6110557379079
},
"data": {
"type": "OpenAI",
"node": {
"template": {
"_type": "openai",
"cache": {
"type": "bool",
"required": false,
"placeholder": "",
"list": false,
"show": false,
"password": false,
"multiline": false,
"value": null
},
"verbose": {
"type": "bool",
"required": false,
"placeholder": "",
"list": false,
"show": false,
"password": false,
"multiline": false,
"value": null
},
"client": {
"type": "Any",
"required": false,
"placeholder": "",
"list": false,
"show": false,
"password": false,
"multiline": false,
"value": null
},
"model_name": {
"type": "str",
"required": false,
"placeholder": "",
"list": false,
"show": true,
"password": false,
"multiline": false,
"value": "text-davinci-003",
"options": [
"text-davinci-003",
"text-davinci-002",
"text-curie-001",
"text-babbage-001",
"text-ada-001"
]
},
"temperature": {
"type": "float",
"required": false,
"placeholder": "",
"list": false,
"show": true,
"password": false,
"multiline": false,
"value": 0.7
},
"max_tokens": {
"type": "int",
"required": false,
"placeholder": "",
"list": false,
"show": false,
"password": true,
"multiline": false,
"value": 256
},
"top_p": {
"type": "float",
"required": false,
"placeholder": "",
"list": false,
"show": false,
"password": false,
"multiline": false,
"value": 1
},
"frequency_penalty": {
"type": "float",
"required": false,
"placeholder": "",
"list": false,
"show": false,
"password": false,
"multiline": false,
"value": 0
},
"presence_penalty": {
"type": "float",
"required": false,
"placeholder": "",
"list": false,
"show": false,
"password": false,
"multiline": false,
"value": 0
},
"n": {
"type": "int",
"required": false,
"placeholder": "",
"list": false,
"show": false,
"password": false,
"multiline": false,
"value": 1
},
"best_of": {
"type": "int",
"required": false,
"placeholder": "",
"list": false,
"show": false,
"password": false,
"multiline": false,
"value": 1
},
"model_kwargs": {
"type": "dict[str, Any]",
"required": false,
"placeholder": "",
"list": false,
"show": false,
"password": false,
"multiline": false,
"value": null
},
"openai_api_key": {
"type": "str",
"required": false,
"placeholder": "",
"list": false,
"show": true,
"password": true,
"multiline": false,
"value": "sk-"
},
"batch_size": {
"type": "int",
"required": false,
"placeholder": "",
"list": false,
"show": false,
"password": false,
"multiline": false,
"value": 20
},
"request_timeout": {
"type": "Union[float, Tuple[float, float], NoneType]",
"required": false,
"placeholder": "",
"list": false,
"show": false,
"password": false,
"multiline": false,
"value": null
},
"logit_bias": {
"type": "dict[str, float]",
"required": false,
"placeholder": "",
"list": false,
"show": false,
"password": false,
"multiline": false,
"value": null
},
"max_retries": {
"type": "int",
"required": false,
"placeholder": "",
"list": false,
"show": false,
"password": false,
"multiline": false,
"value": 6
},
"streaming": {
"type": "bool",
"required": false,
"placeholder": "",
"list": false,
"show": false,
"password": false,
"multiline": false,
"value": false
}
},
"description": "Generic OpenAI class that uses model name.",
"base_classes": [
"BaseOpenAI",
"BaseLLM",
"BaseLanguageModel"
]
},
"id": "dndnode_44",
"value": null
},
"selected": false,
"positionAbsolute": {
"x": -841.2346718572583,
"y": 368.6110557379079
},
"dragging": false
}
],
"edges": [
{
"source": "dndnode_27",
"sourceHandle": "Tool|dndnode_27|Tool",
"target": "dndnode_3",
"targetHandle": "Tool|allowed_tools|dndnode_3",
"className": "animate-pulse",
"id": "reactflow__edge-dndnode_27Tool|dndnode_27|Tool-dndnode_3Tool|allowed_tools|dndnode_3"
},
{
"source": "dndnode_3",
"sourceHandle": "ZeroShotAgent|dndnode_3|Agent|function",
"target": "dndnode_28",
"targetHandle": "function|func|dndnode_28",
"className": "animate-pulse",
"id": "reactflow__edge-dndnode_3ZeroShotAgent|dndnode_3|Agent|function-dndnode_28function|func|dndnode_28"
},
{
"source": "dndnode_40",
"sourceHandle": "PythonFunction|dndnode_40|function",
"target": "dndnode_27",
"targetHandle": "function|func|dndnode_27",
"className": "animate-pulse",
"id": "reactflow__edge-dndnode_40PythonFunction|dndnode_40|function-dndnode_27function|func|dndnode_27"
},
{
"source": "dndnode_28",
"sourceHandle": "Tool|dndnode_28|Tool",
"target": "dndnode_41",
"targetHandle": "Tool|allowed_tools|dndnode_41",
"className": "animate-pulse",
"id": "reactflow__edge-dndnode_28Tool|dndnode_28|Tool-dndnode_41Tool|allowed_tools|dndnode_41"
},
{
"source": "dndnode_42",
"sourceHandle": "ZeroShotPrompt|dndnode_42|BasePromptTemplate",
"target": "dndnode_43",
"targetHandle": "BasePromptTemplate|prompt|dndnode_43",
"className": "animate-pulse",
"id": "reactflow__edge-dndnode_42ZeroShotPrompt|dndnode_42|BasePromptTemplate-dndnode_43BasePromptTemplate|prompt|dndnode_43"
},
{
"source": "dndnode_44",
"sourceHandle": "OpenAI|dndnode_44|BaseOpenAI|BaseLLM|BaseLanguageModel",
"target": "dndnode_43",
"targetHandle": "BaseLanguageModel|llm|dndnode_43",
"className": "animate-pulse",
"id": "reactflow__edge-dndnode_44OpenAI|dndnode_44|BaseOpenAI|BaseLLM|BaseLanguageModel-dndnode_43BaseLanguageModel|llm|dndnode_43"
},
{
"source": "dndnode_43",
"sourceHandle": "LLMChain|dndnode_43|Chain",
"target": "dndnode_3",
"targetHandle": "LLMChain|llm_chain|dndnode_3",
"className": "animate-pulse",
"id": "reactflow__edge-dndnode_43LLMChain|dndnode_43|Chain-dndnode_3LLMChain|llm_chain|dndnode_3"
},
{
"source": "dndnode_43",
"sourceHandle": "LLMChain|dndnode_43|Chain",
"target": "dndnode_41",
"targetHandle": "LLMChain|llm_chain|dndnode_41",
"className": "animate-pulse",
"id": "reactflow__edge-dndnode_43LLMChain|dndnode_43|Chain-dndnode_41LLMChain|llm_chain|dndnode_41"
}
],
"viewport": {
"x": 250.11733592862913,
"y": 349.94447213104604,
"zoom": 0.5
}
},
"chat": []
}

View file

@ -0,0 +1,16 @@
# Test this:
from langflow.interface.custom_types import PythonFunction
from langflow.utils import constants
import pytest
def test_python_function():
"""Test Python function"""
func = PythonFunction(code=constants.DEFAULT_PYTHON_FUNCTION)
assert func.get_function()("text") == "text"
# the tool decorator should raise an error if
# the function is not str -> str
# This raises ValidationError
with pytest.raises(SyntaxError):
func = PythonFunction(code=pytest.CODE_WITH_SYNTAX_ERROR)

76
tests/test_endpoints.py Normal file
View file

@ -0,0 +1,76 @@
from langflow.interface.listing import CUSTOM_TOOLS
from fastapi.testclient import TestClient
def test_get_all(client: TestClient):
response = client.get("/all")
assert response.status_code == 200
json_response = response.json()
# We need to test the custom nodes
assert "ZeroShotPrompt" in json_response["prompts"]
# All CUSTOM_TOOLS(dict) should be in the response
assert all(tool in json_response["tools"] for tool in CUSTOM_TOOLS.keys())
def test_post_validate_code(client: TestClient):
# Test case with a valid import and function
code1 = """
import math
def square(x):
return x ** 2
"""
response1 = client.post("/validate", json={"code": code1})
assert response1.status_code == 200
assert response1.json() == {"imports": {"errors": []}, "function": {"errors": []}}
# Test case with an invalid import and valid function
code2 = """
import non_existent_module
def square(x):
return x ** 2
"""
response2 = client.post("/validate", json={"code": code2})
assert response2.status_code == 200
assert response2.json() == {
"imports": {"errors": ["No module named 'non_existent_module'"]},
"function": {"errors": []},
}
# Test case with a valid import and invalid function syntax
code3 = """
import math
def square(x)
return x ** 2
"""
response3 = client.post("/validate", json={"code": code3})
assert response3.status_code == 200
assert response3.json() == {
"imports": {"errors": []},
"function": {"errors": ["expected ':' (<unknown>, line 4)"]},
}
# Test case with invalid JSON payload
response4 = client.post("/validate", json={"invalid_key": code1})
assert response4.status_code == 422
# Test case with an empty code string
response5 = client.post("/validate", json={"code": ""})
assert response5.status_code == 200
assert response5.json() == {"imports": {"errors": []}, "function": {"errors": []}}
# Test case with a syntax error in the code
code6 = """
import math
def square(x)
return x ** 2
"""
response6 = client.post("/validate", json={"code": code6})
assert response6.status_code == 200
assert response6.json() == {
"imports": {"errors": []},
"function": {"errors": ["expected ':' (<unknown>, line 4)"]},
}

300
tests/test_graph.py Normal file
View file

@ -0,0 +1,300 @@
import json
from langflow.graph.graph import Edge, Graph, Node
import pytest
from langflow.utils.payload import build_json, get_root_node
from langchain.agents import AgentExecutor
# Test cases for the graph module
def get_graph(basic=True):
"""Get a graph from a json file"""
path = pytest.BASIC_EXAMPLE_PATH if basic else pytest.COMPLEX_EXAMPLE_PATH
with open(path, "r") as f:
flow_graph = json.load(f)
data_graph = flow_graph["data"]
nodes = data_graph["nodes"]
edges = data_graph["edges"]
return Graph(nodes, edges)
def test_get_nodes_with_target():
"""Test getting connected nodes"""
graph = get_graph()
assert isinstance(graph, Graph)
# Get root node
root = get_root_node(graph)
assert root is not None
connected_nodes = graph.get_nodes_with_target(root)
assert connected_nodes is not None
def test_get_node_neighbors_basic():
"""Test getting node neighbors"""
graph = get_graph(basic=True)
assert isinstance(graph, Graph)
# Get root node
root = get_root_node(graph)
assert root is not None
neighbors = graph.get_node_neighbors(root)
assert neighbors is not None
assert isinstance(neighbors, dict)
# Root Node is an Agent, it requires an LLMChain and tools
# We need to check if there is a Chain in the one of the neighbors'
# data attribute in the type key
assert any(
"Chain" in neighbor.data["type"] for neighbor, val in neighbors.items() if val
)
# assert Serper Search is in the neighbors
assert any(
"Serper" in neighbor.data["type"] for neighbor, val in neighbors.items() if val
)
# Now on to the Chain's neighbors
chain = next(
neighbor
for neighbor, val in neighbors.items()
if "Chain" in neighbor.data["type"] and val
)
chain_neighbors = graph.get_node_neighbors(chain)
assert chain_neighbors is not None
assert isinstance(chain_neighbors, dict)
# Check if there is a LLM in the chain's neighbors
assert any(
"OpenAI" in neighbor.data["type"]
for neighbor, val in chain_neighbors.items()
if val
)
# Chain should have a Prompt as a neighbor
assert any(
"Prompt" in neighbor.data["type"]
for neighbor, val in chain_neighbors.items()
if val
)
def test_get_node_neighbors_complex():
"""Test getting node neighbors"""
graph = get_graph(basic=False)
assert isinstance(graph, Graph)
# Get root node
root = get_root_node(graph)
assert root is not None
neighbors = graph.get_nodes_with_target(root)
assert neighbors is not None
# Neighbors should be a list of nodes
assert isinstance(neighbors, list)
# Root Node is an Agent, it requires an LLMChain and tools
# We need to check if there is a Chain in the one of the neighbors'
assert any("Chain" in neighbor.data["type"] for neighbor in neighbors)
# assert Tool is in the neighbors
assert any("Tool" in neighbor.data["type"] for neighbor in neighbors)
# Now on to the Chain's neighbors
chain = next(neighbor for neighbor in neighbors if "Chain" in neighbor.data["type"])
chain_neighbors = graph.get_nodes_with_target(chain)
assert chain_neighbors is not None
# Check if there is a LLM in the chain's neighbors
assert any("OpenAI" in neighbor.data["type"] for neighbor in chain_neighbors)
# Chain should have a Prompt as a neighbor
assert any("Prompt" in neighbor.data["type"] for neighbor in chain_neighbors)
# Now on to the Tool's neighbors
tool = next(neighbor for neighbor in neighbors if "Tool" in neighbor.data["type"])
tool_neighbors = graph.get_nodes_with_target(tool)
assert tool_neighbors is not None
# Check if there is an Agent in the tool's neighbors
assert any("Agent" in neighbor.data["type"] for neighbor in tool_neighbors)
# This Agent has a Tool that has a PythonFunction as func
agent = next(
neighbor for neighbor in tool_neighbors if "Agent" in neighbor.data["type"]
)
agent_neighbors = graph.get_nodes_with_target(agent)
assert agent_neighbors is not None
# Check if there is a Tool in the agent's neighbors
assert any("Tool" in neighbor.data["type"] for neighbor in agent_neighbors)
# This Tool has a PythonFunction as func
tool = next(
neighbor for neighbor in agent_neighbors if "Tool" in neighbor.data["type"]
)
tool_neighbors = graph.get_nodes_with_target(tool)
assert tool_neighbors is not None
# Check if there is a PythonFunction in the tool's neighbors
assert any("PythonFunction" in neighbor.data["type"] for neighbor in tool_neighbors)
def test_get_node():
"""Test getting a single node"""
graph = get_graph()
node_id = graph.nodes[0].id
node = graph.get_node(node_id)
assert isinstance(node, Node)
assert node.id == node_id
def test_build_nodes():
"""Test building nodes"""
graph = get_graph()
assert len(graph.nodes) == len(graph._nodes)
for node in graph.nodes:
assert isinstance(node, Node)
def test_build_edges():
"""Test building edges"""
graph = get_graph()
assert len(graph.edges) == len(graph._edges)
for edge in graph.edges:
assert isinstance(edge, Edge)
assert isinstance(edge.source, Node)
assert isinstance(edge.target, Node)
def test_get_root_node():
"""Test getting root node"""
graph = get_graph(basic=True)
assert isinstance(graph, Graph)
root = get_root_node(graph)
assert root is not None
assert isinstance(root, Node)
assert root.data["type"] == "ZeroShotAgent"
# For complex example, the root node is a ZeroShotAgent too
graph = get_graph(basic=False)
assert isinstance(graph, Graph)
root = get_root_node(graph)
assert root is not None
assert isinstance(root, Node)
assert root.data["type"] == "ZeroShotAgent"
def test_build_json():
"""Test building JSON from graph"""
graph = get_graph()
assert isinstance(graph, Graph)
root = get_root_node(graph)
json_data = build_json(root, graph)
assert isinstance(json_data, dict)
assert json_data["_type"] == "zero-shot-react-description"
assert isinstance(json_data["llm_chain"], dict)
assert json_data["llm_chain"]["_type"] == "llm_chain"
assert json_data["llm_chain"]["memory"] is None
assert json_data["llm_chain"]["verbose"] is False
assert isinstance(json_data["llm_chain"]["prompt"], dict)
assert isinstance(json_data["llm_chain"]["llm"], dict)
assert json_data["llm_chain"]["output_key"] == "text"
assert isinstance(json_data["allowed_tools"], list)
assert all(isinstance(tool, dict) for tool in json_data["allowed_tools"])
assert isinstance(json_data["return_values"], list)
assert all(isinstance(val, str) for val in json_data["return_values"])
def test_validate_edges():
"""Test validating edges"""
graph = get_graph()
assert isinstance(graph, Graph)
# all edges should be valid
assert all(edge.valid for edge in graph.edges)
def test_matched_type():
"""Test matched type attribute in Edge"""
graph = get_graph()
assert isinstance(graph, Graph)
# all edges should be valid
assert all(edge.valid for edge in graph.edges)
# all edges should have a matched_type attribute
assert all(hasattr(edge, "matched_type") for edge in graph.edges)
# The matched_type attribute should be in the source_types attr
assert all(edge.matched_type in edge.source_types for edge in graph.edges)
def test_build_params():
"""Test building params"""
graph = get_graph()
assert isinstance(graph, Graph)
# all edges should be valid
assert all(edge.valid for edge in graph.edges)
# all edges should have a matched_type attribute
assert all(hasattr(edge, "matched_type") for edge in graph.edges)
# The matched_type attribute should be in the source_types attr
assert all(edge.matched_type in edge.source_types for edge in graph.edges)
# Get the root node
root = get_root_node(graph)
# Root node is a ZeroShotAgent
# which requires an llm_chain, allowed_tools and return_values
assert isinstance(root.params, dict)
assert "llm_chain" in root.params
assert "allowed_tools" in root.params
assert "return_values" in root.params
# The llm_chain should be a Node
assert isinstance(root.params["llm_chain"], Node)
# The allowed_tools should be a list of Nodes
assert isinstance(root.params["allowed_tools"], list)
assert all(isinstance(tool, Node) for tool in root.params["allowed_tools"])
# The return_values is of type str so it should be a list of strings
assert isinstance(root.params["return_values"], list)
assert all(isinstance(val, str) for val in root.params["return_values"])
# The llm_chain should have a prompt and llm
llm_chain_node = root.params["llm_chain"]
assert isinstance(llm_chain_node.params, dict)
assert "prompt" in llm_chain_node.params
assert "llm" in llm_chain_node.params
# The prompt should be a Node
assert isinstance(llm_chain_node.params["prompt"], Node)
# The llm should be a Node
assert isinstance(llm_chain_node.params["llm"], Node)
# The prompt should have format_insctructions, suffix, prefix
prompt_node = llm_chain_node.params["prompt"]
assert isinstance(prompt_node.params, dict)
assert "format_instructions" in prompt_node.params
assert "suffix" in prompt_node.params
assert "prefix" in prompt_node.params
# All of them should be of type str
assert isinstance(prompt_node.params["format_instructions"], str)
assert isinstance(prompt_node.params["suffix"], str)
assert isinstance(prompt_node.params["prefix"], str)
# The llm should have a model
llm_node = llm_chain_node.params["llm"]
assert isinstance(llm_node.params, dict)
assert "model_name" in llm_node.params
# The model should be a str
assert isinstance(llm_node.params["model_name"], str)
def test_build():
"""Test Node's build method"""
# def build(self):
# # The params dict is used to build the module
# # it contains values and keys that point to nodes which
# # have their own params dict
# # When build is called, we iterate through the params dict
# # and if the value is a node, we call build on that node
# # and use the output of that build as the value for the param
# # if the value is not a node, then we use the value as the param
# # and continue
# # Another aspect is that the node_type is the class that we need to import
# # and instantiate with these built params
# # Build each node in the params dict
# for key, value in self.params.items():
# if isinstance(value, Node):
# self.params[key] = value.build()
# # Get the class from LANGCHAIN_TYPES_DICT
# # and instantiate it with the params
# # and return the instance
# return LANGCHAIN_TYPES_DICT[self.node_type](**self.params)
graph = get_graph()
assert isinstance(graph, Graph)
# Now we test the build method
# Build the Agent
agent = graph.build()
# The agent should be a AgentExecutor
assert isinstance(agent, AgentExecutor)
# Now we test the complex example
graph = get_graph(basic=False)
assert isinstance(graph, Graph)
# Now we test the build method
agent = graph.build()
# The agent should be a AgentExecutor
assert isinstance(agent, AgentExecutor)

View file

@ -1,11 +1,27 @@
from pathlib import Path
import json
from langflow.graph.graph import Graph
import pytest
from langflow import load_flow_from_json
from langflow.utils.payload import get_root_node
from langchain.agents import AgentExecutor
def test_load_flow_from_json():
"""Test loading a flow from a json file"""
path = Path(__file__).parent.absolute()
path = f"{path}/data/example_flow.json"
loaded = load_flow_from_json(path)
loaded = load_flow_from_json(pytest.BASIC_EXAMPLE_PATH)
assert loaded is not None
assert isinstance(loaded, AgentExecutor)
def test_get_root_node():
with open(pytest.BASIC_EXAMPLE_PATH, "r") as f:
flow_graph = json.load(f)
data_graph = flow_graph["data"]
nodes = data_graph["nodes"]
edges = data_graph["edges"]
graph = Graph(nodes, edges)
root = get_root_node(graph)
assert root is not None
assert hasattr(root, "id")
assert hasattr(root, "data")

106
tests/test_validate_code.py Normal file
View file

@ -0,0 +1,106 @@
from langflow.utils.validate import (
create_function,
extract_function_name,
validate_code,
execute_function,
)
import pytest
from requests.exceptions import MissingSchema
from unittest import mock
def test_validate_code():
# Test case with a valid import and function
code1 = """
import math
def square(x):
return x ** 2
"""
errors1 = validate_code(code1)
assert errors1 == {"imports": {"errors": []}, "function": {"errors": []}}
# Test case with an invalid import and valid function
code2 = """
import non_existent_module
def square(x):
return x ** 2
"""
errors2 = validate_code(code2)
assert errors2 == {
"imports": {"errors": ["No module named 'non_existent_module'"]},
"function": {"errors": []},
}
# Test case with a valid import and invalid function syntax
code3 = """
import math
def square(x)
return x ** 2
"""
errors3 = validate_code(code3)
assert errors3 == {
"imports": {"errors": []},
"function": {"errors": ["expected ':' (<unknown>, line 4)"]},
}
def test_execute_function_success():
code = """
import math
def my_function(x):
return math.sin(x) + 1
"""
result = execute_function(code, "my_function", 0.5)
assert result == 1.479425538604203
def test_execute_function_missing_module():
code = """
import some_missing_module
def my_function(x):
return some_missing_module.some_function(x)
"""
with pytest.raises(ModuleNotFoundError):
execute_function(code, "my_function", 0.5)
def test_execute_function_missing_function():
code = """
import math
def my_function(x):
return math.some_missing_function(x)
"""
with pytest.raises(AttributeError):
execute_function(code, "my_function", 0.5)
def test_execute_function_missing_schema():
code = """
import requests
def my_function(x):
return requests.get(x).text
"""
with mock.patch("requests.get", side_effect=MissingSchema):
with pytest.raises(MissingSchema):
execute_function(code, "my_function", "invalid_url")
def test_create_function():
code = """
import math
def my_function(x):
return math.sin(x) + 1
"""
function_name = extract_function_name(code)
function = create_function(code, function_name)
result = function(0.5)
assert result == 1.479425538604203