From de292b8fff0a4bab58e02afeaf845d9501a39562 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Sun, 7 Jan 2024 20:06:20 -0300 Subject: [PATCH 001/153] Refactor OllamaLLM imports and remove unnecessary code --- src/backend/langflow/components/llms/OllamaLLM.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/src/backend/langflow/components/llms/OllamaLLM.py b/src/backend/langflow/components/llms/OllamaLLM.py index 9bf00ad5d..abbc00387 100644 --- a/src/backend/langflow/components/llms/OllamaLLM.py +++ b/src/backend/langflow/components/llms/OllamaLLM.py @@ -1,8 +1,7 @@ -from typing import Optional, List +from typing import List, Optional -from langchain.llms import Ollama from langchain.llms.base import BaseLLM - +from langchain_community.llms.ollama import Ollama from langflow import CustomComponent @@ -150,10 +149,9 @@ class OllamaLLM(CustomComponent): "top_k": top_k, "top_p": top_p, } - - # None Value remove - llm_params = {k: v for k, v in llm_params.items() if v is not None} + # None Value remove + llm_params = {k: v for k, v in llm_params.items() if v is not None} try: llm = Ollama(**llm_params) From cef8fd7e7447f35c0468ec41f1e894fecad79bef Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Sun, 7 Jan 2024 20:06:27 -0300 Subject: [PATCH 002/153] Update import statements in ChatOllamaEndpoint.py --- src/backend/langflow/components/llms/ChatOllamaEndpoint.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/backend/langflow/components/llms/ChatOllamaEndpoint.py b/src/backend/langflow/components/llms/ChatOllamaEndpoint.py index 3201ff432..34e9e0bd7 100644 --- a/src/backend/langflow/components/llms/ChatOllamaEndpoint.py +++ b/src/backend/langflow/components/llms/ChatOllamaEndpoint.py @@ -1,8 +1,8 @@ from typing import Any, Dict, List, Optional # from langchain_community.chat_models import ChatOllama -from langchain.chat_models import ChatOllama -from langchain.chat_models.base import BaseChatModel +from langchain_community.chat_models import ChatOllama +from langchain_core.language_models.chat_models import BaseChatModel # from langchain.chat_models import ChatOllama from langflow import CustomComponent From 4c89bee64022ee13bae8db9a88c99e3dbdc5a105 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Sun, 7 Jan 2024 20:06:35 -0300 Subject: [PATCH 003/153] Update tool imports in constants.py --- src/backend/langflow/interface/tools/constants.py | 15 +++------------ 1 file changed, 3 insertions(+), 12 deletions(-) diff --git a/src/backend/langflow/interface/tools/constants.py b/src/backend/langflow/interface/tools/constants.py index dc1bfe0c1..0ac37a0a4 100644 --- a/src/backend/langflow/interface/tools/constants.py +++ b/src/backend/langflow/interface/tools/constants.py @@ -1,18 +1,9 @@ from langchain import tools from langchain.agents import Tool -from langchain.agents.load_tools import ( - _BASE_TOOLS, - _EXTRA_LLM_TOOLS, - _EXTRA_OPTIONAL_TOOLS, - _LLM_TOOLS, -) +from langchain.agents.load_tools import _BASE_TOOLS, _EXTRA_LLM_TOOLS, _EXTRA_OPTIONAL_TOOLS, _LLM_TOOLS from langchain.tools.json.tool import JsonSpec - from langflow.interface.importing.utils import import_class -from langflow.interface.tools.custom import ( - PythonFunctionTool, - PythonFunction, -) +from langflow.interface.tools.custom import PythonFunction, PythonFunctionTool FILE_TOOLS = {"JsonSpec": JsonSpec} CUSTOM_TOOLS = { @@ -21,7 +12,7 @@ CUSTOM_TOOLS = { "PythonFunction": PythonFunction, } -OTHER_TOOLS = {tool: import_class(f"langchain.tools.{tool}") for tool in tools.__all__} +OTHER_TOOLS = {tool: import_class(f"langchain_community.tools.{tool}") for tool in tools.__all__} ALL_TOOLS_NAMES = { **_BASE_TOOLS, From b1ec1225ffb08010d90d82fb2a9df594027c6313 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Sun, 7 Jan 2024 20:06:41 -0300 Subject: [PATCH 004/153] Update langchain version to 0.1.0 and add langchain-openai dependency --- poetry.lock | 184 +++++++++++++++++++++++++++---------------------- pyproject.toml | 3 +- 2 files changed, 102 insertions(+), 85 deletions(-) diff --git a/poetry.lock b/poetry.lock index 1b4c5d334..5f4d111d2 100644 --- a/poetry.lock +++ b/poetry.lock @@ -3551,13 +3551,13 @@ zookeeper = ["kazoo (>=2.8.0)"] [[package]] name = "langchain" -version = "0.0.354" +version = "0.1.0" description = "Building applications with LLMs through composability" optional = false python-versions = ">=3.8.1,<4.0" files = [ - {file = "langchain-0.0.354-py3-none-any.whl", hash = "sha256:8d28283a2891422a685b0605dd23b5a1cd6a15ab57a8e359b37a3151a322bad4"}, - {file = "langchain-0.0.354.tar.gz", hash = "sha256:419c48735b803d70c0dee985e0afcfd7c88528b8c1cd918c57eb23e53d94ea87"}, + {file = "langchain-0.1.0-py3-none-any.whl", hash = "sha256:8652e74b039333a55c79faff4400b077ba1bd0ddce5255574e42d301c05c1733"}, + {file = "langchain-0.1.0.tar.gz", hash = "sha256:d43119f8d3fda2c8ddf8c3a19bd5b94b347e27d1867ff14a921b90bdbed0668a"}, ] [package.dependencies] @@ -3565,8 +3565,8 @@ aiohttp = ">=3.8.3,<4.0.0" async-timeout = {version = ">=4.0.0,<5.0.0", markers = "python_version < \"3.11\""} dataclasses-json = ">=0.5.7,<0.7" jsonpatch = ">=1.33,<2.0" -langchain-community = ">=0.0.8,<0.1" -langchain-core = ">=0.1.5,<0.2" +langchain-community = ">=0.0.9,<0.1" +langchain-core = ">=0.1.7,<0.2" langsmith = ">=0.0.77,<0.1.0" numpy = ">=1,<2" pydantic = ">=1,<3" @@ -3582,7 +3582,7 @@ cli = ["typer (>=0.9.0,<0.10.0)"] cohere = ["cohere (>=4,<5)"] docarray = ["docarray[hnswlib] (>=0.32.0,<0.33.0)"] embeddings = ["sentence-transformers (>=2,<3)"] -extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.0,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "cohere (>=4,<5)", "couchbase (>=4.1.9,<5.0.0)", "dashvector (>=1.0.1,<2.0.0)", "databricks-vectorsearch (>=0.21,<0.22)", "datasets (>=2.15.0,<3.0.0)", "dgml-utils (>=0.3.0,<0.4.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.9.0,<0.10.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "hologres-vector (>=0.0.6,<0.0.7)", "html2text (>=2020.1.16,<2021.0.0)", "javelin-sdk (>=0.1.8,<0.2.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "lxml (>=4.9.2,<5.0.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "msal (>=1.25.0,<2.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "openai (<2)", "openapi-pydantic (>=0.3.2,<0.4.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "praw (>=7.7.1,<8.0.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "upstash-redis (>=0.15.0,<0.16.0)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)"] +extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.0,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "cohere (>=4,<5)", "couchbase (>=4.1.9,<5.0.0)", "dashvector (>=1.0.1,<2.0.0)", "databricks-vectorsearch (>=0.21,<0.22)", "datasets (>=2.15.0,<3.0.0)", "dgml-utils (>=0.3.0,<0.4.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.9.0,<0.10.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "hologres-vector (>=0.0.6,<0.0.7)", "html2text (>=2020.1.16,<2021.0.0)", "javelin-sdk (>=0.1.8,<0.2.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "langchain-openai (>=0.0.2,<0.1)", "lxml (>=4.9.2,<5.0.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "msal (>=1.25.0,<2.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "openai (<2)", "openapi-pydantic (>=0.3.2,<0.4.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "praw (>=7.7.1,<8.0.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "upstash-redis (>=0.15.0,<0.16.0)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)"] javascript = ["esprima (>=4.0.1,<5.0.0)"] llms = ["clarifai (>=9.1.0)", "cohere (>=4,<5)", "huggingface_hub (>=0,<1)", "manifest-ml (>=0.0.1,<0.0.2)", "nlpcloud (>=1,<2)", "openai (<2)", "openlm (>=0.0.5,<0.0.6)", "torch (>=1,<3)", "transformers (>=4,<5)"] openai = ["openai (<2)", "tiktoken (>=0.3.2,<0.6.0)"] @@ -3641,21 +3641,20 @@ extended-testing = ["jinja2 (>=3,<4)"] [[package]] name = "langchain-experimental" -version = "0.0.47" +version = "0.0.42" description = "Building applications with LLMs through composability" optional = false python-versions = ">=3.8.1,<4.0" files = [ - {file = "langchain_experimental-0.0.47-py3-none-any.whl", hash = "sha256:d5b6930c4e0a6f280cbb7d327d03d86f555f6168e185a1df64ba4a52b1059f65"}, - {file = "langchain_experimental-0.0.47.tar.gz", hash = "sha256:0fdba89a438287c14fa0632c6adf87acffd55158a2f66c7a12be7721a7882a0e"}, + {file = "langchain_experimental-0.0.42-py3-none-any.whl", hash = "sha256:8e9190fa5ebdd03dfed6ca20846ebb26fc7e0c1fffbab070a12f1ce0cf5053d2"}, + {file = "langchain_experimental-0.0.42.tar.gz", hash = "sha256:1571ef536b056c46781d1de0fa926ab27c7d386da203ba61e0e0601d4cfc96be"}, ] [package.dependencies] -langchain = ">=0.0.350,<0.1" -langchain-core = ">=0.1,<0.2" +langchain = ">=0.0.308" [package.extras] -extended-testing = ["faker (>=19.3.1,<20.0.0)", "jinja2 (>=3,<4)", "presidio-analyzer (>=2.2.33,<3.0.0)", "presidio-anonymizer (>=2.2.33,<3.0.0)", "sentence-transformers (>=2,<3)", "vowpal-wabbit-next (==0.6.0)"] +extended-testing = ["faker (>=19.3.1,<20.0.0)", "presidio-analyzer (>=2.2.33,<3.0.0)", "presidio-anonymizer (>=2.2.33,<3.0.0)", "sentence-transformers (>=2,<3)", "vowpal-wabbit-next (==0.6.0)"] [[package]] name = "langchain-google-genai" @@ -3672,6 +3671,23 @@ files = [ google-generativeai = ">=0.3.1,<0.4.0" langchain-core = ">=0.1,<0.2" +[[package]] +name = "langchain-openai" +version = "0.0.2" +description = "An integration package connecting OpenAI and LangChain" +optional = false +python-versions = ">=3.8.1,<4.0" +files = [ + {file = "langchain_openai-0.0.2-py3-none-any.whl", hash = "sha256:0a46067be13ce95a029fdca339cd1034a61be1a727786178fbad702668a060f9"}, + {file = "langchain_openai-0.0.2.tar.gz", hash = "sha256:713af4a638f65b3af2f741a9d61991011c31939b070d81ede5b2e3cba625e01a"}, +] + +[package.dependencies] +langchain-core = ">=0.1.7,<0.2" +numpy = ">=1,<2" +openai = ">=1.6.1,<2.0.0" +tiktoken = ">=0.5.2,<0.6.0" + [[package]] name = "langdetect" version = "1.0.9" @@ -5604,13 +5620,13 @@ test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] [[package]] name = "psycopg" -version = "3.1.16" +version = "3.1.17" description = "PostgreSQL database adapter for Python" optional = false python-versions = ">=3.7" files = [ - {file = "psycopg-3.1.16-py3-none-any.whl", hash = "sha256:0bfe9741f4fb1c8115cadd8fe832fa91ac277e81e0652ff7fa1400f0ef0f59ba"}, - {file = "psycopg-3.1.16.tar.gz", hash = "sha256:a34d922fd7df3134595e71c3428ba6f1bd5f4968db74857fe95de12db2d6b763"}, + {file = "psycopg-3.1.17-py3-none-any.whl", hash = "sha256:96b7b13af6d5a514118b759a66b2799a8a4aa78675fa6bb0d3f7d52d67eff002"}, + {file = "psycopg-3.1.17.tar.gz", hash = "sha256:437e7d7925459f21de570383e2e10542aceb3b9cb972ce957fdd3826ca47edc6"}, ] [package.dependencies] @@ -5618,8 +5634,8 @@ typing-extensions = ">=4.1" tzdata = {version = "*", markers = "sys_platform == \"win32\""} [package.extras] -binary = ["psycopg-binary (==3.1.16)"] -c = ["psycopg-c (==3.1.16)"] +binary = ["psycopg-binary (==3.1.17)"] +c = ["psycopg-c (==3.1.17)"] dev = ["black (>=23.1.0)", "codespell (>=2.2)", "dnspython (>=2.1)", "flake8 (>=4.0)", "mypy (>=1.4.1)", "types-setuptools (>=57.4)", "wheel (>=0.37)"] docs = ["Sphinx (>=5.0)", "furo (==2022.6.21)", "sphinx-autobuild (>=2021.3.14)", "sphinx-autodoc-typehints (>=1.12)"] pool = ["psycopg-pool"] @@ -5627,76 +5643,76 @@ test = ["anyio (>=3.6.2,<4.0)", "mypy (>=1.4.1)", "pproxy (>=2.7)", "pytest (>=6 [[package]] name = "psycopg-binary" -version = "3.1.16" +version = "3.1.17" description = "PostgreSQL database adapter for Python -- C optimisation distribution" optional = false python-versions = ">=3.7" files = [ - {file = "psycopg_binary-3.1.16-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e08e333366f8583c7bee33ca6a27f84b76e05ee4e9f9f327a48e3ff81386261d"}, - {file = "psycopg_binary-3.1.16-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a18dfcf7eb3db698eb7a38b4a0e82bf5b76a7bc0079068c5837df70b965570f8"}, - {file = "psycopg_binary-3.1.16-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:db99192d9f448829322c4f59a584994ce747b8d586ec65788b4c65f7166cfe43"}, - {file = "psycopg_binary-3.1.16-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3f6053fe95596e2f67ff2c9464ea23032c748695a3b79060ca01ef878b0ea0f2"}, - {file = "psycopg_binary-3.1.16-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e6092ec21c08ed4ae4ff343c93a3bbb1d39c87dee181860ce40fa3b5c46f4ae"}, - {file = "psycopg_binary-3.1.16-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f81e880d1bd935433efab1c2883a02031df84e739eadcb2c6a715e9c2f41c19"}, - {file = "psycopg_binary-3.1.16-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:430f8843e381199cdc39ce9506a2cdbc27a569c99a0d80193844c787ce7de94d"}, - {file = "psycopg_binary-3.1.16-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:92bda36f0570a5f9a3d6aeb897bad219f1f23fc4e1d0e7780935798771efb536"}, - {file = "psycopg_binary-3.1.16-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:b256d500ec0121ad7875bc3539c43c82dc004535d55256a13c49df2d43f07ad8"}, - {file = "psycopg_binary-3.1.16-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:699737cecf675e1eb70b892b1995456db4016eff7189a3ad9325dca5b6715cc3"}, - {file = "psycopg_binary-3.1.16-cp310-cp310-win_amd64.whl", hash = "sha256:5e0885bcd7d9a0c0043be83d6a214069356c640d42496de798d901d0a16a34e7"}, - {file = "psycopg_binary-3.1.16-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4ee8be32eb8b813ef37c5f5968fe03fdddc9a6f0129190f97f6491c798a1ef57"}, - {file = "psycopg_binary-3.1.16-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8f8fb9677fb7873daf9797207e72e9275f61e769a308c4ea8f55dfd3153ebae7"}, - {file = "psycopg_binary-3.1.16-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a611d7256493ee5bb73a070c9c60206af415be6aee01243c186fc03f1eb1a48"}, - {file = "psycopg_binary-3.1.16-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d267cc92f0f0a9ea6c8ef058e95c85e58133d06c06f4ed48d63fc256aef166ab"}, - {file = "psycopg_binary-3.1.16-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e996b38ffeffbaa06d236bbeab5168d33eea95941cf74de1daa0b008333861b1"}, - {file = "psycopg_binary-3.1.16-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8429017cd7a3ef4699bee4ff8125a5e30b26882b817a178608d73e69fb727ab9"}, - {file = "psycopg_binary-3.1.16-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a7d3b2ea267e7676b3693799fadf941c672f5727fae4947efa1f0cc6e25b672c"}, - {file = "psycopg_binary-3.1.16-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:d8290cfd475fadf935da0900dc91b845fe92f792e6d53039c0df82f9049a84ad"}, - {file = "psycopg_binary-3.1.16-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:72539a0c6b9a2a9be2acca993df17f4baaa0ed00f1d76b65733725286e3e3304"}, - {file = "psycopg_binary-3.1.16-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1078370a93eaef1dc5aed540055d50cbe37e9154342f3a3d73fd768a6199344d"}, - {file = "psycopg_binary-3.1.16-cp311-cp311-win_amd64.whl", hash = "sha256:adca24d273fe81ecab2312309db547b345155ec50d15676e2df82b8c5409eb06"}, - {file = "psycopg_binary-3.1.16-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e1c416a7c2a699c3e5ba031357682ebca92bd58f399e553173ab5d67cc71cbc5"}, - {file = "psycopg_binary-3.1.16-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e951a8cc7cf919fdc817a28d57160e7286011a4a45dcad3be21f3e4feba8be1a"}, - {file = "psycopg_binary-3.1.16-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5eaa02fe8aa9ef8c8743919fdbc92c04b0ee8c43f3d65e53f24d355776c52fb3"}, - {file = "psycopg_binary-3.1.16-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e23375c14c22ce8fd26d057ac4ab827de79aafced173c68a4c0b03520ea02c70"}, - {file = "psycopg_binary-3.1.16-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84472e5c83e805d4c491f331061cbae3ea4e62f80a480fc4b32200be72262ffd"}, - {file = "psycopg_binary-3.1.16-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b0f824565d1dc325c74c076efd5ba842b86219f8bc1b8048c8816621a8b268c"}, - {file = "psycopg_binary-3.1.16-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:6b856d44531475488e773ac78d2a7a91c0909a1e8bdbd20d3ebdbdce1868c9a0"}, - {file = "psycopg_binary-3.1.16-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:198c4f16f806f7d2ad0c4a5b774652e17861b55249efb4e344049b1fcf9a24af"}, - {file = "psycopg_binary-3.1.16-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:b23d4b86acba2d745763ee0801821af1c42b127d8df75b903b7e7ca7c5f6400c"}, - {file = "psycopg_binary-3.1.16-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2cfd857f1085c59da592090f2fa0751da30b67dcafea2ac52c4b404678406aae"}, - {file = "psycopg_binary-3.1.16-cp312-cp312-win_amd64.whl", hash = "sha256:46c9cca48d459d8df71fda4eef7d94a189b8333f4bc3cf1d170c1796fcbbc8cd"}, - {file = "psycopg_binary-3.1.16-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f3136d8f92708c04694ca0cae6a2d6c8170e7174b9ee594218cb229b407e8f48"}, - {file = "psycopg_binary-3.1.16-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1063fe43bb06790a4cfed9f1cacebb165939ca672b6fddcb03627d673ae00bd9"}, - {file = "psycopg_binary-3.1.16-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:58289209495a92022e58757add4badb495815a4477f5e9840d481eac2ea422b2"}, - {file = "psycopg_binary-3.1.16-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d18c58f99beec18d38094edcb1ae7e6a1e58fb1a53ed08b0f18df714aa4b07cc"}, - {file = "psycopg_binary-3.1.16-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e122c904d4c4e044a3797a62624316cf7359271564f9ebe8ca342ed4a8cef3bd"}, - {file = "psycopg_binary-3.1.16-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:82ffad0edfa3dd77d6aa40c267f61275a6a4061f735cefe97cfd83cfa78e112a"}, - {file = "psycopg_binary-3.1.16-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:82099b6b4b0b12b63c4169d69b48bdbce97e674b86fa51b015e9949fc0ce5c82"}, - {file = "psycopg_binary-3.1.16-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:3815c72c590ffe3ad1dc3b7021d082b42215bbd91d2c7211d4a101eec1d0b83e"}, - {file = "psycopg_binary-3.1.16-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d2f308bfd39d6dcf3f46165f98d816bce5ac78aaf782eceb3cf43aa0a4fe62f8"}, - {file = "psycopg_binary-3.1.16-cp37-cp37m-win_amd64.whl", hash = "sha256:1e197c3e8d88e984c1e0fcc9a0218947e5a14855939a00b158b428bc449b49e3"}, - {file = "psycopg_binary-3.1.16-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2b22e2dad291a79d7a31b304866fd125038ef7fe378aba9698de0e1804a863c9"}, - {file = "psycopg_binary-3.1.16-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d9e1768c46c595a8177cb709c99626c3cefbd12c2e46eb54323efd8ac4a7fc2d"}, - {file = "psycopg_binary-3.1.16-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8eaabc8dd2d364e1b43d3a25188356191a45abb687b77016544f6847b3fcd73a"}, - {file = "psycopg_binary-3.1.16-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8cda744c43b09342b1a8b5aace13d3284c1f5ddbfcefa2d385f703337503a060"}, - {file = "psycopg_binary-3.1.16-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1cdaf56adc9cc56df7a05e8f097a776939ba49d5e6afc907ba7b404d8bd21c89"}, - {file = "psycopg_binary-3.1.16-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7232116fc5d4e0274114f152bdb9df089895d4c70f7c03268cab0a4c48a28d04"}, - {file = "psycopg_binary-3.1.16-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6f03239d7c18666f5d6ca82ea972235de4d4d3604287098af6cdc256b76a0ca5"}, - {file = "psycopg_binary-3.1.16-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:edd1b630652bdfff84662b46d11878fbab8ab2966003c1876fcde56650e99e3f"}, - {file = "psycopg_binary-3.1.16-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:481e9dafca1ed9532552e097105e6664ee7f14686270ed0ee0b1d6c78c2cdb11"}, - {file = "psycopg_binary-3.1.16-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d43aa3aa55b5fa964ffa78cf6abdbd51ff33a759f290e9159a9f974ffa3178fa"}, - {file = "psycopg_binary-3.1.16-cp38-cp38-win_amd64.whl", hash = "sha256:51e66b282d8689bc33d81bde3a1e14d0c88a39200c2d9436b028b394d24f1f99"}, - {file = "psycopg_binary-3.1.16-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bfae154f3c88e67f3ed592765ad56531b6076acfe80796e28cccc05727c1cf5b"}, - {file = "psycopg_binary-3.1.16-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f9f4bc3d366951359a68833c8031cc83faf5084b3bc80dd2d24f0add593d4418"}, - {file = "psycopg_binary-3.1.16-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a37d682d7ff57cc2573b1011740ef1566749fc94ae6ac1456405510592735c0a"}, - {file = "psycopg_binary-3.1.16-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0be876e3a8ee359f6a985b662c6b02a094a50b37adf1bd756a655004bddf167a"}, - {file = "psycopg_binary-3.1.16-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4f79192b0edd60ef24acb0af5b83319cbb65d4187576757b690646b290de8307"}, - {file = "psycopg_binary-3.1.16-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fcc5996b1db4e7fb948ea47b610456df317625d92474c779a20f92ca8cbcec92"}, - {file = "psycopg_binary-3.1.16-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3f2ceb04f8137462f9312a324bea5402de0a4f0503cd5442f4264911e4b6265b"}, - {file = "psycopg_binary-3.1.16-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:47517d2da63bb10c80c2cf35c80a936db79636534849524fd57940b5f0bbd7bd"}, - {file = "psycopg_binary-3.1.16-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:2a6bd83d0b934aa03897e93acb6897972ccc3827ae61c903589bc92ed423f75d"}, - {file = "psycopg_binary-3.1.16-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:08fb94928e785571ac90d3ab9e09f2721e0d895c2504ecfb8de91c5ea807b267"}, - {file = "psycopg_binary-3.1.16-cp39-cp39-win_amd64.whl", hash = "sha256:cf13807b61315130a59ea8d0950bda2ac875bae9fadc0b1a9aca9b4ef6d62c7b"}, + {file = "psycopg_binary-3.1.17-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f9ba559eabb0ba1afd4e0504fa0b10e00a212cac0c4028b8a1c3b087b5c1e5de"}, + {file = "psycopg_binary-3.1.17-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2b2a689eaede08cf91a36b10b0da6568dd6e4669200f201e082639816737992b"}, + {file = "psycopg_binary-3.1.17-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a16abab0c1abc58feb6ab11d78d0f8178a67c3586bd70628ec7c0218ec04c4ef"}, + {file = "psycopg_binary-3.1.17-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:73e7097b81cad9ae358334e3cec625246bb3b8013ae6bb287758dd6435e12f65"}, + {file = "psycopg_binary-3.1.17-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:67a5b93101bc85a95a189c0a23d02a29cf06c1080a695a0dedfdd50dd734662a"}, + {file = "psycopg_binary-3.1.17-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:751b31c2faae0348f87f22b45ef58f704bdcfc2abdd680fa0c743c124071157e"}, + {file = "psycopg_binary-3.1.17-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b447ea765e71bc33a82cf070bba814b1efa77967442d116b95ccef8ce5da7631"}, + {file = "psycopg_binary-3.1.17-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:d2e9ed88d9a6a475c67bf70fc8285e88ccece0391727c7701e5a512e0eafbb05"}, + {file = "psycopg_binary-3.1.17-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:a89f36bf7b612ff6ed3e789bd987cbd0787cf0d66c49386fa3bad816dd7bee87"}, + {file = "psycopg_binary-3.1.17-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5ccbe8b2ec444763a51ecb1213befcbb75defc1ef36e7dd5dff501a23d7ce8cf"}, + {file = "psycopg_binary-3.1.17-cp310-cp310-win_amd64.whl", hash = "sha256:adb670031b27949c9dc5cf585c4a5a6b4469d3879fd2fb9d39b6d53e5f66b9bc"}, + {file = "psycopg_binary-3.1.17-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0227885686c2cc0104ceb22d6eebc732766e9ad48710408cb0123237432e5435"}, + {file = "psycopg_binary-3.1.17-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9124b6db07e8d8b11f4512b8b56cbe136bf1b7d0417d1280e62291a9dcad4408"}, + {file = "psycopg_binary-3.1.17-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c8a46f77ba0ca7c5a5449b777170a518fa7820e1710edb40e777c9798f00d033"}, + {file = "psycopg_binary-3.1.17-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5f5f5bcbb772d8c243d605fc7151beec760dd27532d42145a58fb74ef9c5fbf2"}, + {file = "psycopg_binary-3.1.17-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:267a82548c21476120e43dc72b961f1af52c380c0b4c951bdb34cf14cb26bd35"}, + {file = "psycopg_binary-3.1.17-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b20013051f1fd7d02b8d0766cfe8d009e8078babc00a6d39bc7e2d50a7b96af"}, + {file = "psycopg_binary-3.1.17-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8c5c38129cc79d7e3ba553035b9962a442171e9f97bb1b8795c0885213f206f3"}, + {file = "psycopg_binary-3.1.17-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:d01c4faae66de60fcd3afd3720dcc8ffa03bc2087f898106da127774db12aac5"}, + {file = "psycopg_binary-3.1.17-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e6ae27b0617ad3809449964b5e901b21acff8e306abacb8ba71d5ee7c8c47eeb"}, + {file = "psycopg_binary-3.1.17-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:40af298b209dd77ca2f3e7eb3fbcfb87a25999fc015fcd14140bde030a164c7e"}, + {file = "psycopg_binary-3.1.17-cp311-cp311-win_amd64.whl", hash = "sha256:7b4e4c2b05f3b431e9026e82590b217e87696e7a7548f512ae8059d59fa8af3b"}, + {file = "psycopg_binary-3.1.17-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ea425a8dcd808a7232a5417d2633bfa543da583a2701b5228e9e29989a50deda"}, + {file = "psycopg_binary-3.1.17-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a3f1196d76860e72d338fab0d2b6722e8d47e2285d693e366ae36011c4a5898a"}, + {file = "psycopg_binary-3.1.17-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e1e867c2a729348df218a14ba1b862e627177fd57c7b4f3db0b4c708f6d03696"}, + {file = "psycopg_binary-3.1.17-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b0711e46361ea3047cd049868419d030c8236a9dea7e9ed1f053cbd61a853ec9"}, + {file = "psycopg_binary-3.1.17-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d1c0115bdf80cf6c8c9109cb10cf6f650fd1a8d841f884925e8cb12f34eb5371"}, + {file = "psycopg_binary-3.1.17-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d0d154c780cc7b28a3a0886e8a4b18689202a1dbb522b3c771eb3a1289cf7c3"}, + {file = "psycopg_binary-3.1.17-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:f4028443bf25c1e04ecffdc552c0a98d826903dec76a1568dfddf5ebbbb03db7"}, + {file = "psycopg_binary-3.1.17-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bf424d92dd7e94705b31625b02d396297a7c8fab4b6f7de8dba6388323a7b71c"}, + {file = "psycopg_binary-3.1.17-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:00377f6963ee7e4bf71cab17c2c235ef0624df9483f3b615d86aa24cde889d42"}, + {file = "psycopg_binary-3.1.17-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9690a535d9ccd361bbc3590bfce7fe679e847f44fa7cc97f3b885f4744ca8a2c"}, + {file = "psycopg_binary-3.1.17-cp312-cp312-win_amd64.whl", hash = "sha256:6b2ae342d69684555bfe77aed5546d125b4a99012e0b83a8b3da68c8829f0935"}, + {file = "psycopg_binary-3.1.17-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:86bb3656c8d744cc1e42003414cd6c765117d70aa23da6c0f4ff2b826e0fd0fd"}, + {file = "psycopg_binary-3.1.17-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c10b7713e3ed31df7319c2a72d5fea5a2536476d7695a3e1d18a1f289060997c"}, + {file = "psycopg_binary-3.1.17-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:12eab8bc91b4ba01b2ecee3b5b80501934b198f6e1f8d4b13596f3f38ba6e762"}, + {file = "psycopg_binary-3.1.17-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6a728beefd89b430ebe2729d04ba10e05036b5e9d01648da60436000d2fcd242"}, + {file = "psycopg_binary-3.1.17-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61104b8e7a43babf2bbaa36c08e31a12023e2f967166e99d6b052b11a4c7db06"}, + {file = "psycopg_binary-3.1.17-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:02cd2eb62ffc56f8c847d68765cbf461b3d11b438fe48951e44b6c563ec27d18"}, + {file = "psycopg_binary-3.1.17-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:ca1757a6e080086f7234dc45684e81a47a66a6dd492a37d6ce38c58a1a93e9ff"}, + {file = "psycopg_binary-3.1.17-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:6e3543edc18553e31a3884af3cd7eea43d6c44532d8b9b16f3e743cdf6cfe6c5"}, + {file = "psycopg_binary-3.1.17-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:914254849486e14aa931b0b3382cd16887f1507068ffba775cbdc5a55fe9ef19"}, + {file = "psycopg_binary-3.1.17-cp37-cp37m-win_amd64.whl", hash = "sha256:92fad8f1aa80a5ab316c0493dc6d1b54c1dba21937e43eea7296ff4a0ccc071e"}, + {file = "psycopg_binary-3.1.17-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6d4f2e15d33ed4f9776fdf23683512d76f4e7825c4b80677e9e3ce6c1b193ff2"}, + {file = "psycopg_binary-3.1.17-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4fa26836ce074a1104249378727e1f239a01530f36bae16e77cf6c50968599b4"}, + {file = "psycopg_binary-3.1.17-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d54bcf2dfc0880bf13f38512d44b194c092794e4ee9e01d804bc6cd3eed9bfb7"}, + {file = "psycopg_binary-3.1.17-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e28024204dc0c61094268c682041d2becfedfea2e3b46bed5f6138239304d98"}, + {file = "psycopg_binary-3.1.17-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0b1ec6895cab887b92c303565617f994c9b9db53befda81fa2a31b76fe8a3ab1"}, + {file = "psycopg_binary-3.1.17-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:420c1eb1626539c261cf3fbe099998da73eb990f9ce1a34da7feda414012ea5f"}, + {file = "psycopg_binary-3.1.17-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:83404a353240fdff5cfe9080665fdfdcaa2d4d0c5112e15b0a2fe2e59200ed57"}, + {file = "psycopg_binary-3.1.17-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a0c4ba73f9e7721dd6cc3e6953016652dbac206f654229b7a1a8ac182b16e689"}, + {file = "psycopg_binary-3.1.17-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:f6898bf1ca5aa01115807643138e3e20ec603b17a811026bc4a49d43055720a7"}, + {file = "psycopg_binary-3.1.17-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6b40fa54a02825d3d6a8009d9a82a2b4fad80387acf2b8fd6d398fd2813cb2d9"}, + {file = "psycopg_binary-3.1.17-cp38-cp38-win_amd64.whl", hash = "sha256:78ebb43dca7d5b41eee543cd005ee5a0256cecc74d84acf0fab4f025997b837e"}, + {file = "psycopg_binary-3.1.17-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:02ac573f5a6e79bb6df512b3a6279f01f033bbd45c47186e8872fee45f6681d0"}, + {file = "psycopg_binary-3.1.17-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:704f6393d758b12a4369887fe956b2a8c99e4aced839d9084de8e3f056015d40"}, + {file = "psycopg_binary-3.1.17-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0340ef87a888fd940796c909e038426f4901046f61856598582a817162c64984"}, + {file = "psycopg_binary-3.1.17-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a880e4113af3ab84d6a0991e3f85a2424924c8a182733ab8d964421df8b5190a"}, + {file = "psycopg_binary-3.1.17-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:93921178b9a40c60c26e47eb44970f88c49fe484aaa3bb7ec02bb8b514eab3d9"}, + {file = "psycopg_binary-3.1.17-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a05400e9314fc30bc1364865ba9f6eaa2def42b5e7e67f71f9a4430f870023e"}, + {file = "psycopg_binary-3.1.17-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3e2cc2bbf37ff1cf11e8b871c294e3532636a3cf7f0c82518b7537158923d77b"}, + {file = "psycopg_binary-3.1.17-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:a343261701a8f63f0d8268f7fd32be40ffe28d24b65d905404ca03e7281f7bb5"}, + {file = "psycopg_binary-3.1.17-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:dceb3930ec426623c0cacc78e447a90882981e8c49d6fea8d1e48850e24a0170"}, + {file = "psycopg_binary-3.1.17-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d613a23f8928f30acb2b6b2398cb7775ba9852e8968e15df13807ba0d3ebd565"}, + {file = "psycopg_binary-3.1.17-cp39-cp39-win_amd64.whl", hash = "sha256:d90c0531e9d591bde8cea04e75107fcddcc56811b638a34853436b23c9a3cb7d"}, ] [[package]] @@ -9278,4 +9294,4 @@ local = ["ctransformers", "llama-cpp-python", "sentence-transformers"] [metadata] lock-version = "2.0" python-versions = ">=3.9,<3.11" -content-hash = "380c300a4aee763dbd4cf33c3c0cab3509c69af9605f98622a6429d884c8d3af" +content-hash = "447639bcb6b221b2c0a1f216c7c93a5ce104664791a149645556b0916c555272" diff --git a/pyproject.toml b/pyproject.toml index c4da330d4..a02ec9009 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -35,7 +35,7 @@ google-search-results = "^2.4.1" google-api-python-client = "^2.79.0" typer = "^0.9.0" gunicorn = "^21.2.0" -langchain = "~0.0.345" +langchain = "~0.1.0" openai = "^1.6.1" pandas = "2.0.3" chromadb = "^0.4.0" @@ -107,6 +107,7 @@ langchain-google-genai = "^0.0.2" elasticsearch = "^8.11.1" pytube = "^15.0.0" llama-index = "^0.9.24" +langchain-openai = "^0.0.2" [tool.poetry.group.dev.dependencies] pytest-asyncio = "^0.23.1" From 4852b845b4acf192f386089dee8984468d06442b Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Tue, 9 Jan 2024 00:30:23 -0300 Subject: [PATCH 005/153] Add broken components --- .../langflow/components/agents/CSVAgent.py | 22 +++ .../langflow/components/agents/JsonAgent.py | 25 +++ .../langflow/components/agents/SQLAgent.py | 25 +++ .../components/agents/VectorStoreAgent.py | 22 +++ .../agents/VectorStoreRouterAgent.py | 23 +++ .../components/agents/ZeroShotAgent.py | 29 ++++ .../components/chains/CombineDocsChain.py | 37 +++++ .../components/chains/LLMCheckerChain.py | 24 +++ .../components/chains/LLMMathChain.py | 32 ++++ .../langflow/components/chains/RetrievalQA.py | 41 +++++ .../chains/RetrievalQAWithSourcesChain.py | 35 +++++ .../components/chains/SQLDatabaseChain.py | 33 ++++ .../documentloaders/AZLyricsLoader.py | 31 ++++ .../documentloaders/AirbyteJSONLoader.py | 31 ++++ .../documentloaders/BSHTMLLoader.py | 33 ++++ .../components/documentloaders/CSVLoader.py | 30 ++++ .../documentloaders/CoNLLULoader.py | 33 ++++ .../CollegeConfidentialLoader.py | 24 +++ .../documentloaders/DirectoryLoader.py | 42 +++++ .../documentloaders/EverNoteLoader.py | 32 ++++ .../documentloaders/FacebookChatLoader.py | 30 ++++ .../documentloaders/GitbookLoader.py | 26 ++++ .../components/documentloaders/HNLoader.py | 31 ++++ .../documentloaders/IFixitLoader.py | 20 +++ .../components/documentloaders/IMSDbLoader.py | 23 +++ .../documentloaders/PyPDFDirectoryLoader.py | 23 +++ .../components/documentloaders/PyPDFLoader.py | 31 ++++ .../documentloaders/ReadTheDocsLoader.py | 21 +++ .../components/documentloaders/SRTLoader.py | 25 +++ .../documentloaders/SlackDirectoryLoader.py | 26 ++++ .../components/documentloaders/TextLoader.py | 28 ++++ .../documentloaders/UnstructuredHTMLLoader.py | 21 +++ .../UnstructuredPowerPointLoader.py | 32 ++++ .../UnstructuredWordDocumentLoader.py | 28 ++++ .../components/embeddings/CohereEmbeddings.py | 34 ++++ .../embeddings/HuggingFaceEmbeddings.py | 37 +++++ .../components/embeddings/OpenAIEmbeddings.py | 75 +++++++++ .../embeddings/VertexAIEmbeddings.py | 60 ++++++++ .../langflow/components/llms/Anthropic.py | 61 ++++++++ .../langflow/components/llms/CTransformers.py | 56 +++++++ .../langflow/components/llms/ChatAnthropic.py | 47 ++++++ .../langflow/components/llms/ChatOpenAI.py | 82 ++++++++++ .../langflow/components/llms/ChatVertexAI.py | 84 ++++++++++ .../langflow/components/llms/Cohere.py | 40 +++++ .../langflow/components/llms/LlamaCpp.py | 126 +++++++++++++++ .../langflow/components/llms/OpenAI.py | 57 +++++++ .../langflow/components/llms/VertexAI.py | 145 ++++++++++++++++++ .../retrievers/MultiQueryRetriever.py | 46 ++++++ .../textsplitters/CharacterTextSplitter.py | 32 ++++ .../components/toolkits/JsonToolkit.py | 17 ++ .../components/toolkits/OpenAPIToolkit.py | 22 +++ .../components/toolkits/VectorStoreInfo.py | 33 ++++ .../toolkits/VectorStoreRouterToolkit.py | 23 +++ .../components/toolkits/VectorStoreToolkit.py | 22 +++ .../utilities/BingSearchAPIWrapper.py | 34 ++++ .../utilities/GoogleSearchAPIWrapper.py | 27 ++++ .../utilities/GoogleSerperAPIWrapper.py | 57 +++++++ .../utilities/SearxSearchWrapper.py | 27 ++++ .../components/utilities/SerpAPIWrapper.py | 32 ++++ .../utilities/WikipediaAPIWrapper.py | 17 ++ .../utilities/WolframAlphaAPIWrapper.py | 22 +++ .../langflow/components/vectorstores/FAISS.py | 39 +++++ .../vectorstores/MongoDBAtlasVectorSearch.py | 45 ++++++ .../components/vectorstores/Pinecone.py | 44 ++++++ .../components/vectorstores/Qdrant.py | 73 +++++++++ .../vectorstores/SupabaseVectorStore.py | 44 ++++++ 66 files changed, 2529 insertions(+) create mode 100644 src/backend/langflow/components/agents/CSVAgent.py create mode 100644 src/backend/langflow/components/agents/JsonAgent.py create mode 100644 src/backend/langflow/components/agents/SQLAgent.py create mode 100644 src/backend/langflow/components/agents/VectorStoreAgent.py create mode 100644 src/backend/langflow/components/agents/VectorStoreRouterAgent.py create mode 100644 src/backend/langflow/components/agents/ZeroShotAgent.py create mode 100644 src/backend/langflow/components/chains/CombineDocsChain.py create mode 100644 src/backend/langflow/components/chains/LLMCheckerChain.py create mode 100644 src/backend/langflow/components/chains/LLMMathChain.py create mode 100644 src/backend/langflow/components/chains/RetrievalQA.py create mode 100644 src/backend/langflow/components/chains/RetrievalQAWithSourcesChain.py create mode 100644 src/backend/langflow/components/chains/SQLDatabaseChain.py create mode 100644 src/backend/langflow/components/documentloaders/AZLyricsLoader.py create mode 100644 src/backend/langflow/components/documentloaders/AirbyteJSONLoader.py create mode 100644 src/backend/langflow/components/documentloaders/BSHTMLLoader.py create mode 100644 src/backend/langflow/components/documentloaders/CSVLoader.py create mode 100644 src/backend/langflow/components/documentloaders/CoNLLULoader.py create mode 100644 src/backend/langflow/components/documentloaders/CollegeConfidentialLoader.py create mode 100644 src/backend/langflow/components/documentloaders/DirectoryLoader.py create mode 100644 src/backend/langflow/components/documentloaders/EverNoteLoader.py create mode 100644 src/backend/langflow/components/documentloaders/FacebookChatLoader.py create mode 100644 src/backend/langflow/components/documentloaders/GitbookLoader.py create mode 100644 src/backend/langflow/components/documentloaders/HNLoader.py create mode 100644 src/backend/langflow/components/documentloaders/IFixitLoader.py create mode 100644 src/backend/langflow/components/documentloaders/IMSDbLoader.py create mode 100644 src/backend/langflow/components/documentloaders/PyPDFDirectoryLoader.py create mode 100644 src/backend/langflow/components/documentloaders/PyPDFLoader.py create mode 100644 src/backend/langflow/components/documentloaders/ReadTheDocsLoader.py create mode 100644 src/backend/langflow/components/documentloaders/SRTLoader.py create mode 100644 src/backend/langflow/components/documentloaders/SlackDirectoryLoader.py create mode 100644 src/backend/langflow/components/documentloaders/TextLoader.py create mode 100644 src/backend/langflow/components/documentloaders/UnstructuredHTMLLoader.py create mode 100644 src/backend/langflow/components/documentloaders/UnstructuredPowerPointLoader.py create mode 100644 src/backend/langflow/components/documentloaders/UnstructuredWordDocumentLoader.py create mode 100644 src/backend/langflow/components/embeddings/CohereEmbeddings.py create mode 100644 src/backend/langflow/components/embeddings/HuggingFaceEmbeddings.py create mode 100644 src/backend/langflow/components/embeddings/OpenAIEmbeddings.py create mode 100644 src/backend/langflow/components/embeddings/VertexAIEmbeddings.py create mode 100644 src/backend/langflow/components/llms/Anthropic.py create mode 100644 src/backend/langflow/components/llms/CTransformers.py create mode 100644 src/backend/langflow/components/llms/ChatAnthropic.py create mode 100644 src/backend/langflow/components/llms/ChatOpenAI.py create mode 100644 src/backend/langflow/components/llms/ChatVertexAI.py create mode 100644 src/backend/langflow/components/llms/Cohere.py create mode 100644 src/backend/langflow/components/llms/LlamaCpp.py create mode 100644 src/backend/langflow/components/llms/OpenAI.py create mode 100644 src/backend/langflow/components/llms/VertexAI.py create mode 100644 src/backend/langflow/components/retrievers/MultiQueryRetriever.py create mode 100644 src/backend/langflow/components/textsplitters/CharacterTextSplitter.py create mode 100644 src/backend/langflow/components/toolkits/JsonToolkit.py create mode 100644 src/backend/langflow/components/toolkits/OpenAPIToolkit.py create mode 100644 src/backend/langflow/components/toolkits/VectorStoreInfo.py create mode 100644 src/backend/langflow/components/toolkits/VectorStoreRouterToolkit.py create mode 100644 src/backend/langflow/components/toolkits/VectorStoreToolkit.py create mode 100644 src/backend/langflow/components/utilities/BingSearchAPIWrapper.py create mode 100644 src/backend/langflow/components/utilities/GoogleSearchAPIWrapper.py create mode 100644 src/backend/langflow/components/utilities/GoogleSerperAPIWrapper.py create mode 100644 src/backend/langflow/components/utilities/SearxSearchWrapper.py create mode 100644 src/backend/langflow/components/utilities/SerpAPIWrapper.py create mode 100644 src/backend/langflow/components/utilities/WikipediaAPIWrapper.py create mode 100644 src/backend/langflow/components/utilities/WolframAlphaAPIWrapper.py create mode 100644 src/backend/langflow/components/vectorstores/FAISS.py create mode 100644 src/backend/langflow/components/vectorstores/MongoDBAtlasVectorSearch.py create mode 100644 src/backend/langflow/components/vectorstores/Pinecone.py create mode 100644 src/backend/langflow/components/vectorstores/Qdrant.py create mode 100644 src/backend/langflow/components/vectorstores/SupabaseVectorStore.py diff --git a/src/backend/langflow/components/agents/CSVAgent.py b/src/backend/langflow/components/agents/CSVAgent.py new file mode 100644 index 000000000..e3b90527f --- /dev/null +++ b/src/backend/langflow/components/agents/CSVAgent.py @@ -0,0 +1,22 @@ + +from langflow import CustomComponent +from langchain.field_typing import BaseLanguageModel, AgentExecutor + +class CSVAgentComponent(CustomComponent): + display_name = "CSVAgent" + description = "Construct a CSV agent from a CSV and tools." + documentation = "https://python.langchain.com/docs/modules/agents/toolkits/csv" + + def build_config(self): + return { + "llm": {"display_name": "LLM", "type": BaseLanguageModel}, + "path": {"display_name": "Path", "type": "file", "suffixes": [".csv"], "file_types": ["csv"]}, + } + + def build( + self, + llm: BaseLanguageModel, + path: str, + ) -> AgentExecutor: + # Instantiate and return the CSV agent class with the provided llm and path + return AgentExecutor(llm=llm, path=path) diff --git a/src/backend/langflow/components/agents/JsonAgent.py b/src/backend/langflow/components/agents/JsonAgent.py new file mode 100644 index 000000000..935138142 --- /dev/null +++ b/src/backend/langflow/components/agents/JsonAgent.py @@ -0,0 +1,25 @@ + +from langflow import CustomComponent +from langchain.agents import AgentExecutor +from typing import Callable +from langflow.field_typing import ( + BaseLanguageModel, + BaseToolkit, +) + +class JsonAgentComponent(CustomComponent): + display_name = "JsonAgent" + description = "Construct a json agent from an LLM and tools." + + def build_config(self): + return { + "llm": {"display_name": "LLM"}, + "toolkit": {"display_name": "Toolkit"}, + } + + def build( + self, + llm: BaseLanguageModel, + toolkit: BaseToolkit, + ) -> Callable: + return AgentExecutor(llm=llm, toolkit=toolkit) diff --git a/src/backend/langflow/components/agents/SQLAgent.py b/src/backend/langflow/components/agents/SQLAgent.py new file mode 100644 index 000000000..6f96b6757 --- /dev/null +++ b/src/backend/langflow/components/agents/SQLAgent.py @@ -0,0 +1,25 @@ + +from langflow import CustomComponent +from typing import Union, Callable +from langchain.agents import AgentExecutor +from langflow.field_typing import BaseLanguageModel + +class SQLAgentComponent(CustomComponent): + display_name = "SQLAgent" + description = "Construct an SQL agent from an LLM and tools." + + def build_config(self): + return { + "llm": {"display_name": "LLM"}, + "database_uri": {"display_name": "Database URI"}, + } + + def build( + self, + llm: BaseLanguageModel, + database_uri: str, + ) -> Union[AgentExecutor, Callable]: + # Assuming there is a constructor for SQLAgent that takes these parameters + # Since the actual implementation is not provided, this is a placeholder + # Replace SQLAgent with the actual class name if different + return SQLAgent(llm=llm, database_uri=database_uri) diff --git a/src/backend/langflow/components/agents/VectorStoreAgent.py b/src/backend/langflow/components/agents/VectorStoreAgent.py new file mode 100644 index 000000000..e554e0d96 --- /dev/null +++ b/src/backend/langflow/components/agents/VectorStoreAgent.py @@ -0,0 +1,22 @@ + +from langflow import CustomComponent +from langchain.agents import AgentExecutor +from typing import Union, Callable +from langflow.field_typing import BaseLanguageModel, VectorStore + +class VectorStoreAgentComponent(CustomComponent): + display_name = "VectorStoreAgent" + description = "Construct an agent from a Vector Store." + + def build_config(self): + return { + "llm": {"display_name": "LLM"}, + "vectorstoreinfo": {"display_name": "Vector Store Info"}, + } + + def build( + self, + llm: BaseLanguageModel, + vectorstoreinfo: VectorStore, + ) -> Union[AgentExecutor, Callable]: + return AgentExecutor(llm=llm, vectorstore=vectorstoreinfo) diff --git a/src/backend/langflow/components/agents/VectorStoreRouterAgent.py b/src/backend/langflow/components/agents/VectorStoreRouterAgent.py new file mode 100644 index 000000000..6aec821c8 --- /dev/null +++ b/src/backend/langflow/components/agents/VectorStoreRouterAgent.py @@ -0,0 +1,23 @@ + +from langflow import CustomComponent +from langchain.llms import BaseLanguageModel +from langchain.vectorstores import VectorStoreRouterToolkit +from langchain.agents import AgentExecutor +from typing import Callable + +class VectorStoreRouterAgentComponent(CustomComponent): + display_name = "VectorStoreRouterAgent" + description = "Construct an agent from a Vector Store Router." + + def build_config(self): + return { + "llm": {"display_name": "LLM"}, + "vectorstoreroutertoolkit": {"display_name": "Vector Store Router Toolkit"}, + } + + def build( + self, + llm: BaseLanguageModel, + vectorstoreroutertoolkit: VectorStoreRouterToolkit + ) -> Callable: + return AgentExecutor(llm=llm, toolkit=vectorstoreroutertoolkit) diff --git a/src/backend/langflow/components/agents/ZeroShotAgent.py b/src/backend/langflow/components/agents/ZeroShotAgent.py new file mode 100644 index 000000000..fca29a9e0 --- /dev/null +++ b/src/backend/langflow/components/agents/ZeroShotAgent.py @@ -0,0 +1,29 @@ + +from langflow import CustomComponent +from langchain.agents import ZeroShotAgent +from typing import List, Optional +from langflow.field_typing import ( + BaseLanguageModel, + BaseTool, +) + +class ZeroShotAgentComponent(CustomComponent): + display_name = "ZeroShotAgent" + description = "Construct an agent from an LLM and tools." + + def build_config(self): + return { + "llm": {"display_name": "LLM"}, + "tools": {"display_name": "Tools"}, + "prefix": {"display_name": "Prefix", "multiline": True}, + "suffix": {"display_name": "Suffix", "multiline": True}, + } + + def build( + self, + llm: BaseLanguageModel, + tools: List[BaseTool], + prefix: Optional[str] = "Answer the following questions as best you can. You have access to the following tools:", + suffix: Optional[str] = "Begin!\n\nQuestion: {input}\nThought:{agent_scratchpad}", + ) -> ZeroShotAgent: + return ZeroShotAgent(llm=llm, tools=tools, prefix=prefix, suffix=suffix) diff --git a/src/backend/langflow/components/chains/CombineDocsChain.py b/src/backend/langflow/components/chains/CombineDocsChain.py new file mode 100644 index 000000000..d18cec63a --- /dev/null +++ b/src/backend/langflow/components/chains/CombineDocsChain.py @@ -0,0 +1,37 @@ + +from langflow import CustomComponent +from langflow.field_typing import BaseLanguageModel, Chain +from typing import Union, Callable + +class CombineDocsChainComponent(CustomComponent): + display_name = "CombineDocsChain" + description = "Load question answering chain." + + def build_config(self): + return { + "llm": {"display_name": "LLM"}, + "chain_type": { + "display_name": "Chain Type", + "options": ['stuff', 'map_reduce', 'map_rerank', 'refine'], + }, + } + + def build( + self, + llm: BaseLanguageModel, + chain_type: str, + ) -> Union[Chain, Callable]: + if chain_type not in ['stuff', 'map_reduce', 'map_rerank', 'refine']: + raise ValueError(f"Invalid chain_type: {chain_type}") + + # Implement the logic to create and return the appropriate chain based on the chain_type + # This could be a placeholder for now, as the specific chain loading function is not defined. + # Replace with actual implementation when available. + return load_qa_chain(llm=llm, chain_type=chain_type) + +# Assuming there is a function or class `load_qa_chain` that creates the chain +# based on the `chain_type` and `llm`. This is a placeholder for the actual +# implementation which should be replaced with the correct function/class call. +def load_qa_chain(llm: BaseLanguageModel, chain_type: str) -> Union[Chain, Callable]: + # Implement the logic to create and return the appropriate chain based on the chain_type + pass diff --git a/src/backend/langflow/components/chains/LLMCheckerChain.py b/src/backend/langflow/components/chains/LLMCheckerChain.py new file mode 100644 index 000000000..0d2266470 --- /dev/null +++ b/src/backend/langflow/components/chains/LLMCheckerChain.py @@ -0,0 +1,24 @@ + +from langflow import CustomComponent +from langchain.chains import LLMCheckerChain +from typing import Union, Callable +from langflow.field_typing import ( + BaseLanguageModel, + Chain, +) + +class LLMCheckerChainComponent(CustomComponent): + display_name = "LLMCheckerChain" + description = "" + documentation = "https://python.langchain.com/docs/modules/chains/additional/llm_checker" + + def build_config(self): + return { + "llm": {"display_name": "LLM"}, + } + + def build( + self, + llm: BaseLanguageModel, + ) -> Union[Chain, Callable]: + return LLMCheckerChain(llm=llm) diff --git a/src/backend/langflow/components/chains/LLMMathChain.py b/src/backend/langflow/components/chains/LLMMathChain.py new file mode 100644 index 000000000..406143418 --- /dev/null +++ b/src/backend/langflow/components/chains/LLMMathChain.py @@ -0,0 +1,32 @@ + +from langflow import CustomComponent +from langchain.chains import LLMChain +from typing import Optional +from langflow.field_typing import ( + BaseLanguageModel, + BaseMemory, +) + +class LLMMathChainComponent(CustomComponent): + display_name = "LLMMathChain" + description = "Chain that interprets a prompt and executes python code to do math." + documentation = "https://python.langchain.com/docs/modules/chains/additional/llm_math" + + def build_config(self): + return { + "llm": {"display_name": "LLM"}, + "llm_chain": {"display_name": "LLM Chain"}, + "memory": {"display_name": "Memory"}, + "input_key": {"display_name": "Input Key"}, + "output_key": {"display_name": "Output Key"}, + } + + def build( + self, + llm: BaseLanguageModel, + llm_chain: LLMChain, + input_key: str, + output_key: str, + memory: Optional[BaseMemory] = None, + ) -> LLMChain: + return LLMChain(llm=llm, prompt=llm_chain, input_key=input_key, output_key=output_key, memory=memory) diff --git a/src/backend/langflow/components/chains/RetrievalQA.py b/src/backend/langflow/components/chains/RetrievalQA.py new file mode 100644 index 000000000..9ed188bf5 --- /dev/null +++ b/src/backend/langflow/components/chains/RetrievalQA.py @@ -0,0 +1,41 @@ + +from langflow import CustomComponent +from langchain.chains import BaseRetrievalQA +from typing import Optional, Union, Callable +from langflow.field_typing import ( + BaseCombineDocumentsChain, + BaseMemory, + BaseRetriever, +) + +class RetrievalQAComponent(CustomComponent): + display_name = "RetrievalQA" + description = "Chain for question-answering against an index." + + def build_config(self): + return { + "combine_documents_chain": {"display_name": "Combine Documents Chain"}, + "retriever": {"display_name": "Retriever"}, + "memory": {"display_name": "Memory", "required": False}, + "input_key": {"display_name": "Input Key"}, + "output_key": {"display_name": "Output Key"}, + "return_source_documents": {"display_name": "Return Source Documents"}, + } + + def build( + self, + combine_documents_chain: BaseCombineDocumentsChain, + retriever: BaseRetriever, + memory: Optional[BaseMemory] = None, + input_key: str = "query", + output_key: str = "result", + return_source_documents: bool = True, + ) -> Union[BaseRetrievalQA, Callable]: + return BaseRetrievalQA( + combine_documents_chain=combine_documents_chain, + retriever=retriever, + memory=memory, + input_key=input_key, + output_key=output_key, + return_source_documents=return_source_documents, + ) diff --git a/src/backend/langflow/components/chains/RetrievalQAWithSourcesChain.py b/src/backend/langflow/components/chains/RetrievalQAWithSourcesChain.py new file mode 100644 index 000000000..44362683d --- /dev/null +++ b/src/backend/langflow/components/chains/RetrievalQAWithSourcesChain.py @@ -0,0 +1,35 @@ + +from langflow import CustomComponent +from langchain.chains import RetrievalQAWithSourcesChain +from typing import Optional +from langflow.field_typing import ( + BaseMemory, + BaseRetriever, + Chain, +) + +class RetrievalQAWithSourcesChainComponent(CustomComponent): + display_name = "RetrievalQAWithSourcesChain" + description = "Question-answering with sources over an index." + + def build_config(self): + return { + "combine_documents_chain": {"display_name": "Combine Documents Chain"}, + "retriever": {"display_name": "Retriever"}, + "memory": {"display_name": "Memory", "optional": True}, + "return_source_documents": {"display_name": "Return Source Documents", "default": True, "advanced": True}, + } + + def build( + self, + combine_documents_chain: Chain, + retriever: BaseRetriever, + memory: Optional[BaseMemory] = None, + return_source_documents: Optional[bool] = True, + ) -> RetrievalQAWithSourcesChain: + return RetrievalQAWithSourcesChain( + combine_documents_chain=combine_documents_chain, + retriever=retriever, + memory=memory, + return_source_documents=return_source_documents + ) diff --git a/src/backend/langflow/components/chains/SQLDatabaseChain.py b/src/backend/langflow/components/chains/SQLDatabaseChain.py new file mode 100644 index 000000000..3ea43dc75 --- /dev/null +++ b/src/backend/langflow/components/chains/SQLDatabaseChain.py @@ -0,0 +1,33 @@ + +from langflow import CustomComponent +from langchain.chains import Chain +from typing import Callable, Union +from langflow.field_typing import ( + BasePromptTemplate, + BaseLanguageModel, +) + +# Placeholder SQLDatabase class. In practice, replace this with the actual class or import it if available. +class SQLDatabase: + pass + +class SQLDatabaseChainComponent(CustomComponent): + display_name = "SQLDatabaseChain" + description = "" + + def build_config(self): + return { + "db": {"display_name": "Database"}, + "llm": {"display_name": "LLM"}, + "prompt": {"display_name": "Prompt"}, + } + + def build( + self, + db: SQLDatabase, + llm: BaseLanguageModel, + prompt: BasePromptTemplate, + ) -> Union[Chain, Callable]: + # Assuming there's a specific chain for SQLDatabase in the langchain library: + # Replace `Chain` with the specific chain class that interfaces with the SQLDatabase. + return Chain(db=db, llm=llm, prompt=prompt) diff --git a/src/backend/langflow/components/documentloaders/AZLyricsLoader.py b/src/backend/langflow/components/documentloaders/AZLyricsLoader.py new file mode 100644 index 000000000..f8e9b24e7 --- /dev/null +++ b/src/backend/langflow/components/documentloaders/AZLyricsLoader.py @@ -0,0 +1,31 @@ + +from langflow import CustomComponent +from langchain.field_typing import Document +from typing import Optional, Dict + +class AZLyricsLoaderComponent(CustomComponent): + display_name = "AZLyricsLoader" + description = "Load `AZLyrics` webpages." + documentation = "https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/azlyrics" + + def build_config(self): + return { + "metadata": { + "display_name": "Metadata", + "type": "dict", + "default": {}, + "show": True + }, + "web_path": { + "display_name": "Web Page", + "type": "str", + "required": True, + "show": True + }, + } + + def build(self, metadata: Optional[Dict] = None, web_path: str = "") -> Document: + # Assuming there is a class AZLyricsLoader that takes metadata and web_path as parameters + # and returns a Document object. Replace AZLyricsLoader with the actual class name if different. + # The import statement for AZLyricsLoader is assumed to be added above. + return AZLyricsLoader(metadata=metadata, web_path=web_path) diff --git a/src/backend/langflow/components/documentloaders/AirbyteJSONLoader.py b/src/backend/langflow/components/documentloaders/AirbyteJSONLoader.py new file mode 100644 index 000000000..5d8272d04 --- /dev/null +++ b/src/backend/langflow/components/documentloaders/AirbyteJSONLoader.py @@ -0,0 +1,31 @@ + +from langflow import CustomComponent +from langchain.field_typing import Document +from typing import Optional, Dict + + +class AirbyteJSONLoaderComponent(CustomComponent): + display_name = "AirbyteJSONLoader" + description = "Load local `Airbyte` json files." + documentation = "https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/airbyte_json" + + def build_config(self): + return { + "file_path": { + "display_name": "File Path", + "type": "file", + "fileTypes": ["json"], + "required": True, + }, + "metadata": { + "display_name": "Metadata", + "type": "dict", + "required": False, + }, + } + + def build(self, file_path: str, metadata: Optional[Dict] = None) -> Document: + # Assuming there is a function or class named AirbyteJSONLoader that takes file_path and metadata as parameters + # and returns a Document object. Replace AirbyteJSONLoader with the actual class or function name. + # The actual implementation here is a placeholder and should be adapted to the real AirbyteJSONLoader class or function. + return AirbyteJSONLoader(file_path=file_path, metadata=metadata) diff --git a/src/backend/langflow/components/documentloaders/BSHTMLLoader.py b/src/backend/langflow/components/documentloaders/BSHTMLLoader.py new file mode 100644 index 000000000..8f9553183 --- /dev/null +++ b/src/backend/langflow/components/documentloaders/BSHTMLLoader.py @@ -0,0 +1,33 @@ + +from langflow import CustomComponent +from langchain.field_typing import Document +from typing import Optional, Dict + +class BSHTMLLoaderComponent(CustomComponent): + display_name = "BSHTMLLoader" + description = "Load `HTML` files and parse them with `beautiful soup`." + documentation = "https://python.langchain.com/docs/modules/data_connection/document_loaders/how_to/html" + + def build_config(self): + return { + "file_path": { + "display_name": "File Path", + "required": True, + "show": True, + "type": "file", + "suffixes": [".html"], + "file_types": ["html"], + }, + "metadata": { + "display_name": "Metadata", + "required": False, + "show": True, + "type": "dict", + }, + } + + def build(self, file_path: str, metadata: Optional[Dict] = None) -> Document: + # Assuming there is a class or function named BSHTMLLoader that takes a file path and optional metadata + # and returns a Document object after parsing HTML. Since the actual implementation of BSHTMLLoader is not provided, + # this is a placeholder and should be replaced with the actual logic. + raise NotImplementedError("The BSHTMLLoader function or class needs to be implemented.") diff --git a/src/backend/langflow/components/documentloaders/CSVLoader.py b/src/backend/langflow/components/documentloaders/CSVLoader.py new file mode 100644 index 000000000..64f1f5527 --- /dev/null +++ b/src/backend/langflow/components/documentloaders/CSVLoader.py @@ -0,0 +1,30 @@ + +from langchain import CustomComponent +from typing import Optional, Dict, List +from langchain.loaders import CSVLoader +from langchain.documents import Document + +class CSVLoaderComponent(CustomComponent): + display_name = "CSVLoader" + description = "Load a `CSV` file into a list of Documents." + + def build_config(self): + return { + "file_path": { + "display_name": "File Path", + "required": True, + "suffixes": [".csv"], + "file_types": ["csv"], + }, + "metadata": { + "display_name": "Metadata", + "required": False, + }, + } + + def build( + self, + file_path: str, + metadata: Optional[Dict[str, str]] = None, + ) -> List[Document]: + return CSVLoader(file_path=file_path, metadata=metadata).load() diff --git a/src/backend/langflow/components/documentloaders/CoNLLULoader.py b/src/backend/langflow/components/documentloaders/CoNLLULoader.py new file mode 100644 index 000000000..be2e3b8de --- /dev/null +++ b/src/backend/langflow/components/documentloaders/CoNLLULoader.py @@ -0,0 +1,33 @@ + +from langflow import CustomComponent +from langchain.documents import Document +from typing import Optional, Dict +from langchain.field_typing import TemplateField + + +class CoNLLULoaderComponent(CustomComponent): + display_name = "CoNLLULoader" + description = "Load `CoNLL-U` files." + documentation = "https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/conll-u" + + def build_config(self): + return { + "file_path": TemplateField( + display_name="File Path", + required=True, + type="file", + file_types=["conllu"], + suffixes=['.conllu'], + ), + "metadata": TemplateField( + display_name="Metadata", + required=False, + type="dict", + ), + } + + def build(self, file_path: str, metadata: Optional[Dict[str, str]] = None) -> Document: + # Here, you would use the actual class that loads CoNLL-U files. + # As I don't have the specific class, I'm returning an instance of Document. + # In a real scenario, you should replace the below Document with the actual loader class. + return Document(file_path=file_path, metadata=metadata) diff --git a/src/backend/langflow/components/documentloaders/CollegeConfidentialLoader.py b/src/backend/langflow/components/documentloaders/CollegeConfidentialLoader.py new file mode 100644 index 000000000..163e43afa --- /dev/null +++ b/src/backend/langflow/components/documentloaders/CollegeConfidentialLoader.py @@ -0,0 +1,24 @@ + +from langflow import CustomComponent +from langchain.document_loaders import Document +from typing import Optional, Dict + +class CollegeConfidentialLoaderComponent(CustomComponent): + display_name = "CollegeConfidentialLoader" + description = "Load `College Confidential` webpages." + documentation = "https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/college_confidential" + + def build_config(self): + return { + "metadata": {"display_name": "Metadata", "default": {}}, + "web_path": {"display_name": "Web Page", "required": True}, + } + + def build( + self, + web_path: str, + metadata: Optional[Dict] = None, + ) -> Document: + # Assuming there is a loader class `CollegeConfidentialLoader` that takes `metadata` and `web_path` as arguments + # Replace `CollegeConfidentialLoader` with the actual class name if different + return CollegeConfidentialLoader(web_path=web_path, metadata=metadata) diff --git a/src/backend/langflow/components/documentloaders/DirectoryLoader.py b/src/backend/langflow/components/documentloaders/DirectoryLoader.py new file mode 100644 index 000000000..3e27458f8 --- /dev/null +++ b/src/backend/langflow/components/documentloaders/DirectoryLoader.py @@ -0,0 +1,42 @@ + +from langflow import CustomComponent +from langchain.data_connections import Document +from typing import Optional, Dict, Any + +class DirectoryLoaderComponent(CustomComponent): + display_name = "DirectoryLoader" + description = "Load from a directory." + + def build_config(self) -> Dict[str, Any]: + return { + "glob": {"display_name": "Glob Pattern", "default": "**/*.txt"}, + "load_hidden": {"display_name": "Load Hidden Files", "default": False, "advanced": True}, + "max_concurrency": {"display_name": "Max Concurrency", "default": 10, "advanced": True}, + "metadata": {"display_name": "Metadata", "default": {}}, + "path": {"display_name": "Local Directory"}, + "recursive": {"display_name": "Recursive", "default": True, "advanced": True}, + "silent_errors": {"display_name": "Silent Errors", "default": False, "advanced": True}, + "use_multithreading": {"display_name": "Use Multithreading", "default": True, "advanced": True}, + } + + def build( + self, + glob: str, + path: str, + load_hidden: Optional[bool] = False, + max_concurrency: Optional[int] = 10, + metadata: Optional[Dict[str, Any]] = None, + recursive: Optional[bool] = True, + silent_errors: Optional[bool] = False, + use_multithreading: Optional[bool] = True, + ) -> Document: + return Document( + glob=glob, + path=path, + load_hidden=load_hidden, + max_concurrency=max_concurrency, + metadata=metadata, + recursive=recursive, + silent_errors=silent_errors, + use_multithreading=use_multithreading, + ) diff --git a/src/backend/langflow/components/documentloaders/EverNoteLoader.py b/src/backend/langflow/components/documentloaders/EverNoteLoader.py new file mode 100644 index 000000000..5300c0ef9 --- /dev/null +++ b/src/backend/langflow/components/documentloaders/EverNoteLoader.py @@ -0,0 +1,32 @@ + +from langflow import CustomComponent +from langchain.field_typing import Document +from typing import Optional, Dict + +class EverNoteLoaderComponent(CustomComponent): + display_name = "EverNoteLoader" + description = "Load from `EverNote`." + documentation = "https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/evernote" + + def build_config(self): + return { + "file_path": { + "display_name": "File Path", + "required": True, + "suffixes": [".xml"], + "show": True, + "type": "file", + "file_types": ["xml"], + }, + "metadata": { + "display_name": "Metadata", + "required": False, + "show": True, + "type": "dict", + }, + } + + def build(self, file_path: str, metadata: Optional[Dict] = None) -> Document: + # Assuming there is a function or class named `EverNoteLoader` that takes these parameters + # and returns a `Document` object. Replace `EverNoteLoader` with the actual implementation. + return EverNoteLoader(file_path=file_path, metadata=metadata) diff --git a/src/backend/langflow/components/documentloaders/FacebookChatLoader.py b/src/backend/langflow/components/documentloaders/FacebookChatLoader.py new file mode 100644 index 000000000..cf0ea2380 --- /dev/null +++ b/src/backend/langflow/components/documentloaders/FacebookChatLoader.py @@ -0,0 +1,30 @@ + +from langflow import CustomComponent +from langchain.documents import Document +from typing import Optional, Dict + +class FacebookChatLoaderComponent(CustomComponent): + display_name = "FacebookChatLoader" + description = "Load `Facebook Chat` messages directory dump." + documentation = "https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/facebook_chat" + + def build_config(self): + return { + "file_path": { + "display_name": "File Path", + "required": True, + "suffixes": [".json"], + "file_types": ["json"], + }, + "metadata": { + "display_name": "Metadata", + "required": False, + }, + } + + def build(self, file_path: str, metadata: Optional[Dict] = None) -> Document: + # Assuming there is a class named FacebookChatLoader that takes file_path and metadata as parameters + # and returns a Document object. Replace 'FacebookChatLoader' with the actual class name. + # As per the JSON, the output type is 'Document', which is part of langchain.documents. + # Therefore, the 'FacebookChatLoader' should be imported or defined elsewhere in the codebase. + return FacebookChatLoader(file_path=file_path, metadata=metadata) diff --git a/src/backend/langflow/components/documentloaders/GitbookLoader.py b/src/backend/langflow/components/documentloaders/GitbookLoader.py new file mode 100644 index 000000000..7157f249e --- /dev/null +++ b/src/backend/langflow/components/documentloaders/GitbookLoader.py @@ -0,0 +1,26 @@ + +from langflow import CustomComponent +from langchain.field_typing import Document +from typing import Optional, Dict + + +class GitbookLoaderComponent(CustomComponent): + display_name = "GitbookLoader" + description = "Load `GitBook` data." + + def build_config(self): + return { + "metadata": { + "display_name": "Metadata", + "default": {}, + }, + "web_page": { + "display_name": "Web Page", + "required": True, + }, + } + + def build(self, metadata: Optional[Dict] = None, web_page: str = "") -> Document: + # Assuming there is a GitbookLoader class that takes metadata and web_page as parameters + # Replace 'GitbookLoader' with the actual class name if different + return GitbookLoader(metadata=metadata, web_page=web_page) diff --git a/src/backend/langflow/components/documentloaders/HNLoader.py b/src/backend/langflow/components/documentloaders/HNLoader.py new file mode 100644 index 000000000..eed9bb957 --- /dev/null +++ b/src/backend/langflow/components/documentloaders/HNLoader.py @@ -0,0 +1,31 @@ + +from langchain import CustomComponent +from langchain.document_loaders import BaseLoader +from typing import Optional, Dict + +class HNLoaderComponent(CustomComponent): + display_name = "HNLoader" + description = "Load `Hacker News` data." + + def build_config(self): + return { + "metadata": { + "display_name": "Metadata", + "default": {}, + "required": False + }, + "web_path": { + "display_name": "Web Page", + "required": True + }, + } + + def build( + self, + web_path: str, + metadata: Optional[Dict] = None, + ) -> BaseLoader: + # Assuming that there's a specific loader for Hacker News + # as BaseLoader does not take a web_path argument + # The HackerNewsLoader needs to be defined somewhere in the actual implementation + return HackerNewsLoader(metadata=metadata, web_path=web_path) diff --git a/src/backend/langflow/components/documentloaders/IFixitLoader.py b/src/backend/langflow/components/documentloaders/IFixitLoader.py new file mode 100644 index 000000000..0666872a0 --- /dev/null +++ b/src/backend/langflow/components/documentloaders/IFixitLoader.py @@ -0,0 +1,20 @@ + +from langflow import CustomComponent +from langchain.field_typing import Document +from typing import Optional, Dict + +class IFixitLoaderComponent(CustomComponent): + display_name = "IFixitLoader" + description = "Load `iFixit` repair guides, device wikis and answers." + documentation = "https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/ifixit" + + def build_config(self): + return { + "metadata": {"display_name": "Metadata", "type": "dict", "default": {}}, + "web_path": {"display_name": "Web Page", "type": "str"}, + } + + def build(self, web_path: str, metadata: Optional[Dict] = None) -> Document: + # Assuming IFixitLoader is the correct class name from the langchain library, + # and it has a load method that returns a Document object. + return IFixitLoader(web_path=web_path, metadata=metadata).load() diff --git a/src/backend/langflow/components/documentloaders/IMSDbLoader.py b/src/backend/langflow/components/documentloaders/IMSDbLoader.py new file mode 100644 index 000000000..215f8585a --- /dev/null +++ b/src/backend/langflow/components/documentloaders/IMSDbLoader.py @@ -0,0 +1,23 @@ + +from langflow import CustomComponent +from langchain.field_typing import Document +from typing import Dict, Optional + +class IMSDbLoaderComponent(CustomComponent): + display_name = "IMSDbLoader" + description = "Load `IMSDb` webpages." + + def build_config(self): + return { + "metadata": {"display_name": "Metadata", "type": "dict"}, + "web_path": {"display_name": "Web Page", "type": "str"}, + } + + def build( + self, + metadata: Optional[Dict] = None, + web_path: str = "", + ) -> Document: + # Assuming there is a class or function named `IMSDbLoader` that takes these parameters + # and returns a Document object. Replace `IMSDbLoader` with the actual class or function name. + return IMSDbLoader(metadata=metadata, web_path=web_path) diff --git a/src/backend/langflow/components/documentloaders/PyPDFDirectoryLoader.py b/src/backend/langflow/components/documentloaders/PyPDFDirectoryLoader.py new file mode 100644 index 000000000..fd3bc6ae8 --- /dev/null +++ b/src/backend/langflow/components/documentloaders/PyPDFDirectoryLoader.py @@ -0,0 +1,23 @@ + +from langflow import CustomComponent +from langchain.documents import Document +from typing import Optional, Dict + +class PyPDFDirectoryLoaderComponent(CustomComponent): + display_name = "PyPDFDirectoryLoader" + description = "Load a directory with `PDF` files using `pypdf` and chunks at character level." + + def build_config(self): + return { + "metadata": {"display_name": "Metadata", "required": False}, + "path": {"display_name": "Local directory", "required": True}, + } + + def build( + self, + path: str, + metadata: Optional[Dict] = None, + ) -> Document: + # Assuming there is a PyPDFDirectoryLoader class that takes these parameters + # Since the actual implementation is not provided, this is a placeholder + return PyPDFDirectoryLoader(path=path, metadata=metadata) diff --git a/src/backend/langflow/components/documentloaders/PyPDFLoader.py b/src/backend/langflow/components/documentloaders/PyPDFLoader.py new file mode 100644 index 000000000..bf3667694 --- /dev/null +++ b/src/backend/langflow/components/documentloaders/PyPDFLoader.py @@ -0,0 +1,31 @@ + +from langflow import CustomComponent +from langchain.document_loaders import BaseLoader +from typing import Optional, Dict + +class PyPDFLoaderComponent(CustomComponent): + display_name = "PyPDFLoader" + description = "Load PDF using pypdf into list of documents" + documentation = "https://python.langchain.com/docs/modules/data_connection/document_loaders/how_to/pdf" + + def build_config(self): + return { + "file_path": { + "display_name": "File Path", + "required": True, + "type": "file", + "fileTypes": ["pdf"], + "show": True, + }, + "metadata": { + "display_name": "Metadata", + "required": False, + "type": "dict", + "show": True, + } + } + + def build(self, file_path: str, metadata: Optional[Dict] = None) -> BaseLoader: + # Assuming there is a PyPDFLoader class that takes file_path and metadata as parameters + # and inherits from BaseLoader + return PyPDFLoader(file_path=file_path, metadata=metadata) diff --git a/src/backend/langflow/components/documentloaders/ReadTheDocsLoader.py b/src/backend/langflow/components/documentloaders/ReadTheDocsLoader.py new file mode 100644 index 000000000..b5a5007da --- /dev/null +++ b/src/backend/langflow/components/documentloaders/ReadTheDocsLoader.py @@ -0,0 +1,21 @@ + +from langflow import CustomComponent +from langchain.field_typing import Document +from typing import Dict, Optional + +class ReadTheDocsLoaderComponent(CustomComponent): + display_name = "ReadTheDocsLoader" + description = "Load `ReadTheDocs` documentation directory." + + def build_config(self): + return { + "metadata": {"display_name": "Metadata", "default": {}}, + "path": {"display_name": "Local directory", "required": True}, + } + + def build( + self, + path: str, + metadata: Optional[Dict] = None, + ) -> Document: + return Document(path=path, metadata=metadata or {}) diff --git a/src/backend/langflow/components/documentloaders/SRTLoader.py b/src/backend/langflow/components/documentloaders/SRTLoader.py new file mode 100644 index 000000000..83a0cf1e8 --- /dev/null +++ b/src/backend/langflow/components/documentloaders/SRTLoader.py @@ -0,0 +1,25 @@ + +from langflow import CustomComponent +from langchain.documents import Document +from typing import Optional, Dict + +class SRTLoaderComponent(CustomComponent): + display_name = "SRTLoader" + description = "Load `.srt` (subtitle) files." + documentation = "https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/subtitle" + + def build_config(self): + return { + "file_path": { + "display_name": "File Path", + "required": True, + "fileTypes": ["srt"], + }, + "metadata": { + "display_name": "Metadata", + "required": False, + }, + } + + def build(self, file_path: str, metadata: Optional[Dict] = None) -> Document: + return Document(file_path=file_path, metadata=metadata) diff --git a/src/backend/langflow/components/documentloaders/SlackDirectoryLoader.py b/src/backend/langflow/components/documentloaders/SlackDirectoryLoader.py new file mode 100644 index 000000000..5c4c92421 --- /dev/null +++ b/src/backend/langflow/components/documentloaders/SlackDirectoryLoader.py @@ -0,0 +1,26 @@ + +from langflow import CustomComponent +from typing import Optional, Dict + +class SlackDirectoryLoaderComponent(CustomComponent): + display_name = "SlackDirectoryLoader" + description = "Load from a `Slack` directory dump." + documentation = "https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/slack" + + def build_config(self): + return { + "zip_path": {"display_name": "Path to zip file"}, + "metadata": {"display_name": "Metadata"}, + "workspace_url": {"display_name": "Workspace URL"}, + } + + def build( + self, + zip_path: str, + metadata: Optional[Dict] = None, + workspace_url: Optional[str] = None, + ) -> 'Document': + # Assuming there is a SlackDirectoryLoader class that takes these parameters + # Since the actual implementation details are not provided, this is a placeholder + # Replace SlackDirectoryLoader with the actual class that should be instantiated + return SlackDirectoryLoader(zip_path=zip_path, metadata=metadata, workspace_url=workspace_url) diff --git a/src/backend/langflow/components/documentloaders/TextLoader.py b/src/backend/langflow/components/documentloaders/TextLoader.py new file mode 100644 index 000000000..a8e6e18f7 --- /dev/null +++ b/src/backend/langflow/components/documentloaders/TextLoader.py @@ -0,0 +1,28 @@ + +from langflow import CustomComponent +from langchain.data_connections import Document +from typing import Optional, Dict + +class TextLoaderComponent(CustomComponent): + display_name = "TextLoader" + description = "Load text file." + documentation = "https://python.langchain.com/docs/modules/data_connection/document_loaders/" + + def build_config(self): + return { + "file_path": { + "display_name": "File Path", + "required": True, + "type": "file", + "suffixes": [".txt"], + }, + "metadata": { + "display_name": "Metadata", + "required": False, + "type": "dict", + "default": {}, + }, + } + + def build(self, file_path: str, metadata: Optional[Dict] = None) -> Document: + return Document(file_path=file_path, metadata=metadata) diff --git a/src/backend/langflow/components/documentloaders/UnstructuredHTMLLoader.py b/src/backend/langflow/components/documentloaders/UnstructuredHTMLLoader.py new file mode 100644 index 000000000..f6358324e --- /dev/null +++ b/src/backend/langflow/components/documentloaders/UnstructuredHTMLLoader.py @@ -0,0 +1,21 @@ + +from langchain import CustomComponent +from langchain.field_typing import Document +from typing import Optional, Dict + + +class UnstructuredHTMLLoaderComponent(CustomComponent): + display_name = "UnstructuredHTMLLoader" + description = "Load `HTML` files using `Unstructured`." + documentation = "https://python.langchain.com/docs/modules/data_connection/document_loaders/how_to/html" + + def build_config(self): + return { + "file_path": {"display_name": "File Path", "type": "file", "fileTypes": ["html"]}, + "metadata": {"display_name": "Metadata"}, + } + + def build(self, file_path: str, metadata: Optional[Dict] = None) -> Document: + # Assuming the existence of a function or class named UnstructuredHTMLLoader that + # loads HTML and creates a Document object; Replace with actual implementation. + return UnstructuredHTMLLoader(file_path=file_path, metadata=metadata) diff --git a/src/backend/langflow/components/documentloaders/UnstructuredPowerPointLoader.py b/src/backend/langflow/components/documentloaders/UnstructuredPowerPointLoader.py new file mode 100644 index 000000000..11cec9fc1 --- /dev/null +++ b/src/backend/langflow/components/documentloaders/UnstructuredPowerPointLoader.py @@ -0,0 +1,32 @@ + +from langflow import CustomComponent +from langchain.document_loaders import Document +from typing import Optional, Dict + + +class UnstructuredPowerPointLoaderComponent(CustomComponent): + display_name = "UnstructuredPowerPointLoader" + description = "Load `Microsoft PowerPoint` files using `Unstructured`." + + def build_config(self): + return { + "file_path": { + "display_name": "File Path", + "type": "file", + "fileTypes": ["pptx", "ppt"], + }, + "metadata": { + "display_name": "Metadata", + "type": "dict", + }, + } + + def build( + self, + file_path: str, + metadata: Optional[Dict] = None, + ) -> Document: + # Assuming there is a loader class `UnstructuredPowerPointLoader` that takes these parameters + # Since the actual loader class is not provided, this is a placeholder for the actual implementation + loader_class = self.get_loader_class() # Placeholder method to obtain the correct loader class + return loader_class(file_path=file_path, metadata=metadata) diff --git a/src/backend/langflow/components/documentloaders/UnstructuredWordDocumentLoader.py b/src/backend/langflow/components/documentloaders/UnstructuredWordDocumentLoader.py new file mode 100644 index 000000000..5ff906482 --- /dev/null +++ b/src/backend/langflow/components/documentloaders/UnstructuredWordDocumentLoader.py @@ -0,0 +1,28 @@ + +from langchain import CustomComponent +from langchain.field_typing import Document +from typing import Optional, Dict + + +class UnstructuredWordDocumentLoaderComponent(CustomComponent): + display_name = "UnstructuredWordDocumentLoader" + description = "Load `Microsoft Word` file using `Unstructured`." + documentation = "https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/microsoft_word" + + def build_config(self): + return { + "file_path": { + "display_name": "File Path", + "required": True, + "type": "file", + "suffixes": [".docx", ".doc"], + }, + "metadata": { + "display_name": "Metadata", + "required": False, + "type": "dict" + }, + } + + def build(self, file_path: str, metadata: Optional[Dict] = None) -> Document: + return Document(file_path=file_path, metadata=metadata) diff --git a/src/backend/langflow/components/embeddings/CohereEmbeddings.py b/src/backend/langflow/components/embeddings/CohereEmbeddings.py new file mode 100644 index 000000000..3c5c88db9 --- /dev/null +++ b/src/backend/langflow/components/embeddings/CohereEmbeddings.py @@ -0,0 +1,34 @@ + +from langflow import CustomComponent +from langchain.embeddings import CohereEmbeddings +from typing import Optional, Any + + +class CohereEmbeddingsComponent(CustomComponent): + display_name = "CohereEmbeddings" + description = "Cohere embedding models." + + def build_config(self): + return { + "async_client": {"display_name": "Async Client", "advanced": True}, + "client": {"display_name": "Client", "advanced": True}, + "cohere_api_key": {"display_name": "Cohere API Key"}, + "model": {"display_name": "Model", "default": "embed-english-v2.0", "advanced": True}, + "truncate": {"display_name": "Truncate", "advanced": True}, + } + + def build( + self, + async_client: Optional[Any] = None, + client: Optional[Any] = None, + cohere_api_key: Optional[str] = None, + model: str = "embed-english-v2.0", + truncate: Optional[str] = None, + ) -> CohereEmbeddings: + return CohereEmbeddings( + async_client=async_client, + client=client, + cohere_api_key=cohere_api_key, + model=model, + truncate=truncate, + ) diff --git a/src/backend/langflow/components/embeddings/HuggingFaceEmbeddings.py b/src/backend/langflow/components/embeddings/HuggingFaceEmbeddings.py new file mode 100644 index 000000000..6d9c6a0fe --- /dev/null +++ b/src/backend/langflow/components/embeddings/HuggingFaceEmbeddings.py @@ -0,0 +1,37 @@ + +from langflow import CustomComponent +from typing import Optional, Any, Dict +from langchain.field_typing import Embeddings + +class HuggingFaceEmbeddingsComponent(CustomComponent): + display_name = "HuggingFaceEmbeddings" + description = "HuggingFace sentence_transformers embedding models." + documentation = "https://python.langchain.com/docs/modules/data_connection/text_embedding/integrations/sentence_transformers" + + def build_config(self): + return { + "cache_folder": {"display_name": "Cache Folder", "advanced": True}, + "client": {"display_name": "Client", "advanced": True}, + "encode_kwargs": {"display_name": "Encode Kwargs", "advanced": True}, + "model_kwargs": {"display_name": "Model Kwargs", "advanced": True}, + "model_name": {"display_name": "Model Name"}, + "multi_process": {"display_name": "Multi Process", "advanced": True}, + } + + def build( + self, + cache_folder: Optional[str] = None, + client: Optional[Any] = None, + encode_kwargs: Optional[Dict] = None, + model_kwargs: Optional[Dict] = None, + model_name: str = "sentence-transformers/all-mpnet-base-v2", + multi_process: bool = False, + ) -> Embeddings: + return Embeddings( + cache_folder=cache_folder, + client=client, + encode_kwargs=encode_kwargs, + model_kwargs=model_kwargs, + model_name=model_name, + multi_process=multi_process, + ) diff --git a/src/backend/langflow/components/embeddings/OpenAIEmbeddings.py b/src/backend/langflow/components/embeddings/OpenAIEmbeddings.py new file mode 100644 index 000000000..7faf4fd07 --- /dev/null +++ b/src/backend/langflow/components/embeddings/OpenAIEmbeddings.py @@ -0,0 +1,75 @@ + +from langflow import CustomComponent +from typing import Optional, Set, Dict, Any, Union, Callable +from langchain.embeddings import OpenAIEmbeddings + +class OpenAIEmbeddingsComponent(CustomComponent): + display_name = "OpenAIEmbeddings" + description = "OpenAI embedding models" + + def build_config(self): + return { + "allowed_special": {"display_name": "Allowed Special", "advanced": True}, + "disallowed_special": {"display_name": "Disallowed Special", "advanced": True}, + "chunk_size": {"display_name": "Chunk Size", "advanced": True}, + "client": {"display_name": "Client", "advanced": True}, + "deployment": {"display_name": "Deployment", "advanced": True}, + "embedding_ctx_length": {"display_name": "Embedding Context Length", "advanced": True}, + "max_retries": {"display_name": "Max Retries", "advanced": True}, + "model": {"display_name": "Model", "advanced": True}, + "model_kwargs": {"display_name": "Model Kwargs", "advanced": True}, + "openai_api_base": {"display_name": "OpenAI API Base", "advanced": True}, + "openai_api_key": {"display_name": "OpenAI API Key"}, + "openai_api_type": {"display_name": "OpenAI API Type", "advanced": True}, + "openai_api_version": {"display_name": "OpenAI API Version", "advanced": True}, + "openai_organization": {"display_name": "OpenAI Organization", "advanced": True}, + "openai_proxy": {"display_name": "OpenAI Proxy", "advanced": True}, + "request_timeout": {"display_name": "Request Timeout", "advanced": True}, + "show_progress_bar": {"display_name": "Show Progress Bar", "advanced": True}, + "skip_empty": {"display_name": "Skip Empty", "advanced": True}, + "tiktoken_model_name": {"display_name": "TikToken Model Name"}, + } + + def build( + self, + allowed_special: Optional[Set[str]] = set(), + disallowed_special: str = "all", + chunk_size: Optional[int] = 1000, + client: Optional[Any] = None, + deployment: str = "text-embedding-ada-002", + embedding_ctx_length: Optional[int] = 8191, + max_retries: Optional[int] = 6, + model: str = "text-embedding-ada-002", + model_kwargs: Optional[Dict[str, Any]] = None, + openai_api_base: Optional[str] = None, + openai_api_key: Optional[str] = '', + openai_api_type: Optional[str] = None, + openai_api_version: Optional[str] = None, + openai_organization: Optional[str] = None, + openai_proxy: Optional[str] = None, + request_timeout: Optional[float] = None, + show_progress_bar: Optional[bool] = False, + skip_empty: Optional[bool] = False, + tiktoken_model_name: Optional[str] = None, + ) -> Union[OpenAIEmbeddings, Callable]: + return OpenAIEmbeddings( + allowed_special=allowed_special, + disallowed_special=disallowed_special, + chunk_size=chunk_size, + client=client, + deployment=deployment, + embedding_ctx_length=embedding_ctx_length, + max_retries=max_retries, + model=model, + model_kwargs=model_kwargs, + openai_api_base=openai_api_base, + openai_api_key=openai_api_key, + openai_api_type=openai_api_type, + openai_api_version=openai_api_version, + openai_organization=openai_organization, + openai_proxy=openai_proxy, + request_timeout=request_timeout, + show_progress_bar=show_progress_bar, + skip_empty=skip_empty, + tiktoken_model_name=tiktoken_model_name, + ) diff --git a/src/backend/langflow/components/embeddings/VertexAIEmbeddings.py b/src/backend/langflow/components/embeddings/VertexAIEmbeddings.py new file mode 100644 index 000000000..d0e4cc811 --- /dev/null +++ b/src/backend/langflow/components/embeddings/VertexAIEmbeddings.py @@ -0,0 +1,60 @@ + +from langflow import CustomComponent +from langchain.embeddings import VertexAIEmbeddings +from typing import Optional, List + +class VertexAIEmbeddingsComponent(CustomComponent): + display_name = "VertexAIEmbeddings" + description = "Google Cloud VertexAI embedding models." + + def build_config(self): + return { + "client": {"display_name": "Client", "advanced": True}, + "credentials": {"display_name": "Credentials", "default": '', "file_types": ['json']}, + "location": {"display_name": "Location", "default": 'us-central1', "advanced": True}, + "max_output_tokens": {"display_name": "Max Output Tokens", "default": 128}, + "max_retries": {"display_name": "Max Retries", "default": 6, "advanced": True}, + "model_name": {"display_name": "Model Name", "default": 'textembedding-gecko'}, + "n": {"display_name": "N", "default": 1, "advanced": True}, + "project": {"display_name": "Project", "advanced": True}, + "request_parallelism": {"display_name": "Request Parallelism", "default": 5, "advanced": True}, + "stop": {"display_name": "Stop", "advanced": True}, + "streaming": {"display_name": "Streaming", "default": False, "advanced": True}, + "temperature": {"display_name": "Temperature", "default": 0.0}, + "top_k": {"display_name": "Top K", "default": 40, "advanced": True}, + "top_p": {"display_name": "Top P", "default": 0.95, "advanced": True}, + } + + def build( + self, + client: Optional[str] = None, + credentials: Optional[str] = None, + location: str = 'us-central1', + max_output_tokens: int = 128, + max_retries: int = 6, + model_name: str = 'textembedding-gecko', + n: int = 1, + project: Optional[str] = None, + request_parallelism: int = 5, + stop: Optional[List[str]] = None, + streaming: bool = False, + temperature: float = 0.0, + top_k: int = 40, + top_p: float = 0.95, + ) -> VertexAIEmbeddings: + return VertexAIEmbeddings( + client=client, + credentials=credentials, + location=location, + max_output_tokens=max_output_tokens, + max_retries=max_retries, + model_name=model_name, + n=n, + project=project, + request_parallelism=request_parallelism, + stop=stop, + streaming=streaming, + temperature=temperature, + top_k=top_k, + top_p=top_p, + ) diff --git a/src/backend/langflow/components/llms/Anthropic.py b/src/backend/langflow/components/llms/Anthropic.py new file mode 100644 index 000000000..114560337 --- /dev/null +++ b/src/backend/langflow/components/llms/Anthropic.py @@ -0,0 +1,61 @@ + +from langflow import CustomComponent +from pydantic import SecretStr +from typing import Optional, Dict, Any +from langchain.field_typing import BaseLanguageModel + +class AnthropicComponent(CustomComponent): + display_name = "Anthropic" + description = "Anthropic large language models." + + def build_config(self): + return { + "anthropic_api_key": { + "display_name": "Anthropic API Key", + "type": SecretStr, + }, + "anthropic_api_url": { + "display_name": "Anthropic API URL", + "type": str, + }, + "model_kwargs": { + "display_name": "Model Kwargs", + "type": Dict[str, Any], + "advanced": True, + }, + "temperature": { + "display_name": "Temperature", + "type": float, + }, + } + + def build( + self, + anthropic_api_key: Optional[SecretStr], + anthropic_api_url: Optional[str], + model_kwargs: Optional[Dict[str, Any]], + temperature: Optional[float] = None, + ) -> BaseLanguageModel: + # The actual builder method should return an instance of the Anthropic class + # Here we are returning a placeholder class as the Anthropic class is not defined + # This is to comply with the type hints required by the CustomComponent + class Anthropic(BaseLanguageModel): + def __init__(self, api_key: Optional[SecretStr], api_url: Optional[str], model_kwargs: Optional[Dict[str, Any]] = None, temperature: Optional[float] = None): + # Initialize Anthropic model with the provided arguments + super().__init__() + self.api_key = api_key + self.api_url = api_url + self.model_kwargs = model_kwargs + self.temperature = temperature + + def __call__(self, prompt: str) -> str: + # The logic to call the Anthropic model would go here + # This is a placeholder implementation + return "This is a simulated response from the Anthropic model." + + return Anthropic( + api_key=anthropic_api_key, + api_url=anthropic_api_url, + model_kwargs=model_kwargs, + temperature=temperature, + ) diff --git a/src/backend/langflow/components/llms/CTransformers.py b/src/backend/langflow/components/llms/CTransformers.py new file mode 100644 index 000000000..a6b34b1e4 --- /dev/null +++ b/src/backend/langflow/components/llms/CTransformers.py @@ -0,0 +1,56 @@ + +from langflow import CustomComponent +from langchain.llms import BaseLanguageModel +from typing import Optional, Dict + +class CTransformersComponent(CustomComponent): + display_name = "CTransformers" + description = "C Transformers LLM models" + documentation = "https://python.langchain.com/docs/modules/model_io/models/llms/integrations/ctransformers" + + def build_config(self): + return { + "model": {"display_name": "Model", "required": True}, + "model_file": {"display_name": "Model File", "required": False}, + "model_type": {"display_name": "Model Type", "required": False}, + "config": {"display_name": "Config", "advanced": True, "required": False}, + } + + def build( + self, + model: str, + model_file: Optional[str] = None, + model_type: Optional[str] = None, + config: Optional[Dict] = None + ) -> BaseLanguageModel: + # Default config values + default_config = { + "top_k": 40, + "top_p": 0.95, + "temperature": 0.8, + "repetition_penalty": 1.1, + "last_n_tokens": 64, + "seed": -1, + "max_new_tokens": 256, + "stop": None, + "stream": False, + "reset": True, + "batch_size": 8, + "threads": -1, + "context_length": -1, + "gpu_layers": 0 + } + + # If there is a custom config, update the default config with it + if config: + default_config.update(config) + + # Assuming the import below is correct and CTransformers is a class within the langchain library + # that inherits from BaseLanguageModel. The following import statement is required: + # from langchain.llms.integration_module import CTransformers + + return CTransformers(model=model, model_file=model_file, model_type=model_type, config=default_config) + +# Note: The actual CTransformers class needs to be imported from the correct module inside the langchain library. +# The `integration_module` in the import statement is just a placeholder and should be replaced with +# the actual module where the CTransformers class is located. diff --git a/src/backend/langflow/components/llms/ChatAnthropic.py b/src/backend/langflow/components/llms/ChatAnthropic.py new file mode 100644 index 000000000..fbc41c3e9 --- /dev/null +++ b/src/backend/langflow/components/llms/ChatAnthropic.py @@ -0,0 +1,47 @@ + +from langflow import CustomComponent +from langchain.tools import SecretStr +from typing import Optional, Dict, Union, Callable +from langflow.field_typing import BaseLanguageModel + +class ChatAnthropicComponent(CustomComponent): + display_name = "ChatAnthropic" + description = "`Anthropic` chat large language models." + documentation = "https://python.langchain.com/docs/modules/model_io/models/chat/integrations/anthropic" + + def build_config(self): + return { + "anthropic_api_key": { + "display_name": "Anthropic API Key", + "type": SecretStr, + }, + "anthropic_api_url": { + "display_name": "Anthropic API URL", + "type": str, + }, + "model_kwargs": { + "display_name": "Model Kwargs", + "type": Dict[str, Union[str, int, float, bool]], + "advanced": True, + }, + "temperature": { + "display_name": "Temperature", + "type": float, + }, + } + + def build( + self, + anthropic_api_key: Optional[SecretStr] = None, + anthropic_api_url: Optional[str] = None, + model_kwargs: Optional[Dict[str, Union[str, int, float, bool]]] = None, + temperature: Optional[float] = None, + ) -> Union[BaseLanguageModel, Callable]: + from langchain.model_io.models.chat.integrations import ChatAnthropic # Importing here due to potential local scope requirements + + return ChatAnthropic( + anthropic_api_key=anthropic_api_key.get_secret_value() if anthropic_api_key else None, + anthropic_api_url=anthropic_api_url, + model_kwargs=model_kwargs, + temperature=temperature, + ) diff --git a/src/backend/langflow/components/llms/ChatOpenAI.py b/src/backend/langflow/components/llms/ChatOpenAI.py new file mode 100644 index 000000000..e9552f2b1 --- /dev/null +++ b/src/backend/langflow/components/llms/ChatOpenAI.py @@ -0,0 +1,82 @@ + +from langflow import CustomComponent +from langchain.llms import BaseLLM +from typing import Optional, Dict, Union, Any +from langchain.field_typing import BaseLanguageModel + +class ChatOpenAIComponent(CustomComponent): + display_name = "ChatOpenAI" + description = "`OpenAI` Chat large language models API." + + def build_config(self): + return { + "max_tokens": { + "display_name": "Max Tokens", + "type": "int", + "advanced": False, + "required": False, + }, + "model_kwargs": { + "display_name": "Model Kwargs", + "type": "dict", + "advanced": True, + "required": False, + }, + "model_name": { + "display_name": "Model Name", + "type": "str", + "advanced": False, + "required": False, + "options": [ + "gpt-4-1106-preview", + "gpt-4", + "gpt-4-32k", + "gpt-3.5-turbo", + "gpt-3.5-turbo-16k", + ], + }, + "openai_api_base": { + "display_name": "OpenAI API Base", + "type": "str", + "advanced": False, + "required": False, + "info": ( + "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\n\n" + "You can change this to use other APIs like JinaChat, LocalAI and Prem." + ), + }, + "openai_api_key": { + "display_name": "OpenAI API Key", + "type": "str", + "advanced": False, + "required": False, + }, + "temperature": { + "display_name": "Temperature", + "type": "float", + "advanced": False, + "required": False, + "default": 0.7, + }, + } + + def build( + self, + max_tokens: Optional[int] = None, + model_kwargs: Optional[Dict[str, Any]] = None, + model_name: Optional[str] = "gpt-4-1106-preview", + openai_api_base: Optional[str] = None, + openai_api_key: Optional[str] = None, + temperature: float = 0.7, + ) -> Union[BaseLanguageModel, BaseLLM]: + + # Assuming there is a class `ChatOpenAI` that takes these parameters + # The `ChatOpenAI` class must be imported or defined elsewhere in the actual implementation + return ChatOpenAI( + max_tokens=max_tokens, + model_kwargs=model_kwargs, + model_name=model_name, + openai_api_base=openai_api_base, + openai_api_key=openai_api_key, + temperature=temperature, + ) diff --git a/src/backend/langflow/components/llms/ChatVertexAI.py b/src/backend/langflow/components/llms/ChatVertexAI.py new file mode 100644 index 000000000..63fe99f09 --- /dev/null +++ b/src/backend/langflow/components/llms/ChatVertexAI.py @@ -0,0 +1,84 @@ + +from langflow import CustomComponent +from typing import List +from langchain.messages import BaseMessage + +class ChatVertexAIComponent(CustomComponent): + display_name = "ChatVertexAI" + description = "`Vertex AI` Chat large language models API." + + def build_config(self): + return { + "credentials": { + "display_name": "Credentials", + "type": "file", + "fileTypes": ["json"], + "file_path": None, + }, + "examples": { + "display_name": "Examples", + "multiline": True, + }, + "location": { + "display_name": "Location", + "default": "us-central1", + }, + "max_output_tokens": { + "display_name": "Max Output Tokens", + "default": 128, + "advanced": True, + }, + "model_name": { + "display_name": "Model Name", + "default": "chat-bison", + }, + "project": { + "display_name": "Project", + }, + "temperature": { + "display_name": "Temperature", + "default": 0.0, + }, + "top_k": { + "display_name": "Top K", + "default": 40, + "advanced": True, + }, + "top_p": { + "display_name": "Top P", + "default": 0.95, + "advanced": True, + }, + "verbose": { + "display_name": "Verbose", + "default": False, + "advanced": True, + }, + } + + def build( + self, + credentials: str, + examples: List[BaseMessage], + project: str, + location: str = "us-central1", + max_output_tokens: int = 128, + model_name: str = "chat-bison", + temperature: float = 0.0, + top_k: int = 40, + top_p: float = 0.95, + verbose: bool = False, + ): + # Assuming there is a ChatVertexAI class that takes these parameters + return ChatVertexAI( + credentials=credentials, + examples=examples, + location=location, + max_output_tokens=max_output_tokens, + model_name=model_name, + project=project, + temperature=temperature, + top_k=top_k, + top_p=top_p, + verbose=verbose, + ) diff --git a/src/backend/langflow/components/llms/Cohere.py b/src/backend/langflow/components/llms/Cohere.py new file mode 100644 index 000000000..15895013e --- /dev/null +++ b/src/backend/langflow/components/llms/Cohere.py @@ -0,0 +1,40 @@ + +from langflow import CustomComponent +from langchain.llms import BaseLanguageModel +from typing import Optional + +class CohereComponent(CustomComponent): + display_name = "Cohere" + description = "Cohere large language models." + documentation = "https://python.langchain.com/docs/modules/model_io/models/llms/integrations/cohere" + + def build_config(self): + return { + "cohere_api_key": { + "display_name": "Cohere API Key", + "type": "password", + "show": True + }, + "max_tokens": { + "display_name": "Max Tokens", + "default": 256, + "type": "int", + "show": True + }, + "temperature": { + "display_name": "Temperature", + "default": 0.75, + "type": "float", + "show": True + }, + } + + def build( + self, + cohere_api_key: str, + max_tokens: Optional[int] = 256, + temperature: Optional[float] = 0.75, + ) -> BaseLanguageModel: + # Assuming there is a Cohere class that takes these parameters to initialize + # Please replace `Cohere` with the actual class name that should be instantiated + return Cohere(api_key=cohere_api_key, max_tokens=max_tokens, temperature=temperature) diff --git a/src/backend/langflow/components/llms/LlamaCpp.py b/src/backend/langflow/components/llms/LlamaCpp.py new file mode 100644 index 000000000..eaf40f1d8 --- /dev/null +++ b/src/backend/langflow/components/llms/LlamaCpp.py @@ -0,0 +1,126 @@ + +from typing import Optional, List, Dict, Any +from langflow import CustomComponent +from langchain.llms import BaseLanguageModel + +class LlamaCppComponent(CustomComponent): + display_name = "LlamaCpp" + description = "llama.cpp model." + documentation = "https://python.langchain.com/docs/modules/model_io/models/llms/integrations/llamacpp" + + def build_config(self): + return { + "grammar": {"display_name": "Grammar", "advanced": True}, + "cache": {"display_name": "Cache", "advanced": True}, + "client": {"display_name": "Client", "advanced": True}, + "echo": {"display_name": "Echo", "advanced": True}, + "f16_kv": {"display_name": "F16 KV", "advanced": True}, + "grammar_path": {"display_name": "Grammar Path", "advanced": True}, + "last_n_tokens_size": {"display_name": "Last N Tokens Size", "advanced": True}, + "logits_all": {"display_name": "Logits All", "advanced": True}, + "logprobs": {"display_name": "Logprobs", "advanced": True}, + "lora_base": {"display_name": "Lora Base", "advanced": True}, + "lora_path": {"display_name": "Lora Path", "advanced": True}, + "max_tokens": {"display_name": "Max Tokens", "advanced": True}, + "metadata": {"display_name": "Metadata", "advanced": True}, + "model_kwargs": {"display_name": "Model Kwargs", "advanced": True}, + "model_path": {"display_name": "Model Path"}, + "n_batch": {"display_name": "N Batch", "advanced": True}, + "n_ctx": {"display_name": "N Ctx", "advanced": True}, + "n_gpu_layers": {"display_name": "N GPU Layers", "advanced": True}, + "n_parts": {"display_name": "N Parts", "advanced": True}, + "n_threads": {"display_name": "N Threads", "advanced": True}, + "repeat_penalty": {"display_name": "Repeat Penalty", "advanced": True}, + "rope_freq_base": {"display_name": "Rope Freq Base", "advanced": True}, + "rope_freq_scale": {"display_name": "Rope Freq Scale", "advanced": True}, + "seed": {"display_name": "Seed", "advanced": True}, + "stop": {"display_name": "Stop", "advanced": True}, + "streaming": {"display_name": "Streaming", "advanced": True}, + "suffix": {"display_name": "Suffix", "advanced": True}, + "tags": {"display_name": "Tags", "advanced": True}, + "temperature": {"display_name": "Temperature"}, + "top_k": {"display_name": "Top K", "advanced": True}, + "top_p": {"display_name": "Top P", "advanced": True}, + "use_mlock": {"display_name": "Use Mlock", "advanced": True}, + "use_mmap": {"display_name": "Use Mmap", "advanced": True}, + "verbose": {"display_name": "Verbose", "advanced": True}, + "vocab_only": {"display_name": "Vocab Only", "advanced": True}, + } + + def build( + self, + model_path: str, + grammar: Optional[str] = None, + cache: Optional[bool] = None, + client: Optional[Any] = None, + echo: Optional[bool] = False, + f16_kv: Optional[bool] = True, + grammar_path: Optional[str] = None, + last_n_tokens_size: Optional[int] = 64, + logits_all: Optional[bool] = False, + logprobs: Optional[int] = None, + lora_base: Optional[str] = None, + lora_path: Optional[str] = None, + max_tokens: Optional[int] = 256, + metadata: Optional[Dict] = None, + model_kwargs: Optional[Dict] = None, + n_batch: Optional[int] = 8, + n_ctx: Optional[int] = 512, + n_gpu_layers: Optional[int] = None, + n_parts: Optional[int] = -1, + n_threads: Optional[int] = None, + repeat_penalty: Optional[float] = 1.1, + rope_freq_base: Optional[float] = 10000.0, + rope_freq_scale: Optional[float] = 1.0, + seed: Optional[int] = -1, + stop: Optional[List[str]] = None, + streaming: Optional[bool] = True, + suffix: Optional[str] = None, + tags: Optional[List[str]] = None, + temperature: Optional[float] = 0.8, + top_k: Optional[int] = 40, + top_p: Optional[float] = 0.95, + use_mlock: Optional[bool] = False, + use_mmap: Optional[bool] = True, + verbose: Optional[bool] = True, + vocab_only: Optional[bool] = False, + ) -> BaseLanguageModel: + # Here you would instantiate the LlamaCpp model with the provided parameters + # Since the actual implementation of LlamaCpp is not provided, this is a placeholder + return BaseLanguageModel( + model_path=model_path, + grammar=grammar, + cache=cache, + client=client, + echo=echo, + f16_kv=f16_kv, + grammar_path=grammar_path, + last_n_tokens_size=last_n_tokens_size, + logits_all=logits_all, + logprobs=logprobs, + lora_base=lora_base, + lora_path=lora_path, + max_tokens=max_tokens, + metadata=metadata, + model_kwargs=model_kwargs, + n_batch=n_batch, + n_ctx=n_ctx, + n_gpu_layers=n_gpu_layers, + n_parts=n_parts, + n_threads=n_threads, + repeat_penalty=repeat_penalty, + rope_freq_base=rope_freq_base, + rope_freq_scale=rope_freq_scale, + seed=seed, + stop=stop, + streaming=streaming, + suffix=suffix, + tags=tags, + temperature=temperature, + top_k=top_k, + top_p=top_p, + use_mlock=use_mlock, + use_mmap=use_mmap, + verbose=verbose, + vocab_only=vocab_only, + ) diff --git a/src/backend/langflow/components/llms/OpenAI.py b/src/backend/langflow/components/llms/OpenAI.py new file mode 100644 index 000000000..4e31eb207 --- /dev/null +++ b/src/backend/langflow/components/llms/OpenAI.py @@ -0,0 +1,57 @@ + +from langflow import CustomComponent +from langchain.llms import BaseLLM +from typing import Optional, Dict + + +class OpenAIComponent(CustomComponent): + display_name = "OpenAI" + description = "OpenAI large language models." + + def build_config(self): + return { + "max_tokens": {"display_name": "Max Tokens", "default": 256}, + "model_kwargs": {"display_name": "Model Kwargs", "advanced": True}, + "model_name": { + "display_name": "Model Name", + "default": "text-davinci-003", + "options": [ + "text-davinci-003", + "text-davinci-002", + "text-curie-001", + "text-babbage-001", + "text-ada-001", + ], + }, + "openai_api_base": { + "display_name": "OpenAI API Base", + "info": ( + "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\n" + "You can change this to use other APIs like JinaChat, LocalAI and Prem." + ), + }, + "openai_api_key": { + "display_name": "OpenAI API Key", + "default": "", + "password": True, + }, + "temperature": {"display_name": "Temperature", "default": 0.7}, + } + + def build( + self, + max_tokens: int = 256, + model_kwargs: Optional[Dict] = None, + model_name: str = "text-davinci-003", + openai_api_base: str = "https://api.openai.com/v1", + openai_api_key: str = "", + temperature: float = 0.7, + ) -> BaseLLM: + return BaseLLM( + max_tokens=max_tokens, + model_kwargs=model_kwargs or {}, + model_name=model_name, + openai_api_base=openai_api_base, + openai_api_key=openai_api_key, + temperature=temperature, + ) diff --git a/src/backend/langflow/components/llms/VertexAI.py b/src/backend/langflow/components/llms/VertexAI.py new file mode 100644 index 000000000..65bd2cacd --- /dev/null +++ b/src/backend/langflow/components/llms/VertexAI.py @@ -0,0 +1,145 @@ + +from langflow import CustomComponent +from langchain.llms import BaseLLM +from typing import Optional, Union, Callable, Dict + +class VertexAIComponent(CustomComponent): + display_name = "VertexAI" + description = "Google Vertex AI large language models" + + def build_config(self): + return { + "credentials": { + "display_name": "Credentials", + "type": "file", + "file_types": ["json"], + "required": False, + "default": None, + }, + "location": { + "display_name": "Location", + "type": "str", + "default": "us-central1", + "required": False, + }, + "max_output_tokens": { + "display_name": "Max Output Tokens", + "type": "int", + "default": 128, + "required": False, + }, + "max_retries": { + "display_name": "Max Retries", + "type": "int", + "default": 6, + "required": False, + }, + "metadata": { + "display_name": "Metadata", + "type": "dict", + "required": False, + "default": {}, + }, + "model_name": { + "display_name": "Model Name", + "type": "str", + "default": "text-bison", + "required": False, + }, + "n": { + "display_name": "N", + "type": "int", + "default": 1, + "required": False, + }, + "project": { + "display_name": "Project", + "type": "str", + "required": False, + "default": None, + }, + "request_parallelism": { + "display_name": "Request Parallelism", + "type": "int", + "default": 5, + "required": False, + }, + "streaming": { + "display_name": "Streaming", + "type": "bool", + "default": False, + "required": False, + }, + "temperature": { + "display_name": "Temperature", + "type": "float", + "default": 0.0, + "required": False, + }, + "top_k": { + "display_name": "Top K", + "type": "int", + "default": 40, + "required": False, + }, + "top_p": { + "display_name": "Top P", + "type": "float", + "default": 0.95, + "required": False, + }, + "tuned_model_name": { + "display_name": "Tuned Model Name", + "type": "str", + "required": False, + "default": None, + }, + "verbose": { + "display_name": "Verbose", + "type": "bool", + "default": False, + "required": False, + }, + } + + def build( + self, + credentials: Optional[str] = None, + location: str = "us-central1", + max_output_tokens: int = 128, + max_retries: int = 6, + metadata: Dict = None, + model_name: str = "text-bison", + n: int = 1, + project: Optional[str] = None, + request_parallelism: int = 5, + streaming: bool = False, + temperature: float = 0.0, + top_k: int = 40, + top_p: float = 0.95, + tuned_model_name: Optional[str] = None, + verbose: bool = False, + ) -> Union[BaseLLM, Callable]: + if metadata is None: + metadata = {} + + # Import the appropriate VertexAI class from the langchain.llms module + from langchain.llms import VertexAI + + return VertexAI( + credentials=credentials, + location=location, + max_output_tokens=max_output_tokens, + max_retries=max_retries, + metadata=metadata, + model_name=model_name, + n=n, + project=project, + request_parallelism=request_parallelism, + streaming=streaming, + temperature=temperature, + top_k=top_k, + top_p=top_p, + tuned_model_name=tuned_model_name, + verbose=verbose, + ) diff --git a/src/backend/langflow/components/retrievers/MultiQueryRetriever.py b/src/backend/langflow/components/retrievers/MultiQueryRetriever.py new file mode 100644 index 000000000..762756794 --- /dev/null +++ b/src/backend/langflow/components/retrievers/MultiQueryRetriever.py @@ -0,0 +1,46 @@ + +from langflow import CustomComponent +from langchain.retrievers import MultiQueryRetriever +from typing import Optional, Union, Callable +from langflow.field_typing import ( + PromptTemplate, + BaseLLM, + BaseRetriever, +) + +class MultiQueryRetrieverComponent(CustomComponent): + display_name = "MultiQueryRetriever" + description = "Initialize from llm using default template." + documentation = "https://python.langchain.com/docs/modules/data_connection/retrievers/how_to/MultiQueryRetriever" + + def build_config(self): + return { + "llm": {"display_name": "LLM"}, + "prompt": {"display_name": "Prompt", "default": { + "input_variables": ["question"], + "input_types": {}, + "output_parser": None, + "partial_variables": {}, + "template": 'You are an AI language model assistant. Your task is \n' + 'to generate 3 different versions of the given user \n' + 'question to retrieve relevant documents from a vector database. \n' + 'By generating multiple perspectives on the user question, \n' + 'your goal is to help the user overcome some of the limitations \n' + 'of distance-based similarity search. Provide these alternative \n' + 'questions separated by newlines. Original question: {question}', + "template_format": "f-string", + "validate_template": False, + "_type": "prompt" + }}, + "retriever": {"display_name": "Retriever"}, + "parser_key": {"display_name": "Parser Key", "default": "lines"}, + } + + def build( + self, + llm: BaseLLM, + retriever: BaseRetriever, + prompt: Optional[PromptTemplate] = None, + parser_key: str = "lines", + ) -> Union[Callable, MultiQueryRetriever]: + return MultiQueryRetriever(llm=llm, retriever=retriever, prompt=prompt, parser_key=parser_key) diff --git a/src/backend/langflow/components/textsplitters/CharacterTextSplitter.py b/src/backend/langflow/components/textsplitters/CharacterTextSplitter.py new file mode 100644 index 000000000..de8a31848 --- /dev/null +++ b/src/backend/langflow/components/textsplitters/CharacterTextSplitter.py @@ -0,0 +1,32 @@ + +from langflow import CustomComponent +from langchain.document_transformers import TextSplitter +from langchain.documents import Document +from typing import List + + +class CharacterTextSplitterComponent(CustomComponent): + display_name = "CharacterTextSplitter" + description = "Splitting text that looks at characters." + + def build_config(self): + return { + "documents": {"display_name": "Documents"}, + "chunk_overlap": {"display_name": "Chunk Overlap", "default": 200}, + "chunk_size": {"display_name": "Chunk Size", "default": 1000}, + "separator": {"display_name": "Separator", "default": "\n"}, + } + + def build( + self, + documents: List[Document], + chunk_overlap: int = 200, + chunk_size: int = 1000, + separator: str = "\n", + ) -> TextSplitter: + return TextSplitter( + documents=documents, + chunk_overlap=chunk_overlap, + chunk_size=chunk_size, + separator=separator, + ) diff --git a/src/backend/langflow/components/toolkits/JsonToolkit.py b/src/backend/langflow/components/toolkits/JsonToolkit.py new file mode 100644 index 000000000..f0cf00a34 --- /dev/null +++ b/src/backend/langflow/components/toolkits/JsonToolkit.py @@ -0,0 +1,17 @@ + +from langflow import CustomComponent +from langchain.field_typing import JsonSpec, Tool + +class JsonToolkitComponent(CustomComponent): + display_name = "JsonToolkit" + description = "Toolkit for interacting with a JSON spec." + + def build_config(self): + return { + "spec": {"display_name": "Spec", "type": JsonSpec}, + } + + def build(self, spec: JsonSpec) -> Tool: + # Assuming JsonToolkit is the class that should be instantiated with the spec + # The actual class name should be used in place of JsonToolkit if it is different + return JsonToolkit(spec=spec) # Replace JsonToolkit with the actual class name if necessary diff --git a/src/backend/langflow/components/toolkits/OpenAPIToolkit.py b/src/backend/langflow/components/toolkits/OpenAPIToolkit.py new file mode 100644 index 000000000..49fd6f06d --- /dev/null +++ b/src/backend/langflow/components/toolkits/OpenAPIToolkit.py @@ -0,0 +1,22 @@ + +from langflow import CustomComponent +from langchain.field_typing import AgentExecutor, TextRequestsWrapper +from typing import Callable + +class OpenAPIToolkitComponent(CustomComponent): + display_name = "OpenAPIToolkit" + description = "Toolkit for interacting with an OpenAPI API." + + def build_config(self): + return { + "json_agent": {"display_name": "JSON Agent"}, + "requests_wrapper": {"display_name": "Text Requests Wrapper"}, + } + + def build( + self, + json_agent: AgentExecutor, + requests_wrapper: TextRequestsWrapper, + ) -> Callable: + # Assuming the actual toolkit class name is OpenAPIToolkit + return OpenAPIToolkit(json_agent=json_agent, requests_wrapper=requests_wrapper) diff --git a/src/backend/langflow/components/toolkits/VectorStoreInfo.py b/src/backend/langflow/components/toolkits/VectorStoreInfo.py new file mode 100644 index 000000000..3018d975e --- /dev/null +++ b/src/backend/langflow/components/toolkits/VectorStoreInfo.py @@ -0,0 +1,33 @@ + +from langflow import CustomComponent +from langchain.vectorstores import VectorStore +from typing import Union, Callable +from langflow.field_typing import Chain + +class VectorStoreInfoComponent(CustomComponent): + display_name = "VectorStoreInfo" + description = "Information about a VectorStore" + + def build_config(self): + return { + "vectorstore": {"display_name": "VectorStore"}, + "description": {"display_name": "Description", "multiline": True}, + "name": {"display_name": "Name"}, + } + + def build( + self, + vectorstore: VectorStore, + description: str, + name: str, + ) -> Union[Chain, Callable]: + # Since the actual implementation of VectorStoreInfo is not provided, this is a placeholder + # Replace VectorStoreInfo with the actual class that should be instantiated + # This is a hypothetical class, actual implementation may vary + class VectorStoreInfo: + def __init__(self, vectorstore, description, name): + self.vectorstore = vectorstore + self.description = description + self.name = name + + return VectorStoreInfo(vectorstore=vectorstore, description=description, name=name) diff --git a/src/backend/langflow/components/toolkits/VectorStoreRouterToolkit.py b/src/backend/langflow/components/toolkits/VectorStoreRouterToolkit.py new file mode 100644 index 000000000..14e79f088 --- /dev/null +++ b/src/backend/langflow/components/toolkits/VectorStoreRouterToolkit.py @@ -0,0 +1,23 @@ + +from langflow import CustomComponent +from typing import List +from langchain.vectorstores import VectorStore + +class VectorStoreRouterToolkitComponent(CustomComponent): + display_name = "VectorStoreRouterToolkit" + description = "Toolkit for routing between Vector Stores." + + def build_config(self): + return { + "vectorstores": {"display_name": "Vector Stores"}, + } + + def build( + self, + vectorstores: List[VectorStore], + ): + # Assuming the class `VectorStoreRouterToolkit` exists within a module, but since there + # is no further information provided about the module structure, I will assume it is + # accessible from the current context. If it's in `langchain.vectorstores`, it should be + # imported from there. + return VectorStoreRouterToolkit(vectorstores=vectorstores) diff --git a/src/backend/langflow/components/toolkits/VectorStoreToolkit.py b/src/backend/langflow/components/toolkits/VectorStoreToolkit.py new file mode 100644 index 000000000..b83246b07 --- /dev/null +++ b/src/backend/langflow/components/toolkits/VectorStoreToolkit.py @@ -0,0 +1,22 @@ + +from langflow import CustomComponent +from langchain.toolkits import VectorStoreToolkit +from langflow.field_typing import ( + VectorStore, + Tool, +) + +class VectorStoreToolkitComponent(CustomComponent): + display_name = "VectorStoreToolkit" + description = "Toolkit for interacting with a Vector Store." + + def build_config(self): + return { + "vectorstore_info": {"display_name": "Vector Store Info"}, + } + + def build( + self, + vectorstore_info: VectorStore, + ) -> Tool: + return VectorStoreToolkit(vectorstore_info=vectorstore_info) diff --git a/src/backend/langflow/components/utilities/BingSearchAPIWrapper.py b/src/backend/langflow/components/utilities/BingSearchAPIWrapper.py new file mode 100644 index 000000000..dc2bf051d --- /dev/null +++ b/src/backend/langflow/components/utilities/BingSearchAPIWrapper.py @@ -0,0 +1,34 @@ + +from langflow import CustomComponent + +# Assuming `BingSearchAPIWrapper` is a class that exists in the context +# and has the appropriate methods and attributes. +# We need to make sure this class is importable from the context where this code will be running. +from your_module import BingSearchAPIWrapper + + +class BingSearchAPIWrapperComponent(CustomComponent): + display_name = "BingSearchAPIWrapper" + description = "Wrapper for Bing Search API." + + def build_config(self): + return { + "bing_search_url": {"display_name": "Bing Search URL"}, + "bing_subscription_key": { + "display_name": "Bing Subscription Key", + "password": True, + }, + # 'k' is not included as it is not shown (show=False) + } + + def build( + self, + bing_search_url: str, + bing_subscription_key: str, + ) -> BingSearchAPIWrapper: + # 'k' has a default value and is not shown (show=False), so it is hardcoded here + return BingSearchAPIWrapper( + bing_search_url=bing_search_url, + bing_subscription_key=bing_subscription_key, + k=10 + ) diff --git a/src/backend/langflow/components/utilities/GoogleSearchAPIWrapper.py b/src/backend/langflow/components/utilities/GoogleSearchAPIWrapper.py new file mode 100644 index 000000000..110d6ea11 --- /dev/null +++ b/src/backend/langflow/components/utilities/GoogleSearchAPIWrapper.py @@ -0,0 +1,27 @@ + +from langflow import CustomComponent +from typing import Optional, Union, Callable + +# Assuming GoogleSearchAPIWrapper is a valid import based on JSON +# and it exists in some module that should be imported here. +# The import path should be replaced with the correct one once available. +from some_module import GoogleSearchAPIWrapper + + +class GoogleSearchAPIWrapperComponent(CustomComponent): + display_name = "GoogleSearchAPIWrapper" + description = "Wrapper for Google Search API." + + def build_config(self): + return { + "google_api_key": {"display_name": "Google API Key", "password": True}, + "google_cse_id": {"display_name": "Google CSE ID"}, + # Fields with "show": False are omitted based on the rules + } + + def build( + self, + google_api_key: Optional[str] = None, + google_cse_id: Optional[str] = None, + ) -> Union[GoogleSearchAPIWrapper, Callable]: + return GoogleSearchAPIWrapper(google_api_key=google_api_key, google_cse_id=google_cse_id) diff --git a/src/backend/langflow/components/utilities/GoogleSerperAPIWrapper.py b/src/backend/langflow/components/utilities/GoogleSerperAPIWrapper.py new file mode 100644 index 000000000..f48470d40 --- /dev/null +++ b/src/backend/langflow/components/utilities/GoogleSerperAPIWrapper.py @@ -0,0 +1,57 @@ + +from langflow import CustomComponent +from typing import Dict, Optional + +# Assuming the existence of GoogleSerperAPIWrapper class in the serper module +# If this class does not exist, you would need to create it or import the appropriate class from another module +from serper import GoogleSerperAPIWrapper + + +class GoogleSerperAPIWrapperComponent(CustomComponent): + display_name = "GoogleSerperAPIWrapper" + description = "Wrapper around the Serper.dev Google Search API." + + def build_config(self) -> Dict[str, Dict]: + return { + "result_key_for_type": { + "display_name": "Result Key for Type", + "show": True, + "multiline": False, + "password": False, # corrected based on error message + "name": "result_key_for_type", + "advanced": False, + "dynamic": False, + "info": '', + "type": "dict", + "list": False, + "value": { + "news": "news", + "places": "places", + "images": "images", + "search": "organic" + } + }, + "serper_api_key": { + "display_name": "Serper API Key", + "show": True, + "multiline": False, + "password": False, # corrected based on error message + "name": "serper_api_key", + "advanced": False, + "dynamic": False, + "info": '', + "type": "str", + "list": False, + "value": "" # assuming empty string as default, needs to be set by user + } + } + + def build( + self, + result_key_for_type: Optional[Dict[str, str]] = None, + serper_api_key: Optional[str] = None, + ) -> GoogleSerperAPIWrapper: + return GoogleSerperAPIWrapper( + result_key_for_type=result_key_for_type, + serper_api_key=serper_api_key + ) diff --git a/src/backend/langflow/components/utilities/SearxSearchWrapper.py b/src/backend/langflow/components/utilities/SearxSearchWrapper.py new file mode 100644 index 000000000..deaefe1a7 --- /dev/null +++ b/src/backend/langflow/components/utilities/SearxSearchWrapper.py @@ -0,0 +1,27 @@ + +from langflow import CustomComponent +from typing import Optional, Dict + +class SearxSearchWrapperComponent(CustomComponent): + display_name = "SearxSearchWrapper" + description = "Wrapper for Searx API." + + def build_config(self): + return { + "headers": { + "display_name": "Headers", + "multiline": True, + "default": '{"Authorization": "Bearer "}' + }, + } + + def build( + self, + headers: Optional[Dict[str, str]] = None, + ): + if headers is None: + headers = {"Authorization": "Bearer "} + # Placeholder for actual SearxSearchWrapper instantiation + # Since the actual SearxSearchWrapper class is not available, + # it is assumed that it would be instantiated here with headers as an argument. + pass diff --git a/src/backend/langflow/components/utilities/SerpAPIWrapper.py b/src/backend/langflow/components/utilities/SerpAPIWrapper.py new file mode 100644 index 000000000..9fd37bb48 --- /dev/null +++ b/src/backend/langflow/components/utilities/SerpAPIWrapper.py @@ -0,0 +1,32 @@ + +from langflow import CustomComponent +from typing import Callable, Union + +# Assuming SerpAPIWrapper is a predefined class within the langflow context. +# If it's not, it must be defined or imported from the appropriate module. + +class SerpAPIWrapperComponent(CustomComponent): + display_name = "SerpAPIWrapper" + description = "Wrapper around SerpAPI" + + def build_config(self): + return { + "serpapi_api_key": {"display_name": "SerpAPI API Key", "type": "password"}, + } + + def build( + self, + serpapi_api_key: str, + ) -> Union['SerpAPIWrapper', Callable]: + # Default parameters as defined in the JSON template. + default_params = { + "engine": "google", + "google_domain": "google.com", + "gl": "us", + "hl": "en" + } + + return SerpAPIWrapper( + serpapi_api_key=serpapi_api_key, + params=default_params + ) diff --git a/src/backend/langflow/components/utilities/WikipediaAPIWrapper.py b/src/backend/langflow/components/utilities/WikipediaAPIWrapper.py new file mode 100644 index 000000000..77c68d238 --- /dev/null +++ b/src/backend/langflow/components/utilities/WikipediaAPIWrapper.py @@ -0,0 +1,17 @@ + +from langflow import CustomComponent +from typing import Union, Callable + +# Assuming WikipediaAPIWrapper is a class that needs to be imported. +# The import statement is not included as it is not provided in the JSON +# and the actual implementation details are unknown. + +class WikipediaAPIWrapperComponent(CustomComponent): + display_name = "WikipediaAPIWrapper" + description = "Wrapper around WikipediaAPI." + + def build_config(self): + return {} + + def build(self) -> Union[WikipediaAPIWrapper, Callable]: + return WikipediaAPIWrapper() diff --git a/src/backend/langflow/components/utilities/WolframAlphaAPIWrapper.py b/src/backend/langflow/components/utilities/WolframAlphaAPIWrapper.py new file mode 100644 index 000000000..5456c6f50 --- /dev/null +++ b/src/backend/langflow/components/utilities/WolframAlphaAPIWrapper.py @@ -0,0 +1,22 @@ + +from langflow import CustomComponent +from typing import Callable, Union + +# Since all the fields in the JSON have show=False, we will only create a basic component +# without any configurable fields. + +class WolframAlphaAPIWrapperComponent(CustomComponent): + display_name = "WolframAlphaAPIWrapper" + description = "Wrapper for Wolfram Alpha." + + def build_config(self): + # No fields with show=True are available according to the JSON configuration, + # so we return an empty config. + return {} + + def build(self) -> Union[Callable, object]: + # Since we are not given any specific implementation details or associated classes, + # we will simply return an object that represents the WolframAlphaAPIWrapper without + # initializing any specific fields. In a real scenario, this would be replaced with + # the actual instantiation of the WolframAlphaAPIWrapper class. + return object() # Placeholder for actual WolframAlphaAPIWrapper class instantiation. diff --git a/src/backend/langflow/components/vectorstores/FAISS.py b/src/backend/langflow/components/vectorstores/FAISS.py new file mode 100644 index 000000000..4d4864e73 --- /dev/null +++ b/src/backend/langflow/components/vectorstores/FAISS.py @@ -0,0 +1,39 @@ + +from langflow import CustomComponent +from langchain.vectorstores import FAISS +from typing import Optional, List +from langflow.field_typing import ( + Document, + Embeddings, + NestedDict, +) + +class FAISSComponent(CustomComponent): + display_name = "FAISS" + description = "Construct FAISS wrapper from raw documents." + documentation = "https://python.langchain.com/docs/modules/data_connection/vectorstores/integrations/faiss" + + def build_config(self): + return { + "documents": {"display_name": "Documents"}, + "embedding": {"display_name": "Embedding"}, + "folder_path": {"display_name": "Local Path"}, + "index_name": {"display_name": "Index Name"}, + "search_kwargs": {"display_name": "Search Kwargs", "advanced": True}, + } + + def build( + self, + embedding: Embeddings, + documents: Optional[List[Document]] = None, + folder_path: str = "", + index_name: str = "", + search_kwargs: Optional[NestedDict] = None, + ) -> FAISS: + return FAISS( + embedding=embedding, + documents=documents, + folder_path=folder_path, + index_name=index_name, + search_kwargs=search_kwargs or {}, + ) diff --git a/src/backend/langflow/components/vectorstores/MongoDBAtlasVectorSearch.py b/src/backend/langflow/components/vectorstores/MongoDBAtlasVectorSearch.py new file mode 100644 index 000000000..1d416780d --- /dev/null +++ b/src/backend/langflow/components/vectorstores/MongoDBAtlasVectorSearch.py @@ -0,0 +1,45 @@ + +from langflow import CustomComponent +from langchain.vectorstores import MongoDBAtlasVectorSearch +from typing import Optional, List +from langflow.field_typing import ( + Document, + Embeddings, + NestedDict, +) + +class MongoDBAtlasComponent(CustomComponent): + display_name = "MongoDB Atlas" + description = "Construct a `MongoDB Atlas Vector Search` vector store from raw documents." + + def build_config(self): + return { + "documents": {"display_name": "Documents"}, + "embedding": {"display_name": "Embedding"}, + "collection_name": {"display_name": "Collection Name"}, + "db_name": {"display_name": "Database Name"}, + "index_name": {"display_name": "Index Name"}, + "mongodb_atlas_cluster_uri": {"display_name": "MongoDB Atlas Cluster URI"}, + "search_kwargs": {"display_name": "Search Kwargs", "advanced": True}, + } + + def build( + self, + documents: List[Document], + embedding: Embeddings, + collection_name: str = "", + db_name: str = "", + index_name: str = "", + mongodb_atlas_cluster_uri: str = "", + search_kwargs: Optional[NestedDict] = None, + ) -> MongoDBAtlasVectorSearch: + search_kwargs = search_kwargs or {} + return MongoDBAtlasVectorSearch( + documents=documents, + embedding=embedding, + collection_name=collection_name, + db_name=db_name, + index_name=index_name, + mongodb_atlas_cluster_uri=mongodb_atlas_cluster_uri, + search_kwargs=search_kwargs, + ) diff --git a/src/backend/langflow/components/vectorstores/Pinecone.py b/src/backend/langflow/components/vectorstores/Pinecone.py new file mode 100644 index 000000000..0e72b65e5 --- /dev/null +++ b/src/backend/langflow/components/vectorstores/Pinecone.py @@ -0,0 +1,44 @@ + +from langflow import CustomComponent +from typing import Optional, List +from langchain.vectorstores import Pinecone +from langchain.field_typing import ( + Document, + Embeddings, + NestedDict, +) + +class PineconeComponent(CustomComponent): + display_name = "Pinecone" + description = "Construct Pinecone wrapper from raw documents." + + def build_config(self): + return { + "documents": {"display_name": "Documents"}, + "embedding": {"display_name": "Embedding", "default": 1000}, + "index_name": {"display_name": "Index Name"}, + "namespace": {"display_name": "Namespace"}, + "pinecone_api_key": {"display_name": "Pinecone API Key", "default": ""}, + "pinecone_env": {"display_name": "Pinecone Environment", "default": ""}, + "search_kwargs": {"display_name": "Search Kwargs", "default": '{}'}, + } + + def build( + self, + embedding: Embeddings, + documents: Optional[List[Document]] = None, + index_name: Optional[str] = None, + namespace: Optional[str] = None, + pinecone_api_key: Optional[str] = None, + pinecone_env: Optional[str] = None, + search_kwargs: Optional[NestedDict] = None, + ) -> Pinecone: + return Pinecone( + documents=documents, + embedding=embedding, + index_name=index_name, + namespace=namespace, + pinecone_api_key=pinecone_api_key, + pinecone_env=pinecone_env, + search_kwargs=search_kwargs, + ) diff --git a/src/backend/langflow/components/vectorstores/Qdrant.py b/src/backend/langflow/components/vectorstores/Qdrant.py new file mode 100644 index 000000000..ac6a4cc09 --- /dev/null +++ b/src/backend/langflow/components/vectorstores/Qdrant.py @@ -0,0 +1,73 @@ + +from langflow import CustomComponent +from langchain.vectorstores import Qdrant +from typing import Optional, List +from langflow.field_typing import Document, Embeddings, NestedDict + +class QdrantComponent(CustomComponent): + display_name = "Qdrant" + description = "Construct Qdrant wrapper from a list of texts." + + def build_config(self): + return { + "documents": {"display_name": "Documents"}, + "embedding": {"display_name": "Embedding"}, + "api_key": {"display_name": "API Key", "password": True}, + "collection_name": {"display_name": "Collection Name"}, + "content_payload_key": {"display_name": "Content Payload Key"}, + "distance_func": {"display_name": "Distance Function"}, + "grpc_port": {"display_name": "gRPC Port"}, + "host": {"display_name": "Host"}, + "https": {"display_name": "HTTPS"}, + "location": {"display_name": "Location"}, + "metadata_payload_key": {"display_name": "Metadata Payload Key"}, + "path": {"display_name": "Path"}, + "port": {"display_name": "Port"}, + "prefer_grpc": {"display_name": "Prefer gRPC"}, + "prefix": {"display_name": "Prefix"}, + "search_kwargs": {"display_name": "Search Kwargs"}, + "timeout": {"display_name": "Timeout"}, + "url": {"display_name": "URL"}, + } + + def build( + self, + embedding: Embeddings, + documents: Optional[List[Document]] = None, + api_key: Optional[str] = None, + collection_name: Optional[str] = None, + content_payload_key: str = "page_content", + distance_func: str = "Cosine", + grpc_port: int = 6334, + host: Optional[str] = None, + https: bool = False, + location: str = ":memory:", + metadata_payload_key: str = "metadata", + path: Optional[str] = None, + port: int = 6333, + prefer_grpc: bool = False, + prefix: Optional[str] = None, + search_kwargs: Optional[NestedDict] = None, + timeout: Optional[float] = None, + url: Optional[str] = None, + ) -> Qdrant: + return Qdrant( + documents=documents, + embedding=embedding, + api_key=api_key, + collection_name=collection_name, + content_payload_key=content_payload_key, + distance_func=distance_func, + grpc_port=grpc_port, + host=host, + https=https, + location=location, + metadata_payload_key=metadata_payload_key, + path=path, + port=port, + prefer_grpc=prefer_grpc, + prefix=prefix, + search_kwargs=search_kwargs, + timeout=timeout, + url=url, + ) diff --git a/src/backend/langflow/components/vectorstores/SupabaseVectorStore.py b/src/backend/langflow/components/vectorstores/SupabaseVectorStore.py new file mode 100644 index 000000000..93b0d495f --- /dev/null +++ b/src/backend/langflow/components/vectorstores/SupabaseVectorStore.py @@ -0,0 +1,44 @@ + +from langflow import CustomComponent +from typing import Optional, List +from langchain.vectorstores import SupabaseVectorStore +from langchain.field_typing import ( + Document, + Embeddings, + NestedDict, +) + +class SupabaseComponent(CustomComponent): + display_name = "Supabase" + description = "Return VectorStore initialized from texts and embeddings." + + def build_config(self): + return { + "documents": {"display_name": "Documents"}, + "embedding": {"display_name": "Embedding"}, + "query_name": {"display_name": "Query Name"}, + "search_kwargs": {"display_name": "Search Kwargs"}, + "supabase_service_key": {"display_name": "Supabase Service Key"}, + "supabase_url": {"display_name": "Supabase URL"}, + "table_name": {"display_name": "Table Name"}, + } + + def build( + self, + embedding: Embeddings, + documents: Optional[List[Document]] = None, + query_name: str = '', + search_kwargs: NestedDict = {}, + supabase_service_key: str = '', + supabase_url: str = '', + table_name: str = '', + ) -> SupabaseVectorStore: + return SupabaseVectorStore( + documents=documents, + embedding=embedding, + query_name=query_name, + search_kwargs=search_kwargs, + supabase_service_key=supabase_service_key, + supabase_url=supabase_url, + table_name=table_name, + ) From 4ee7a9c62d0b2e5379fbec7e37512f38fde8d1dd Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Tue, 9 Jan 2024 10:05:25 -0300 Subject: [PATCH 006/153] Update langchain version to ~0.0.345 --- poetry.lock | 175 +++++++++++++++++++++++++------------------------ pyproject.toml | 2 +- 2 files changed, 90 insertions(+), 87 deletions(-) diff --git a/poetry.lock b/poetry.lock index 5f4d111d2..5447b025f 100644 --- a/poetry.lock +++ b/poetry.lock @@ -321,13 +321,13 @@ files = [ [[package]] name = "bce-python-sdk" -version = "0.8.98" +version = "0.8.99" description = "BCE SDK for python" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, <4" files = [ - {file = "bce-python-sdk-0.8.98.tar.gz", hash = "sha256:8d2a55541a5bf21228f248ed4faea9dd40dfcfc1df2c5bf1e9cdb69db6e0fab4"}, - {file = "bce_python_sdk-0.8.98-py3-none-any.whl", hash = "sha256:0e044fa2e4efd749642dfaf22f9e047a04618761780deeaa24ff06dc6ac55f34"}, + {file = "bce-python-sdk-0.8.99.tar.gz", hash = "sha256:0a6daf8bb529436ec683ed73e2b7e393ad9a12f4d127cf5ff96f3a018ddb1c05"}, + {file = "bce_python_sdk-0.8.99-py3-none-any.whl", hash = "sha256:fb8b174f03ce2493c517772fe615bdfd57708c88319fc5611e58d64138ab9335"}, ] [package.dependencies] @@ -411,17 +411,17 @@ files = [ [[package]] name = "boto3" -version = "1.34.14" +version = "1.34.15" description = "The AWS SDK for Python" optional = false python-versions = ">= 3.8" files = [ - {file = "boto3-1.34.14-py3-none-any.whl", hash = "sha256:1f94042f4efb5133b6b9b8b3243afc01143a81d21b3197a3afadf5780f97b05d"}, - {file = "boto3-1.34.14.tar.gz", hash = "sha256:5c1bb487c68120aae236354d81b8a1a55d0aa3395d30748a01825ef90891921e"}, + {file = "boto3-1.34.15-py3-none-any.whl", hash = "sha256:f8f16c2d0ec1dca291857f1c138d5c30e01e40f653443cc2679e2f6ae71b05a6"}, + {file = "boto3-1.34.15.tar.gz", hash = "sha256:2b74c58f475ff0dcf2f3637da9367a9465d29fad971ff5d8dc54ac39554e9022"}, ] [package.dependencies] -botocore = ">=1.34.14,<1.35.0" +botocore = ">=1.34.15,<1.35.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.10.0,<0.11.0" @@ -430,13 +430,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.34.14" +version = "1.34.15" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">= 3.8" files = [ - {file = "botocore-1.34.14-py3-none-any.whl", hash = "sha256:3b592f50f0406e236782a3a0a9ad1c3976060fdb2e04a23d18c3df5b7dfad3e0"}, - {file = "botocore-1.34.14.tar.gz", hash = "sha256:041bed0852649cab7e4dcd4d87f9d1cc084467fb846e5b60015e014761d96414"}, + {file = "botocore-1.34.15-py3-none-any.whl", hash = "sha256:16bcf871e67ef0177593f06e9e5bae4db51c9a9a2e953cb14feeb42d53441a85"}, + {file = "botocore-1.34.15.tar.gz", hash = "sha256:c3c3404962a6d9d5e1634bd70ed53b8eff1ff17ee9d7a6240e9e8c94db48ad6f"}, ] [package.dependencies] @@ -1581,42 +1581,42 @@ all = ["email-validator (>=2.0.0)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)" [[package]] name = "fastavro" -version = "1.9.2" +version = "1.9.3" description = "Fast read/write of AVRO files" optional = false python-versions = ">=3.8" files = [ - {file = "fastavro-1.9.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:223cecf135fd29b83ca6a30035b15b8db169aeaf8dc4f9a5d34afadc4b31638a"}, - {file = "fastavro-1.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e08c9be8c6f7eed2cf30f8b64d50094cba38a81b751c7db9f9c4be2656715259"}, - {file = "fastavro-1.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:394f06cc865c6fbae3bbca323633a28a5d914c55dc2c1cdefb75432456ef8f6f"}, - {file = "fastavro-1.9.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7a7caadd47bdd04bda534ff70b4b98d2823800c488fd911918115aec4c4dc09b"}, - {file = "fastavro-1.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:68478a1b8a583d83ad6550e9dceac6cbb148a99a52c3559a0413bf4c0b9c8786"}, - {file = "fastavro-1.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:b59a1123f1d534743af33fdbda80dd7b9146685bdd7931eae12bee6203065222"}, - {file = "fastavro-1.9.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:887c20dc527a549764c91f9e48ece071f2f26d217af66ebcaeb87bf29578fee5"}, - {file = "fastavro-1.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:46458f78b481c12db62d3d8a81bae09cb0b5b521c0d066c6856fc2746908d00d"}, - {file = "fastavro-1.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f4a2a4bed0e829f79fa1e4f172d484b2179426e827bcc80c0069cc81328a5af"}, - {file = "fastavro-1.9.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6167f9bbe1c5a28fbc2db767f97dbbb4981065e6eeafd4e613f6fe76c576ffd4"}, - {file = "fastavro-1.9.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d574bc385f820da0404528157238de4e5fdd775d2cb3d05b3b0f1b475d493837"}, - {file = "fastavro-1.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:ec600eb15b3ec931904c5bf8da62b3b725cb0f369add83ba47d7b5e9322f92a0"}, - {file = "fastavro-1.9.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:c82b0761503420cd45f7f50bc31975ac1c75b5118e15434c1d724b751abcc249"}, - {file = "fastavro-1.9.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:db62d9b8c944b8d9c481e5f980d5becfd034bdd58c72e27c9333bd504b06bda0"}, - {file = "fastavro-1.9.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65e61f040bc9494646f42a466e9cd428783b82d7161173f3296710723ba5a453"}, - {file = "fastavro-1.9.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:6278b93cdd5bef1778c0232ce1f265137f90bc6be97a5c1dd7e0d99a406c0488"}, - {file = "fastavro-1.9.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:cd003ddea5d89720194b6e57011c37221d9fc4ddc750e6f4723516eb659be686"}, - {file = "fastavro-1.9.2-cp312-cp312-win_amd64.whl", hash = "sha256:43f09d100a26e8b59f30dde664d93e423b648e008abfc43132608a18fe8ddcc2"}, - {file = "fastavro-1.9.2-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:3ddffeff5394f285c69f9cd481f47b6cf62379840cdbe6e0dc74683bd589b56e"}, - {file = "fastavro-1.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e75a2b2ec697d2058a7d96522e921f03f174cf9049ace007c24be7ab58c5370"}, - {file = "fastavro-1.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fd2e8fd0567483eb0fdada1b979ad4d493305dfdd3f351c82a87df301f0ae1f"}, - {file = "fastavro-1.9.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c652dbe3f087c943a5b89f9a50a574e64f23790bfbec335ce2b91a2ae354a443"}, - {file = "fastavro-1.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bba73e9a1822162f1b3a43de0362f29880014c5c4d49d63ad7fcce339ef73ea2"}, - {file = "fastavro-1.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:beeef2964bbfd09c539424808539b956d7425afbb7055b89e2aa311374748b56"}, - {file = "fastavro-1.9.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:d5fa48266d75e057b27d8586b823d6d7d7c94593fd989d75033eb4c8078009fb"}, - {file = "fastavro-1.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b69aeb0d063f5955a0e412f9779444fc452568a49db75a90a8d372f9cb4a01c8"}, - {file = "fastavro-1.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ce336c59fb40fdb8751bda8cc6076cfcdf9767c3c107f6049e049166b26c61f"}, - {file = "fastavro-1.9.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:581036e18661f045415a51ad528865e1d7ba5a9690a3dede9e6ea50f94ed6c4c"}, - {file = "fastavro-1.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:39b6b5c3cda569c0a130fd2d08d4c53a326ede7e05174a24eda08f7698f70eda"}, - {file = "fastavro-1.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:d33e40f246bf07f106f9d2da68d0234efcc62276b6e35bde00ff920ea7f871fd"}, - {file = "fastavro-1.9.2.tar.gz", hash = "sha256:5c1ffad986200496bd69b5c4748ae90b5d934d3b1456f33147bee3a0bb17f89b"}, + {file = "fastavro-1.9.3-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:5e9b2e1427fb84c0754bc34923d10cabcf2ed23230201208a1371ab7b6027674"}, + {file = "fastavro-1.9.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4ef82f86ae276309abc0072598474b6be68105a0b28f8d7cc0398d1d353d7de"}, + {file = "fastavro-1.9.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:280ef7ab7232ecb2097038d6842416ec717d0e1c314b80ff245f85201f3396a4"}, + {file = "fastavro-1.9.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4a36cfc0421ed7576ecb1c22de7bd1dedcce62aebbffcc597379d59171e5d76e"}, + {file = "fastavro-1.9.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d80f2e20199140eb8c036b4393e9bc9eff325543311b958c72318999499d4279"}, + {file = "fastavro-1.9.3-cp310-cp310-win_amd64.whl", hash = "sha256:a435f7edd7c5b52cee3f23ca950cd9373ab35cf2aa3d269b3d6aca7e2fc1372c"}, + {file = "fastavro-1.9.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2a7053ed10194ec53754f5337b57b3273a74b48505edcd6edb79fe3c4cd259c0"}, + {file = "fastavro-1.9.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:853e01f13534d1baa0a3d493a8573e665e93ffa35b4bf1d125e21764d343af8e"}, + {file = "fastavro-1.9.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a5a279cda25d876e6f120950cadf184a307fd8998f9a22a90bb62e6749f88d1e"}, + {file = "fastavro-1.9.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:63d6f928840f3fb1f2e1fe20bc8b7d0e1a51ba4bb0e554ecb837a669fba31288"}, + {file = "fastavro-1.9.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8807046edc78f50b3ea5f55f6a534c87b2a13538e7c56fec3532ef802bcae333"}, + {file = "fastavro-1.9.3-cp311-cp311-win_amd64.whl", hash = "sha256:e502579da4a51c5630eadbd811a1b3d262d6e783bf19998cfb33d2ea0cf6f516"}, + {file = "fastavro-1.9.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:6b665efe442061df8d9608c2fb692847df85d52ad825b776c441802f0dfa6571"}, + {file = "fastavro-1.9.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b8c96d81f0115633489d7f1133a03832922629a61ca81c1d47b482ddcda3b94"}, + {file = "fastavro-1.9.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:338c7ec94dd2474c4679e44d2560a1922cb6fa99acbb7b18957264baf8eadfc7"}, + {file = "fastavro-1.9.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a509b34c9af71a109c633631ac2f6d2209830e13200d0048f7e9c057fd563f8f"}, + {file = "fastavro-1.9.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:967edefab470987c024cd5a1fcd04744a50a91e740c7bdf325181043a47f1083"}, + {file = "fastavro-1.9.3-cp312-cp312-win_amd64.whl", hash = "sha256:033c15e8ed02f80f01d58be1cd880b09fd444faf277263d563a727711d47a98a"}, + {file = "fastavro-1.9.3-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:6b38723327603d77080aec56628e13a739415f8596ca0cc41a905615977c6d6b"}, + {file = "fastavro-1.9.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:046d75c4400941fd08f0a6855a34ae63bf02ea01f366b5b749942abe10640056"}, + {file = "fastavro-1.9.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87ab312b8baf0e61ee717878d390022ee1b713d70b244d69efbf3325680f9749"}, + {file = "fastavro-1.9.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c562fcf8f5091a2446aafd0c2a0da590c24e0b53527a0100d33908e32f20eea8"}, + {file = "fastavro-1.9.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2aa0111e7ebd076d2a094862bbdf8ea175cebba148fcce6c89ff46b625e334b4"}, + {file = "fastavro-1.9.3-cp38-cp38-win_amd64.whl", hash = "sha256:652072e0f455ca19a1ee502b527e603389783657c130d81f89df66775979d6f5"}, + {file = "fastavro-1.9.3-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:0a57cdd4edaee36d4216faf801ebc7f53f45e4e1518bdd9832d6f6f1d6e2d88f"}, + {file = "fastavro-1.9.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b46a18ebed61573b0823c28eda2716485d283258a83659c7fe6ad3aaeacfed4"}, + {file = "fastavro-1.9.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f756f0723f3bd97db20437d0a8e45712839e6ccd7c82f4d82469533be48b4c7"}, + {file = "fastavro-1.9.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d98d5a08063f5b6d7ac5016a0dfe0698b50d9987cb74686f7dfa8288b7b09e0b"}, + {file = "fastavro-1.9.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:00698e60db58a2d52cb709df882d451fb7664ebb2f8cb37d9171697e060dc767"}, + {file = "fastavro-1.9.3-cp39-cp39-win_amd64.whl", hash = "sha256:d021bbc135023194688e88a7431fb0b5e3ce20e27153bf258f2ce08ee1a0106b"}, + {file = "fastavro-1.9.3.tar.gz", hash = "sha256:a30d3d2353f6d3b4f6dcd6a97ae937b3775faddd63f5856fe11ba3b0dbb1756a"}, ] [package.extras] @@ -2126,13 +2126,13 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] [[package]] name = "google-api-python-client" -version = "2.112.0" +version = "2.113.0" description = "Google API Client Library for Python" optional = false python-versions = ">=3.7" files = [ - {file = "google-api-python-client-2.112.0.tar.gz", hash = "sha256:c3bcb5fd70d57f4c94b30c0dbeade53c216febfbf1d771eeb1a2fa74bd0d6756"}, - {file = "google_api_python_client-2.112.0-py2.py3-none-any.whl", hash = "sha256:f5e45d9812376deb7e04cda8d8ca5233aa608038bdbf1253ad8f7edcb7f6d595"}, + {file = "google-api-python-client-2.113.0.tar.gz", hash = "sha256:bcffbc8ffbad631f699cf85aa91993f3dc03060b234ca9e6e2f9135028bd9b52"}, + {file = "google_api_python_client-2.113.0-py2.py3-none-any.whl", hash = "sha256:25659d488df6c8a69615b2a510af0e63b4c47ab2cb87d71c1e13b28715906e27"}, ] [package.dependencies] @@ -3498,13 +3498,13 @@ test = ["coverage", "ipykernel (>=6.14)", "mypy", "paramiko", "pre-commit", "pyt [[package]] name = "jupyter-core" -version = "5.7.0" +version = "5.7.1" description = "Jupyter core package. A base package on which Jupyter projects rely." optional = false python-versions = ">=3.8" files = [ - {file = "jupyter_core-5.7.0-py3-none-any.whl", hash = "sha256:16eea462f7dad23ba9f86542bdf17f830804e2028eb48d609b6134d91681e983"}, - {file = "jupyter_core-5.7.0.tar.gz", hash = "sha256:cb8d3ed92144d2463a3c5664fdd686a3f0c1442ea45df8babb1c1a9e6333fe03"}, + {file = "jupyter_core-5.7.1-py3-none-any.whl", hash = "sha256:c65c82126453a723a2804aa52409930434598fd9d35091d63dfb919d2b765bb7"}, + {file = "jupyter_core-5.7.1.tar.gz", hash = "sha256:de61a9d7fc71240f688b2fb5ab659fbb56979458dc66a71decd098e03c79e218"}, ] [package.dependencies] @@ -3551,13 +3551,13 @@ zookeeper = ["kazoo (>=2.8.0)"] [[package]] name = "langchain" -version = "0.1.0" +version = "0.0.354" description = "Building applications with LLMs through composability" optional = false python-versions = ">=3.8.1,<4.0" files = [ - {file = "langchain-0.1.0-py3-none-any.whl", hash = "sha256:8652e74b039333a55c79faff4400b077ba1bd0ddce5255574e42d301c05c1733"}, - {file = "langchain-0.1.0.tar.gz", hash = "sha256:d43119f8d3fda2c8ddf8c3a19bd5b94b347e27d1867ff14a921b90bdbed0668a"}, + {file = "langchain-0.0.354-py3-none-any.whl", hash = "sha256:8d28283a2891422a685b0605dd23b5a1cd6a15ab57a8e359b37a3151a322bad4"}, + {file = "langchain-0.0.354.tar.gz", hash = "sha256:419c48735b803d70c0dee985e0afcfd7c88528b8c1cd918c57eb23e53d94ea87"}, ] [package.dependencies] @@ -3565,8 +3565,8 @@ aiohttp = ">=3.8.3,<4.0.0" async-timeout = {version = ">=4.0.0,<5.0.0", markers = "python_version < \"3.11\""} dataclasses-json = ">=0.5.7,<0.7" jsonpatch = ">=1.33,<2.0" -langchain-community = ">=0.0.9,<0.1" -langchain-core = ">=0.1.7,<0.2" +langchain-community = ">=0.0.8,<0.1" +langchain-core = ">=0.1.5,<0.2" langsmith = ">=0.0.77,<0.1.0" numpy = ">=1,<2" pydantic = ">=1,<3" @@ -3582,7 +3582,7 @@ cli = ["typer (>=0.9.0,<0.10.0)"] cohere = ["cohere (>=4,<5)"] docarray = ["docarray[hnswlib] (>=0.32.0,<0.33.0)"] embeddings = ["sentence-transformers (>=2,<3)"] -extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.0,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "cohere (>=4,<5)", "couchbase (>=4.1.9,<5.0.0)", "dashvector (>=1.0.1,<2.0.0)", "databricks-vectorsearch (>=0.21,<0.22)", "datasets (>=2.15.0,<3.0.0)", "dgml-utils (>=0.3.0,<0.4.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.9.0,<0.10.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "hologres-vector (>=0.0.6,<0.0.7)", "html2text (>=2020.1.16,<2021.0.0)", "javelin-sdk (>=0.1.8,<0.2.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "langchain-openai (>=0.0.2,<0.1)", "lxml (>=4.9.2,<5.0.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "msal (>=1.25.0,<2.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "openai (<2)", "openapi-pydantic (>=0.3.2,<0.4.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "praw (>=7.7.1,<8.0.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "upstash-redis (>=0.15.0,<0.16.0)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)"] +extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.0,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "cohere (>=4,<5)", "couchbase (>=4.1.9,<5.0.0)", "dashvector (>=1.0.1,<2.0.0)", "databricks-vectorsearch (>=0.21,<0.22)", "datasets (>=2.15.0,<3.0.0)", "dgml-utils (>=0.3.0,<0.4.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.9.0,<0.10.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "hologres-vector (>=0.0.6,<0.0.7)", "html2text (>=2020.1.16,<2021.0.0)", "javelin-sdk (>=0.1.8,<0.2.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "lxml (>=4.9.2,<5.0.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "msal (>=1.25.0,<2.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "openai (<2)", "openapi-pydantic (>=0.3.2,<0.4.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "praw (>=7.7.1,<8.0.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "upstash-redis (>=0.15.0,<0.16.0)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)"] javascript = ["esprima (>=4.0.1,<5.0.0)"] llms = ["clarifai (>=9.1.0)", "cohere (>=4,<5)", "huggingface_hub (>=0,<1)", "manifest-ml (>=0.0.1,<0.0.2)", "nlpcloud (>=1,<2)", "openai (<2)", "openlm (>=0.0.5,<0.0.6)", "torch (>=1,<3)", "transformers (>=4,<5)"] openai = ["openai (<2)", "tiktoken (>=0.3.2,<0.6.0)"] @@ -3591,19 +3591,19 @@ text-helpers = ["chardet (>=5.1.0,<6.0.0)"] [[package]] name = "langchain-community" -version = "0.0.9" +version = "0.0.10" description = "Community contributed LangChain integrations." optional = false python-versions = ">=3.8.1,<4.0" files = [ - {file = "langchain_community-0.0.9-py3-none-any.whl", hash = "sha256:21e1f96c776541255b7067f32aafbf065f78a33be8f0e2660080ddc3e9ed48b7"}, - {file = "langchain_community-0.0.9.tar.gz", hash = "sha256:b14f10b249fd61b0b8e3d2896f85c2d577eb4a5e2ae01291e2a4ebbe1bb3c370"}, + {file = "langchain_community-0.0.10-py3-none-any.whl", hash = "sha256:37123ce31018bc7ad3ffda8af73c46e16d568270527a546c34e8dbce713377af"}, + {file = "langchain_community-0.0.10.tar.gz", hash = "sha256:4d7b3510e04b80dfddace32fb5db0878e9bab7d4be7288f86112ed22dc5faf68"}, ] [package.dependencies] aiohttp = ">=3.8.3,<4.0.0" dataclasses-json = ">=0.5.7,<0.7" -langchain-core = ">=0.1.7,<0.2" +langchain-core = ">=0.1.8,<0.2" langsmith = ">=0.0.63,<0.1.0" numpy = ">=1,<2" PyYAML = ">=5.3" @@ -3617,13 +3617,13 @@ extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15. [[package]] name = "langchain-core" -version = "0.1.7" +version = "0.1.8" description = "Building applications with LLMs through composability" optional = false python-versions = ">=3.8.1,<4.0" files = [ - {file = "langchain_core-0.1.7-py3-none-any.whl", hash = "sha256:c66327dbb4b7d4ab911556aa0511ebf4f40801ad66d98778fb5566dba45b0091"}, - {file = "langchain_core-0.1.7.tar.gz", hash = "sha256:c05211a309721d67aa5a681c946a2f010e14632a2bea3728da0a30a2534efa9e"}, + {file = "langchain_core-0.1.8-py3-none-any.whl", hash = "sha256:f4d1837d6d814ed36528b642211933d1f0bd84e1eff361f4630a8c750acc27d0"}, + {file = "langchain_core-0.1.8.tar.gz", hash = "sha256:93ab72f5ab202526310fad389a45626501fd76ecf56d451111c0d4abe8183407"}, ] [package.dependencies] @@ -3641,20 +3641,21 @@ extended-testing = ["jinja2 (>=3,<4)"] [[package]] name = "langchain-experimental" -version = "0.0.42" +version = "0.0.47" description = "Building applications with LLMs through composability" optional = false python-versions = ">=3.8.1,<4.0" files = [ - {file = "langchain_experimental-0.0.42-py3-none-any.whl", hash = "sha256:8e9190fa5ebdd03dfed6ca20846ebb26fc7e0c1fffbab070a12f1ce0cf5053d2"}, - {file = "langchain_experimental-0.0.42.tar.gz", hash = "sha256:1571ef536b056c46781d1de0fa926ab27c7d386da203ba61e0e0601d4cfc96be"}, + {file = "langchain_experimental-0.0.47-py3-none-any.whl", hash = "sha256:d5b6930c4e0a6f280cbb7d327d03d86f555f6168e185a1df64ba4a52b1059f65"}, + {file = "langchain_experimental-0.0.47.tar.gz", hash = "sha256:0fdba89a438287c14fa0632c6adf87acffd55158a2f66c7a12be7721a7882a0e"}, ] [package.dependencies] -langchain = ">=0.0.308" +langchain = ">=0.0.350,<0.1" +langchain-core = ">=0.1,<0.2" [package.extras] -extended-testing = ["faker (>=19.3.1,<20.0.0)", "presidio-analyzer (>=2.2.33,<3.0.0)", "presidio-anonymizer (>=2.2.33,<3.0.0)", "sentence-transformers (>=2,<3)", "vowpal-wabbit-next (==0.6.0)"] +extended-testing = ["faker (>=19.3.1,<20.0.0)", "jinja2 (>=3,<4)", "presidio-analyzer (>=2.2.33,<3.0.0)", "presidio-anonymizer (>=2.2.33,<3.0.0)", "sentence-transformers (>=2,<3)", "vowpal-wabbit-next (==0.6.0)"] [[package]] name = "langchain-google-genai" @@ -3730,13 +3731,13 @@ langchain = ["langchain (>=0.0.309)"] [[package]] name = "langsmith" -version = "0.0.77" +version = "0.0.78" description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." optional = false python-versions = ">=3.8.1,<4.0" files = [ - {file = "langsmith-0.0.77-py3-none-any.whl", hash = "sha256:750c0aa9177240c64e131d831e009ed08dd59038f7cabbd0bbcf62ccb7c8dcac"}, - {file = "langsmith-0.0.77.tar.gz", hash = "sha256:c4c8d3a96ad8671a41064f3ccc673e2e22a4153e823b19f915c9c9b8a4f33a2c"}, + {file = "langsmith-0.0.78-py3-none-any.whl", hash = "sha256:d7c8300700dde0cea87388177c2552187e87fb4ae789510712e7654db72b5c04"}, + {file = "langsmith-0.0.78.tar.gz", hash = "sha256:a7d7f1639072aeb12115a931eb6d4c53810a480a1fec90bc8744f232765f3c81"}, ] [package.dependencies] @@ -3783,13 +3784,13 @@ test = ["httpx (>=0.24.1)", "pytest (>=7.4.0)", "scipy (>=1.10)"] [[package]] name = "llama-index" -version = "0.9.26" +version = "0.9.27" description = "Interface between LLMs and your data" optional = false python-versions = ">=3.8.1,<4.0" files = [ - {file = "llama_index-0.9.26-py3-none-any.whl", hash = "sha256:2c865e6dd17f3f66bf8e51a8380033308e0eb053c5e76a6bbe6f0ac4e9da9edd"}, - {file = "llama_index-0.9.26.tar.gz", hash = "sha256:9ba0a63984abff6c0f57ae82ff55c32a7715bcb4c3a0970f7dea6ddf20f6de77"}, + {file = "llama_index-0.9.27-py3-none-any.whl", hash = "sha256:15c5215c7d6b1b64b4d3d91b0601a9e8ef64319e80e27520603bc5e1fa812563"}, + {file = "llama_index-0.9.27.tar.gz", hash = "sha256:6d605615d79abac5aff3e73830de389b3114df7982f7ff00ade79ce595c47929"}, ] [package.dependencies] @@ -3800,6 +3801,7 @@ deprecated = ">=1.2.9.3" fsspec = ">=2023.5.0" httpx = "*" nest-asyncio = ">=1.5.8,<2.0.0" +networkx = ">=3.0" nltk = ">=3.8.1,<4.0.0" numpy = "*" openai = ">=1.1.0" @@ -3815,7 +3817,7 @@ typing-inspect = ">=0.8.0" gradientai = ["gradientai (>=1.4.0)"] langchain = ["langchain (>=0.0.303)"] local-models = ["optimum[onnxruntime] (>=1.13.2,<2.0.0)", "sentencepiece (>=0.1.99,<0.2.0)", "transformers[torch] (>=4.34.0,<5.0.0)"] -postgres = ["asyncpg (>=0.28.0,<0.29.0)", "pgvector (>=0.1.0,<0.2.0)", "psycopg-binary (>=3.1.12,<4.0.0)"] +postgres = ["asyncpg (>=0.28.0,<0.29.0)", "pgvector (>=0.1.0,<0.2.0)", "psycopg-binary (>=3.1.12,<4.0.0)", "psycopg2 (>=2.9.9,<3.0.0)"] query-tools = ["guidance (>=0.0.64,<0.0.65)", "jsonpath-ng (>=1.6.0,<2.0.0)", "lm-format-enforcer (>=0.4.3,<0.5.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "scikit-learn", "spacy (>=3.7.1,<4.0.0)"] [[package]] @@ -4778,13 +4780,13 @@ sympy = "*" [[package]] name = "openai" -version = "1.6.1" +version = "1.7.0" description = "The official Python library for the openai API" optional = false python-versions = ">=3.7.1" files = [ - {file = "openai-1.6.1-py3-none-any.whl", hash = "sha256:bc9f774838d67ac29fb24cdeb2d58faf57de8b311085dcd1348f7aa02a96c7ee"}, - {file = "openai-1.6.1.tar.gz", hash = "sha256:d553ca9dbf9486b08e75b09e8671e4f638462aaadccfced632bf490fc3d75fa2"}, + {file = "openai-1.7.0-py3-none-any.whl", hash = "sha256:2282e8e15acb05df79cccba330c025b8e84284c7ec1f3fa31f167a8479066333"}, + {file = "openai-1.7.0.tar.gz", hash = "sha256:f2a8dcb739e8620c9318a2c6304ea72aebb572ba02fa1d586344405e80d567d3"}, ] [package.dependencies] @@ -5493,13 +5495,13 @@ strenum = ">=0.4.9,<0.5.0" [[package]] name = "posthog" -version = "3.1.0" +version = "3.3.0" description = "Integrate PostHog into any python application." optional = false python-versions = "*" files = [ - {file = "posthog-3.1.0-py2.py3-none-any.whl", hash = "sha256:acd033530bdfc275dce5587f205f62378991ecb9b7cd5479e79c7f4ac575d319"}, - {file = "posthog-3.1.0.tar.gz", hash = "sha256:db17a2c511e18757aec12b6632ddcc1fa318743dad88a4666010467a3d9468da"}, + {file = "posthog-3.3.0-py2.py3-none-any.whl", hash = "sha256:0d8ba574a72bce8b635b6bb905da110d172413dc3f1c15cb6476e45b777d88c5"}, + {file = "posthog-3.3.0.tar.gz", hash = "sha256:130ba2bc5aa9ddfeb266acacee94c370223bb308a174d044f94c1f89a9fdb816"}, ] [package.dependencies] @@ -5951,13 +5953,13 @@ pyasn1 = ">=0.4.6,<0.6.0" [[package]] name = "pyautogen" -version = "0.2.3" +version = "0.2.5" description = "Enabling Next-Gen LLM Applications via Multi-Agent Conversation Framework" optional = false python-versions = ">=3.8, <3.12" files = [ - {file = "pyautogen-0.2.3-py3-none-any.whl", hash = "sha256:f7f8dea3bc5766ff24531638009e50e33aea174a456dbdb94b4ef24ac2c5a1c9"}, - {file = "pyautogen-0.2.3.tar.gz", hash = "sha256:9643f4772ea7f848ec4e2e0bf90b3fab7c16bb59b88328e129a4d9dde4c7c5bf"}, + {file = "pyautogen-0.2.5-py3-none-any.whl", hash = "sha256:df3f6fd2a1a53f4fa57a12eec0fb39f4403559d6ba907396f023a8e7b27a4154"}, + {file = "pyautogen-0.2.5.tar.gz", hash = "sha256:f655574b12e2a1266c18c62092e6aa09c0911d5ede3e5473f0695182a5ea93a9"}, ] [package.dependencies] @@ -5970,6 +5972,7 @@ termcolor = "*" tiktoken = "*" [package.extras] +autobuild = ["chromadb", "huggingface-hub", "sentence-transformers"] blendsearch = ["flaml[blendsearch]"] graphs = ["matplotlib (>=3.8.1,<3.9.0)", "networkx (>=3.2.1,<3.3.0)"] lmm = ["pillow", "replicate"] @@ -8600,13 +8603,13 @@ xlsx = ["networkx", "openpyxl", "pandas", "xlrd"] [[package]] name = "unstructured-client" -version = "0.15.1" +version = "0.15.2" description = "Python Client SDK for Unstructured API" optional = false python-versions = ">=3.8" files = [ - {file = "unstructured-client-0.15.1.tar.gz", hash = "sha256:6769871507b80f8ab37114eb0e7289aec9b80204cc22e5de8bcbb07c02d43e19"}, - {file = "unstructured_client-0.15.1-py3-none-any.whl", hash = "sha256:c1a4f1dd8128eb08c2b07b404912111147f0135bdd2e6736b1f9b5a451b13c7d"}, + {file = "unstructured-client-0.15.2.tar.gz", hash = "sha256:47a80d90abf214c0b695812ec5ef982d08b8ed7ffa84bc76cd208f8ddec9ba4c"}, + {file = "unstructured_client-0.15.2-py3-none-any.whl", hash = "sha256:0476ecc86a26709c768c4b03116e446646b192942e5dc6620a0e3845e0e28efc"}, ] [package.dependencies] @@ -9294,4 +9297,4 @@ local = ["ctransformers", "llama-cpp-python", "sentence-transformers"] [metadata] lock-version = "2.0" python-versions = ">=3.9,<3.11" -content-hash = "447639bcb6b221b2c0a1f216c7c93a5ce104664791a149645556b0916c555272" +content-hash = "bc135f5941d37d61773f906f1b5e96e189ef5dcb35c9d92fbe271f863d169595" diff --git a/pyproject.toml b/pyproject.toml index a02ec9009..f024fecf8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -35,7 +35,7 @@ google-search-results = "^2.4.1" google-api-python-client = "^2.79.0" typer = "^0.9.0" gunicorn = "^21.2.0" -langchain = "~0.1.0" +langchain = "~0.0.345" openai = "^1.6.1" pandas = "2.0.3" chromadb = "^0.4.0" From 5677dd9ad346397378dfb62504d40336767b45ba Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Tue, 9 Jan 2024 10:17:10 -0300 Subject: [PATCH 007/153] Refactor component filtering in DirectoryReader class --- .../directory_reader/directory_reader.py | 24 +++++++++---------- 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/src/backend/langflow/interface/custom/directory_reader/directory_reader.py b/src/backend/langflow/interface/custom/directory_reader/directory_reader.py index 9c50e0181..d6cca5c83 100644 --- a/src/backend/langflow/interface/custom/directory_reader/directory_reader.py +++ b/src/backend/langflow/interface/custom/directory_reader/directory_reader.py @@ -66,18 +66,18 @@ class DirectoryReader: def filter_loaded_components(self, data: dict, with_errors: bool) -> dict: from langflow.interface.custom.utils import build_component - items = [ - { - "name": menu["name"], - "path": menu["path"], - "components": [ - (*build_component(component), component) - for component in menu["components"] - if (component["error"] if with_errors else not component["error"]) - ], - } - for menu in data["menu"] - ] + items = [] + for menu in data["menu"]: + components = [] + for component in menu["components"]: + try: + if component["error"] if with_errors else not component["error"]: + component_tuple = (*build_component(component), component) + components.append(component_tuple) + except Exception as e: + logger.error(f"Error while loading component: {e}") + continue + items.append({"name": menu["name"], "path": menu["path"], "components": components}) filtered = [menu for menu in items if menu["components"]] logger.debug(f'Filtered components {"with errors" if with_errors else ""}: {len(filtered)}') return {"menu": filtered} From 8e32b5e6eaa50a140047febdf27c117356d5b828 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Tue, 9 Jan 2024 10:17:21 -0300 Subject: [PATCH 008/153] Fix import statement in custom_component_full.ts --- src/frontend/tests/custom_component_full.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/frontend/tests/custom_component_full.ts b/src/frontend/tests/custom_component_full.ts index 443c3fd85..be24deba3 100644 --- a/src/frontend/tests/custom_component_full.ts +++ b/src/frontend/tests/custom_component_full.ts @@ -4,7 +4,7 @@ from langchain.llms.base import BaseLLM from langchain.chains import LLMChain from langchain.prompts import PromptTemplate from langchain.schema import Document -from langchain.field_typing import NestedDict +from langflow.field_typing import NestedDict import requests From 6ffb7cc233052eb511e217986b96d57510f5ebb6 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Tue, 9 Jan 2024 10:17:27 -0300 Subject: [PATCH 009/153] Fix import path in constants.py --- src/backend/langflow/interface/tools/constants.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/backend/langflow/interface/tools/constants.py b/src/backend/langflow/interface/tools/constants.py index 0ac37a0a4..89ab047d7 100644 --- a/src/backend/langflow/interface/tools/constants.py +++ b/src/backend/langflow/interface/tools/constants.py @@ -2,6 +2,7 @@ from langchain import tools from langchain.agents import Tool from langchain.agents.load_tools import _BASE_TOOLS, _EXTRA_LLM_TOOLS, _EXTRA_OPTIONAL_TOOLS, _LLM_TOOLS from langchain.tools.json.tool import JsonSpec + from langflow.interface.importing.utils import import_class from langflow.interface.tools.custom import PythonFunction, PythonFunctionTool @@ -12,7 +13,7 @@ CUSTOM_TOOLS = { "PythonFunction": PythonFunction, } -OTHER_TOOLS = {tool: import_class(f"langchain_community.tools.{tool}") for tool in tools.__all__} +OTHER_TOOLS = {tool: import_class(f"langchain.tools.{tool}") for tool in tools.__all__} ALL_TOOLS_NAMES = { **_BASE_TOOLS, From 15dd9e3ee1f2622dbbea6dc0def33812f8494617 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Tue, 9 Jan 2024 10:17:35 -0300 Subject: [PATCH 010/153] Update import statements in GitbookLoader, JsonToolkit, CSVAgent, UnstructuredHTMLLoader, IMSDbLoader, IFixitLoader, BSHTMLLoader, ReadTheDocsLoader, OpenAPIToolkit, Qdrant, EverNoteLoader, CoNLLULoader, and AirbyteJSONLoader components --- .../langflow/components/agents/CSVAgent.py | 4 ++-- .../documentloaders/AZLyricsLoader.py | 18 ++++-------------- .../documentloaders/AirbyteJSONLoader.py | 7 ++++--- .../components/documentloaders/BSHTMLLoader.py | 4 ++-- .../components/documentloaders/CoNLLULoader.py | 5 ++--- .../documentloaders/EverNoteLoader.py | 6 +++--- .../documentloaders/GitbookLoader.py | 3 +-- .../components/documentloaders/IFixitLoader.py | 4 ++-- .../components/documentloaders/IMSDbLoader.py | 4 ++-- .../documentloaders/ReadTheDocsLoader.py | 4 ++-- .../documentloaders/UnstructuredHTMLLoader.py | 3 +-- .../UnstructuredWordDocumentLoader.py | 13 +++++-------- .../embeddings/HuggingFaceEmbeddings.py | 8 +++++--- .../langflow/components/llms/Anthropic.py | 12 +++++++++--- .../langflow/components/llms/ChatOpenAI.py | 5 ++--- .../components/toolkits/JsonToolkit.py | 4 ++-- .../components/toolkits/OpenAPIToolkit.py | 4 ++-- .../components/vectorstores/Pinecone.py | 6 +++--- .../langflow/components/vectorstores/Qdrant.py | 4 ++-- .../vectorstores/SupabaseVectorStore.py | 12 ++++++------ 20 files changed, 61 insertions(+), 69 deletions(-) diff --git a/src/backend/langflow/components/agents/CSVAgent.py b/src/backend/langflow/components/agents/CSVAgent.py index e3b90527f..af3766c74 100644 --- a/src/backend/langflow/components/agents/CSVAgent.py +++ b/src/backend/langflow/components/agents/CSVAgent.py @@ -1,6 +1,6 @@ - from langflow import CustomComponent -from langchain.field_typing import BaseLanguageModel, AgentExecutor +from langflow.field_typing import BaseLanguageModel, AgentExecutor + class CSVAgentComponent(CustomComponent): display_name = "CSVAgent" diff --git a/src/backend/langflow/components/documentloaders/AZLyricsLoader.py b/src/backend/langflow/components/documentloaders/AZLyricsLoader.py index f8e9b24e7..4f2d8e7a3 100644 --- a/src/backend/langflow/components/documentloaders/AZLyricsLoader.py +++ b/src/backend/langflow/components/documentloaders/AZLyricsLoader.py @@ -1,8 +1,8 @@ - from langflow import CustomComponent -from langchain.field_typing import Document +from langflow.field_typing import Document from typing import Optional, Dict + class AZLyricsLoaderComponent(CustomComponent): display_name = "AZLyricsLoader" description = "Load `AZLyrics` webpages." @@ -10,18 +10,8 @@ class AZLyricsLoaderComponent(CustomComponent): def build_config(self): return { - "metadata": { - "display_name": "Metadata", - "type": "dict", - "default": {}, - "show": True - }, - "web_path": { - "display_name": "Web Page", - "type": "str", - "required": True, - "show": True - }, + "metadata": {"display_name": "Metadata", "type": "dict", "default": {}, "show": True}, + "web_path": {"display_name": "Web Page", "type": "str", "required": True, "show": True}, } def build(self, metadata: Optional[Dict] = None, web_path: str = "") -> Document: diff --git a/src/backend/langflow/components/documentloaders/AirbyteJSONLoader.py b/src/backend/langflow/components/documentloaders/AirbyteJSONLoader.py index 5d8272d04..954638926 100644 --- a/src/backend/langflow/components/documentloaders/AirbyteJSONLoader.py +++ b/src/backend/langflow/components/documentloaders/AirbyteJSONLoader.py @@ -1,13 +1,14 @@ - from langflow import CustomComponent -from langchain.field_typing import Document +from langflow.field_typing import Document from typing import Optional, Dict class AirbyteJSONLoaderComponent(CustomComponent): display_name = "AirbyteJSONLoader" description = "Load local `Airbyte` json files." - documentation = "https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/airbyte_json" + documentation = ( + "https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/airbyte_json" + ) def build_config(self): return { diff --git a/src/backend/langflow/components/documentloaders/BSHTMLLoader.py b/src/backend/langflow/components/documentloaders/BSHTMLLoader.py index 8f9553183..ffbc45d74 100644 --- a/src/backend/langflow/components/documentloaders/BSHTMLLoader.py +++ b/src/backend/langflow/components/documentloaders/BSHTMLLoader.py @@ -1,8 +1,8 @@ - from langflow import CustomComponent -from langchain.field_typing import Document +from langflow.field_typing import Document from typing import Optional, Dict + class BSHTMLLoaderComponent(CustomComponent): display_name = "BSHTMLLoader" description = "Load `HTML` files and parse them with `beautiful soup`." diff --git a/src/backend/langflow/components/documentloaders/CoNLLULoader.py b/src/backend/langflow/components/documentloaders/CoNLLULoader.py index be2e3b8de..34151efe7 100644 --- a/src/backend/langflow/components/documentloaders/CoNLLULoader.py +++ b/src/backend/langflow/components/documentloaders/CoNLLULoader.py @@ -1,8 +1,7 @@ - from langflow import CustomComponent from langchain.documents import Document from typing import Optional, Dict -from langchain.field_typing import TemplateField +from langflow.field_typing import TemplateField class CoNLLULoaderComponent(CustomComponent): @@ -17,7 +16,7 @@ class CoNLLULoaderComponent(CustomComponent): required=True, type="file", file_types=["conllu"], - suffixes=['.conllu'], + suffixes=[".conllu"], ), "metadata": TemplateField( display_name="Metadata", diff --git a/src/backend/langflow/components/documentloaders/EverNoteLoader.py b/src/backend/langflow/components/documentloaders/EverNoteLoader.py index 5300c0ef9..fb9460a04 100644 --- a/src/backend/langflow/components/documentloaders/EverNoteLoader.py +++ b/src/backend/langflow/components/documentloaders/EverNoteLoader.py @@ -1,13 +1,13 @@ - from langflow import CustomComponent -from langchain.field_typing import Document +from langflow.field_typing import Document from typing import Optional, Dict + class EverNoteLoaderComponent(CustomComponent): display_name = "EverNoteLoader" description = "Load from `EverNote`." documentation = "https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/evernote" - + def build_config(self): return { "file_path": { diff --git a/src/backend/langflow/components/documentloaders/GitbookLoader.py b/src/backend/langflow/components/documentloaders/GitbookLoader.py index 7157f249e..7351fefff 100644 --- a/src/backend/langflow/components/documentloaders/GitbookLoader.py +++ b/src/backend/langflow/components/documentloaders/GitbookLoader.py @@ -1,6 +1,5 @@ - from langflow import CustomComponent -from langchain.field_typing import Document +from langflow.field_typing import Document from typing import Optional, Dict diff --git a/src/backend/langflow/components/documentloaders/IFixitLoader.py b/src/backend/langflow/components/documentloaders/IFixitLoader.py index 0666872a0..b6a439b3e 100644 --- a/src/backend/langflow/components/documentloaders/IFixitLoader.py +++ b/src/backend/langflow/components/documentloaders/IFixitLoader.py @@ -1,8 +1,8 @@ - from langflow import CustomComponent -from langchain.field_typing import Document +from langflow.field_typing import Document from typing import Optional, Dict + class IFixitLoaderComponent(CustomComponent): display_name = "IFixitLoader" description = "Load `iFixit` repair guides, device wikis and answers." diff --git a/src/backend/langflow/components/documentloaders/IMSDbLoader.py b/src/backend/langflow/components/documentloaders/IMSDbLoader.py index 215f8585a..14859c3d1 100644 --- a/src/backend/langflow/components/documentloaders/IMSDbLoader.py +++ b/src/backend/langflow/components/documentloaders/IMSDbLoader.py @@ -1,8 +1,8 @@ - from langflow import CustomComponent -from langchain.field_typing import Document +from langflow.field_typing import Document from typing import Dict, Optional + class IMSDbLoaderComponent(CustomComponent): display_name = "IMSDbLoader" description = "Load `IMSDb` webpages." diff --git a/src/backend/langflow/components/documentloaders/ReadTheDocsLoader.py b/src/backend/langflow/components/documentloaders/ReadTheDocsLoader.py index b5a5007da..901148264 100644 --- a/src/backend/langflow/components/documentloaders/ReadTheDocsLoader.py +++ b/src/backend/langflow/components/documentloaders/ReadTheDocsLoader.py @@ -1,8 +1,8 @@ - from langflow import CustomComponent -from langchain.field_typing import Document +from langflow.field_typing import Document from typing import Dict, Optional + class ReadTheDocsLoaderComponent(CustomComponent): display_name = "ReadTheDocsLoader" description = "Load `ReadTheDocs` documentation directory." diff --git a/src/backend/langflow/components/documentloaders/UnstructuredHTMLLoader.py b/src/backend/langflow/components/documentloaders/UnstructuredHTMLLoader.py index f6358324e..46dc5aa2e 100644 --- a/src/backend/langflow/components/documentloaders/UnstructuredHTMLLoader.py +++ b/src/backend/langflow/components/documentloaders/UnstructuredHTMLLoader.py @@ -1,6 +1,5 @@ - from langchain import CustomComponent -from langchain.field_typing import Document +from langflow.field_typing import Document from typing import Optional, Dict diff --git a/src/backend/langflow/components/documentloaders/UnstructuredWordDocumentLoader.py b/src/backend/langflow/components/documentloaders/UnstructuredWordDocumentLoader.py index 5ff906482..bf57664ff 100644 --- a/src/backend/langflow/components/documentloaders/UnstructuredWordDocumentLoader.py +++ b/src/backend/langflow/components/documentloaders/UnstructuredWordDocumentLoader.py @@ -1,13 +1,14 @@ - from langchain import CustomComponent -from langchain.field_typing import Document +from langflow.field_typing import Document from typing import Optional, Dict class UnstructuredWordDocumentLoaderComponent(CustomComponent): display_name = "UnstructuredWordDocumentLoader" description = "Load `Microsoft Word` file using `Unstructured`." - documentation = "https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/microsoft_word" + documentation = ( + "https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/microsoft_word" + ) def build_config(self): return { @@ -17,11 +18,7 @@ class UnstructuredWordDocumentLoaderComponent(CustomComponent): "type": "file", "suffixes": [".docx", ".doc"], }, - "metadata": { - "display_name": "Metadata", - "required": False, - "type": "dict" - }, + "metadata": {"display_name": "Metadata", "required": False, "type": "dict"}, } def build(self, file_path: str, metadata: Optional[Dict] = None) -> Document: diff --git a/src/backend/langflow/components/embeddings/HuggingFaceEmbeddings.py b/src/backend/langflow/components/embeddings/HuggingFaceEmbeddings.py index 6d9c6a0fe..4d8d73c4a 100644 --- a/src/backend/langflow/components/embeddings/HuggingFaceEmbeddings.py +++ b/src/backend/langflow/components/embeddings/HuggingFaceEmbeddings.py @@ -1,12 +1,14 @@ - from langflow import CustomComponent from typing import Optional, Any, Dict -from langchain.field_typing import Embeddings +from langflow.field_typing import Embeddings + class HuggingFaceEmbeddingsComponent(CustomComponent): display_name = "HuggingFaceEmbeddings" description = "HuggingFace sentence_transformers embedding models." - documentation = "https://python.langchain.com/docs/modules/data_connection/text_embedding/integrations/sentence_transformers" + documentation = ( + "https://python.langchain.com/docs/modules/data_connection/text_embedding/integrations/sentence_transformers" + ) def build_config(self): return { diff --git a/src/backend/langflow/components/llms/Anthropic.py b/src/backend/langflow/components/llms/Anthropic.py index 114560337..327afc6e4 100644 --- a/src/backend/langflow/components/llms/Anthropic.py +++ b/src/backend/langflow/components/llms/Anthropic.py @@ -1,8 +1,8 @@ - from langflow import CustomComponent from pydantic import SecretStr from typing import Optional, Dict, Any -from langchain.field_typing import BaseLanguageModel +from langflow.field_typing import BaseLanguageModel + class AnthropicComponent(CustomComponent): display_name = "Anthropic" @@ -40,7 +40,13 @@ class AnthropicComponent(CustomComponent): # Here we are returning a placeholder class as the Anthropic class is not defined # This is to comply with the type hints required by the CustomComponent class Anthropic(BaseLanguageModel): - def __init__(self, api_key: Optional[SecretStr], api_url: Optional[str], model_kwargs: Optional[Dict[str, Any]] = None, temperature: Optional[float] = None): + def __init__( + self, + api_key: Optional[SecretStr], + api_url: Optional[str], + model_kwargs: Optional[Dict[str, Any]] = None, + temperature: Optional[float] = None, + ): # Initialize Anthropic model with the provided arguments super().__init__() self.api_key = api_key diff --git a/src/backend/langflow/components/llms/ChatOpenAI.py b/src/backend/langflow/components/llms/ChatOpenAI.py index e9552f2b1..247f7c8ed 100644 --- a/src/backend/langflow/components/llms/ChatOpenAI.py +++ b/src/backend/langflow/components/llms/ChatOpenAI.py @@ -1,8 +1,8 @@ - from langflow import CustomComponent from langchain.llms import BaseLLM from typing import Optional, Dict, Union, Any -from langchain.field_typing import BaseLanguageModel +from langflow.field_typing import BaseLanguageModel + class ChatOpenAIComponent(CustomComponent): display_name = "ChatOpenAI" @@ -69,7 +69,6 @@ class ChatOpenAIComponent(CustomComponent): openai_api_key: Optional[str] = None, temperature: float = 0.7, ) -> Union[BaseLanguageModel, BaseLLM]: - # Assuming there is a class `ChatOpenAI` that takes these parameters # The `ChatOpenAI` class must be imported or defined elsewhere in the actual implementation return ChatOpenAI( diff --git a/src/backend/langflow/components/toolkits/JsonToolkit.py b/src/backend/langflow/components/toolkits/JsonToolkit.py index f0cf00a34..8ee73a4c8 100644 --- a/src/backend/langflow/components/toolkits/JsonToolkit.py +++ b/src/backend/langflow/components/toolkits/JsonToolkit.py @@ -1,6 +1,6 @@ - from langflow import CustomComponent -from langchain.field_typing import JsonSpec, Tool +from langflow.field_typing import JsonSpec, Tool + class JsonToolkitComponent(CustomComponent): display_name = "JsonToolkit" diff --git a/src/backend/langflow/components/toolkits/OpenAPIToolkit.py b/src/backend/langflow/components/toolkits/OpenAPIToolkit.py index 49fd6f06d..b536fb006 100644 --- a/src/backend/langflow/components/toolkits/OpenAPIToolkit.py +++ b/src/backend/langflow/components/toolkits/OpenAPIToolkit.py @@ -1,8 +1,8 @@ - from langflow import CustomComponent -from langchain.field_typing import AgentExecutor, TextRequestsWrapper +from langflow.field_typing import AgentExecutor, TextRequestsWrapper from typing import Callable + class OpenAPIToolkitComponent(CustomComponent): display_name = "OpenAPIToolkit" description = "Toolkit for interacting with an OpenAPI API." diff --git a/src/backend/langflow/components/vectorstores/Pinecone.py b/src/backend/langflow/components/vectorstores/Pinecone.py index 0e72b65e5..bd1890fe7 100644 --- a/src/backend/langflow/components/vectorstores/Pinecone.py +++ b/src/backend/langflow/components/vectorstores/Pinecone.py @@ -1,13 +1,13 @@ - from langflow import CustomComponent from typing import Optional, List from langchain.vectorstores import Pinecone -from langchain.field_typing import ( +from langflow.field_typing import ( Document, Embeddings, NestedDict, ) + class PineconeComponent(CustomComponent): display_name = "Pinecone" description = "Construct Pinecone wrapper from raw documents." @@ -20,7 +20,7 @@ class PineconeComponent(CustomComponent): "namespace": {"display_name": "Namespace"}, "pinecone_api_key": {"display_name": "Pinecone API Key", "default": ""}, "pinecone_env": {"display_name": "Pinecone Environment", "default": ""}, - "search_kwargs": {"display_name": "Search Kwargs", "default": '{}'}, + "search_kwargs": {"display_name": "Search Kwargs", "default": "{}"}, } def build( diff --git a/src/backend/langflow/components/vectorstores/Qdrant.py b/src/backend/langflow/components/vectorstores/Qdrant.py index ac6a4cc09..e902cb353 100644 --- a/src/backend/langflow/components/vectorstores/Qdrant.py +++ b/src/backend/langflow/components/vectorstores/Qdrant.py @@ -1,9 +1,9 @@ - from langflow import CustomComponent -from langchain.vectorstores import Qdrant +from langchain_community.vectorstores.qdrant import Qdrant from typing import Optional, List from langflow.field_typing import Document, Embeddings, NestedDict + class QdrantComponent(CustomComponent): display_name = "Qdrant" description = "Construct Qdrant wrapper from a list of texts." diff --git a/src/backend/langflow/components/vectorstores/SupabaseVectorStore.py b/src/backend/langflow/components/vectorstores/SupabaseVectorStore.py index 93b0d495f..e875fc06e 100644 --- a/src/backend/langflow/components/vectorstores/SupabaseVectorStore.py +++ b/src/backend/langflow/components/vectorstores/SupabaseVectorStore.py @@ -1,13 +1,13 @@ - from langflow import CustomComponent from typing import Optional, List from langchain.vectorstores import SupabaseVectorStore -from langchain.field_typing import ( +from langflow.field_typing import ( Document, Embeddings, NestedDict, ) + class SupabaseComponent(CustomComponent): display_name = "Supabase" description = "Return VectorStore initialized from texts and embeddings." @@ -27,11 +27,11 @@ class SupabaseComponent(CustomComponent): self, embedding: Embeddings, documents: Optional[List[Document]] = None, - query_name: str = '', + query_name: str = "", search_kwargs: NestedDict = {}, - supabase_service_key: str = '', - supabase_url: str = '', - table_name: str = '', + supabase_service_key: str = "", + supabase_url: str = "", + table_name: str = "", ) -> SupabaseVectorStore: return SupabaseVectorStore( documents=documents, From 5e0315ad0bc6f054160b4a5da514b41019350c1a Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Tue, 9 Jan 2024 11:33:21 -0300 Subject: [PATCH 011/153] Update JsonAgent.py imports --- src/backend/langflow/components/agents/JsonAgent.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/backend/langflow/components/agents/JsonAgent.py b/src/backend/langflow/components/agents/JsonAgent.py index 935138142..71b276e26 100644 --- a/src/backend/langflow/components/agents/JsonAgent.py +++ b/src/backend/langflow/components/agents/JsonAgent.py @@ -4,8 +4,8 @@ from langchain.agents import AgentExecutor from typing import Callable from langflow.field_typing import ( BaseLanguageModel, - BaseToolkit, ) +from langchain_community.agent_toolkits.base import BaseToolkit class JsonAgentComponent(CustomComponent): display_name = "JsonAgent" From 102e0e0f932ab9699a95e6541ae9b0eeb335d827 Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Tue, 9 Jan 2024 11:51:50 -0300 Subject: [PATCH 012/153] Update imports in VectorStoreRouterAgent.py --- .../langflow/components/agents/VectorStoreRouterAgent.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/backend/langflow/components/agents/VectorStoreRouterAgent.py b/src/backend/langflow/components/agents/VectorStoreRouterAgent.py index 6aec821c8..d613f3080 100644 --- a/src/backend/langflow/components/agents/VectorStoreRouterAgent.py +++ b/src/backend/langflow/components/agents/VectorStoreRouterAgent.py @@ -1,7 +1,7 @@ from langflow import CustomComponent -from langchain.llms import BaseLanguageModel -from langchain.vectorstores import VectorStoreRouterToolkit +from langchain_core.language_models.base import BaseLanguageModel +from langchain.agents.agent_toolkits.vectorstore.toolkit import VectorStoreRouterToolkit from langchain.agents import AgentExecutor from typing import Callable From 411b4f78c08845158f17c55ec48a14ef34d751de Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Tue, 9 Jan 2024 11:53:45 -0300 Subject: [PATCH 013/153] Update ZeroShotAgent import --- src/backend/langflow/components/agents/ZeroShotAgent.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/backend/langflow/components/agents/ZeroShotAgent.py b/src/backend/langflow/components/agents/ZeroShotAgent.py index fca29a9e0..bdc402059 100644 --- a/src/backend/langflow/components/agents/ZeroShotAgent.py +++ b/src/backend/langflow/components/agents/ZeroShotAgent.py @@ -1,10 +1,10 @@ from langflow import CustomComponent from langchain.agents import ZeroShotAgent +from langchain_core.tools import BaseTool from typing import List, Optional from langflow.field_typing import ( BaseLanguageModel, - BaseTool, ) class ZeroShotAgentComponent(CustomComponent): From 95b02ceeaef5cbd7a5e3e96a50f4aa7b9948fa13 Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Tue, 9 Jan 2024 18:34:50 -0300 Subject: [PATCH 014/153] Import WikipediaAPIWrapper from langchain_community.utilities --- src/backend/langflow/components/utilities/WikipediaAPIWrapper.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/backend/langflow/components/utilities/WikipediaAPIWrapper.py b/src/backend/langflow/components/utilities/WikipediaAPIWrapper.py index 77c68d238..4b22848ca 100644 --- a/src/backend/langflow/components/utilities/WikipediaAPIWrapper.py +++ b/src/backend/langflow/components/utilities/WikipediaAPIWrapper.py @@ -1,6 +1,7 @@ from langflow import CustomComponent from typing import Union, Callable +from langchain_community.utilities import WikipediaAPIWrapper # Assuming WikipediaAPIWrapper is a class that needs to be imported. # The import statement is not included as it is not provided in the JSON From 5cc8afc55761a20c957025ddc21dbe6f50daaf2f Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Tue, 9 Jan 2024 18:42:45 -0300 Subject: [PATCH 015/153] Refactor SerpAPIWrapperComponent class in SerpAPIWrapper.py --- .../langflow/components/utilities/SerpAPIWrapper.py | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/src/backend/langflow/components/utilities/SerpAPIWrapper.py b/src/backend/langflow/components/utilities/SerpAPIWrapper.py index 9fd37bb48..0d93dbc91 100644 --- a/src/backend/langflow/components/utilities/SerpAPIWrapper.py +++ b/src/backend/langflow/components/utilities/SerpAPIWrapper.py @@ -1,9 +1,6 @@ - from langflow import CustomComponent from typing import Callable, Union - -# Assuming SerpAPIWrapper is a predefined class within the langflow context. -# If it's not, it must be defined or imported from the appropriate module. +from langchain_community.utilities import SerpAPIWrapper class SerpAPIWrapperComponent(CustomComponent): display_name = "SerpAPIWrapper" @@ -17,7 +14,7 @@ class SerpAPIWrapperComponent(CustomComponent): def build( self, serpapi_api_key: str, - ) -> Union['SerpAPIWrapper', Callable]: + ) -> Union[SerpAPIWrapper, Callable]: # Removed quotes around SerpAPIWrapper # Default parameters as defined in the JSON template. default_params = { "engine": "google", @@ -29,4 +26,4 @@ class SerpAPIWrapperComponent(CustomComponent): return SerpAPIWrapper( serpapi_api_key=serpapi_api_key, params=default_params - ) + ) \ No newline at end of file From 7611ec7f9492d81c9f3fd434f96a4095bf5f226e Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Tue, 9 Jan 2024 18:56:47 -0300 Subject: [PATCH 016/153] Add field type for headers in SearxSearchWrapperComponent --- .../langflow/components/utilities/SearxSearchWrapper.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/backend/langflow/components/utilities/SearxSearchWrapper.py b/src/backend/langflow/components/utilities/SearxSearchWrapper.py index deaefe1a7..391e50401 100644 --- a/src/backend/langflow/components/utilities/SearxSearchWrapper.py +++ b/src/backend/langflow/components/utilities/SearxSearchWrapper.py @@ -1,4 +1,3 @@ - from langflow import CustomComponent from typing import Optional, Dict @@ -9,9 +8,10 @@ class SearxSearchWrapperComponent(CustomComponent): def build_config(self): return { "headers": { + "field_type":"dict", "display_name": "Headers", "multiline": True, - "default": '{"Authorization": "Bearer "}' + "value": '{"Authorization": "Bearer "}' }, } @@ -24,4 +24,4 @@ class SearxSearchWrapperComponent(CustomComponent): # Placeholder for actual SearxSearchWrapper instantiation # Since the actual SearxSearchWrapper class is not available, # it is assumed that it would be instantiated here with headers as an argument. - pass + pass \ No newline at end of file From 5c7ee7e8b18d3b6f7fe143276be87e80539bd12e Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Tue, 9 Jan 2024 18:58:43 -0300 Subject: [PATCH 017/153] Update import statement for GoogleSerperAPIWrapper in GoogleSerperAPIWrapper.py --- .../langflow/components/utilities/GoogleSerperAPIWrapper.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/backend/langflow/components/utilities/GoogleSerperAPIWrapper.py b/src/backend/langflow/components/utilities/GoogleSerperAPIWrapper.py index f48470d40..96fa82ac4 100644 --- a/src/backend/langflow/components/utilities/GoogleSerperAPIWrapper.py +++ b/src/backend/langflow/components/utilities/GoogleSerperAPIWrapper.py @@ -4,7 +4,7 @@ from typing import Dict, Optional # Assuming the existence of GoogleSerperAPIWrapper class in the serper module # If this class does not exist, you would need to create it or import the appropriate class from another module -from serper import GoogleSerperAPIWrapper +from langchain_community.utilities.google_serper import GoogleSerperAPIWrapper class GoogleSerperAPIWrapperComponent(CustomComponent): @@ -22,7 +22,7 @@ class GoogleSerperAPIWrapperComponent(CustomComponent): "advanced": False, "dynamic": False, "info": '', - "type": "dict", + "field_type": "dict", "list": False, "value": { "news": "news", From 0737d2a790b490729fe638ea81bff1349d45aa8a Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Tue, 9 Jan 2024 19:00:30 -0300 Subject: [PATCH 018/153] Update import path for GoogleSearchAPIWrapper --- .../langflow/components/utilities/GoogleSearchAPIWrapper.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/backend/langflow/components/utilities/GoogleSearchAPIWrapper.py b/src/backend/langflow/components/utilities/GoogleSearchAPIWrapper.py index 110d6ea11..329358f52 100644 --- a/src/backend/langflow/components/utilities/GoogleSearchAPIWrapper.py +++ b/src/backend/langflow/components/utilities/GoogleSearchAPIWrapper.py @@ -5,7 +5,7 @@ from typing import Optional, Union, Callable # Assuming GoogleSearchAPIWrapper is a valid import based on JSON # and it exists in some module that should be imported here. # The import path should be replaced with the correct one once available. -from some_module import GoogleSearchAPIWrapper +from langchain_community.utilities.google_search import GoogleSearchAPIWrapper class GoogleSearchAPIWrapperComponent(CustomComponent): From e13eeeaea6e93e828558f1f472d7e32e076e1369 Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Tue, 9 Jan 2024 19:10:04 -0300 Subject: [PATCH 019/153] Fix password field in GoogleSerperAPIWrapperComponent --- .../langflow/components/utilities/GoogleSerperAPIWrapper.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/backend/langflow/components/utilities/GoogleSerperAPIWrapper.py b/src/backend/langflow/components/utilities/GoogleSerperAPIWrapper.py index 96fa82ac4..724ef6e98 100644 --- a/src/backend/langflow/components/utilities/GoogleSerperAPIWrapper.py +++ b/src/backend/langflow/components/utilities/GoogleSerperAPIWrapper.py @@ -35,14 +35,13 @@ class GoogleSerperAPIWrapperComponent(CustomComponent): "display_name": "Serper API Key", "show": True, "multiline": False, - "password": False, # corrected based on error message + "password": True, "name": "serper_api_key", "advanced": False, "dynamic": False, "info": '', "type": "str", "list": False, - "value": "" # assuming empty string as default, needs to be set by user } } From ba4f355fbfc1f3db13fc959bfb472f4278524fb3 Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Tue, 9 Jan 2024 19:11:02 -0300 Subject: [PATCH 020/153] Update Google CSE ID field to hide password --- .../langflow/components/utilities/GoogleSearchAPIWrapper.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/backend/langflow/components/utilities/GoogleSearchAPIWrapper.py b/src/backend/langflow/components/utilities/GoogleSearchAPIWrapper.py index 329358f52..37c5ec33e 100644 --- a/src/backend/langflow/components/utilities/GoogleSearchAPIWrapper.py +++ b/src/backend/langflow/components/utilities/GoogleSearchAPIWrapper.py @@ -15,7 +15,7 @@ class GoogleSearchAPIWrapperComponent(CustomComponent): def build_config(self): return { "google_api_key": {"display_name": "Google API Key", "password": True}, - "google_cse_id": {"display_name": "Google CSE ID"}, + "google_cse_id": {"display_name": "Google CSE ID","password":True}, # Fields with "show": False are omitted based on the rules } From 5e94bfb8b27ad5d1f06200c0bd379106e2bad57e Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Tue, 9 Jan 2024 19:16:55 -0300 Subject: [PATCH 021/153] Add VectorStoreInfo import to VectorStoreAgent.py --- src/backend/langflow/components/agents/VectorStoreAgent.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/backend/langflow/components/agents/VectorStoreAgent.py b/src/backend/langflow/components/agents/VectorStoreAgent.py index e554e0d96..339e3f713 100644 --- a/src/backend/langflow/components/agents/VectorStoreAgent.py +++ b/src/backend/langflow/components/agents/VectorStoreAgent.py @@ -1,8 +1,9 @@ from langflow import CustomComponent from langchain.agents import AgentExecutor +from langchain.agents.agent_toolkits.vectorstore.toolkit import VectorStoreInfo from typing import Union, Callable -from langflow.field_typing import BaseLanguageModel, VectorStore +from langflow.field_typing import BaseLanguageModel class VectorStoreAgentComponent(CustomComponent): display_name = "VectorStoreAgent" @@ -17,6 +18,6 @@ class VectorStoreAgentComponent(CustomComponent): def build( self, llm: BaseLanguageModel, - vectorstoreinfo: VectorStore, + vectorstoreinfo: VectorStoreInfo, ) -> Union[AgentExecutor, Callable]: return AgentExecutor(llm=llm, vectorstore=vectorstoreinfo) From 7ecb0b6ec4bfa47b7cfff5b099d4b7dca3d7644b Mon Sep 17 00:00:00 2001 From: carlosrcoelho Date: Tue, 9 Jan 2024 19:23:02 -0300 Subject: [PATCH 022/153] Update DirectoryLoaderComponent configuration --- .../documentloaders/DirectoryLoader.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/src/backend/langflow/components/documentloaders/DirectoryLoader.py b/src/backend/langflow/components/documentloaders/DirectoryLoader.py index 3e27458f8..41ff19b7b 100644 --- a/src/backend/langflow/components/documentloaders/DirectoryLoader.py +++ b/src/backend/langflow/components/documentloaders/DirectoryLoader.py @@ -1,6 +1,6 @@ from langflow import CustomComponent -from langchain.data_connections import Document +from langchain.docstore.document import Document from typing import Optional, Dict, Any class DirectoryLoaderComponent(CustomComponent): @@ -9,14 +9,14 @@ class DirectoryLoaderComponent(CustomComponent): def build_config(self) -> Dict[str, Any]: return { - "glob": {"display_name": "Glob Pattern", "default": "**/*.txt"}, - "load_hidden": {"display_name": "Load Hidden Files", "default": False, "advanced": True}, - "max_concurrency": {"display_name": "Max Concurrency", "default": 10, "advanced": True}, - "metadata": {"display_name": "Metadata", "default": {}}, + "glob": {"display_name": "Glob Pattern", "value": "**/*.txt"}, + "load_hidden": {"display_name": "Load Hidden Files", "value": False, "advanced": True}, + "max_concurrency": {"display_name": "Max Concurrency", "value": 10, "advanced": True}, + "metadata": {"display_name": "Metadata", "value": {}}, "path": {"display_name": "Local Directory"}, - "recursive": {"display_name": "Recursive", "default": True, "advanced": True}, - "silent_errors": {"display_name": "Silent Errors", "default": False, "advanced": True}, - "use_multithreading": {"display_name": "Use Multithreading", "default": True, "advanced": True}, + "recursive": {"display_name": "Recursive", "value": True, "advanced": True}, + "silent_errors": {"display_name": "Silent Errors", "value": False, "advanced": True}, + "use_multithreading": {"display_name": "Use Multithreading", "value": True, "advanced": True}, } def build( @@ -25,7 +25,7 @@ class DirectoryLoaderComponent(CustomComponent): path: str, load_hidden: Optional[bool] = False, max_concurrency: Optional[int] = 10, - metadata: Optional[Dict[str, Any]] = None, + metadata: Optional[dict] = {}, recursive: Optional[bool] = True, silent_errors: Optional[bool] = False, use_multithreading: Optional[bool] = True, From eff7dbc009b2f402d4ed43222b16360bec8018ac Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Tue, 9 Jan 2024 19:24:23 -0300 Subject: [PATCH 023/153] Update import statement for BingSearchAPIWrapper --- .../langflow/components/utilities/BingSearchAPIWrapper.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/backend/langflow/components/utilities/BingSearchAPIWrapper.py b/src/backend/langflow/components/utilities/BingSearchAPIWrapper.py index dc2bf051d..abe10d5bf 100644 --- a/src/backend/langflow/components/utilities/BingSearchAPIWrapper.py +++ b/src/backend/langflow/components/utilities/BingSearchAPIWrapper.py @@ -4,7 +4,7 @@ from langflow import CustomComponent # Assuming `BingSearchAPIWrapper` is a class that exists in the context # and has the appropriate methods and attributes. # We need to make sure this class is importable from the context where this code will be running. -from your_module import BingSearchAPIWrapper +from langchain_community.utilities.bing_search import BingSearchAPIWrapper class BingSearchAPIWrapperComponent(CustomComponent): From 0ebf7d35e478cbc9f1596f6bea4bf3151bfed41c Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Tue, 9 Jan 2024 19:27:04 -0300 Subject: [PATCH 024/153] Update VectorStoreToolkit imports --- .../langflow/components/toolkits/VectorStoreToolkit.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/backend/langflow/components/toolkits/VectorStoreToolkit.py b/src/backend/langflow/components/toolkits/VectorStoreToolkit.py index b83246b07..236e2146f 100644 --- a/src/backend/langflow/components/toolkits/VectorStoreToolkit.py +++ b/src/backend/langflow/components/toolkits/VectorStoreToolkit.py @@ -1,8 +1,8 @@ from langflow import CustomComponent -from langchain.toolkits import VectorStoreToolkit +from langchain.agents.agent_toolkits.vectorstore.toolkit import VectorStoreToolkit +from langchain.agents.agent_toolkits.vectorstore.toolkit import VectorStoreInfo from langflow.field_typing import ( - VectorStore, Tool, ) @@ -17,6 +17,6 @@ class VectorStoreToolkitComponent(CustomComponent): def build( self, - vectorstore_info: VectorStore, + vectorstore_info: VectorStoreInfo, ) -> Tool: return VectorStoreToolkit(vectorstore_info=vectorstore_info) From 1b3fa78ac70ec332f8117355b78e8a526634c80a Mon Sep 17 00:00:00 2001 From: carlosrcoelho Date: Tue, 9 Jan 2024 19:27:53 -0300 Subject: [PATCH 025/153] Update metadata field type in AZLyricsLoader.py --- .../langflow/components/documentloaders/AZLyricsLoader.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/backend/langflow/components/documentloaders/AZLyricsLoader.py b/src/backend/langflow/components/documentloaders/AZLyricsLoader.py index 4f2d8e7a3..66e8e86d9 100644 --- a/src/backend/langflow/components/documentloaders/AZLyricsLoader.py +++ b/src/backend/langflow/components/documentloaders/AZLyricsLoader.py @@ -10,7 +10,7 @@ class AZLyricsLoaderComponent(CustomComponent): def build_config(self): return { - "metadata": {"display_name": "Metadata", "type": "dict", "default": {}, "show": True}, + "metadata": {"display_name": "Metadata", "field_type": "dict", "value": {}, "show": True}, "web_path": {"display_name": "Web Page", "type": "str", "required": True, "show": True}, } From 65ba59cc2e271c5dd06770b9202debef2b5c6e0b Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Tue, 9 Jan 2024 19:30:04 -0300 Subject: [PATCH 026/153] Update VectorStoreRouterToolkit to use VectorStoreInfo --- .../langflow/components/toolkits/VectorStoreRouterToolkit.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/backend/langflow/components/toolkits/VectorStoreRouterToolkit.py b/src/backend/langflow/components/toolkits/VectorStoreRouterToolkit.py index 14e79f088..11ff2a6cc 100644 --- a/src/backend/langflow/components/toolkits/VectorStoreRouterToolkit.py +++ b/src/backend/langflow/components/toolkits/VectorStoreRouterToolkit.py @@ -1,7 +1,8 @@ from langflow import CustomComponent from typing import List -from langchain.vectorstores import VectorStore +from langchain.agents.agent_toolkits.vectorstore.toolkit import VectorStoreRouterToolkit +from langchain.agents.agent_toolkits.vectorstore.toolkit import VectorStoreInfo class VectorStoreRouterToolkitComponent(CustomComponent): display_name = "VectorStoreRouterToolkit" @@ -14,7 +15,7 @@ class VectorStoreRouterToolkitComponent(CustomComponent): def build( self, - vectorstores: List[VectorStore], + vectorstores: List[VectorStoreInfo], ): # Assuming the class `VectorStoreRouterToolkit` exists within a module, but since there # is no further information provided about the module structure, I will assume it is From 069cbd7dcca96a9e10b6e472b80dbf5094353ec1 Mon Sep 17 00:00:00 2001 From: carlosrcoelho Date: Tue, 9 Jan 2024 19:31:55 -0300 Subject: [PATCH 027/153] Update AirbyteJSONLoaderComponent configuration --- .../langflow/components/documentloaders/AirbyteJSONLoader.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/backend/langflow/components/documentloaders/AirbyteJSONLoader.py b/src/backend/langflow/components/documentloaders/AirbyteJSONLoader.py index 954638926..f1f22e45c 100644 --- a/src/backend/langflow/components/documentloaders/AirbyteJSONLoader.py +++ b/src/backend/langflow/components/documentloaders/AirbyteJSONLoader.py @@ -17,10 +17,11 @@ class AirbyteJSONLoaderComponent(CustomComponent): "type": "file", "fileTypes": ["json"], "required": True, + "field_type": "file", }, "metadata": { "display_name": "Metadata", - "type": "dict", + "field_type": "dict", "required": False, }, } From 5087a503f21213ca4f3d113b93a1621ccfa40a40 Mon Sep 17 00:00:00 2001 From: carlosrcoelho Date: Tue, 9 Jan 2024 19:34:30 -0300 Subject: [PATCH 028/153] Refactor BSHTMLLoaderComponent class in BSHTMLLoader.py --- .../langflow/components/documentloaders/BSHTMLLoader.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/backend/langflow/components/documentloaders/BSHTMLLoader.py b/src/backend/langflow/components/documentloaders/BSHTMLLoader.py index ffbc45d74..672e1e24d 100644 --- a/src/backend/langflow/components/documentloaders/BSHTMLLoader.py +++ b/src/backend/langflow/components/documentloaders/BSHTMLLoader.py @@ -17,12 +17,13 @@ class BSHTMLLoaderComponent(CustomComponent): "type": "file", "suffixes": [".html"], "file_types": ["html"], + "field_type": "file", }, "metadata": { "display_name": "Metadata", "required": False, "show": True, - "type": "dict", + "field_type": "dict", }, } From 2d2fd9a2aef1c9df70157bb0996c17d6fa5a859b Mon Sep 17 00:00:00 2001 From: carlosrcoelho Date: Tue, 9 Jan 2024 19:36:49 -0300 Subject: [PATCH 029/153] Update CSVLoader imports and metadata parameter --- .../langflow/components/documentloaders/CSVLoader.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/src/backend/langflow/components/documentloaders/CSVLoader.py b/src/backend/langflow/components/documentloaders/CSVLoader.py index 64f1f5527..dce3f5708 100644 --- a/src/backend/langflow/components/documentloaders/CSVLoader.py +++ b/src/backend/langflow/components/documentloaders/CSVLoader.py @@ -1,8 +1,8 @@ -from langchain import CustomComponent +from langflow import CustomComponent from typing import Optional, Dict, List -from langchain.loaders import CSVLoader -from langchain.documents import Document +from langchain_community.document_loaders.csv_loader import CSVLoader +from langchain.docstore.document import Document class CSVLoaderComponent(CustomComponent): display_name = "CSVLoader" @@ -15,6 +15,7 @@ class CSVLoaderComponent(CustomComponent): "required": True, "suffixes": [".csv"], "file_types": ["csv"], + "field_type": "file", }, "metadata": { "display_name": "Metadata", @@ -25,6 +26,6 @@ class CSVLoaderComponent(CustomComponent): def build( self, file_path: str, - metadata: Optional[Dict[str, str]] = None, + metadata: dict ) -> List[Document]: return CSVLoader(file_path=file_path, metadata=metadata).load() From a29d7e515c3c29286c5d36876017d43513f10ae2 Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Tue, 9 Jan 2024 19:37:53 -0300 Subject: [PATCH 030/153] Update imports in OpenAPIToolkit.py --- src/backend/langflow/components/toolkits/OpenAPIToolkit.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/backend/langflow/components/toolkits/OpenAPIToolkit.py b/src/backend/langflow/components/toolkits/OpenAPIToolkit.py index b536fb006..4f3309117 100644 --- a/src/backend/langflow/components/toolkits/OpenAPIToolkit.py +++ b/src/backend/langflow/components/toolkits/OpenAPIToolkit.py @@ -1,6 +1,8 @@ from langflow import CustomComponent -from langflow.field_typing import AgentExecutor, TextRequestsWrapper +from langflow.field_typing import AgentExecutor from typing import Callable +from langchain_community.utilities.requests import TextRequestsWrapper +from langchain_community.agent_toolkits.openapi.toolkit import OpenAPIToolkit class OpenAPIToolkitComponent(CustomComponent): From cbacf559555835da901dc63f887cf89a88d8e925 Mon Sep 17 00:00:00 2001 From: carlosrcoelho Date: Tue, 9 Jan 2024 19:38:16 -0300 Subject: [PATCH 031/153] Update CoNLLULoaderComponent configuration --- .../components/documentloaders/CoNLLULoader.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/src/backend/langflow/components/documentloaders/CoNLLULoader.py b/src/backend/langflow/components/documentloaders/CoNLLULoader.py index 34151efe7..653919bd7 100644 --- a/src/backend/langflow/components/documentloaders/CoNLLULoader.py +++ b/src/backend/langflow/components/documentloaders/CoNLLULoader.py @@ -1,5 +1,5 @@ from langflow import CustomComponent -from langchain.documents import Document +from langchain.docstore.document import Document from typing import Optional, Dict from langflow.field_typing import TemplateField @@ -11,13 +11,13 @@ class CoNLLULoaderComponent(CustomComponent): def build_config(self): return { - "file_path": TemplateField( - display_name="File Path", - required=True, - type="file", - file_types=["conllu"], - suffixes=[".conllu"], - ), + "file_path": { + "display_name": "File Path", + "required": True, + "suffixes": [".conllu"], + "file_types": ["conllu"], + "field_type": "file", + }, "metadata": TemplateField( display_name="Metadata", required=False, @@ -25,7 +25,7 @@ class CoNLLULoaderComponent(CustomComponent): ), } - def build(self, file_path: str, metadata: Optional[Dict[str, str]] = None) -> Document: + def build(self, file_path: str, metadata: dict) -> Document: # Here, you would use the actual class that loads CoNLL-U files. # As I don't have the specific class, I'm returning an instance of Document. # In a real scenario, you should replace the below Document with the actual loader class. From 920f092ea1b1f59af508d39a45d420631577a5c9 Mon Sep 17 00:00:00 2001 From: carlosrcoelho Date: Tue, 9 Jan 2024 19:40:27 -0300 Subject: [PATCH 032/153] Update CollegeConfidentialLoader.py --- .../components/documentloaders/CollegeConfidentialLoader.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/backend/langflow/components/documentloaders/CollegeConfidentialLoader.py b/src/backend/langflow/components/documentloaders/CollegeConfidentialLoader.py index 163e43afa..9c2b2cfaf 100644 --- a/src/backend/langflow/components/documentloaders/CollegeConfidentialLoader.py +++ b/src/backend/langflow/components/documentloaders/CollegeConfidentialLoader.py @@ -1,6 +1,6 @@ from langflow import CustomComponent -from langchain.document_loaders import Document +from langchain.docstore.document import Document from typing import Optional, Dict class CollegeConfidentialLoaderComponent(CustomComponent): @@ -10,14 +10,14 @@ class CollegeConfidentialLoaderComponent(CustomComponent): def build_config(self): return { - "metadata": {"display_name": "Metadata", "default": {}}, + "metadata": {"display_name": "Metadata", "values": {}}, "web_path": {"display_name": "Web Page", "required": True}, } def build( self, web_path: str, - metadata: Optional[Dict] = None, + metadata: Optional[dict] = {} ) -> Document: # Assuming there is a loader class `CollegeConfidentialLoader` that takes `metadata` and `web_path` as arguments # Replace `CollegeConfidentialLoader` with the actual class name if different From 4370cbb33494c3fa544196200635b643d104a7f2 Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Tue, 9 Jan 2024 19:42:30 -0300 Subject: [PATCH 033/153] Update JsonToolkit imports --- src/backend/langflow/components/toolkits/JsonToolkit.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/backend/langflow/components/toolkits/JsonToolkit.py b/src/backend/langflow/components/toolkits/JsonToolkit.py index 8ee73a4c8..d6d1710bb 100644 --- a/src/backend/langflow/components/toolkits/JsonToolkit.py +++ b/src/backend/langflow/components/toolkits/JsonToolkit.py @@ -1,5 +1,7 @@ from langflow import CustomComponent -from langflow.field_typing import JsonSpec, Tool +from langflow.field_typing import Tool +from langchain_community.tools.json.tool import JsonSpec +from langchain_community.agent_toolkits.json.toolkit import JsonToolkit class JsonToolkitComponent(CustomComponent): From ddd613d1fc46324d71f8d39919c292a7389ac6bb Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Tue, 9 Jan 2024 19:45:27 -0300 Subject: [PATCH 034/153] Update imports in CharacterTextSplitter.py --- .../components/textsplitters/CharacterTextSplitter.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/backend/langflow/components/textsplitters/CharacterTextSplitter.py b/src/backend/langflow/components/textsplitters/CharacterTextSplitter.py index de8a31848..ba64b0f32 100644 --- a/src/backend/langflow/components/textsplitters/CharacterTextSplitter.py +++ b/src/backend/langflow/components/textsplitters/CharacterTextSplitter.py @@ -1,7 +1,7 @@ from langflow import CustomComponent -from langchain.document_transformers import TextSplitter -from langchain.documents import Document +from langchain.text_splitter import TextSplitter +from langchain_core.documents.base import Document from typing import List From 94eaaed95dea7a230b9c6338e03f6fad290a4a99 Mon Sep 17 00:00:00 2001 From: carlosrcoelho Date: Tue, 9 Jan 2024 19:49:24 -0300 Subject: [PATCH 035/153] Update EverNoteLoader.py: Added "field_type" attribute to "file" and "dict" types --- .../langflow/components/documentloaders/EverNoteLoader.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/backend/langflow/components/documentloaders/EverNoteLoader.py b/src/backend/langflow/components/documentloaders/EverNoteLoader.py index fb9460a04..1d8c95b34 100644 --- a/src/backend/langflow/components/documentloaders/EverNoteLoader.py +++ b/src/backend/langflow/components/documentloaders/EverNoteLoader.py @@ -17,12 +17,13 @@ class EverNoteLoaderComponent(CustomComponent): "show": True, "type": "file", "file_types": ["xml"], + "field_type": "file", }, "metadata": { "display_name": "Metadata", "required": False, "show": True, - "type": "dict", + "field_type": "dict", }, } From aa2aa1da517f7f595d00a634400ff11601fb4e9e Mon Sep 17 00:00:00 2001 From: carlosrcoelho Date: Tue, 9 Jan 2024 19:52:23 -0300 Subject: [PATCH 036/153] Update imports and field types in FacebookChatLoader.py --- .../components/documentloaders/FacebookChatLoader.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/backend/langflow/components/documentloaders/FacebookChatLoader.py b/src/backend/langflow/components/documentloaders/FacebookChatLoader.py index cf0ea2380..d5b3c1dd1 100644 --- a/src/backend/langflow/components/documentloaders/FacebookChatLoader.py +++ b/src/backend/langflow/components/documentloaders/FacebookChatLoader.py @@ -1,6 +1,6 @@ from langflow import CustomComponent -from langchain.documents import Document +from langchain.docstore.document import Document from typing import Optional, Dict class FacebookChatLoaderComponent(CustomComponent): @@ -15,10 +15,12 @@ class FacebookChatLoaderComponent(CustomComponent): "required": True, "suffixes": [".json"], "file_types": ["json"], + "field_type": "file", }, "metadata": { "display_name": "Metadata", "required": False, + "field_type": "dict", }, } @@ -27,4 +29,4 @@ class FacebookChatLoaderComponent(CustomComponent): # and returns a Document object. Replace 'FacebookChatLoader' with the actual class name. # As per the JSON, the output type is 'Document', which is part of langchain.documents. # Therefore, the 'FacebookChatLoader' should be imported or defined elsewhere in the codebase. - return FacebookChatLoader(file_path=file_path, metadata=metadata) + return FacebookChatLoader(file_path=file_path, metadata=metadata) \ No newline at end of file From 2fb287525a23d4a930ab72fb8b0a9ba5f875d3b4 Mon Sep 17 00:00:00 2001 From: carlosrcoelho Date: Tue, 9 Jan 2024 20:13:53 -0300 Subject: [PATCH 037/153] Update VertexAIEmbeddingsComponent configuration --- .../embeddings/VertexAIEmbeddings.py | 28 +++++++++---------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/src/backend/langflow/components/embeddings/VertexAIEmbeddings.py b/src/backend/langflow/components/embeddings/VertexAIEmbeddings.py index d0e4cc811..a736e063b 100644 --- a/src/backend/langflow/components/embeddings/VertexAIEmbeddings.py +++ b/src/backend/langflow/components/embeddings/VertexAIEmbeddings.py @@ -9,25 +9,25 @@ class VertexAIEmbeddingsComponent(CustomComponent): def build_config(self): return { - "client": {"display_name": "Client", "advanced": True}, - "credentials": {"display_name": "Credentials", "default": '', "file_types": ['json']}, - "location": {"display_name": "Location", "default": 'us-central1', "advanced": True}, - "max_output_tokens": {"display_name": "Max Output Tokens", "default": 128}, - "max_retries": {"display_name": "Max Retries", "default": 6, "advanced": True}, - "model_name": {"display_name": "Model Name", "default": 'textembedding-gecko'}, - "n": {"display_name": "N", "default": 1, "advanced": True}, + "credentials": {"display_name": "Credentials", "value": '', "file_types": ['json'],"field_type": "file"}, + "instance": {"display_name": "instance", "advanced": True, "field_type": "dict"}, + "location": {"display_name": "Location", "value": 'us-central1', "advanced": True}, + "max_output_tokens": {"display_name": "Max Output Tokens", "value": 128}, + "max_retries": {"display_name": "Max Retries", "value": 6, "advanced": True}, + "model_name": {"display_name": "Model Name", "value": 'textembedding-gecko'}, + "n": {"display_name": "N", "value": 1, "advanced": True}, "project": {"display_name": "Project", "advanced": True}, - "request_parallelism": {"display_name": "Request Parallelism", "default": 5, "advanced": True}, + "request_parallelism": {"display_name": "Request Parallelism", "value": 5, "advanced": True}, "stop": {"display_name": "Stop", "advanced": True}, - "streaming": {"display_name": "Streaming", "default": False, "advanced": True}, - "temperature": {"display_name": "Temperature", "default": 0.0}, - "top_k": {"display_name": "Top K", "default": 40, "advanced": True}, - "top_p": {"display_name": "Top P", "default": 0.95, "advanced": True}, + "streaming": {"display_name": "Streaming", "value": False, "advanced": True}, + "temperature": {"display_name": "Temperature", "value": 0.0}, + "top_k": {"display_name": "Top K", "value": 40, "advanced": True}, + "top_p": {"display_name": "Top P", "value": 0.95, "advanced": True}, } def build( self, - client: Optional[str] = None, + instance: Optional[str] = None, credentials: Optional[str] = None, location: str = 'us-central1', max_output_tokens: int = 128, @@ -43,7 +43,7 @@ class VertexAIEmbeddingsComponent(CustomComponent): top_p: float = 0.95, ) -> VertexAIEmbeddings: return VertexAIEmbeddings( - client=client, + instance=instance, credentials=credentials, location=location, max_output_tokens=max_output_tokens, From a291564b4e1f059d3ebdf02c241e25dfe54be041 Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Wed, 10 Jan 2024 09:34:13 -0300 Subject: [PATCH 038/153] Update VertexAIComponent configuration --- .../langflow/components/llms/VertexAI.py | 63 ++++++++++++------- 1 file changed, 42 insertions(+), 21 deletions(-) diff --git a/src/backend/langflow/components/llms/VertexAI.py b/src/backend/langflow/components/llms/VertexAI.py index 65bd2cacd..c019f64e4 100644 --- a/src/backend/langflow/components/llms/VertexAI.py +++ b/src/backend/langflow/components/llms/VertexAI.py @@ -11,45 +11,49 @@ class VertexAIComponent(CustomComponent): return { "credentials": { "display_name": "Credentials", - "type": "file", + "field_type": "file", "file_types": ["json"], "required": False, - "default": None, + "value": None, }, "location": { "display_name": "Location", "type": "str", - "default": "us-central1", + "advanced":True, + "value": "us-central1", "required": False, }, "max_output_tokens": { "display_name": "Max Output Tokens", - "type": "int", - "default": 128, + "field_type": "int", + "value": 128, "required": False, + "advanced":True }, "max_retries": { "display_name": "Max Retries", "type": "int", - "default": 6, + "value": 6, "required": False, + "advanced":True }, "metadata": { "display_name": "Metadata", - "type": "dict", + "field_type": "dict", "required": False, "default": {}, }, "model_name": { "display_name": "Model Name", "type": "str", - "default": "text-bison", + "value": "text-bison", "required": False, }, "n": { + "advanced":True, "display_name": "N", - "type": "int", - "default": 1, + "field_type": "int", + "value": 1, "required": False, }, "project": { @@ -60,57 +64,72 @@ class VertexAIComponent(CustomComponent): }, "request_parallelism": { "display_name": "Request Parallelism", - "type": "int", - "default": 5, + "field_type": "int", + "value": 5, "required": False, + "advanced":True }, "streaming": { "display_name": "Streaming", - "type": "bool", - "default": False, + "field_type": "bool", + "value": False, "required": False, + "advanced":True }, "temperature": { "display_name": "Temperature", - "type": "float", - "default": 0.0, + "field_type": "float", + "value": 0.0, "required": False, + "advanced":True }, "top_k": { "display_name": "Top K", "type": "int", "default": 40, "required": False, + "advanced":True }, "top_p": { "display_name": "Top P", - "type": "float", - "default": 0.95, + "field_type": "float", + "value": 0.95, "required": False, + "advanced":True }, "tuned_model_name": { "display_name": "Tuned Model Name", "type": "str", "required": False, - "default": None, + "value": None, + "advanced":True }, "verbose": { "display_name": "Verbose", - "type": "bool", - "default": False, + "field_type": "bool", + "value": False, "required": False, }, + "name":{ + "display_name":"Name", + "field_type":"str" + }, + "client_preview":{ + "display_name":"client_preview" + } } def build( self, credentials: Optional[str] = None, + client_preview: Optional[any]= None, location: str = "us-central1", max_output_tokens: int = 128, max_retries: int = 6, metadata: Dict = None, model_name: str = "text-bison", n: int = 1, + name:Optional[str] = None, project: Optional[str] = None, request_parallelism: int = 5, streaming: bool = False, @@ -127,6 +146,7 @@ class VertexAIComponent(CustomComponent): from langchain.llms import VertexAI return VertexAI( + client_preview=client_preview, credentials=credentials, location=location, max_output_tokens=max_output_tokens, @@ -134,6 +154,7 @@ class VertexAIComponent(CustomComponent): metadata=metadata, model_name=model_name, n=n, + name=name, project=project, request_parallelism=request_parallelism, streaming=streaming, From 6f54fe657942a92fcea448b7e48ff2fc7f5dfaea Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Wed, 10 Jan 2024 09:34:26 -0300 Subject: [PATCH 039/153] Update type hints in document loaders --- src/backend/langflow/components/documentloaders/CSVLoader.py | 2 +- src/backend/langflow/components/documentloaders/CoNLLULoader.py | 1 - .../components/documentloaders/CollegeConfidentialLoader.py | 2 +- 3 files changed, 2 insertions(+), 3 deletions(-) diff --git a/src/backend/langflow/components/documentloaders/CSVLoader.py b/src/backend/langflow/components/documentloaders/CSVLoader.py index dce3f5708..63ce29ba1 100644 --- a/src/backend/langflow/components/documentloaders/CSVLoader.py +++ b/src/backend/langflow/components/documentloaders/CSVLoader.py @@ -1,6 +1,6 @@ from langflow import CustomComponent -from typing import Optional, Dict, List +from typing import List from langchain_community.document_loaders.csv_loader import CSVLoader from langchain.docstore.document import Document diff --git a/src/backend/langflow/components/documentloaders/CoNLLULoader.py b/src/backend/langflow/components/documentloaders/CoNLLULoader.py index 653919bd7..7159ba707 100644 --- a/src/backend/langflow/components/documentloaders/CoNLLULoader.py +++ b/src/backend/langflow/components/documentloaders/CoNLLULoader.py @@ -1,6 +1,5 @@ from langflow import CustomComponent from langchain.docstore.document import Document -from typing import Optional, Dict from langflow.field_typing import TemplateField diff --git a/src/backend/langflow/components/documentloaders/CollegeConfidentialLoader.py b/src/backend/langflow/components/documentloaders/CollegeConfidentialLoader.py index 9c2b2cfaf..654932fd5 100644 --- a/src/backend/langflow/components/documentloaders/CollegeConfidentialLoader.py +++ b/src/backend/langflow/components/documentloaders/CollegeConfidentialLoader.py @@ -1,7 +1,7 @@ from langflow import CustomComponent from langchain.docstore.document import Document -from typing import Optional, Dict +from typing import Optional class CollegeConfidentialLoaderComponent(CustomComponent): display_name = "CollegeConfidentialLoader" From 914b8ddf6a716f83c54300616ee37c1405eedcf3 Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Wed, 10 Jan 2024 10:14:03 -0300 Subject: [PATCH 040/153] Refactor OpenAIComponent class in OpenAI.py --- .../langflow/components/llms/OpenAI.py | 21 +++++++++---------- 1 file changed, 10 insertions(+), 11 deletions(-) diff --git a/src/backend/langflow/components/llms/OpenAI.py b/src/backend/langflow/components/llms/OpenAI.py index 4e31eb207..5c8d38730 100644 --- a/src/backend/langflow/components/llms/OpenAI.py +++ b/src/backend/langflow/components/llms/OpenAI.py @@ -1,8 +1,7 @@ from langflow import CustomComponent -from langchain.llms import BaseLLM from typing import Optional, Dict - +from langchain_openai.llms.base import OpenAI class OpenAIComponent(CustomComponent): display_name = "OpenAI" @@ -14,7 +13,7 @@ class OpenAIComponent(CustomComponent): "model_kwargs": {"display_name": "Model Kwargs", "advanced": True}, "model_name": { "display_name": "Model Name", - "default": "text-davinci-003", + "value": "text-davinci-003", "options": [ "text-davinci-003", "text-davinci-002", @@ -32,22 +31,22 @@ class OpenAIComponent(CustomComponent): }, "openai_api_key": { "display_name": "OpenAI API Key", - "default": "", + "value": "", "password": True, }, - "temperature": {"display_name": "Temperature", "default": 0.7}, + "temperature": {"display_name": "Temperature", "value": 0.7}, } def build( self, - max_tokens: int = 256, + max_tokens: Optional[int] = 256, model_kwargs: Optional[Dict] = None, - model_name: str = "text-davinci-003", - openai_api_base: str = "https://api.openai.com/v1", + model_name: Optional[str] = "text-davinci-003", + openai_api_base: Optional[str] = "https://api.openai.com/v1", openai_api_key: str = "", - temperature: float = 0.7, - ) -> BaseLLM: - return BaseLLM( + temperature: Optional[float] = 0.7, + ) -> OpenAI: + return OpenAI( max_tokens=max_tokens, model_kwargs=model_kwargs or {}, model_name=model_name, From a8bf2ccf663d464c21925e49e5cdbeee869ce2e3 Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Wed, 10 Jan 2024 10:19:38 -0300 Subject: [PATCH 041/153] Update LlamaCpp import and return type --- src/backend/langflow/components/llms/LlamaCpp.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/backend/langflow/components/llms/LlamaCpp.py b/src/backend/langflow/components/llms/LlamaCpp.py index eaf40f1d8..d8c917ba2 100644 --- a/src/backend/langflow/components/llms/LlamaCpp.py +++ b/src/backend/langflow/components/llms/LlamaCpp.py @@ -1,7 +1,7 @@ from typing import Optional, List, Dict, Any from langflow import CustomComponent -from langchain.llms import BaseLanguageModel +from langchain_community.llms.llamacpp import LlamaCpp class LlamaCppComponent(CustomComponent): display_name = "LlamaCpp" @@ -84,10 +84,10 @@ class LlamaCppComponent(CustomComponent): use_mmap: Optional[bool] = True, verbose: Optional[bool] = True, vocab_only: Optional[bool] = False, - ) -> BaseLanguageModel: + ) -> LlamaCpp: # Here you would instantiate the LlamaCpp model with the provided parameters # Since the actual implementation of LlamaCpp is not provided, this is a placeholder - return BaseLanguageModel( + return LlamaCpp( model_path=model_path, grammar=grammar, cache=cache, From 55e7d23f554366aead77366ab34216e9fba0e4e6 Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Wed, 10 Jan 2024 10:29:26 -0300 Subject: [PATCH 042/153] Update CTransformersComponent to include default config values --- src/backend/langflow/components/llms/CTransformers.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/backend/langflow/components/llms/CTransformers.py b/src/backend/langflow/components/llms/CTransformers.py index a6b34b1e4..881953639 100644 --- a/src/backend/langflow/components/llms/CTransformers.py +++ b/src/backend/langflow/components/llms/CTransformers.py @@ -1,6 +1,6 @@ from langflow import CustomComponent -from langchain.llms import BaseLanguageModel +from langchain_community.llms.ctransformers import CTransformers from typing import Optional, Dict class CTransformersComponent(CustomComponent): @@ -13,7 +13,7 @@ class CTransformersComponent(CustomComponent): "model": {"display_name": "Model", "required": True}, "model_file": {"display_name": "Model File", "required": False}, "model_type": {"display_name": "Model Type", "required": False}, - "config": {"display_name": "Config", "advanced": True, "required": False}, + "config": {"display_name": "Config", "advanced": True, "required": False,"field_type":"dict","value":'{"top_k":40,"top_p":0.95,"temperature":0.8,"repetition_penalty":1.1,"last_n_tokens":64,"seed":-1,"max_new_tokens":256,"stop":"","stream":"False","reset":"True","batch_size":8,"threads":-1,"context_length":-1,"gpu_layers":0}'} } def build( @@ -22,7 +22,7 @@ class CTransformersComponent(CustomComponent): model_file: Optional[str] = None, model_type: Optional[str] = None, config: Optional[Dict] = None - ) -> BaseLanguageModel: + ) -> CTransformers: # Default config values default_config = { "top_k": 40, From ebe9cfc8a4a33932e3661b8237ce6981fd3c5720 Mon Sep 17 00:00:00 2001 From: carlosrcoelho Date: Wed, 10 Jan 2024 11:13:04 -0300 Subject: [PATCH 043/153] Update AnthropicComponent configuration --- src/backend/langflow/components/llms/Anthropic.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/src/backend/langflow/components/llms/Anthropic.py b/src/backend/langflow/components/llms/Anthropic.py index 327afc6e4..e6327dbab 100644 --- a/src/backend/langflow/components/llms/Anthropic.py +++ b/src/backend/langflow/components/llms/Anthropic.py @@ -12,7 +12,8 @@ class AnthropicComponent(CustomComponent): return { "anthropic_api_key": { "display_name": "Anthropic API Key", - "type": SecretStr, + "type": str, + "password": True, }, "anthropic_api_url": { "display_name": "Anthropic API URL", @@ -20,7 +21,7 @@ class AnthropicComponent(CustomComponent): }, "model_kwargs": { "display_name": "Model Kwargs", - "type": Dict[str, Any], + "field_type": 'dict', "advanced": True, }, "temperature": { @@ -31,7 +32,7 @@ class AnthropicComponent(CustomComponent): def build( self, - anthropic_api_key: Optional[SecretStr], + anthropic_api_key: Optional[str], anthropic_api_url: Optional[str], model_kwargs: Optional[Dict[str, Any]], temperature: Optional[float] = None, @@ -42,7 +43,7 @@ class AnthropicComponent(CustomComponent): class Anthropic(BaseLanguageModel): def __init__( self, - api_key: Optional[SecretStr], + api_key: Optional[str], api_url: Optional[str], model_kwargs: Optional[Dict[str, Any]] = None, temperature: Optional[float] = None, @@ -64,4 +65,4 @@ class AnthropicComponent(CustomComponent): api_url=anthropic_api_url, model_kwargs=model_kwargs, temperature=temperature, - ) + ) \ No newline at end of file From 44a17f6f25b1c5453770bb34eacc6e76425d128f Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Wed, 10 Jan 2024 11:31:22 -0300 Subject: [PATCH 044/153] Update ChatVertexAIComponent parameters --- .../langflow/components/llms/ChatVertexAI.py | 41 ++++++++++--------- 1 file changed, 21 insertions(+), 20 deletions(-) diff --git a/src/backend/langflow/components/llms/ChatVertexAI.py b/src/backend/langflow/components/llms/ChatVertexAI.py index 63fe99f09..310d65e2f 100644 --- a/src/backend/langflow/components/llms/ChatVertexAI.py +++ b/src/backend/langflow/components/llms/ChatVertexAI.py @@ -1,7 +1,8 @@ from langflow import CustomComponent -from typing import List -from langchain.messages import BaseMessage +from typing import List, Optional +from langchain_core.messages.base import BaseMessage +from langchain_community.chat_models.vertexai import ChatVertexAI class ChatVertexAIComponent(CustomComponent): display_name = "ChatVertexAI" @@ -11,7 +12,7 @@ class ChatVertexAIComponent(CustomComponent): return { "credentials": { "display_name": "Credentials", - "type": "file", + "field_type": "file", "fileTypes": ["json"], "file_path": None, }, @@ -21,53 +22,53 @@ class ChatVertexAIComponent(CustomComponent): }, "location": { "display_name": "Location", - "default": "us-central1", + "value": "us-central1", }, "max_output_tokens": { "display_name": "Max Output Tokens", - "default": 128, + "value": 128, "advanced": True, }, "model_name": { "display_name": "Model Name", - "default": "chat-bison", + "value": "chat-bison", }, "project": { "display_name": "Project", }, "temperature": { "display_name": "Temperature", - "default": 0.0, + "value": 0.0, }, "top_k": { "display_name": "Top K", - "default": 40, + "value": 40, "advanced": True, }, "top_p": { "display_name": "Top P", - "default": 0.95, + "value": 0.95, "advanced": True, }, "verbose": { "display_name": "Verbose", - "default": False, + "value": False, "advanced": True, }, } def build( self, - credentials: str, - examples: List[BaseMessage], - project: str, - location: str = "us-central1", - max_output_tokens: int = 128, - model_name: str = "chat-bison", - temperature: float = 0.0, - top_k: int = 40, - top_p: float = 0.95, - verbose: bool = False, + credentials: Optional[str], + examples: Optional[List[BaseMessage]], + project: Optional[str], + location: Optional[str] = "us-central1", + max_output_tokens: Optional[int] = 128, + model_name: Optional[str] = "chat-bison", + temperature: Optional[float] = 0.0, + top_k: Optional[int] = 40, + top_p: Optional[float] = 0.95, + verbose: Optional[bool] = False, ): # Assuming there is a ChatVertexAI class that takes these parameters return ChatVertexAI( From a24e0314bcf2204dec0d6689f50ff3d8159ad7f8 Mon Sep 17 00:00:00 2001 From: carlosrcoelho Date: Wed, 10 Jan 2024 11:39:11 -0300 Subject: [PATCH 045/153] Update import statement in Cohere.py --- src/backend/langflow/components/llms/Cohere.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/backend/langflow/components/llms/Cohere.py b/src/backend/langflow/components/llms/Cohere.py index 15895013e..0b56e9c88 100644 --- a/src/backend/langflow/components/llms/Cohere.py +++ b/src/backend/langflow/components/llms/Cohere.py @@ -1,6 +1,6 @@ from langflow import CustomComponent -from langchain.llms import BaseLanguageModel +from langchain_core.language_models.base import BaseLanguageModel from typing import Optional class CohereComponent(CustomComponent): @@ -13,7 +13,7 @@ class CohereComponent(CustomComponent): "cohere_api_key": { "display_name": "Cohere API Key", "type": "password", - "show": True + "password": True }, "max_tokens": { "display_name": "Max Tokens", From 11c5a3e001182ae293dc247a2bc044ed6f31eb4c Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Wed, 10 Jan 2024 11:47:54 -0300 Subject: [PATCH 046/153] Add ChatOpenAI model and update field types --- .../langflow/components/llms/ChatOpenAI.py | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/src/backend/langflow/components/llms/ChatOpenAI.py b/src/backend/langflow/components/llms/ChatOpenAI.py index 247f7c8ed..e6b1c2bd0 100644 --- a/src/backend/langflow/components/llms/ChatOpenAI.py +++ b/src/backend/langflow/components/llms/ChatOpenAI.py @@ -1,6 +1,7 @@ from langflow import CustomComponent from langchain.llms import BaseLLM from typing import Optional, Dict, Union, Any +from langchain_community.chat_models.openai import ChatOpenAI from langflow.field_typing import BaseLanguageModel @@ -12,19 +13,19 @@ class ChatOpenAIComponent(CustomComponent): return { "max_tokens": { "display_name": "Max Tokens", - "type": "int", + "field_type": "int", "advanced": False, "required": False, }, "model_kwargs": { "display_name": "Model Kwargs", - "type": "dict", + "field_type": "dict", "advanced": True, "required": False, }, "model_name": { "display_name": "Model Name", - "type": "str", + "field_type": "str", "advanced": False, "required": False, "options": [ @@ -37,7 +38,7 @@ class ChatOpenAIComponent(CustomComponent): }, "openai_api_base": { "display_name": "OpenAI API Base", - "type": "str", + "field_type": "str", "advanced": False, "required": False, "info": ( @@ -47,16 +48,16 @@ class ChatOpenAIComponent(CustomComponent): }, "openai_api_key": { "display_name": "OpenAI API Key", - "type": "str", + "field_type": "str", "advanced": False, "required": False, }, "temperature": { "display_name": "Temperature", - "type": "float", + "field_type": "float", "advanced": False, "required": False, - "default": 0.7, + "value": 0.7, }, } From c2ebeeda9c565c849260dff7cf0d22394f65df59 Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Wed, 10 Jan 2024 11:48:02 -0300 Subject: [PATCH 047/153] Remove unused import statement --- src/backend/langflow/components/llms/Anthropic.py | 1 - 1 file changed, 1 deletion(-) diff --git a/src/backend/langflow/components/llms/Anthropic.py b/src/backend/langflow/components/llms/Anthropic.py index e6327dbab..de69c2d08 100644 --- a/src/backend/langflow/components/llms/Anthropic.py +++ b/src/backend/langflow/components/llms/Anthropic.py @@ -1,5 +1,4 @@ from langflow import CustomComponent -from pydantic import SecretStr from typing import Optional, Dict, Any from langflow.field_typing import BaseLanguageModel From ee4a0da65dbeedce2b939f4702f5de439dc266d8 Mon Sep 17 00:00:00 2001 From: carlosrcoelho Date: Wed, 10 Jan 2024 11:52:39 -0300 Subject: [PATCH 048/153] Update ChatAnthropicComponent configuration --- src/backend/langflow/components/llms/ChatAnthropic.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/backend/langflow/components/llms/ChatAnthropic.py b/src/backend/langflow/components/llms/ChatAnthropic.py index fbc41c3e9..0ee5b1804 100644 --- a/src/backend/langflow/components/llms/ChatAnthropic.py +++ b/src/backend/langflow/components/llms/ChatAnthropic.py @@ -1,6 +1,5 @@ from langflow import CustomComponent -from langchain.tools import SecretStr from typing import Optional, Dict, Union, Callable from langflow.field_typing import BaseLanguageModel @@ -13,7 +12,8 @@ class ChatAnthropicComponent(CustomComponent): return { "anthropic_api_key": { "display_name": "Anthropic API Key", - "type": SecretStr, + "type": str, + "password": True, }, "anthropic_api_url": { "display_name": "Anthropic API URL", @@ -21,7 +21,7 @@ class ChatAnthropicComponent(CustomComponent): }, "model_kwargs": { "display_name": "Model Kwargs", - "type": Dict[str, Union[str, int, float, bool]], + "field_type": 'dict', "advanced": True, }, "temperature": { @@ -32,9 +32,9 @@ class ChatAnthropicComponent(CustomComponent): def build( self, - anthropic_api_key: Optional[SecretStr] = None, + anthropic_api_key: Optional[str] = None, anthropic_api_url: Optional[str] = None, - model_kwargs: Optional[Dict[str, Union[str, int, float, bool]]] = None, + model_kwargs: dict = {}, temperature: Optional[float] = None, ) -> Union[BaseLanguageModel, Callable]: from langchain.model_io.models.chat.integrations import ChatAnthropic # Importing here due to potential local scope requirements From 2375dd31b229b8ba379ea8754d93e45371828679 Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Wed, 10 Jan 2024 15:30:34 -0300 Subject: [PATCH 049/153] Add GitbookLoader import and update metadata field type --- .../langflow/components/documentloaders/GitbookLoader.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/backend/langflow/components/documentloaders/GitbookLoader.py b/src/backend/langflow/components/documentloaders/GitbookLoader.py index 7351fefff..5811403bb 100644 --- a/src/backend/langflow/components/documentloaders/GitbookLoader.py +++ b/src/backend/langflow/components/documentloaders/GitbookLoader.py @@ -1,6 +1,7 @@ from langflow import CustomComponent from langflow.field_typing import Document from typing import Optional, Dict +from langchain_community.document_loaders.gitbook import GitbookLoader class GitbookLoaderComponent(CustomComponent): @@ -11,7 +12,8 @@ class GitbookLoaderComponent(CustomComponent): return { "metadata": { "display_name": "Metadata", - "default": {}, + "field_type":"dict", + "value": {}, }, "web_page": { "display_name": "Web Page", From 1f1a1fc009bf835b17ef2088a6ded5e9b2855c30 Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Wed, 10 Jan 2024 15:37:47 -0300 Subject: [PATCH 050/153] Refactor HNLoaderComponent to use HNLoader instead of BaseLoader This commit refactors the HNLoaderComponent in the langflow backend to use the HNLoader class instead of the BaseLoader class. The HNLoader class is imported from the langchain_community.document_loaders.hn module. The commit also updates the return type annotation of the `load` method in HNLoaderComponent to HNLoader. Additionally, the metadata field in the component's configuration schema is updated to have a field_type of "dict". This change improves the specificity and functionality of the HNLoaderComponent. --- .../components/documentloaders/HNLoader.py | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/src/backend/langflow/components/documentloaders/HNLoader.py b/src/backend/langflow/components/documentloaders/HNLoader.py index eed9bb957..5d3cb768e 100644 --- a/src/backend/langflow/components/documentloaders/HNLoader.py +++ b/src/backend/langflow/components/documentloaders/HNLoader.py @@ -1,7 +1,8 @@ -from langchain import CustomComponent -from langchain.document_loaders import BaseLoader +from langflow import CustomComponent from typing import Optional, Dict +from langchain_community.document_loaders.hn import HNLoader + class HNLoaderComponent(CustomComponent): display_name = "HNLoader" @@ -11,8 +12,9 @@ class HNLoaderComponent(CustomComponent): return { "metadata": { "display_name": "Metadata", - "default": {}, - "required": False + "value": {}, + "required": False, + "field_type": "dict" }, "web_path": { "display_name": "Web Page", @@ -24,8 +26,8 @@ class HNLoaderComponent(CustomComponent): self, web_path: str, metadata: Optional[Dict] = None, - ) -> BaseLoader: + ) -> HNLoader: # Assuming that there's a specific loader for Hacker News - # as BaseLoader does not take a web_path argument + # as HNloader does not take a web_path argument # The HackerNewsLoader needs to be defined somewhere in the actual implementation - return HackerNewsLoader(metadata=metadata, web_path=web_path) + return HNLoader(metadata=metadata, web_path=web_path) From 5437edc05f4b3d429009067b45ba29c4808132e1 Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Wed, 10 Jan 2024 15:38:01 -0300 Subject: [PATCH 051/153] Remove unused import in ChatAnthropic.py --- src/backend/langflow/components/llms/ChatAnthropic.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/backend/langflow/components/llms/ChatAnthropic.py b/src/backend/langflow/components/llms/ChatAnthropic.py index 0ee5b1804..33f9838fb 100644 --- a/src/backend/langflow/components/llms/ChatAnthropic.py +++ b/src/backend/langflow/components/llms/ChatAnthropic.py @@ -1,6 +1,6 @@ from langflow import CustomComponent -from typing import Optional, Dict, Union, Callable +from typing import Optional, Union, Callable from langflow.field_typing import BaseLanguageModel class ChatAnthropicComponent(CustomComponent): From 8650fde69e5d60e7e41afeafa06fcc140eed5757 Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Wed, 10 Jan 2024 15:42:04 -0300 Subject: [PATCH 052/153] Refactor IMSDbLoaderComponent configuration --- .../langflow/components/documentloaders/IMSDbLoader.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/backend/langflow/components/documentloaders/IMSDbLoader.py b/src/backend/langflow/components/documentloaders/IMSDbLoader.py index 14859c3d1..99c3b75d3 100644 --- a/src/backend/langflow/components/documentloaders/IMSDbLoader.py +++ b/src/backend/langflow/components/documentloaders/IMSDbLoader.py @@ -1,5 +1,7 @@ from langflow import CustomComponent from langflow.field_typing import Document +from langchain_community.document_loaders.imsdb import IMSDbLoader + from typing import Dict, Optional @@ -9,8 +11,8 @@ class IMSDbLoaderComponent(CustomComponent): def build_config(self): return { - "metadata": {"display_name": "Metadata", "type": "dict"}, - "web_path": {"display_name": "Web Page", "type": "str"}, + "metadata": {"display_name": "Metadata", "field_type": "dict"}, + "web_path": {"display_name": "Web Page", "field_type": "str"}, } def build( @@ -18,6 +20,4 @@ class IMSDbLoaderComponent(CustomComponent): metadata: Optional[Dict] = None, web_path: str = "", ) -> Document: - # Assuming there is a class or function named `IMSDbLoader` that takes these parameters - # and returns a Document object. Replace `IMSDbLoader` with the actual class or function name. return IMSDbLoader(metadata=metadata, web_path=web_path) From 1425714ce93dd4525c5495522e3f5066f939210e Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Wed, 10 Jan 2024 16:34:29 -0300 Subject: [PATCH 053/153] Update OpenAIEmbeddingsComponent configuration --- .../components/embeddings/OpenAIEmbeddings.py | 76 ++++++++++++++----- 1 file changed, 58 insertions(+), 18 deletions(-) diff --git a/src/backend/langflow/components/embeddings/OpenAIEmbeddings.py b/src/backend/langflow/components/embeddings/OpenAIEmbeddings.py index 7faf4fd07..6303389c8 100644 --- a/src/backend/langflow/components/embeddings/OpenAIEmbeddings.py +++ b/src/backend/langflow/components/embeddings/OpenAIEmbeddings.py @@ -1,7 +1,8 @@ - from langflow import CustomComponent -from typing import Optional, Set, Dict, Any, Union, Callable -from langchain.embeddings import OpenAIEmbeddings +from langflow.field_typing import NestedDict +from typing import List, Optional, Dict, Any, Union, Callable +from langchain_openai.embeddings.base import OpenAIEmbeddings + class OpenAIEmbeddingsComponent(CustomComponent): display_name = "OpenAIEmbeddings" @@ -9,40 +10,75 @@ class OpenAIEmbeddingsComponent(CustomComponent): def build_config(self): return { - "allowed_special": {"display_name": "Allowed Special", "advanced": True}, - "disallowed_special": {"display_name": "Disallowed Special", "advanced": True}, + "allowed_special": { + "display_name": "Allowed Special", + "advanced": True, + "field_type": "str", + "is_list": True, + }, + "default_headers": { + "display_name": "Default Headers", + "advanced": True, + "field_type": "dict", + }, + "default_query": { + "display_name": "Default Query", + "advanced": True, + "field_type": "NestedDict", + }, + "disallowed_special": { + "display_name": "Disallowed Special", + "advanced": True, + "field_type": "str", + "is_list": True, + }, "chunk_size": {"display_name": "Chunk Size", "advanced": True}, "client": {"display_name": "Client", "advanced": True}, "deployment": {"display_name": "Deployment", "advanced": True}, - "embedding_ctx_length": {"display_name": "Embedding Context Length", "advanced": True}, + "embedding_ctx_length": { + "display_name": "Embedding Context Length", + "advanced": True, + }, "max_retries": {"display_name": "Max Retries", "advanced": True}, "model": {"display_name": "Model", "advanced": True}, "model_kwargs": {"display_name": "Model Kwargs", "advanced": True}, - "openai_api_base": {"display_name": "OpenAI API Base", "advanced": True}, - "openai_api_key": {"display_name": "OpenAI API Key"}, - "openai_api_type": {"display_name": "OpenAI API Type", "advanced": True}, - "openai_api_version": {"display_name": "OpenAI API Version", "advanced": True}, - "openai_organization": {"display_name": "OpenAI Organization", "advanced": True}, + "openai_api_base": {"display_name": "OpenAI API Base","password":True, "advanced": True}, + "openai_api_key": {"display_name": "OpenAI API Key","password":True}, + "openai_api_type": {"display_name": "OpenAI API Type", "advanced": True,"password":True}, + "openai_api_version": { + "display_name": "OpenAI API Version", + "advanced": True, + }, + "openai_organization": { + "display_name": "OpenAI Organization", + "advanced": True, + }, "openai_proxy": {"display_name": "OpenAI Proxy", "advanced": True}, "request_timeout": {"display_name": "Request Timeout", "advanced": True}, - "show_progress_bar": {"display_name": "Show Progress Bar", "advanced": True}, + "show_progress_bar": { + "display_name": "Show Progress Bar", + "advanced": True, + }, "skip_empty": {"display_name": "Skip Empty", "advanced": True}, "tiktoken_model_name": {"display_name": "TikToken Model Name"}, + "tikToken_enable": {"display_name": "TikToken Enable"}, } def build( self, - allowed_special: Optional[Set[str]] = set(), - disallowed_special: str = "all", + default_headers: Optional[Dict[str, str]] = None, + default_query: Optional[NestedDict] = {}, + allowed_special: Optional[List[str]] = [], + disallowed_special: List[str] = ["all"], chunk_size: Optional[int] = 1000, client: Optional[Any] = None, deployment: str = "text-embedding-ada-002", embedding_ctx_length: Optional[int] = 8191, max_retries: Optional[int] = 6, model: str = "text-embedding-ada-002", - model_kwargs: Optional[Dict[str, Any]] = None, + model_kwargs: NestedDict = {}, openai_api_base: Optional[str] = None, - openai_api_key: Optional[str] = '', + openai_api_key: Optional[str] = "", openai_api_type: Optional[str] = None, openai_api_version: Optional[str] = None, openai_organization: Optional[str] = None, @@ -50,11 +86,15 @@ class OpenAIEmbeddingsComponent(CustomComponent): request_timeout: Optional[float] = None, show_progress_bar: Optional[bool] = False, skip_empty: Optional[bool] = False, + tikToken_enable: Optional[bool] = True, tiktoken_model_name: Optional[str] = None, ) -> Union[OpenAIEmbeddings, Callable]: return OpenAIEmbeddings( - allowed_special=allowed_special, - disallowed_special=disallowed_special, + tiktoken_enabled=tikToken_enable, + default_headers=default_headers, + default_query=default_query, + allowed_special=set(allowed_special), + disallowed_special=set(disallowed_special), chunk_size=chunk_size, client=client, deployment=deployment, From f086f31e79ddd30b2e61ed5388532dc559aa151a Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Wed, 10 Jan 2024 16:43:50 -0300 Subject: [PATCH 054/153] Add field type for metadata in ReadTheDocsLoaderComponent --- .../components/documentloaders/ReadTheDocsLoader.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/backend/langflow/components/documentloaders/ReadTheDocsLoader.py b/src/backend/langflow/components/documentloaders/ReadTheDocsLoader.py index 901148264..47fa35489 100644 --- a/src/backend/langflow/components/documentloaders/ReadTheDocsLoader.py +++ b/src/backend/langflow/components/documentloaders/ReadTheDocsLoader.py @@ -1,6 +1,6 @@ from langflow import CustomComponent -from langflow.field_typing import Document from typing import Dict, Optional +from langchain_community.document_loaders.readthedocs import ReadTheDocsLoader class ReadTheDocsLoaderComponent(CustomComponent): @@ -9,7 +9,7 @@ class ReadTheDocsLoaderComponent(CustomComponent): def build_config(self): return { - "metadata": {"display_name": "Metadata", "default": {}}, + "metadata": {"display_name": "Metadata", "default": {},"field_type": "dict"}, "path": {"display_name": "Local directory", "required": True}, } @@ -17,5 +17,5 @@ class ReadTheDocsLoaderComponent(CustomComponent): self, path: str, metadata: Optional[Dict] = None, - ) -> Document: - return Document(path=path, metadata=metadata or {}) + ) -> ReadTheDocsLoader: + return ReadTheDocsLoader(path=path, metadata=metadata or {}).load() From 24931545f923dd54aea579009525395a28d2a867 Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Wed, 10 Jan 2024 16:45:31 -0300 Subject: [PATCH 055/153] Add Document type to ReadTheDocsLoaderComponent --- .../langflow/components/documentloaders/ReadTheDocsLoader.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/backend/langflow/components/documentloaders/ReadTheDocsLoader.py b/src/backend/langflow/components/documentloaders/ReadTheDocsLoader.py index 47fa35489..6f1022696 100644 --- a/src/backend/langflow/components/documentloaders/ReadTheDocsLoader.py +++ b/src/backend/langflow/components/documentloaders/ReadTheDocsLoader.py @@ -1,5 +1,6 @@ from langflow import CustomComponent -from typing import Dict, Optional +from typing import Dict, Optional,List +from langchain_core.documents import Document from langchain_community.document_loaders.readthedocs import ReadTheDocsLoader @@ -17,5 +18,5 @@ class ReadTheDocsLoaderComponent(CustomComponent): self, path: str, metadata: Optional[Dict] = None, - ) -> ReadTheDocsLoader: + ) -> List[Document]: return ReadTheDocsLoader(path=path, metadata=metadata or {}).load() From 737b29e89170fb48d35ecc9b68a5c4ec5c2d0e30 Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Wed, 10 Jan 2024 16:51:43 -0300 Subject: [PATCH 056/153] Add type hints and import necessary modules in SlackDirectoryLoaderComponent --- .../documentloaders/SlackDirectoryLoader.py | 16 +++++++--------- 1 file changed, 7 insertions(+), 9 deletions(-) diff --git a/src/backend/langflow/components/documentloaders/SlackDirectoryLoader.py b/src/backend/langflow/components/documentloaders/SlackDirectoryLoader.py index 5c4c92421..1f176ca11 100644 --- a/src/backend/langflow/components/documentloaders/SlackDirectoryLoader.py +++ b/src/backend/langflow/components/documentloaders/SlackDirectoryLoader.py @@ -1,7 +1,8 @@ from langflow import CustomComponent -from typing import Optional, Dict - +from typing import Optional, Dict, List +from langchain_core.documents import Document +from langchain_community.document_loaders.slack_directory import SlackDirectoryLoader class SlackDirectoryLoaderComponent(CustomComponent): display_name = "SlackDirectoryLoader" description = "Load from a `Slack` directory dump." @@ -9,8 +10,8 @@ class SlackDirectoryLoaderComponent(CustomComponent): def build_config(self): return { - "zip_path": {"display_name": "Path to zip file"}, - "metadata": {"display_name": "Metadata"}, + "zip_path": {"display_name": "Path to zip file","field_type": "file","file_types":[".zip"]}, + "metadata": {"display_name": "Metadata", "field_type": "dict"}, "workspace_url": {"display_name": "Workspace URL"}, } @@ -19,8 +20,5 @@ class SlackDirectoryLoaderComponent(CustomComponent): zip_path: str, metadata: Optional[Dict] = None, workspace_url: Optional[str] = None, - ) -> 'Document': - # Assuming there is a SlackDirectoryLoader class that takes these parameters - # Since the actual implementation details are not provided, this is a placeholder - # Replace SlackDirectoryLoader with the actual class that should be instantiated - return SlackDirectoryLoader(zip_path=zip_path, metadata=metadata, workspace_url=workspace_url) + ) -> List[Document]: + return SlackDirectoryLoader(zip_path=zip_path, metadata=metadata, workspace_url=workspace_url).load() From 32be1ccbc1e210954e2a11383f2e1f4b3cf3fb20 Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Wed, 10 Jan 2024 17:03:51 -0300 Subject: [PATCH 057/153] Update CohereEmbeddings import and add new configuration options --- .../components/embeddings/CohereEmbeddings.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/src/backend/langflow/components/embeddings/CohereEmbeddings.py b/src/backend/langflow/components/embeddings/CohereEmbeddings.py index 3c5c88db9..f6b4ee1c1 100644 --- a/src/backend/langflow/components/embeddings/CohereEmbeddings.py +++ b/src/backend/langflow/components/embeddings/CohereEmbeddings.py @@ -1,7 +1,7 @@ from langflow import CustomComponent -from langchain.embeddings import CohereEmbeddings -from typing import Optional, Any +from langchain_community.embeddings.cohere import CohereEmbeddings +from typing import Optional class CohereEmbeddingsComponent(CustomComponent): @@ -10,24 +10,24 @@ class CohereEmbeddingsComponent(CustomComponent): def build_config(self): return { - "async_client": {"display_name": "Async Client", "advanced": True}, - "client": {"display_name": "Client", "advanced": True}, - "cohere_api_key": {"display_name": "Cohere API Key"}, + "cohere_api_key": {"display_name": "Cohere API Key","password":True}, "model": {"display_name": "Model", "default": "embed-english-v2.0", "advanced": True}, "truncate": {"display_name": "Truncate", "advanced": True}, + "max_retries": {"display_name": "Max Retries", "advanced": True}, + "user_agent": {"display_name": "User Agent", "advanced": True}, } def build( self, - async_client: Optional[Any] = None, - client: Optional[Any] = None, + request_timeout: Optional[float] = None, cohere_api_key: Optional[str] = None, model: str = "embed-english-v2.0", truncate: Optional[str] = None, + user_agent: Optional[str] = "langchain", ) -> CohereEmbeddings: return CohereEmbeddings( - async_client=async_client, - client=client, + user_agent=user_agent, + request_timeout=request_timeout, cohere_api_key=cohere_api_key, model=model, truncate=truncate, From 8fc03d06ed0f1f5e4b8015825f98c72b86527ae2 Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Wed, 10 Jan 2024 17:22:41 -0300 Subject: [PATCH 058/153] Refactor HuggingFaceEmbeddingsComponent to use langchain_community embeddings --- .../embeddings/HuggingFaceEmbeddings.py | 16 ++++++---------- 1 file changed, 6 insertions(+), 10 deletions(-) diff --git a/src/backend/langflow/components/embeddings/HuggingFaceEmbeddings.py b/src/backend/langflow/components/embeddings/HuggingFaceEmbeddings.py index 4d8d73c4a..c3dc04ec8 100644 --- a/src/backend/langflow/components/embeddings/HuggingFaceEmbeddings.py +++ b/src/backend/langflow/components/embeddings/HuggingFaceEmbeddings.py @@ -1,7 +1,6 @@ from langflow import CustomComponent -from typing import Optional, Any, Dict -from langflow.field_typing import Embeddings - +from typing import Optional, Dict +from langchain_community.embeddings.huggingface import HuggingFaceEmbeddings class HuggingFaceEmbeddingsComponent(CustomComponent): display_name = "HuggingFaceEmbeddings" @@ -13,9 +12,8 @@ class HuggingFaceEmbeddingsComponent(CustomComponent): def build_config(self): return { "cache_folder": {"display_name": "Cache Folder", "advanced": True}, - "client": {"display_name": "Client", "advanced": True}, - "encode_kwargs": {"display_name": "Encode Kwargs", "advanced": True}, - "model_kwargs": {"display_name": "Model Kwargs", "advanced": True}, + "encode_kwargs": {"display_name": "Encode Kwargs", "advanced": True,"field_type":"dict"}, + "model_kwargs": {"display_name": "Model Kwargs","field_type":"dict", "advanced": True}, "model_name": {"display_name": "Model Name"}, "multi_process": {"display_name": "Multi Process", "advanced": True}, } @@ -23,15 +21,13 @@ class HuggingFaceEmbeddingsComponent(CustomComponent): def build( self, cache_folder: Optional[str] = None, - client: Optional[Any] = None, encode_kwargs: Optional[Dict] = None, model_kwargs: Optional[Dict] = None, model_name: str = "sentence-transformers/all-mpnet-base-v2", multi_process: bool = False, - ) -> Embeddings: - return Embeddings( + ) -> HuggingFaceEmbeddings: + return HuggingFaceEmbeddings( cache_folder=cache_folder, - client=client, encode_kwargs=encode_kwargs, model_kwargs=model_kwargs, model_name=model_name, From f9aba724b941ca04689657cafed961d80ca099d4 Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Wed, 10 Jan 2024 17:35:35 -0300 Subject: [PATCH 059/153] Update RetrievalQAComponent parameters --- .../langflow/components/chains/RetrievalQA.py | 18 ++++++++---------- 1 file changed, 8 insertions(+), 10 deletions(-) diff --git a/src/backend/langflow/components/chains/RetrievalQA.py b/src/backend/langflow/components/chains/RetrievalQA.py index 9ed188bf5..4f0abb264 100644 --- a/src/backend/langflow/components/chains/RetrievalQA.py +++ b/src/backend/langflow/components/chains/RetrievalQA.py @@ -1,13 +1,11 @@ from langflow import CustomComponent -from langchain.chains import BaseRetrievalQA from typing import Optional, Union, Callable from langflow.field_typing import ( - BaseCombineDocumentsChain, BaseMemory, - BaseRetriever, -) - + BaseRetriever) +from langchain.chains.retrieval_qa.base import BaseRetrievalQA +from langchain.chains.combine_documents.base import BaseCombineDocumentsChain class RetrievalQAComponent(CustomComponent): display_name = "RetrievalQA" description = "Chain for question-answering against an index." @@ -17,8 +15,8 @@ class RetrievalQAComponent(CustomComponent): "combine_documents_chain": {"display_name": "Combine Documents Chain"}, "retriever": {"display_name": "Retriever"}, "memory": {"display_name": "Memory", "required": False}, - "input_key": {"display_name": "Input Key"}, - "output_key": {"display_name": "Output Key"}, + "input_key": {"display_name": "Input Key","advanced":True}, + "output_key": {"display_name": "Output Key","advanced":True}, "return_source_documents": {"display_name": "Return Source Documents"}, } @@ -27,9 +25,9 @@ class RetrievalQAComponent(CustomComponent): combine_documents_chain: BaseCombineDocumentsChain, retriever: BaseRetriever, memory: Optional[BaseMemory] = None, - input_key: str = "query", - output_key: str = "result", - return_source_documents: bool = True, + input_key: Optional[str] = "query", + output_key: Optional[str] = "result", + return_source_documents: Optional[bool] = True, ) -> Union[BaseRetrievalQA, Callable]: return BaseRetrievalQA( combine_documents_chain=combine_documents_chain, From db7eef597b719b095887e3e9d9c7aa40c86c1776 Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Wed, 10 Jan 2024 17:46:51 -0300 Subject: [PATCH 060/153] Update SQLDatabaseChain.py to use the specific chain class for SQLDatabase --- .../langflow/components/chains/SQLDatabaseChain.py | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/src/backend/langflow/components/chains/SQLDatabaseChain.py b/src/backend/langflow/components/chains/SQLDatabaseChain.py index 3ea43dc75..a86555b35 100644 --- a/src/backend/langflow/components/chains/SQLDatabaseChain.py +++ b/src/backend/langflow/components/chains/SQLDatabaseChain.py @@ -1,15 +1,12 @@ from langflow import CustomComponent -from langchain.chains import Chain from typing import Callable, Union from langflow.field_typing import ( BasePromptTemplate, BaseLanguageModel, + Chain ) - -# Placeholder SQLDatabase class. In practice, replace this with the actual class or import it if available. -class SQLDatabase: - pass +from langchain_community.utilities.sql_database import SQLDatabase class SQLDatabaseChainComponent(CustomComponent): display_name = "SQLDatabaseChain" @@ -28,6 +25,4 @@ class SQLDatabaseChainComponent(CustomComponent): llm: BaseLanguageModel, prompt: BasePromptTemplate, ) -> Union[Chain, Callable]: - # Assuming there's a specific chain for SQLDatabase in the langchain library: - # Replace `Chain` with the specific chain class that interfaces with the SQLDatabase. return Chain(db=db, llm=llm, prompt=prompt) From 369ca587fa63406479897f5aa24da514cc3043af Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Wed, 10 Jan 2024 18:34:14 -0300 Subject: [PATCH 061/153] Replace SQLAgent with AgentExecutor in SQLAgentComponent constructor --- src/backend/langflow/components/agents/SQLAgent.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/backend/langflow/components/agents/SQLAgent.py b/src/backend/langflow/components/agents/SQLAgent.py index 6f96b6757..8b5781dd9 100644 --- a/src/backend/langflow/components/agents/SQLAgent.py +++ b/src/backend/langflow/components/agents/SQLAgent.py @@ -22,4 +22,4 @@ class SQLAgentComponent(CustomComponent): # Assuming there is a constructor for SQLAgent that takes these parameters # Since the actual implementation is not provided, this is a placeholder # Replace SQLAgent with the actual class name if different - return SQLAgent(llm=llm, database_uri=database_uri) + return AgentExecutor(llm=llm, database_uri=database_uri) From 030eed78045fedd788cb9e925469329ca2a67982 Mon Sep 17 00:00:00 2001 From: cristhianzl Date: Thu, 11 Jan 2024 10:02:13 -0300 Subject: [PATCH 062/153] fix(authContext.tsx): add check for isAuthenticated before attempting to logout to prevent unnecessary logout calls --- src/frontend/src/contexts/authContext.tsx | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/frontend/src/contexts/authContext.tsx b/src/frontend/src/contexts/authContext.tsx index be2f279c9..18145b0af 100644 --- a/src/frontend/src/contexts/authContext.tsx +++ b/src/frontend/src/contexts/authContext.tsx @@ -104,6 +104,9 @@ export function AuthProvider({ children }): React.ReactElement { } async function logout() { + if (!isAuthenticated) { + return; + } try { await requestLogout(); cookies.remove("apikey_tkn_lflw", { path: "/" }); From 76a0f5162c26434822fa95dada77325b87f2515f Mon Sep 17 00:00:00 2001 From: cristhianzl Date: Thu, 11 Jan 2024 10:03:07 -0300 Subject: [PATCH 063/153] fix(authContext.tsx): fix condition in logout function to check for autoLogin instead of isAuthenticated to prevent unnecessary logout --- src/frontend/src/contexts/authContext.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/frontend/src/contexts/authContext.tsx b/src/frontend/src/contexts/authContext.tsx index 18145b0af..256712303 100644 --- a/src/frontend/src/contexts/authContext.tsx +++ b/src/frontend/src/contexts/authContext.tsx @@ -104,7 +104,7 @@ export function AuthProvider({ children }): React.ReactElement { } async function logout() { - if (!isAuthenticated) { + if (autoLogin) { return; } try { From 8a1be061de9f096507a8b865d9e61e8abb172676 Mon Sep 17 00:00:00 2001 From: cristhianzl Date: Thu, 11 Jan 2024 17:47:34 -0300 Subject: [PATCH 064/153] chore(playwright-report): fix indentation and formatting in index.html file style(App.css): add overflow: hidden to body and fix indentation and formatting --- src/frontend/playwright-report/index.html | 26 +++++++++++------------ src/frontend/src/App.css | 12 +++++++---- 2 files changed, 21 insertions(+), 17 deletions(-) diff --git a/src/frontend/playwright-report/index.html b/src/frontend/playwright-report/index.html index d65e584e4..e634446de 100644 --- a/src/frontend/playwright-report/index.html +++ b/src/frontend/playwright-report/index.html @@ -1,18 +1,18 @@ - - - + + + Document - - + + - - \ No newline at end of file + + diff --git a/src/frontend/src/App.css b/src/frontend/src/App.css index a7a8bad51..c4cc99fd4 100644 --- a/src/frontend/src/App.css +++ b/src/frontend/src/App.css @@ -2,15 +2,19 @@ @tailwind components; @tailwind utilities; +body { + overflow: hidden; +} + .App { text-align: center; } .react-flow__node { - width: auto; - height: auto; - border-radius: auto; - min-width: inherit; + width: auto; + height: auto; + border-radius: auto; + min-width: inherit; } .App-logo { From 6e01053f6fbdac21a1b09882731e20c23f93422d Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Mon, 15 Jan 2024 16:11:56 -0300 Subject: [PATCH 065/153] fix(App.tsx): increase the timer interval for fetching data to 20 seconds for better performance feat(App.tsx): add cleanup function to clean up flow state when component unmounts feat(PageComponent/index.tsx): add cleanFlow function to reset flow state when component unmounts fix(PageComponent/index.tsx): fix bug where newEdges were not being set correctly in onConnect function fix(PageComponent/index.tsx): remove unnecessary dependencies from onConnect function fix(PageComponent/index.tsx): fix bug where setEdges was not returning the newEdges array correctly feat(flowStore.ts): add cleanFlow function to reset flow state to initial values feat(flowStore.ts): add cleanFlow function to reset flow state to initial values in flowStore type --- src/frontend/src/App.tsx | 2 +- .../components/PageComponent/index.tsx | 53 +++++++++++-------- src/frontend/src/stores/flowStore.ts | 9 ++++ src/frontend/src/types/zustand/flow/index.ts | 1 + 4 files changed, 43 insertions(+), 22 deletions(-) diff --git a/src/frontend/src/App.tsx b/src/frontend/src/App.tsx index 3b88ee14c..2bbab91f7 100644 --- a/src/frontend/src/App.tsx +++ b/src/frontend/src/App.tsx @@ -149,7 +149,7 @@ export default function App() { .catch(() => { setFetchError(true); }); - }, 5000); + }, 20000); // Clean up the timer on component unmount return () => { diff --git a/src/frontend/src/pages/FlowPage/components/PageComponent/index.tsx b/src/frontend/src/pages/FlowPage/components/PageComponent/index.tsx index ae030e0b7..5852d6999 100644 --- a/src/frontend/src/pages/FlowPage/components/PageComponent/index.tsx +++ b/src/frontend/src/pages/FlowPage/components/PageComponent/index.tsx @@ -64,6 +64,7 @@ export default function Page({ const onEdgesChange = useFlowStore((state) => state.onEdgesChange); const setNodes = useFlowStore((state) => state.setNodes); const setEdges = useFlowStore((state) => state.setEdges); + const cleanFlow = useFlowStore((state) => state.cleanFlow); const deleteNode = useFlowStore((state) => state.deleteNode); const deleteEdge = useFlowStore((state) => state.deleteEdge); const undo = useFlowsManagerStore((state) => state.undo); @@ -170,6 +171,12 @@ export default function Page({ } }, [currentFlowId, reactFlowInstance]); + useEffect(() => { + return () => { + cleanFlow(); + } + }, []) + const onConnectMod = useCallback( (params: Connection) => { takeSnapshot(); @@ -210,26 +217,30 @@ export default function Page({ const onConnect = useCallback( (connection: Connection) => { - const newEdges = addEdge( - { - ...connection, - data: { - targetHandle: scapeJSONParse(connection.targetHandle!), - sourceHandle: scapeJSONParse(connection.sourceHandle!), + let newEdges:Edge[] = [] + setEdges((oldEdges) => { + newEdges = addEdge( + { + ...connection, + data: { + targetHandle: scapeJSONParse(connection.targetHandle!), + sourceHandle: scapeJSONParse(connection.sourceHandle!), + }, + style: { stroke: "#555" }, + className: + ((scapeJSONParse(connection.targetHandle!) as targetHandleType) + .type === "Text" + ? "stroke-foreground " + : "stroke-foreground ") + " stroke-connection", + animated: + (scapeJSONParse(connection.targetHandle!) as targetHandleType) + .type === "Text", }, - style: { stroke: "#555" }, - className: - ((scapeJSONParse(connection.targetHandle!) as targetHandleType) - .type === "Text" - ? "stroke-foreground " - : "stroke-foreground ") + " stroke-connection", - animated: - (scapeJSONParse(connection.targetHandle!) as targetHandleType) - .type === "Text", - }, - edges - ); - setEdges(newEdges); + oldEdges + ); + return newEdges; + + }) useFlowsManagerStore .getState() .autoSaveCurrentFlow( @@ -238,7 +249,7 @@ export default function Page({ reactFlowInstance?.getViewport() ?? { x: 0, y: 0, zoom: 1 } ); }, - [nodes, edges, setEdges, reactFlowInstance, addEdge] + [nodes, setEdges, reactFlowInstance, addEdge] ); const onDrop = useCallback( @@ -357,7 +368,7 @@ export default function Page({
{Object.keys(templates).length > 0 && - Object.keys(types).length > 0 ? ( + Object.keys(types).length > 0 ? (
((set, get) => ({ setLastCopiedSelection: (newSelection) => { set({ lastCopiedSelection: newSelection }); }, + cleanFlow: () => { + set({ + nodes: [], + edges: [], + flowState: undefined, + sseData: {}, + isBuilt: false, + }); + }, })); export default useFlowStore; diff --git a/src/frontend/src/types/zustand/flow/index.ts b/src/frontend/src/types/zustand/flow/index.ts index 9c331892f..50d2fd45b 100644 --- a/src/frontend/src/types/zustand/flow/index.ts +++ b/src/frontend/src/types/zustand/flow/index.ts @@ -49,4 +49,5 @@ export type FlowStoreType = { ) => void; isBuilt: boolean; setIsBuilt: (isBuilt: boolean) => void; + cleanFlow: () => void; }; From 6e8c00ca75dc443f9ed54c8b91d1b573031a6eed Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Mon, 15 Jan 2024 16:20:15 -0300 Subject: [PATCH 066/153] Update setFilterEdge references in code --- .../GenericNode/components/parameterComponent/index.tsx | 2 +- .../src/pages/FlowPage/components/PageComponent/index.tsx | 2 +- .../FlowPage/components/extraSidebarComponent/index.tsx | 4 ++-- src/frontend/src/stores/flowStore.ts | 5 +++++ src/frontend/src/stores/typesStore.ts | 5 +---- src/frontend/src/types/zustand/flow/index.ts | 2 ++ src/frontend/src/types/zustand/types/index.ts | 2 -- 7 files changed, 12 insertions(+), 10 deletions(-) diff --git a/src/frontend/src/CustomNodes/GenericNode/components/parameterComponent/index.tsx b/src/frontend/src/CustomNodes/GenericNode/components/parameterComponent/index.tsx index a76a15d17..9c1b4bbf1 100644 --- a/src/frontend/src/CustomNodes/GenericNode/components/parameterComponent/index.tsx +++ b/src/frontend/src/CustomNodes/GenericNode/components/parameterComponent/index.tsx @@ -71,7 +71,7 @@ export default function ParameterComponent({ const groupedEdge = useRef(null); - const setFilterEdge = useTypesStore((state) => state.setFilterEdge); + const setFilterEdge = useFlowStore((state) => state.setFilterEdge); let disabled = edges.some( diff --git a/src/frontend/src/pages/FlowPage/components/PageComponent/index.tsx b/src/frontend/src/pages/FlowPage/components/PageComponent/index.tsx index 5852d6999..2e2c2bac7 100644 --- a/src/frontend/src/pages/FlowPage/components/PageComponent/index.tsx +++ b/src/frontend/src/pages/FlowPage/components/PageComponent/index.tsx @@ -50,7 +50,7 @@ export default function Page({ ); const types = useTypesStore((state) => state.types); const templates = useTypesStore((state) => state.templates); - const setFilterEdge = useTypesStore((state) => state.setFilterEdge); + const setFilterEdge = useFlowStore((state) => state.setFilterEdge); const reactFlowWrapper = useRef(null); const reactFlowInstance = useFlowStore((state) => state.reactFlowInstance); diff --git a/src/frontend/src/pages/FlowPage/components/extraSidebarComponent/index.tsx b/src/frontend/src/pages/FlowPage/components/extraSidebarComponent/index.tsx index b610663ac..f9e904581 100644 --- a/src/frontend/src/pages/FlowPage/components/extraSidebarComponent/index.tsx +++ b/src/frontend/src/pages/FlowPage/components/extraSidebarComponent/index.tsx @@ -29,8 +29,8 @@ import SidebarDraggableComponent from "./sideBarDraggableComponent"; export default function ExtraSidebar(): JSX.Element { const data = useTypesStore((state) => state.data); const templates = useTypesStore((state) => state.templates); - const getFilterEdge = useTypesStore((state) => state.getFilterEdge); - const setFilterEdge = useTypesStore((state) => state.setFilterEdge); + const getFilterEdge = useFlowStore((state) => state.getFilterEdge); + const setFilterEdge = useFlowStore((state) => state.setFilterEdge); const uploadFlow = useFlowsManagerStore((state) => state.uploadFlow); const currentFlow = useFlowsManagerStore((state) => state.currentFlow); const hasStore = useStoreStore((state) => state.hasStore); diff --git a/src/frontend/src/stores/flowStore.ts b/src/frontend/src/stores/flowStore.ts index 76929cba1..cf041ee0f 100644 --- a/src/frontend/src/stores/flowStore.ts +++ b/src/frontend/src/stores/flowStore.ts @@ -257,8 +257,13 @@ const useFlowStore = create((set, get) => ({ flowState: undefined, sseData: {}, isBuilt: false, + getFilterEdge: [], }); }, + setFilterEdge: (newState) => { + set({ getFilterEdge: newState }); + }, + getFilterEdge: [] })); export default useFlowStore; diff --git a/src/frontend/src/stores/typesStore.ts b/src/frontend/src/stores/typesStore.ts index e2ab3dfc4..90f2c5b9b 100644 --- a/src/frontend/src/stores/typesStore.ts +++ b/src/frontend/src/stores/typesStore.ts @@ -9,7 +9,6 @@ export const useTypesStore = create((set, get) => ({ types: {}, templates: {}, data: {}, - getFilterEdge: [], getTypes: () => { return new Promise(async (resolve, reject) => { getAll() @@ -44,7 +43,5 @@ export const useTypesStore = create((set, get) => ({ let newChange = typeof change === "function" ? change(get().data) : change; set({ data: newChange }); }, - setFilterEdge: (newState) => { - set({ getFilterEdge: newState }); - }, + })); diff --git a/src/frontend/src/types/zustand/flow/index.ts b/src/frontend/src/types/zustand/flow/index.ts index 50d2fd45b..9b76a5347 100644 --- a/src/frontend/src/types/zustand/flow/index.ts +++ b/src/frontend/src/types/zustand/flow/index.ts @@ -50,4 +50,6 @@ export type FlowStoreType = { isBuilt: boolean; setIsBuilt: (isBuilt: boolean) => void; cleanFlow: () => void; + setFilterEdge: (newState) => void; + getFilterEdge: any[]; }; diff --git a/src/frontend/src/types/zustand/types/index.ts b/src/frontend/src/types/zustand/types/index.ts index f18d8b22e..133afbda1 100644 --- a/src/frontend/src/types/zustand/types/index.ts +++ b/src/frontend/src/types/zustand/types/index.ts @@ -8,6 +8,4 @@ export type TypesStoreType = { data: APIDataType; setData: (newState: {}) => void; getTypes: () => Promise; - setFilterEdge: (newState) => void; - getFilterEdge: any[]; }; From 88118ef5c52b56740addf22ddf691e2e229e68e6 Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Mon, 15 Jan 2024 16:37:59 -0300 Subject: [PATCH 067/153] Update ChatOpenAIComponent with NestedDict support --- src/backend/langflow/components/llms/ChatOpenAI.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/src/backend/langflow/components/llms/ChatOpenAI.py b/src/backend/langflow/components/llms/ChatOpenAI.py index e6b1c2bd0..60521d483 100644 --- a/src/backend/langflow/components/llms/ChatOpenAI.py +++ b/src/backend/langflow/components/llms/ChatOpenAI.py @@ -1,8 +1,8 @@ from langflow import CustomComponent from langchain.llms import BaseLLM -from typing import Optional, Dict, Union, Any +from typing import Optional, Union from langchain_community.chat_models.openai import ChatOpenAI -from langflow.field_typing import BaseLanguageModel +from langflow.field_typing import BaseLanguageModel, NestedDict class ChatOpenAIComponent(CustomComponent): @@ -13,7 +13,7 @@ class ChatOpenAIComponent(CustomComponent): return { "max_tokens": { "display_name": "Max Tokens", - "field_type": "int", + "field_type": "NestedDict", "advanced": False, "required": False, }, @@ -64,14 +64,12 @@ class ChatOpenAIComponent(CustomComponent): def build( self, max_tokens: Optional[int] = None, - model_kwargs: Optional[Dict[str, Any]] = None, + model_kwargs: Optional[NestedDict] = None, model_name: Optional[str] = "gpt-4-1106-preview", openai_api_base: Optional[str] = None, openai_api_key: Optional[str] = None, temperature: float = 0.7, ) -> Union[BaseLanguageModel, BaseLLM]: - # Assuming there is a class `ChatOpenAI` that takes these parameters - # The `ChatOpenAI` class must be imported or defined elsewhere in the actual implementation return ChatOpenAI( max_tokens=max_tokens, model_kwargs=model_kwargs, From 1afaf587afc400a97c2a60f0e3c7e029901bfc8d Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Mon, 15 Jan 2024 16:53:22 -0300 Subject: [PATCH 068/153] Update ChatOpenAIComponent parameters --- src/backend/langflow/components/llms/ChatOpenAI.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/src/backend/langflow/components/llms/ChatOpenAI.py b/src/backend/langflow/components/llms/ChatOpenAI.py index 60521d483..8663747bf 100644 --- a/src/backend/langflow/components/llms/ChatOpenAI.py +++ b/src/backend/langflow/components/llms/ChatOpenAI.py @@ -13,13 +13,13 @@ class ChatOpenAIComponent(CustomComponent): return { "max_tokens": { "display_name": "Max Tokens", - "field_type": "NestedDict", + "field_type": "int", "advanced": False, "required": False, }, "model_kwargs": { "display_name": "Model Kwargs", - "field_type": "dict", + "field_type": "NestedDict", "advanced": True, "required": False, }, @@ -63,13 +63,15 @@ class ChatOpenAIComponent(CustomComponent): def build( self, - max_tokens: Optional[int] = None, - model_kwargs: Optional[NestedDict] = None, + max_tokens: Optional[int] = 256, + model_kwargs: Optional[NestedDict] = {}, model_name: Optional[str] = "gpt-4-1106-preview", openai_api_base: Optional[str] = None, openai_api_key: Optional[str] = None, temperature: float = 0.7, ) -> Union[BaseLanguageModel, BaseLLM]: + if(not openai_api_base): + openai_api_base = "https://api.openai.com/v1" return ChatOpenAI( max_tokens=max_tokens, model_kwargs=model_kwargs, From 6290837d15727d294ea84b09efc3425cab911820 Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Mon, 15 Jan 2024 17:25:33 -0300 Subject: [PATCH 069/153] Refactor CSVAgent to use create_csv_agent function --- src/backend/langflow/components/agents/CSVAgent.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/backend/langflow/components/agents/CSVAgent.py b/src/backend/langflow/components/agents/CSVAgent.py index af3766c74..30037dff2 100644 --- a/src/backend/langflow/components/agents/CSVAgent.py +++ b/src/backend/langflow/components/agents/CSVAgent.py @@ -1,6 +1,6 @@ from langflow import CustomComponent from langflow.field_typing import BaseLanguageModel, AgentExecutor - +from langchain_experimental.agents.agent_toolkits.csv.base import create_csv_agent class CSVAgentComponent(CustomComponent): display_name = "CSVAgent" @@ -10,7 +10,7 @@ class CSVAgentComponent(CustomComponent): def build_config(self): return { "llm": {"display_name": "LLM", "type": BaseLanguageModel}, - "path": {"display_name": "Path", "type": "file", "suffixes": [".csv"], "file_types": ["csv"]}, + "path": {"display_name": "Path", "field_type": "file", "suffixes": [".csv"], "file_types": [".csv"]}, } def build( @@ -19,4 +19,4 @@ class CSVAgentComponent(CustomComponent): path: str, ) -> AgentExecutor: # Instantiate and return the CSV agent class with the provided llm and path - return AgentExecutor(llm=llm, path=path) + return create_csv_agent(llm=llm, path=path) From 0101a5fde9b6bb81320bd8fddb64c2246eb37c4d Mon Sep 17 00:00:00 2001 From: igorrCarvalho Date: Mon, 15 Jan 2024 17:46:24 -0300 Subject: [PATCH 070/153] Refactor: Change onConnection function to zustand store --- .../components/PageComponent/index.tsx | 38 +------------------ src/frontend/src/stores/flowStore.ts | 35 ++++++++++++++++- src/frontend/src/types/zustand/flow/index.ts | 2 + 3 files changed, 37 insertions(+), 38 deletions(-) diff --git a/src/frontend/src/pages/FlowPage/components/PageComponent/index.tsx b/src/frontend/src/pages/FlowPage/components/PageComponent/index.tsx index 2e2c2bac7..57716b5b5 100644 --- a/src/frontend/src/pages/FlowPage/components/PageComponent/index.tsx +++ b/src/frontend/src/pages/FlowPage/components/PageComponent/index.tsx @@ -78,6 +78,7 @@ export default function Page({ const setLastCopiedSelection = useFlowStore( (state) => state.setLastCopiedSelection ); + const onConnect = useFlowStore((state) => state.onConnect); const position = useRef({ x: 0, y: 0 }); const [lastSelection, setLastSelection] = @@ -215,43 +216,6 @@ export default function Page({ } }, []); - const onConnect = useCallback( - (connection: Connection) => { - let newEdges:Edge[] = [] - setEdges((oldEdges) => { - newEdges = addEdge( - { - ...connection, - data: { - targetHandle: scapeJSONParse(connection.targetHandle!), - sourceHandle: scapeJSONParse(connection.sourceHandle!), - }, - style: { stroke: "#555" }, - className: - ((scapeJSONParse(connection.targetHandle!) as targetHandleType) - .type === "Text" - ? "stroke-foreground " - : "stroke-foreground ") + " stroke-connection", - animated: - (scapeJSONParse(connection.targetHandle!) as targetHandleType) - .type === "Text", - }, - oldEdges - ); - return newEdges; - - }) - useFlowsManagerStore - .getState() - .autoSaveCurrentFlow( - nodes, - newEdges, - reactFlowInstance?.getViewport() ?? { x: 0, y: 0, zoom: 1 } - ); - }, - [nodes, setEdges, reactFlowInstance, addEdge] - ); - const onDrop = useCallback( (event: React.DragEvent) => { event.preventDefault(); diff --git a/src/frontend/src/stores/flowStore.ts b/src/frontend/src/stores/flowStore.ts index cf041ee0f..d8338428b 100644 --- a/src/frontend/src/stores/flowStore.ts +++ b/src/frontend/src/stores/flowStore.ts @@ -263,7 +263,40 @@ const useFlowStore = create((set, get) => ({ setFilterEdge: (newState) => { set({ getFilterEdge: newState }); }, - getFilterEdge: [] + getFilterEdge: [], + onConnect: (connection) => { + let newEdges: Edge[] = [] + get().setEdges((oldEdges) => { + newEdges = addEdge( + { + ...connection, + data: { + targetHandle: scapeJSONParse(connection.targetHandle!), + sourceHandle: scapeJSONParse(connection.sourceHandle!), + }, + style: { stroke: "#555" }, + className: + ((scapeJSONParse(connection.targetHandle!) as targetHandleType) + .type === "Text" + ? "stroke-foreground " + : "stroke-foreground ") + " stroke-connection", + animated: + (scapeJSONParse(connection.targetHandle!) as targetHandleType) + .type === "Text", + }, + oldEdges + ); + return newEdges; + + }) + useFlowsManagerStore + .getState() + .autoSaveCurrentFlow( + get().nodes, + newEdges, + get().reactFlowInstance?.getViewport() ?? { x: 0, y: 0, zoom: 1 } + ); + }, })); export default useFlowStore; diff --git a/src/frontend/src/types/zustand/flow/index.ts b/src/frontend/src/types/zustand/flow/index.ts index 9b76a5347..273a1958f 100644 --- a/src/frontend/src/types/zustand/flow/index.ts +++ b/src/frontend/src/types/zustand/flow/index.ts @@ -1,4 +1,5 @@ import { + Connection, Edge, Node, OnEdgesChange, @@ -52,4 +53,5 @@ export type FlowStoreType = { cleanFlow: () => void; setFilterEdge: (newState) => void; getFilterEdge: any[]; + onConnect: (connection: Connection) => void; }; From f322c77c5c3ff30a9cedc283e7fd2ebf71f0d15e Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Mon, 15 Jan 2024 17:52:10 -0300 Subject: [PATCH 071/153] Refactor JsonAgentComponent to use create_json_agent function --- src/backend/langflow/components/agents/JsonAgent.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/src/backend/langflow/components/agents/JsonAgent.py b/src/backend/langflow/components/agents/JsonAgent.py index 71b276e26..9f11c5088 100644 --- a/src/backend/langflow/components/agents/JsonAgent.py +++ b/src/backend/langflow/components/agents/JsonAgent.py @@ -1,12 +1,10 @@ from langflow import CustomComponent -from langchain.agents import AgentExecutor -from typing import Callable +from langchain.agents import AgentExecutor,create_json_agent from langflow.field_typing import ( BaseLanguageModel, ) from langchain_community.agent_toolkits.base import BaseToolkit - class JsonAgentComponent(CustomComponent): display_name = "JsonAgent" description = "Construct a json agent from an LLM and tools." @@ -21,5 +19,5 @@ class JsonAgentComponent(CustomComponent): self, llm: BaseLanguageModel, toolkit: BaseToolkit, - ) -> Callable: - return AgentExecutor(llm=llm, toolkit=toolkit) + ) -> AgentExecutor: + return create_json_agent(llm=llm, toolkit=toolkit[0]) \ No newline at end of file From a9086a4e41b9b4756b37d09f0fe6d79edccdfe97 Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Mon, 15 Jan 2024 17:54:40 -0300 Subject: [PATCH 072/153] Fix parameter in create_json_agent function --- src/backend/langflow/components/agents/JsonAgent.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/backend/langflow/components/agents/JsonAgent.py b/src/backend/langflow/components/agents/JsonAgent.py index 9f11c5088..2f45b037d 100644 --- a/src/backend/langflow/components/agents/JsonAgent.py +++ b/src/backend/langflow/components/agents/JsonAgent.py @@ -20,4 +20,4 @@ class JsonAgentComponent(CustomComponent): llm: BaseLanguageModel, toolkit: BaseToolkit, ) -> AgentExecutor: - return create_json_agent(llm=llm, toolkit=toolkit[0]) \ No newline at end of file + return create_json_agent(llm=llm, toolkit=toolkit) \ No newline at end of file From 3659b401ffb95c1bed40476a97edf9b1b1604dfd Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Mon, 15 Jan 2024 18:02:32 -0300 Subject: [PATCH 073/153] Add AZLyricsLoader import --- .../langflow/components/documentloaders/AZLyricsLoader.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/backend/langflow/components/documentloaders/AZLyricsLoader.py b/src/backend/langflow/components/documentloaders/AZLyricsLoader.py index 66e8e86d9..cef344234 100644 --- a/src/backend/langflow/components/documentloaders/AZLyricsLoader.py +++ b/src/backend/langflow/components/documentloaders/AZLyricsLoader.py @@ -1,6 +1,7 @@ from langflow import CustomComponent from langflow.field_typing import Document from typing import Optional, Dict +from langchain_community.document_loaders.azlyrics import AZLyricsLoader class AZLyricsLoaderComponent(CustomComponent): From d0daf5ad8397666020e235bb14c6ce4effd7ded2 Mon Sep 17 00:00:00 2001 From: Lucas Oliveira Date: Mon, 15 Jan 2024 22:12:55 +0100 Subject: [PATCH 074/153] Fixed handles not showing when changing the output of the custom code --- .../components/parameterComponent/index.tsx | 23 ++++++++++++++----- .../components/PageComponent/index.tsx | 2 +- 2 files changed, 18 insertions(+), 7 deletions(-) diff --git a/src/frontend/src/CustomNodes/GenericNode/components/parameterComponent/index.tsx b/src/frontend/src/CustomNodes/GenericNode/components/parameterComponent/index.tsx index 9c1b4bbf1..09af96c6f 100644 --- a/src/frontend/src/CustomNodes/GenericNode/components/parameterComponent/index.tsx +++ b/src/frontend/src/CustomNodes/GenericNode/components/parameterComponent/index.tsx @@ -1,6 +1,6 @@ import { cloneDeep } from "lodash"; import React, { ReactNode, useEffect, useRef, useState } from "react"; -import { Handle, Position } from "reactflow"; +import { Handle, Position, useUpdateNodeInternals } from "reactflow"; import ShadTooltip from "../../../../components/ShadTooltipComponent"; import CodeAreaComponent from "../../../../components/codeAreaComponent"; import DictComponent from "../../../../components/dictComponent"; @@ -124,27 +124,32 @@ export default function ParameterComponent({ renderTooltips(); }; + const updateNodeInternals = useUpdateNodeInternals(); + const handleNodeClass = (newNodeClass: APIClassType, code?: string): void => { if (!data.node) return; if (data.node!.template[name].value !== code) { takeSnapshot(); } - + + setNode(data.id, (oldNode) => { let newNode = cloneDeep(oldNode); - + newNode.data = { ...newNode.data, node: newNodeClass, description: newNodeClass.description ?? data.node!.description, display_name: newNodeClass.display_name ?? data.node!.display_name, }; - + newNode.data.node.template[name].value = code; - + return newNode; }); - + + updateNodeInternals(data.id); + renderTooltips(); }; @@ -268,6 +273,9 @@ export default function ParameterComponent({ { From 59a1370c2d4bf6c358de59f9de63a0919724da00 Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Mon, 15 Jan 2024 18:34:22 -0300 Subject: [PATCH 075/153] Refactor document loaders to support metadata --- .../components/documentloaders/AZLyricsLoader.py | 12 ++++++++---- .../documentloaders/AirbyteJSONLoader.py | 16 ++++++++++------ 2 files changed, 18 insertions(+), 10 deletions(-) diff --git a/src/backend/langflow/components/documentloaders/AZLyricsLoader.py b/src/backend/langflow/components/documentloaders/AZLyricsLoader.py index cef344234..f0a9236c1 100644 --- a/src/backend/langflow/components/documentloaders/AZLyricsLoader.py +++ b/src/backend/langflow/components/documentloaders/AZLyricsLoader.py @@ -16,7 +16,11 @@ class AZLyricsLoaderComponent(CustomComponent): } def build(self, metadata: Optional[Dict] = None, web_path: str = "") -> Document: - # Assuming there is a class AZLyricsLoader that takes metadata and web_path as parameters - # and returns a Document object. Replace AZLyricsLoader with the actual class name if different. - # The import statement for AZLyricsLoader is assumed to be added above. - return AZLyricsLoader(metadata=metadata, web_path=web_path) + documents = AZLyricsLoader(web_path=web_path).load() + if(metadata): + for document in documents: + if not document.metadata: + document.metadata = metadata + else: + document.metadata.update(metadata) + return documents \ No newline at end of file diff --git a/src/backend/langflow/components/documentloaders/AirbyteJSONLoader.py b/src/backend/langflow/components/documentloaders/AirbyteJSONLoader.py index f1f22e45c..89a271945 100644 --- a/src/backend/langflow/components/documentloaders/AirbyteJSONLoader.py +++ b/src/backend/langflow/components/documentloaders/AirbyteJSONLoader.py @@ -1,6 +1,7 @@ from langflow import CustomComponent from langflow.field_typing import Document from typing import Optional, Dict +from langchain_community.document_loaders.airbyte_json import AirbyteJSONLoader class AirbyteJSONLoaderComponent(CustomComponent): @@ -14,8 +15,7 @@ class AirbyteJSONLoaderComponent(CustomComponent): return { "file_path": { "display_name": "File Path", - "type": "file", - "fileTypes": ["json"], + "file_types": [".json"], "required": True, "field_type": "file", }, @@ -27,7 +27,11 @@ class AirbyteJSONLoaderComponent(CustomComponent): } def build(self, file_path: str, metadata: Optional[Dict] = None) -> Document: - # Assuming there is a function or class named AirbyteJSONLoader that takes file_path and metadata as parameters - # and returns a Document object. Replace AirbyteJSONLoader with the actual class or function name. - # The actual implementation here is a placeholder and should be adapted to the real AirbyteJSONLoader class or function. - return AirbyteJSONLoader(file_path=file_path, metadata=metadata) + documents = AirbyteJSONLoader(file_path=file_path).load() + if(metadata): + for document in documents: + if not document.metadata: + document.metadata = metadata + else: + document.metadata.update(metadata) + return documents From b874fca246af1b0207a6ce5692b131839510cc19 Mon Sep 17 00:00:00 2001 From: igorrCarvalho Date: Mon, 15 Jan 2024 18:59:46 -0300 Subject: [PATCH 076/153] Fix: CSVLoader CC not working --- .../langflow/components/documentloaders/CSVLoader.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/src/backend/langflow/components/documentloaders/CSVLoader.py b/src/backend/langflow/components/documentloaders/CSVLoader.py index 63ce29ba1..f7d49da77 100644 --- a/src/backend/langflow/components/documentloaders/CSVLoader.py +++ b/src/backend/langflow/components/documentloaders/CSVLoader.py @@ -28,4 +28,11 @@ class CSVLoaderComponent(CustomComponent): file_path: str, metadata: dict ) -> List[Document]: - return CSVLoader(file_path=file_path, metadata=metadata).load() + documents = CSVLoader(file_path=file_path).load() + if(metadata): + for document in documents: + if not document.metadata: + document.metadata = metadata + else: + document.metadata.update(metadata) + return documents From 69f2f511617f463ba3e4f258c15c835c78d926ac Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Mon, 15 Jan 2024 19:34:47 -0300 Subject: [PATCH 077/153] Refactor document loaders to update metadata --- .../components/documentloaders/EverNoteLoader.py | 16 ++++++++++------ .../components/documentloaders/GitbookLoader.py | 11 ++++++++--- 2 files changed, 18 insertions(+), 9 deletions(-) diff --git a/src/backend/langflow/components/documentloaders/EverNoteLoader.py b/src/backend/langflow/components/documentloaders/EverNoteLoader.py index 1d8c95b34..02d8882d3 100644 --- a/src/backend/langflow/components/documentloaders/EverNoteLoader.py +++ b/src/backend/langflow/components/documentloaders/EverNoteLoader.py @@ -1,7 +1,7 @@ from langflow import CustomComponent from langflow.field_typing import Document from typing import Optional, Dict - +from langchain_community.document_loaders.evernote import EverNoteLoader class EverNoteLoaderComponent(CustomComponent): display_name = "EverNoteLoader" @@ -13,10 +13,9 @@ class EverNoteLoaderComponent(CustomComponent): "file_path": { "display_name": "File Path", "required": True, - "suffixes": [".xml"], "show": True, "type": "file", - "file_types": ["xml"], + "file_types": [".xml"], "field_type": "file", }, "metadata": { @@ -28,6 +27,11 @@ class EverNoteLoaderComponent(CustomComponent): } def build(self, file_path: str, metadata: Optional[Dict] = None) -> Document: - # Assuming there is a function or class named `EverNoteLoader` that takes these parameters - # and returns a `Document` object. Replace `EverNoteLoader` with the actual implementation. - return EverNoteLoader(file_path=file_path, metadata=metadata) + documents = EverNoteLoader(file_path=file_path).load() + if(metadata): + for document in documents: + if not document.metadata: + document.metadata = metadata + else: + document.metadata.update(metadata) + return documents diff --git a/src/backend/langflow/components/documentloaders/GitbookLoader.py b/src/backend/langflow/components/documentloaders/GitbookLoader.py index 5811403bb..e859eccf0 100644 --- a/src/backend/langflow/components/documentloaders/GitbookLoader.py +++ b/src/backend/langflow/components/documentloaders/GitbookLoader.py @@ -22,6 +22,11 @@ class GitbookLoaderComponent(CustomComponent): } def build(self, metadata: Optional[Dict] = None, web_page: str = "") -> Document: - # Assuming there is a GitbookLoader class that takes metadata and web_page as parameters - # Replace 'GitbookLoader' with the actual class name if different - return GitbookLoader(metadata=metadata, web_page=web_page) + documents = GitbookLoader(web_page=web_page).load() + if(metadata): + for document in documents: + if not document.metadata: + document.metadata = metadata + else: + document.metadata.update(metadata) + return documents From 64bcb971ad850516ffbfc2345ffef10e27a29041 Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Mon, 15 Jan 2024 19:38:59 -0300 Subject: [PATCH 078/153] Refactor HNLoaderComponent load method to update document metadata --- .../langflow/components/documentloaders/HNLoader.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/src/backend/langflow/components/documentloaders/HNLoader.py b/src/backend/langflow/components/documentloaders/HNLoader.py index 5d3cb768e..d54591d46 100644 --- a/src/backend/langflow/components/documentloaders/HNLoader.py +++ b/src/backend/langflow/components/documentloaders/HNLoader.py @@ -27,7 +27,11 @@ class HNLoaderComponent(CustomComponent): web_path: str, metadata: Optional[Dict] = None, ) -> HNLoader: - # Assuming that there's a specific loader for Hacker News - # as HNloader does not take a web_path argument - # The HackerNewsLoader needs to be defined somewhere in the actual implementation - return HNLoader(metadata=metadata, web_path=web_path) + documents = HNLoader(web_path=web_path).load() + if(metadata): + for document in documents: + if not document.metadata: + document.metadata = metadata + else: + document.metadata.update(metadata) + return documents From 310886b0ba182cc729a9de99196cd93433acb2f7 Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Mon, 15 Jan 2024 19:49:04 -0300 Subject: [PATCH 079/153] Refactor IMSDbLoaderComponent to update document metadata --- .../langflow/components/documentloaders/IMSDbLoader.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/src/backend/langflow/components/documentloaders/IMSDbLoader.py b/src/backend/langflow/components/documentloaders/IMSDbLoader.py index 99c3b75d3..37c2c542c 100644 --- a/src/backend/langflow/components/documentloaders/IMSDbLoader.py +++ b/src/backend/langflow/components/documentloaders/IMSDbLoader.py @@ -20,4 +20,11 @@ class IMSDbLoaderComponent(CustomComponent): metadata: Optional[Dict] = None, web_path: str = "", ) -> Document: - return IMSDbLoader(metadata=metadata, web_path=web_path) + documents = IMSDbLoader(web_path=web_path).load() + if(metadata): + for document in documents: + if not document.metadata: + document.metadata = metadata + else: + document.metadata.update(metadata) + return documents From 0881f0600d7e760cd0a19dcddb67d5ce8ee5aee6 Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Mon, 15 Jan 2024 19:57:39 -0300 Subject: [PATCH 080/153] Refactor document loaders to support metadata --- .../CollegeConfidentialLoader.py | 12 +++++++++--- .../documentloaders/FacebookChatLoader.py | 18 ++++++++++-------- 2 files changed, 19 insertions(+), 11 deletions(-) diff --git a/src/backend/langflow/components/documentloaders/CollegeConfidentialLoader.py b/src/backend/langflow/components/documentloaders/CollegeConfidentialLoader.py index 654932fd5..44fc8c9ab 100644 --- a/src/backend/langflow/components/documentloaders/CollegeConfidentialLoader.py +++ b/src/backend/langflow/components/documentloaders/CollegeConfidentialLoader.py @@ -2,6 +2,7 @@ from langflow import CustomComponent from langchain.docstore.document import Document from typing import Optional +from langchain_community.document_loaders.college_confidential import CollegeConfidentialLoader class CollegeConfidentialLoaderComponent(CustomComponent): display_name = "CollegeConfidentialLoader" @@ -19,6 +20,11 @@ class CollegeConfidentialLoaderComponent(CustomComponent): web_path: str, metadata: Optional[dict] = {} ) -> Document: - # Assuming there is a loader class `CollegeConfidentialLoader` that takes `metadata` and `web_path` as arguments - # Replace `CollegeConfidentialLoader` with the actual class name if different - return CollegeConfidentialLoader(web_path=web_path, metadata=metadata) + documents = CollegeConfidentialLoader(web_path=web_path).load() + if(metadata): + for document in documents: + if not document.metadata: + document.metadata = metadata + else: + document.metadata.update(metadata) + return documents diff --git a/src/backend/langflow/components/documentloaders/FacebookChatLoader.py b/src/backend/langflow/components/documentloaders/FacebookChatLoader.py index d5b3c1dd1..deb636eac 100644 --- a/src/backend/langflow/components/documentloaders/FacebookChatLoader.py +++ b/src/backend/langflow/components/documentloaders/FacebookChatLoader.py @@ -2,7 +2,7 @@ from langflow import CustomComponent from langchain.docstore.document import Document from typing import Optional, Dict - +from langchain_community.document_loaders.facebook_chat import FacebookChatLoader class FacebookChatLoaderComponent(CustomComponent): display_name = "FacebookChatLoader" description = "Load `Facebook Chat` messages directory dump." @@ -13,8 +13,7 @@ class FacebookChatLoaderComponent(CustomComponent): "file_path": { "display_name": "File Path", "required": True, - "suffixes": [".json"], - "file_types": ["json"], + "file_types": [".json"], "field_type": "file", }, "metadata": { @@ -25,8 +24,11 @@ class FacebookChatLoaderComponent(CustomComponent): } def build(self, file_path: str, metadata: Optional[Dict] = None) -> Document: - # Assuming there is a class named FacebookChatLoader that takes file_path and metadata as parameters - # and returns a Document object. Replace 'FacebookChatLoader' with the actual class name. - # As per the JSON, the output type is 'Document', which is part of langchain.documents. - # Therefore, the 'FacebookChatLoader' should be imported or defined elsewhere in the codebase. - return FacebookChatLoader(file_path=file_path, metadata=metadata) \ No newline at end of file + documents = FacebookChatLoader(file_path=file_path).load() + if(metadata): + for document in documents: + if not document.metadata: + document.metadata = metadata + else: + document.metadata.update(metadata) + return documents \ No newline at end of file From 184b66f5ab2ee166651838b7b97c0d91f7f8772a Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Mon, 15 Jan 2024 20:06:29 -0300 Subject: [PATCH 081/153] Refactor ReadTheDocsLoaderComponent to update document metadata --- .../components/documentloaders/ReadTheDocsLoader.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/src/backend/langflow/components/documentloaders/ReadTheDocsLoader.py b/src/backend/langflow/components/documentloaders/ReadTheDocsLoader.py index 6f1022696..1c78b7e34 100644 --- a/src/backend/langflow/components/documentloaders/ReadTheDocsLoader.py +++ b/src/backend/langflow/components/documentloaders/ReadTheDocsLoader.py @@ -19,4 +19,11 @@ class ReadTheDocsLoaderComponent(CustomComponent): path: str, metadata: Optional[Dict] = None, ) -> List[Document]: - return ReadTheDocsLoader(path=path, metadata=metadata or {}).load() + documents = ReadTheDocsLoader(path=path).load() + if(metadata): + for document in documents: + if not document.metadata: + document.metadata = metadata + else: + document.metadata.update(metadata) + return documents \ No newline at end of file From 26a9d5d670368f3a052d3241914335f60d60edde Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Mon, 15 Jan 2024 20:08:43 -0300 Subject: [PATCH 082/153] Refactor CoNLLULoaderComponent to use CoNLLULoader from langchain_community --- .../documentloaders/CoNLLULoader.py | 24 +++++++++++-------- 1 file changed, 14 insertions(+), 10 deletions(-) diff --git a/src/backend/langflow/components/documentloaders/CoNLLULoader.py b/src/backend/langflow/components/documentloaders/CoNLLULoader.py index 7159ba707..ccc912c0c 100644 --- a/src/backend/langflow/components/documentloaders/CoNLLULoader.py +++ b/src/backend/langflow/components/documentloaders/CoNLLULoader.py @@ -1,6 +1,6 @@ from langflow import CustomComponent from langchain.docstore.document import Document -from langflow.field_typing import TemplateField +from langchain_community.document_loaders.conllu import CoNLLULoader class CoNLLULoaderComponent(CustomComponent): @@ -17,15 +17,19 @@ class CoNLLULoaderComponent(CustomComponent): "file_types": ["conllu"], "field_type": "file", }, - "metadata": TemplateField( - display_name="Metadata", - required=False, - type="dict", - ), + "metadata": { + "display_name": "Metadata", + "field_type": "dict", + "required": False, + }, } def build(self, file_path: str, metadata: dict) -> Document: - # Here, you would use the actual class that loads CoNLL-U files. - # As I don't have the specific class, I'm returning an instance of Document. - # In a real scenario, you should replace the below Document with the actual loader class. - return Document(file_path=file_path, metadata=metadata) + documents = CoNLLULoader(file_path=file_path).load() + if(metadata): + for document in documents: + if not document.metadata: + document.metadata = metadata + else: + document.metadata.update(metadata) + return documents From fbd64033dc955940b091ddd97bad30325798363e Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Mon, 15 Jan 2024 20:25:43 -0300 Subject: [PATCH 083/153] Update ZeroShotAgent and OpenAIComponent --- src/backend/langflow/components/agents/ZeroShotAgent.py | 8 +++----- src/backend/langflow/components/llms/OpenAI.py | 4 +++- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/src/backend/langflow/components/agents/ZeroShotAgent.py b/src/backend/langflow/components/agents/ZeroShotAgent.py index bdc402059..2ec37fa7d 100644 --- a/src/backend/langflow/components/agents/ZeroShotAgent.py +++ b/src/backend/langflow/components/agents/ZeroShotAgent.py @@ -3,9 +3,7 @@ from langflow import CustomComponent from langchain.agents import ZeroShotAgent from langchain_core.tools import BaseTool from typing import List, Optional -from langflow.field_typing import ( - BaseLanguageModel, -) +from langflow.components.chains.LLMChain import LLMChain class ZeroShotAgentComponent(CustomComponent): display_name = "ZeroShotAgent" @@ -21,9 +19,9 @@ class ZeroShotAgentComponent(CustomComponent): def build( self, - llm: BaseLanguageModel, + llm: LLMChain, tools: List[BaseTool], prefix: Optional[str] = "Answer the following questions as best you can. You have access to the following tools:", suffix: Optional[str] = "Begin!\n\nQuestion: {input}\nThought:{agent_scratchpad}", ) -> ZeroShotAgent: - return ZeroShotAgent(llm=llm, tools=tools, prefix=prefix, suffix=suffix) + return ZeroShotAgent(llm_chain=llm, tools=tools, prefix=prefix, suffix=suffix) diff --git a/src/backend/langflow/components/llms/OpenAI.py b/src/backend/langflow/components/llms/OpenAI.py index 5c8d38730..15091f6e7 100644 --- a/src/backend/langflow/components/llms/OpenAI.py +++ b/src/backend/langflow/components/llms/OpenAI.py @@ -42,10 +42,12 @@ class OpenAIComponent(CustomComponent): max_tokens: Optional[int] = 256, model_kwargs: Optional[Dict] = None, model_name: Optional[str] = "text-davinci-003", - openai_api_base: Optional[str] = "https://api.openai.com/v1", + openai_api_base: Optional[str] = "", openai_api_key: str = "", temperature: Optional[float] = 0.7, ) -> OpenAI: + if(not openai_api_base): + openai_api_base = "https://api.openai.com/v1" return OpenAI( max_tokens=max_tokens, model_kwargs=model_kwargs or {}, From 6fe02442dd50a65d389c234f8f95034172dbeb67 Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Mon, 15 Jan 2024 20:47:06 -0300 Subject: [PATCH 084/153] Add SQLAgentComponent build method to create SQL agent with SQLDatabaseToolkit --- src/backend/langflow/components/agents/SQLAgent.py | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/src/backend/langflow/components/agents/SQLAgent.py b/src/backend/langflow/components/agents/SQLAgent.py index 8b5781dd9..7b1865637 100644 --- a/src/backend/langflow/components/agents/SQLAgent.py +++ b/src/backend/langflow/components/agents/SQLAgent.py @@ -3,7 +3,10 @@ from langflow import CustomComponent from typing import Union, Callable from langchain.agents import AgentExecutor from langflow.field_typing import BaseLanguageModel - +from langchain_community.agent_toolkits.sql.base import create_sql_agent +from langchain.sql_database import SQLDatabase +from langchain_community.agent_toolkits import SQLDatabaseToolkit + class SQLAgentComponent(CustomComponent): display_name = "SQLAgent" description = "Construct an SQL agent from an LLM and tools." @@ -12,14 +15,15 @@ class SQLAgentComponent(CustomComponent): return { "llm": {"display_name": "LLM"}, "database_uri": {"display_name": "Database URI"}, + "verbose": {"display_name": "Verbose", "value": False,"advanced": True}, } def build( self, llm: BaseLanguageModel, database_uri: str, + verbose: bool = False, ) -> Union[AgentExecutor, Callable]: - # Assuming there is a constructor for SQLAgent that takes these parameters - # Since the actual implementation is not provided, this is a placeholder - # Replace SQLAgent with the actual class name if different - return AgentExecutor(llm=llm, database_uri=database_uri) + db = SQLDatabase.from_uri(database_uri) + toolkit = SQLDatabaseToolkit(db=db, llm=llm) + return create_sql_agent(llm=llm, toolkit=toolkit) From 3d4a4c1fb5ac8f3e2a9e2a6e56c518e906fc32fc Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Mon, 15 Jan 2024 22:12:36 -0300 Subject: [PATCH 085/153] Refactor SlackDirectoryLoaderComponent to update document metadata --- .../components/documentloaders/SlackDirectoryLoader.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/src/backend/langflow/components/documentloaders/SlackDirectoryLoader.py b/src/backend/langflow/components/documentloaders/SlackDirectoryLoader.py index 1f176ca11..6a38d1488 100644 --- a/src/backend/langflow/components/documentloaders/SlackDirectoryLoader.py +++ b/src/backend/langflow/components/documentloaders/SlackDirectoryLoader.py @@ -21,4 +21,11 @@ class SlackDirectoryLoaderComponent(CustomComponent): metadata: Optional[Dict] = None, workspace_url: Optional[str] = None, ) -> List[Document]: - return SlackDirectoryLoader(zip_path=zip_path, metadata=metadata, workspace_url=workspace_url).load() + documents = SlackDirectoryLoader(zip_path=zip_path,workspace_url=workspace_url).load() + if(metadata): + for document in documents: + if not document.metadata: + document.metadata = metadata + else: + document.metadata.update(metadata) + return documents From 6d217fb81eeae230f33f3e9c3ea329e1c3c7767c Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Mon, 15 Jan 2024 22:16:36 -0300 Subject: [PATCH 086/153] Add optional parameter 'k' to BingSearchAPIWrapperComponent constructor --- .../langflow/components/utilities/BingSearchAPIWrapper.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/backend/langflow/components/utilities/BingSearchAPIWrapper.py b/src/backend/langflow/components/utilities/BingSearchAPIWrapper.py index abe10d5bf..22443d021 100644 --- a/src/backend/langflow/components/utilities/BingSearchAPIWrapper.py +++ b/src/backend/langflow/components/utilities/BingSearchAPIWrapper.py @@ -1,4 +1,5 @@ +from typing import Optional from langflow import CustomComponent # Assuming `BingSearchAPIWrapper` is a class that exists in the context @@ -18,6 +19,7 @@ class BingSearchAPIWrapperComponent(CustomComponent): "display_name": "Bing Subscription Key", "password": True, }, + "k": {"display_name": "Number of results", "advanced": True}, # 'k' is not included as it is not shown (show=False) } @@ -25,10 +27,11 @@ class BingSearchAPIWrapperComponent(CustomComponent): self, bing_search_url: str, bing_subscription_key: str, + k: Optional[int] = 10, ) -> BingSearchAPIWrapper: # 'k' has a default value and is not shown (show=False), so it is hardcoded here return BingSearchAPIWrapper( bing_search_url=bing_search_url, bing_subscription_key=bing_subscription_key, - k=10 + k=k ) From ab35550240a9367581683e309243819dde8e051f Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Mon, 15 Jan 2024 22:24:47 -0300 Subject: [PATCH 087/153] Update GoogleSearchAPIWrapperComponent parameters --- .../components/utilities/GoogleSearchAPIWrapper.py | 11 +++-------- 1 file changed, 3 insertions(+), 8 deletions(-) diff --git a/src/backend/langflow/components/utilities/GoogleSearchAPIWrapper.py b/src/backend/langflow/components/utilities/GoogleSearchAPIWrapper.py index 37c5ec33e..205087076 100644 --- a/src/backend/langflow/components/utilities/GoogleSearchAPIWrapper.py +++ b/src/backend/langflow/components/utilities/GoogleSearchAPIWrapper.py @@ -1,10 +1,6 @@ from langflow import CustomComponent -from typing import Optional, Union, Callable - -# Assuming GoogleSearchAPIWrapper is a valid import based on JSON -# and it exists in some module that should be imported here. -# The import path should be replaced with the correct one once available. +from typing import Union, Callable from langchain_community.utilities.google_search import GoogleSearchAPIWrapper @@ -16,12 +12,11 @@ class GoogleSearchAPIWrapperComponent(CustomComponent): return { "google_api_key": {"display_name": "Google API Key", "password": True}, "google_cse_id": {"display_name": "Google CSE ID","password":True}, - # Fields with "show": False are omitted based on the rules } def build( self, - google_api_key: Optional[str] = None, - google_cse_id: Optional[str] = None, + google_api_key: str, + google_cse_id: str, ) -> Union[GoogleSearchAPIWrapper, Callable]: return GoogleSearchAPIWrapper(google_api_key=google_api_key, google_cse_id=google_cse_id) From a7196ec77408593e62bb31f76e529176f0ee90c5 Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Mon, 15 Jan 2024 22:29:48 -0300 Subject: [PATCH 088/153] Fix password field in GoogleSerperAPIWrapperComponent --- .../langflow/components/utilities/GoogleSerperAPIWrapper.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/backend/langflow/components/utilities/GoogleSerperAPIWrapper.py b/src/backend/langflow/components/utilities/GoogleSerperAPIWrapper.py index 724ef6e98..0434dec53 100644 --- a/src/backend/langflow/components/utilities/GoogleSerperAPIWrapper.py +++ b/src/backend/langflow/components/utilities/GoogleSerperAPIWrapper.py @@ -17,7 +17,7 @@ class GoogleSerperAPIWrapperComponent(CustomComponent): "display_name": "Result Key for Type", "show": True, "multiline": False, - "password": False, # corrected based on error message + "password": False, "name": "result_key_for_type", "advanced": False, "dynamic": False, @@ -47,8 +47,8 @@ class GoogleSerperAPIWrapperComponent(CustomComponent): def build( self, + serper_api_key: str, result_key_for_type: Optional[Dict[str, str]] = None, - serper_api_key: Optional[str] = None, ) -> GoogleSerperAPIWrapper: return GoogleSerperAPIWrapper( result_key_for_type=result_key_for_type, From 01b386fe5b15b726773038f4fd03e7a38fb99e40 Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Mon, 15 Jan 2024 22:46:12 -0300 Subject: [PATCH 089/153] Add k parameter to SearxSearchWrapper build method --- .../components/utilities/SearxSearchWrapper.py | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/src/backend/langflow/components/utilities/SearxSearchWrapper.py b/src/backend/langflow/components/utilities/SearxSearchWrapper.py index 391e50401..d3f755aa5 100644 --- a/src/backend/langflow/components/utilities/SearxSearchWrapper.py +++ b/src/backend/langflow/components/utilities/SearxSearchWrapper.py @@ -1,6 +1,6 @@ from langflow import CustomComponent from typing import Optional, Dict - +from langchain_community.utilities.searx_search import SearxSearchWrapper class SearxSearchWrapperComponent(CustomComponent): display_name = "SearxSearchWrapper" description = "Wrapper for Searx API." @@ -13,15 +13,17 @@ class SearxSearchWrapperComponent(CustomComponent): "multiline": True, "value": '{"Authorization": "Bearer "}' }, + "k": { + "display_name": "k", + "advanced": True, + "field_type": "int", + "value": 10 + }, } def build( self, + k: Optional[int] = 10, headers: Optional[Dict[str, str]] = None, ): - if headers is None: - headers = {"Authorization": "Bearer "} - # Placeholder for actual SearxSearchWrapper instantiation - # Since the actual SearxSearchWrapper class is not available, - # it is assumed that it would be instantiated here with headers as an argument. - pass \ No newline at end of file + return SearxSearchWrapper(headers=headers,k=k) From e7899480afd6cb41b86e388e70866bac1ec4e6eb Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Mon, 15 Jan 2024 22:48:13 -0300 Subject: [PATCH 090/153] Add searx_host parameter to SearxSearchWrapperComponent --- .../components/utilities/SearxSearchWrapper.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/src/backend/langflow/components/utilities/SearxSearchWrapper.py b/src/backend/langflow/components/utilities/SearxSearchWrapper.py index d3f755aa5..51464cd0b 100644 --- a/src/backend/langflow/components/utilities/SearxSearchWrapper.py +++ b/src/backend/langflow/components/utilities/SearxSearchWrapper.py @@ -19,11 +19,18 @@ class SearxSearchWrapperComponent(CustomComponent): "field_type": "int", "value": 10 }, + "searx_host": { + "display_name": "Searx Host", + "field_type": "str", + "value": "https://searx.example.com", + "advanced": True, + }, } def build( self, k: Optional[int] = 10, headers: Optional[Dict[str, str]] = None, - ): - return SearxSearchWrapper(headers=headers,k=k) + searx_host: Optional[str] = None, + )->SearxSearchWrapper: + return SearxSearchWrapper(headers=headers,k=k,searx_host=searx_host) From c2154915497887e27b1b9433efb6f1505c7e9c07 Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Tue, 16 Jan 2024 00:16:16 -0300 Subject: [PATCH 091/153] Update SerpAPIWrapper import and parameters --- .../components/utilities/SerpAPIWrapper.py | 16 +++++----------- 1 file changed, 5 insertions(+), 11 deletions(-) diff --git a/src/backend/langflow/components/utilities/SerpAPIWrapper.py b/src/backend/langflow/components/utilities/SerpAPIWrapper.py index 0d93dbc91..79f806957 100644 --- a/src/backend/langflow/components/utilities/SerpAPIWrapper.py +++ b/src/backend/langflow/components/utilities/SerpAPIWrapper.py @@ -1,6 +1,6 @@ from langflow import CustomComponent from typing import Callable, Union -from langchain_community.utilities import SerpAPIWrapper +from langchain_community.utilities.serpapi import SerpAPIWrapper class SerpAPIWrapperComponent(CustomComponent): display_name = "SerpAPIWrapper" @@ -8,22 +8,16 @@ class SerpAPIWrapperComponent(CustomComponent): def build_config(self): return { - "serpapi_api_key": {"display_name": "SerpAPI API Key", "type": "password"}, + "serpapi_api_key": {"display_name": "SerpAPI API Key", "type": "str", "password": True}, + "params": {"display_name": "Parameters", "type": "dict","advanced":True, "multiline": True,"value": '{"engine": "google","google_domain": "google.com","gl": "us","hl": "en"}'}, } def build( self, serpapi_api_key: str, + params: dict, ) -> Union[SerpAPIWrapper, Callable]: # Removed quotes around SerpAPIWrapper - # Default parameters as defined in the JSON template. - default_params = { - "engine": "google", - "google_domain": "google.com", - "gl": "us", - "hl": "en" - } - return SerpAPIWrapper( serpapi_api_key=serpapi_api_key, - params=default_params + params=params ) \ No newline at end of file From d4d3bcd7ef12f43ed6ca22e1420d18b894a4c016 Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Tue, 16 Jan 2024 00:22:43 -0300 Subject: [PATCH 092/153] Update import statement for WikipediaAPIWrapper --- .../langflow/components/utilities/WikipediaAPIWrapper.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/backend/langflow/components/utilities/WikipediaAPIWrapper.py b/src/backend/langflow/components/utilities/WikipediaAPIWrapper.py index 4b22848ca..f099d78c3 100644 --- a/src/backend/langflow/components/utilities/WikipediaAPIWrapper.py +++ b/src/backend/langflow/components/utilities/WikipediaAPIWrapper.py @@ -1,7 +1,7 @@ from langflow import CustomComponent from typing import Union, Callable -from langchain_community.utilities import WikipediaAPIWrapper +from langchain_community.utilities.wikipedia import WikipediaAPIWrapper # Assuming WikipediaAPIWrapper is a class that needs to be imported. # The import statement is not included as it is not provided in the JSON From 1c7eb2cf0f3b6a0570da851b259259c6bda55f0e Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Tue, 16 Jan 2024 00:28:22 -0300 Subject: [PATCH 093/153] Add WolframAlphaAPIWrapper with appid configuration --- .../utilities/WolframAlphaAPIWrapper.py | 16 ++++++---------- 1 file changed, 6 insertions(+), 10 deletions(-) diff --git a/src/backend/langflow/components/utilities/WolframAlphaAPIWrapper.py b/src/backend/langflow/components/utilities/WolframAlphaAPIWrapper.py index 5456c6f50..025998b05 100644 --- a/src/backend/langflow/components/utilities/WolframAlphaAPIWrapper.py +++ b/src/backend/langflow/components/utilities/WolframAlphaAPIWrapper.py @@ -1,7 +1,7 @@ from langflow import CustomComponent from typing import Callable, Union - +from langchain_community.utilities.wolfram_alpha import WolframAlphaAPIWrapper # Since all the fields in the JSON have show=False, we will only create a basic component # without any configurable fields. @@ -10,13 +10,9 @@ class WolframAlphaAPIWrapperComponent(CustomComponent): description = "Wrapper for Wolfram Alpha." def build_config(self): - # No fields with show=True are available according to the JSON configuration, - # so we return an empty config. - return {} + return { + "appid": {"display_name": "App ID", "type": "str", "password": True} + } - def build(self) -> Union[Callable, object]: - # Since we are not given any specific implementation details or associated classes, - # we will simply return an object that represents the WolframAlphaAPIWrapper without - # initializing any specific fields. In a real scenario, this would be replaced with - # the actual instantiation of the WolframAlphaAPIWrapper class. - return object() # Placeholder for actual WolframAlphaAPIWrapper class instantiation. + def build(self,appid:str) -> Union[Callable, WolframAlphaAPIWrapper]: + return WolframAlphaAPIWrapper(wolfram_alpha_appid=appid) \ No newline at end of file From d4b0706580210da03bec0d5e57b0242fa764433c Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Tue, 16 Jan 2024 11:40:35 -0300 Subject: [PATCH 094/153] Update CohereEmbeddingsComponent and ChatOpenAIComponent configurations --- .../langflow/components/embeddings/CohereEmbeddings.py | 4 +++- src/backend/langflow/components/llms/ChatOpenAI.py | 1 + 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/src/backend/langflow/components/embeddings/CohereEmbeddings.py b/src/backend/langflow/components/embeddings/CohereEmbeddings.py index f6b4ee1c1..cac7c1d75 100644 --- a/src/backend/langflow/components/embeddings/CohereEmbeddings.py +++ b/src/backend/langflow/components/embeddings/CohereEmbeddings.py @@ -20,12 +20,14 @@ class CohereEmbeddingsComponent(CustomComponent): def build( self, request_timeout: Optional[float] = None, - cohere_api_key: Optional[str] = None, + cohere_api_key: str = None, + max_retries: Optional[int] = None, model: str = "embed-english-v2.0", truncate: Optional[str] = None, user_agent: Optional[str] = "langchain", ) -> CohereEmbeddings: return CohereEmbeddings( + max_retries=max_retries, user_agent=user_agent, request_timeout=request_timeout, cohere_api_key=cohere_api_key, diff --git a/src/backend/langflow/components/llms/ChatOpenAI.py b/src/backend/langflow/components/llms/ChatOpenAI.py index 8663747bf..44fd8dd40 100644 --- a/src/backend/langflow/components/llms/ChatOpenAI.py +++ b/src/backend/langflow/components/llms/ChatOpenAI.py @@ -51,6 +51,7 @@ class ChatOpenAIComponent(CustomComponent): "field_type": "str", "advanced": False, "required": False, + "password": True, }, "temperature": { "display_name": "Temperature", From 7dcbe3a52576282ce9d9bb505c1d388af9b4eb24 Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Tue, 16 Jan 2024 11:58:15 -0300 Subject: [PATCH 095/153] Update HuggingFaceEmbeddings.py with default encode_kwargs and model_kwargs --- .../langflow/components/embeddings/HuggingFaceEmbeddings.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/backend/langflow/components/embeddings/HuggingFaceEmbeddings.py b/src/backend/langflow/components/embeddings/HuggingFaceEmbeddings.py index c3dc04ec8..44ad25212 100644 --- a/src/backend/langflow/components/embeddings/HuggingFaceEmbeddings.py +++ b/src/backend/langflow/components/embeddings/HuggingFaceEmbeddings.py @@ -21,8 +21,8 @@ class HuggingFaceEmbeddingsComponent(CustomComponent): def build( self, cache_folder: Optional[str] = None, - encode_kwargs: Optional[Dict] = None, - model_kwargs: Optional[Dict] = None, + encode_kwargs: Optional[Dict] = {}, + model_kwargs: Optional[Dict] = {}, model_name: str = "sentence-transformers/all-mpnet-base-v2", multi_process: bool = False, ) -> HuggingFaceEmbeddings: From 63c030133c67deb487ce3c3d79f720983cf66337 Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Tue, 16 Jan 2024 12:51:36 -0300 Subject: [PATCH 096/153] Fix file_types in credentials field --- .../langflow/components/embeddings/VertexAIEmbeddings.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/backend/langflow/components/embeddings/VertexAIEmbeddings.py b/src/backend/langflow/components/embeddings/VertexAIEmbeddings.py index a736e063b..542582a2e 100644 --- a/src/backend/langflow/components/embeddings/VertexAIEmbeddings.py +++ b/src/backend/langflow/components/embeddings/VertexAIEmbeddings.py @@ -9,7 +9,7 @@ class VertexAIEmbeddingsComponent(CustomComponent): def build_config(self): return { - "credentials": {"display_name": "Credentials", "value": '', "file_types": ['json'],"field_type": "file"}, + "credentials": {"display_name": "Credentials", "value": '', "file_types": ['.json'],"field_type": "file"}, "instance": {"display_name": "instance", "advanced": True, "field_type": "dict"}, "location": {"display_name": "Location", "value": 'us-central1', "advanced": True}, "max_output_tokens": {"display_name": "Max Output Tokens", "value": 128}, From bd4a6e9d3ad61441792a997335adea31d833b871 Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Tue, 16 Jan 2024 12:54:31 -0300 Subject: [PATCH 097/153] Refactor CohereComponent instantiation in Cohere.py --- src/backend/langflow/components/llms/Cohere.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/src/backend/langflow/components/llms/Cohere.py b/src/backend/langflow/components/llms/Cohere.py index 0b56e9c88..1e0fea8fb 100644 --- a/src/backend/langflow/components/llms/Cohere.py +++ b/src/backend/langflow/components/llms/Cohere.py @@ -2,6 +2,7 @@ from langflow import CustomComponent from langchain_core.language_models.base import BaseLanguageModel from typing import Optional +from langchain_community.llms.cohere import Cohere class CohereComponent(CustomComponent): display_name = "Cohere" @@ -35,6 +36,4 @@ class CohereComponent(CustomComponent): max_tokens: Optional[int] = 256, temperature: Optional[float] = 0.75, ) -> BaseLanguageModel: - # Assuming there is a Cohere class that takes these parameters to initialize - # Please replace `Cohere` with the actual class name that should be instantiated - return Cohere(api_key=cohere_api_key, max_tokens=max_tokens, temperature=temperature) + return Cohere(cohere_api_key=cohere_api_key, max_tokens=max_tokens, temperature=temperature) From 239e4bcb7c9f5eac558e23ed851301bb93158ce4 Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Tue, 16 Jan 2024 13:30:31 -0300 Subject: [PATCH 098/153] Update Anthropic and ChatAnthropic components --- .../langflow/components/llms/Anthropic.py | 42 +++++-------------- .../langflow/components/llms/ChatAnthropic.py | 11 +++-- 2 files changed, 15 insertions(+), 38 deletions(-) diff --git a/src/backend/langflow/components/llms/Anthropic.py b/src/backend/langflow/components/llms/Anthropic.py index de69c2d08..a588fa3a8 100644 --- a/src/backend/langflow/components/llms/Anthropic.py +++ b/src/backend/langflow/components/llms/Anthropic.py @@ -1,6 +1,7 @@ from langflow import CustomComponent -from typing import Optional, Dict, Any -from langflow.field_typing import BaseLanguageModel +from typing import Optional +from langflow.field_typing import BaseLanguageModel,NestedDict +from langchain_community.llms.anthropic import Anthropic class AnthropicComponent(CustomComponent): @@ -20,48 +21,25 @@ class AnthropicComponent(CustomComponent): }, "model_kwargs": { "display_name": "Model Kwargs", - "field_type": 'dict', + "field_type": 'NestedDict', "advanced": True, }, "temperature": { "display_name": "Temperature", - "type": float, + "field_type": "float", }, } def build( self, - anthropic_api_key: Optional[str], - anthropic_api_url: Optional[str], - model_kwargs: Optional[Dict[str, Any]], + anthropic_api_key: str, + anthropic_api_url: str, + model_kwargs: Optional[NestedDict], temperature: Optional[float] = None, ) -> BaseLanguageModel: - # The actual builder method should return an instance of the Anthropic class - # Here we are returning a placeholder class as the Anthropic class is not defined - # This is to comply with the type hints required by the CustomComponent - class Anthropic(BaseLanguageModel): - def __init__( - self, - api_key: Optional[str], - api_url: Optional[str], - model_kwargs: Optional[Dict[str, Any]] = None, - temperature: Optional[float] = None, - ): - # Initialize Anthropic model with the provided arguments - super().__init__() - self.api_key = api_key - self.api_url = api_url - self.model_kwargs = model_kwargs - self.temperature = temperature - - def __call__(self, prompt: str) -> str: - # The logic to call the Anthropic model would go here - # This is a placeholder implementation - return "This is a simulated response from the Anthropic model." - return Anthropic( - api_key=anthropic_api_key, - api_url=anthropic_api_url, + anthropic_api_key=anthropic_api_key, + anthropic_api_url=anthropic_api_url, model_kwargs=model_kwargs, temperature=temperature, ) \ No newline at end of file diff --git a/src/backend/langflow/components/llms/ChatAnthropic.py b/src/backend/langflow/components/llms/ChatAnthropic.py index 33f9838fb..a6e222ded 100644 --- a/src/backend/langflow/components/llms/ChatAnthropic.py +++ b/src/backend/langflow/components/llms/ChatAnthropic.py @@ -2,7 +2,7 @@ from langflow import CustomComponent from typing import Optional, Union, Callable from langflow.field_typing import BaseLanguageModel - +from langchain_community.chat_models.anthropic import ChatAnthropic class ChatAnthropicComponent(CustomComponent): display_name = "ChatAnthropic" description = "`Anthropic` chat large language models." @@ -12,12 +12,12 @@ class ChatAnthropicComponent(CustomComponent): return { "anthropic_api_key": { "display_name": "Anthropic API Key", - "type": str, + "field_type": "str", "password": True, }, "anthropic_api_url": { "display_name": "Anthropic API URL", - "type": str, + "field_type": "str", }, "model_kwargs": { "display_name": "Model Kwargs", @@ -26,7 +26,7 @@ class ChatAnthropicComponent(CustomComponent): }, "temperature": { "display_name": "Temperature", - "type": float, + "field_type": "float", }, } @@ -37,10 +37,9 @@ class ChatAnthropicComponent(CustomComponent): model_kwargs: dict = {}, temperature: Optional[float] = None, ) -> Union[BaseLanguageModel, Callable]: - from langchain.model_io.models.chat.integrations import ChatAnthropic # Importing here due to potential local scope requirements return ChatAnthropic( - anthropic_api_key=anthropic_api_key.get_secret_value() if anthropic_api_key else None, + anthropic_api_key=anthropic_api_key, anthropic_api_url=anthropic_api_url, model_kwargs=model_kwargs, temperature=temperature, From 306292c36fddac5e0c31b0609e8fd898b0509171 Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Tue, 16 Jan 2024 13:43:12 -0300 Subject: [PATCH 099/153] Update CTransformersComponent build_config() method --- .../langflow/components/llms/CTransformers.py | 40 +++---------------- 1 file changed, 5 insertions(+), 35 deletions(-) diff --git a/src/backend/langflow/components/llms/CTransformers.py b/src/backend/langflow/components/llms/CTransformers.py index 881953639..46b1c021e 100644 --- a/src/backend/langflow/components/llms/CTransformers.py +++ b/src/backend/langflow/components/llms/CTransformers.py @@ -11,46 +11,16 @@ class CTransformersComponent(CustomComponent): def build_config(self): return { "model": {"display_name": "Model", "required": True}, - "model_file": {"display_name": "Model File", "required": False}, - "model_type": {"display_name": "Model Type", "required": False}, + "model_file": {"display_name": "Model File", "required": False,"field_type":"file", "file_types":[".bin"]}, + "model_type": {"display_name": "Model Type", "required": True}, "config": {"display_name": "Config", "advanced": True, "required": False,"field_type":"dict","value":'{"top_k":40,"top_p":0.95,"temperature":0.8,"repetition_penalty":1.1,"last_n_tokens":64,"seed":-1,"max_new_tokens":256,"stop":"","stream":"False","reset":"True","batch_size":8,"threads":-1,"context_length":-1,"gpu_layers":0}'} } def build( self, model: str, - model_file: Optional[str] = None, - model_type: Optional[str] = None, + model_file: str, + model_type: str, config: Optional[Dict] = None ) -> CTransformers: - # Default config values - default_config = { - "top_k": 40, - "top_p": 0.95, - "temperature": 0.8, - "repetition_penalty": 1.1, - "last_n_tokens": 64, - "seed": -1, - "max_new_tokens": 256, - "stop": None, - "stream": False, - "reset": True, - "batch_size": 8, - "threads": -1, - "context_length": -1, - "gpu_layers": 0 - } - - # If there is a custom config, update the default config with it - if config: - default_config.update(config) - - # Assuming the import below is correct and CTransformers is a class within the langchain library - # that inherits from BaseLanguageModel. The following import statement is required: - # from langchain.llms.integration_module import CTransformers - - return CTransformers(model=model, model_file=model_file, model_type=model_type, config=default_config) - -# Note: The actual CTransformers class needs to be imported from the correct module inside the langchain library. -# The `integration_module` in the import statement is just a placeholder and should be replaced with -# the actual module where the CTransformers class is located. + return CTransformers(model=model, model_file=model_file, model_type=model_type, config=config) From 69c6ede6149ad1e6609384ddc9d5cc97b2865400 Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Tue, 16 Jan 2024 14:01:22 -0300 Subject: [PATCH 100/153] Update LlamaCppComponent parameters --- src/backend/langflow/components/llms/LlamaCpp.py | 16 +++++++--------- 1 file changed, 7 insertions(+), 9 deletions(-) diff --git a/src/backend/langflow/components/llms/LlamaCpp.py b/src/backend/langflow/components/llms/LlamaCpp.py index d8c917ba2..109bb1a7b 100644 --- a/src/backend/langflow/components/llms/LlamaCpp.py +++ b/src/backend/langflow/components/llms/LlamaCpp.py @@ -24,7 +24,7 @@ class LlamaCppComponent(CustomComponent): "max_tokens": {"display_name": "Max Tokens", "advanced": True}, "metadata": {"display_name": "Metadata", "advanced": True}, "model_kwargs": {"display_name": "Model Kwargs", "advanced": True}, - "model_path": {"display_name": "Model Path"}, + "model_path": {"display_name": "Model Path","field_type":"file", "file_types":[".bin"],"required":True}, "n_batch": {"display_name": "N Batch", "advanced": True}, "n_ctx": {"display_name": "N Ctx", "advanced": True}, "n_gpu_layers": {"display_name": "N GPU Layers", "advanced": True}, @@ -63,20 +63,20 @@ class LlamaCppComponent(CustomComponent): lora_path: Optional[str] = None, max_tokens: Optional[int] = 256, metadata: Optional[Dict] = None, - model_kwargs: Optional[Dict] = None, + model_kwargs: Optional[Dict] = {}, n_batch: Optional[int] = 8, n_ctx: Optional[int] = 512, - n_gpu_layers: Optional[int] = None, + n_gpu_layers: Optional[int] = 1, n_parts: Optional[int] = -1, - n_threads: Optional[int] = None, + n_threads: Optional[int] = 1, repeat_penalty: Optional[float] = 1.1, rope_freq_base: Optional[float] = 10000.0, rope_freq_scale: Optional[float] = 1.0, seed: Optional[int] = -1, - stop: Optional[List[str]] = None, + stop: Optional[List[str]] = [], streaming: Optional[bool] = True, - suffix: Optional[str] = None, - tags: Optional[List[str]] = None, + suffix: Optional[str] = "", + tags: Optional[List[str]] = [], temperature: Optional[float] = 0.8, top_k: Optional[int] = 40, top_p: Optional[float] = 0.95, @@ -85,8 +85,6 @@ class LlamaCppComponent(CustomComponent): verbose: Optional[bool] = True, vocab_only: Optional[bool] = False, ) -> LlamaCpp: - # Here you would instantiate the LlamaCpp model with the provided parameters - # Since the actual implementation of LlamaCpp is not provided, this is a placeholder return LlamaCpp( model_path=model_path, grammar=grammar, From ffb0dc0012df958a659c19aa166246d62f4d98db Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Tue, 16 Jan 2024 15:07:19 -0300 Subject: [PATCH 101/153] Add VertexAI import and update file_types in VertexAIComponent --- src/backend/langflow/components/llms/VertexAI.py | 12 ++---------- 1 file changed, 2 insertions(+), 10 deletions(-) diff --git a/src/backend/langflow/components/llms/VertexAI.py b/src/backend/langflow/components/llms/VertexAI.py index c019f64e4..84f36330b 100644 --- a/src/backend/langflow/components/llms/VertexAI.py +++ b/src/backend/langflow/components/llms/VertexAI.py @@ -2,6 +2,7 @@ from langflow import CustomComponent from langchain.llms import BaseLLM from typing import Optional, Union, Callable, Dict +from langchain_community.llms.vertexai import VertexAI class VertexAIComponent(CustomComponent): display_name = "VertexAI" @@ -12,7 +13,7 @@ class VertexAIComponent(CustomComponent): "credentials": { "display_name": "Credentials", "field_type": "file", - "file_types": ["json"], + "file_types": [".json"], "required": False, "value": None, }, @@ -114,15 +115,11 @@ class VertexAIComponent(CustomComponent): "display_name":"Name", "field_type":"str" }, - "client_preview":{ - "display_name":"client_preview" - } } def build( self, credentials: Optional[str] = None, - client_preview: Optional[any]= None, location: str = "us-central1", max_output_tokens: int = 128, max_retries: int = 6, @@ -141,12 +138,7 @@ class VertexAIComponent(CustomComponent): ) -> Union[BaseLLM, Callable]: if metadata is None: metadata = {} - - # Import the appropriate VertexAI class from the langchain.llms module - from langchain.llms import VertexAI - return VertexAI( - client_preview=client_preview, credentials=credentials, location=location, max_output_tokens=max_output_tokens, From 5065234a1daf7f67f7df31cd551006a9c1bc7d4a Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Tue, 16 Jan 2024 15:20:57 -0300 Subject: [PATCH 102/153] Update ChatVertexAIComponent parameters and imports --- .../langflow/components/llms/ChatVertexAI.py | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/src/backend/langflow/components/llms/ChatVertexAI.py b/src/backend/langflow/components/llms/ChatVertexAI.py index 310d65e2f..072cc0a7f 100644 --- a/src/backend/langflow/components/llms/ChatVertexAI.py +++ b/src/backend/langflow/components/llms/ChatVertexAI.py @@ -1,8 +1,10 @@ - from langflow import CustomComponent -from typing import List, Optional +from typing import List, Optional, Union from langchain_core.messages.base import BaseMessage from langchain_community.chat_models.vertexai import ChatVertexAI +from langflow.field_typing import BaseLanguageModel +from langchain.llms import BaseLLM + class ChatVertexAIComponent(CustomComponent): display_name = "ChatVertexAI" @@ -13,7 +15,7 @@ class ChatVertexAIComponent(CustomComponent): "credentials": { "display_name": "Credentials", "field_type": "file", - "fileTypes": ["json"], + "file_types": [".json"], "file_path": None, }, "examples": { @@ -60,8 +62,8 @@ class ChatVertexAIComponent(CustomComponent): def build( self, credentials: Optional[str], - examples: Optional[List[BaseMessage]], - project: Optional[str], + project: str, + examples: Optional[List[BaseMessage]]=[], location: Optional[str] = "us-central1", max_output_tokens: Optional[int] = 128, model_name: Optional[str] = "chat-bison", @@ -69,8 +71,7 @@ class ChatVertexAIComponent(CustomComponent): top_k: Optional[int] = 40, top_p: Optional[float] = 0.95, verbose: Optional[bool] = False, - ): - # Assuming there is a ChatVertexAI class that takes these parameters + ) -> Union[BaseLanguageModel, BaseLLM]: return ChatVertexAI( credentials=credentials, examples=examples, From ef4a195e7a729e978a2d731c713086c6c69a8ab3 Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Tue, 16 Jan 2024 15:50:19 -0300 Subject: [PATCH 103/153] Update text splitter and vector stores --- .../textsplitters/CharacterTextSplitter.py | 9 ++++---- .../components/vectorstores/Chroma.py | 2 +- .../langflow/components/vectorstores/FAISS.py | 23 +++++-------------- 3 files changed, 11 insertions(+), 23 deletions(-) diff --git a/src/backend/langflow/components/textsplitters/CharacterTextSplitter.py b/src/backend/langflow/components/textsplitters/CharacterTextSplitter.py index ba64b0f32..aa8fc77f2 100644 --- a/src/backend/langflow/components/textsplitters/CharacterTextSplitter.py +++ b/src/backend/langflow/components/textsplitters/CharacterTextSplitter.py @@ -1,6 +1,6 @@ from langflow import CustomComponent -from langchain.text_splitter import TextSplitter +from langchain.text_splitter import CharacterTextSplitter from langchain_core.documents.base import Document from typing import List @@ -23,10 +23,9 @@ class CharacterTextSplitterComponent(CustomComponent): chunk_overlap: int = 200, chunk_size: int = 1000, separator: str = "\n", - ) -> TextSplitter: - return TextSplitter( - documents=documents, + ) -> List[Document]: + return CharacterTextSplitter( chunk_overlap=chunk_overlap, chunk_size=chunk_size, separator=separator, - ) + ).split_documents(documents) diff --git a/src/backend/langflow/components/vectorstores/Chroma.py b/src/backend/langflow/components/vectorstores/Chroma.py index f546eb361..adc3ed554 100644 --- a/src/backend/langflow/components/vectorstores/Chroma.py +++ b/src/backend/langflow/components/vectorstores/Chroma.py @@ -3,7 +3,7 @@ from typing import List, Optional, Union import chromadb # type: ignore from langchain.embeddings.base import Embeddings from langchain.schema import BaseRetriever, Document -from langchain.vectorstores import Chroma +from langchain.vectorstores.chroma import Chroma from langchain.vectorstores.base import VectorStore from langflow import CustomComponent diff --git a/src/backend/langflow/components/vectorstores/FAISS.py b/src/backend/langflow/components/vectorstores/FAISS.py index 4d4864e73..2e516e7bc 100644 --- a/src/backend/langflow/components/vectorstores/FAISS.py +++ b/src/backend/langflow/components/vectorstores/FAISS.py @@ -1,11 +1,12 @@ from langflow import CustomComponent -from langchain.vectorstores import FAISS -from typing import Optional, List +from langchain_community.vectorstores.faiss import FAISS +from typing import Optional, List, Union +from langchain.schema import BaseRetriever +from langchain.vectorstores.base import VectorStore from langflow.field_typing import ( Document, Embeddings, - NestedDict, ) class FAISSComponent(CustomComponent): @@ -17,23 +18,11 @@ class FAISSComponent(CustomComponent): return { "documents": {"display_name": "Documents"}, "embedding": {"display_name": "Embedding"}, - "folder_path": {"display_name": "Local Path"}, - "index_name": {"display_name": "Index Name"}, - "search_kwargs": {"display_name": "Search Kwargs", "advanced": True}, } def build( self, embedding: Embeddings, documents: Optional[List[Document]] = None, - folder_path: str = "", - index_name: str = "", - search_kwargs: Optional[NestedDict] = None, - ) -> FAISS: - return FAISS( - embedding=embedding, - documents=documents, - folder_path=folder_path, - index_name=index_name, - search_kwargs=search_kwargs or {}, - ) + ) -> Union[VectorStore,FAISS,BaseRetriever]: + return FAISS.from_documents(documents=documents,embedding=embedding) From 2cb5855c7bfcbc808dee3d1c0c8df549a5df4bb3 Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Tue, 16 Jan 2024 15:52:30 -0300 Subject: [PATCH 104/153] Update display name for LLM in ZeroShotAgent.py --- src/backend/langflow/components/agents/ZeroShotAgent.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/backend/langflow/components/agents/ZeroShotAgent.py b/src/backend/langflow/components/agents/ZeroShotAgent.py index 2ec37fa7d..8fba2db72 100644 --- a/src/backend/langflow/components/agents/ZeroShotAgent.py +++ b/src/backend/langflow/components/agents/ZeroShotAgent.py @@ -11,7 +11,7 @@ class ZeroShotAgentComponent(CustomComponent): def build_config(self): return { - "llm": {"display_name": "LLM"}, + "llm": {"display_name": "LLM Chain"}, "tools": {"display_name": "Tools"}, "prefix": {"display_name": "Prefix", "multiline": True}, "suffix": {"display_name": "Suffix", "multiline": True}, From cba57bb8f25c99ed8f54c08ed85358978aaaa635 Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Tue, 16 Jan 2024 16:11:12 -0300 Subject: [PATCH 105/153] Update Pinecone.py to use the new Pinecone package and add support for BaseRetriever --- .../components/vectorstores/Pinecone.py | 25 ++++++------------- 1 file changed, 8 insertions(+), 17 deletions(-) diff --git a/src/backend/langflow/components/vectorstores/Pinecone.py b/src/backend/langflow/components/vectorstores/Pinecone.py index bd1890fe7..978ecc821 100644 --- a/src/backend/langflow/components/vectorstores/Pinecone.py +++ b/src/backend/langflow/components/vectorstores/Pinecone.py @@ -1,13 +1,13 @@ from langflow import CustomComponent -from typing import Optional, List -from langchain.vectorstores import Pinecone +from typing import Optional, List, Union +from langchain_community.vectorstores.pinecone import Pinecone from langflow.field_typing import ( Document, Embeddings, - NestedDict, ) - - +from langchain.schema import BaseRetriever +from langchain.vectorstores.base import VectorStore +import pinecone class PineconeComponent(CustomComponent): display_name = "Pinecone" description = "Construct Pinecone wrapper from raw documents." @@ -28,17 +28,8 @@ class PineconeComponent(CustomComponent): embedding: Embeddings, documents: Optional[List[Document]] = None, index_name: Optional[str] = None, - namespace: Optional[str] = None, pinecone_api_key: Optional[str] = None, pinecone_env: Optional[str] = None, - search_kwargs: Optional[NestedDict] = None, - ) -> Pinecone: - return Pinecone( - documents=documents, - embedding=embedding, - index_name=index_name, - namespace=namespace, - pinecone_api_key=pinecone_api_key, - pinecone_env=pinecone_env, - search_kwargs=search_kwargs, - ) + ) -> Union[VectorStore,Pinecone,BaseRetriever]: + pinecone.init(api_key=pinecone_api_key,environment=pinecone_env) + return Pinecone.from_documents(documents=documents,embedding=embedding,index_name=index_name) From 3c7e553d8b555e79853513634eff70a5fbb60105 Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Tue, 16 Jan 2024 16:13:49 -0300 Subject: [PATCH 106/153] Update Pinecone component configuration --- src/backend/langflow/components/vectorstores/Pinecone.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/backend/langflow/components/vectorstores/Pinecone.py b/src/backend/langflow/components/vectorstores/Pinecone.py index 978ecc821..6e62dd766 100644 --- a/src/backend/langflow/components/vectorstores/Pinecone.py +++ b/src/backend/langflow/components/vectorstores/Pinecone.py @@ -18,8 +18,8 @@ class PineconeComponent(CustomComponent): "embedding": {"display_name": "Embedding", "default": 1000}, "index_name": {"display_name": "Index Name"}, "namespace": {"display_name": "Namespace"}, - "pinecone_api_key": {"display_name": "Pinecone API Key", "default": ""}, - "pinecone_env": {"display_name": "Pinecone Environment", "default": ""}, + "pinecone_api_key": {"display_name": "Pinecone API Key", "default": "","password": True,"required": True}, + "pinecone_env": {"display_name": "Pinecone Environment", "default": "","required": True}, "search_kwargs": {"display_name": "Search Kwargs", "default": "{}"}, } From b0654401a6b7cdbafdbe51dd95239de5595e45e8 Mon Sep 17 00:00:00 2001 From: igorrCarvalho Date: Tue, 16 Jan 2024 16:16:06 -0300 Subject: [PATCH 107/153] Fix: Input list doesnt show up when template value are empty --- src/frontend/src/components/inputListComponent/index.tsx | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/frontend/src/components/inputListComponent/index.tsx b/src/frontend/src/components/inputListComponent/index.tsx index e98bba304..b82d37f63 100644 --- a/src/frontend/src/components/inputListComponent/index.tsx +++ b/src/frontend/src/components/inputListComponent/index.tsx @@ -23,6 +23,8 @@ export default function InputListComponent({ value = [value]; } + if (!value.length) value = [""]; + return (
Date: Tue, 16 Jan 2024 16:23:30 -0300 Subject: [PATCH 108/153] Update QdrantComponent parameters --- .../components/vectorstores/Qdrant.py | 40 ++++++++++--------- 1 file changed, 21 insertions(+), 19 deletions(-) diff --git a/src/backend/langflow/components/vectorstores/Qdrant.py b/src/backend/langflow/components/vectorstores/Qdrant.py index e902cb353..548da03bf 100644 --- a/src/backend/langflow/components/vectorstores/Qdrant.py +++ b/src/backend/langflow/components/vectorstores/Qdrant.py @@ -1,7 +1,9 @@ from langflow import CustomComponent from langchain_community.vectorstores.qdrant import Qdrant -from typing import Optional, List +from typing import Optional, List, Union from langflow.field_typing import Document, Embeddings, NestedDict +from langchain.schema import BaseRetriever +from langchain.vectorstores.base import VectorStore class QdrantComponent(CustomComponent): @@ -14,20 +16,20 @@ class QdrantComponent(CustomComponent): "embedding": {"display_name": "Embedding"}, "api_key": {"display_name": "API Key", "password": True}, "collection_name": {"display_name": "Collection Name"}, - "content_payload_key": {"display_name": "Content Payload Key"}, - "distance_func": {"display_name": "Distance Function"}, - "grpc_port": {"display_name": "gRPC Port"}, - "host": {"display_name": "Host"}, - "https": {"display_name": "HTTPS"}, - "location": {"display_name": "Location"}, - "metadata_payload_key": {"display_name": "Metadata Payload Key"}, - "path": {"display_name": "Path"}, - "port": {"display_name": "Port"}, - "prefer_grpc": {"display_name": "Prefer gRPC"}, - "prefix": {"display_name": "Prefix"}, - "search_kwargs": {"display_name": "Search Kwargs"}, - "timeout": {"display_name": "Timeout"}, - "url": {"display_name": "URL"}, + "content_payload_key": {"display_name": "Content Payload Key", "advanced": True}, + "distance_func": {"display_name": "Distance Function", "advanced": True}, + "grpc_port": {"display_name": "gRPC Port", "advanced": True}, + "host": {"display_name": "Host", "advanced": True}, + "https": {"display_name": "HTTPS", "advanced": True}, + "location": {"display_name": "Location", "advanced": True}, + "metadata_payload_key": {"display_name": "Metadata Payload Key", "advanced": True}, + "path": {"display_name": "Path", "advanced": True}, + "port": {"display_name": "Port", "advanced": True}, + "prefer_grpc": {"display_name": "Prefer gRPC", "advanced": True}, + "prefix": {"display_name": "Prefix", "advanced": True}, + "search_kwargs": {"display_name": "Search Kwargs", "advanced": True}, + "timeout": {"display_name": "Timeout", "advanced": True}, + "url": {"display_name": "URL", "advanced": True}, } def build( @@ -38,20 +40,20 @@ class QdrantComponent(CustomComponent): collection_name: Optional[str] = None, content_payload_key: str = "page_content", distance_func: str = "Cosine", - grpc_port: int = 6334, + grpc_port: Optional[int] = 6334, host: Optional[str] = None, https: bool = False, location: str = ":memory:", metadata_payload_key: str = "metadata", path: Optional[str] = None, - port: int = 6333, + port: Optional[int] = 6333, prefer_grpc: bool = False, prefix: Optional[str] = None, search_kwargs: Optional[NestedDict] = None, timeout: Optional[float] = None, url: Optional[str] = None, - ) -> Qdrant: - return Qdrant( + ) -> Union[VectorStore, Qdrant, BaseRetriever]: + return Qdrant.from_documents( documents=documents, embedding=embedding, api_key=api_key, From e79022c71f89909e9b29ca069db01f0920f706b3 Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Tue, 16 Jan 2024 16:34:01 -0300 Subject: [PATCH 109/153] Update SupabaseVectorStore implementation --- .../vectorstores/SupabaseVectorStore.py | 20 ++++++++----------- 1 file changed, 8 insertions(+), 12 deletions(-) diff --git a/src/backend/langflow/components/vectorstores/SupabaseVectorStore.py b/src/backend/langflow/components/vectorstores/SupabaseVectorStore.py index e875fc06e..d8108250f 100644 --- a/src/backend/langflow/components/vectorstores/SupabaseVectorStore.py +++ b/src/backend/langflow/components/vectorstores/SupabaseVectorStore.py @@ -1,11 +1,14 @@ from langflow import CustomComponent -from typing import Optional, List -from langchain.vectorstores import SupabaseVectorStore +from typing import Optional, List,Union +from langchain_community.vectorstores.supabase import SupabaseVectorStore from langflow.field_typing import ( Document, Embeddings, NestedDict, ) +from langchain.schema import BaseRetriever +from langchain.vectorstores.base import VectorStore +from supabase.client import Client, create_client class SupabaseComponent(CustomComponent): @@ -32,13 +35,6 @@ class SupabaseComponent(CustomComponent): supabase_service_key: str = "", supabase_url: str = "", table_name: str = "", - ) -> SupabaseVectorStore: - return SupabaseVectorStore( - documents=documents, - embedding=embedding, - query_name=query_name, - search_kwargs=search_kwargs, - supabase_service_key=supabase_service_key, - supabase_url=supabase_url, - table_name=table_name, - ) + ) -> Union[VectorStore,SupabaseVectorStore,BaseRetriever]: + supabase: Client = create_client(supabase_url, supabase_key=supabase_service_key) + return SupabaseVectorStore.from_documents(documents=documents,embedding=embedding,query_name=query_name,search_kwargs=search_kwargs,client=supabase,table_name=table_name) \ No newline at end of file From 630d28ea015d56147e7dbda11a45a9637be83328 Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Tue, 16 Jan 2024 16:35:02 -0300 Subject: [PATCH 110/153] Update display names for advanced settings in SupabaseVectorStore.py --- .../langflow/components/vectorstores/SupabaseVectorStore.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/backend/langflow/components/vectorstores/SupabaseVectorStore.py b/src/backend/langflow/components/vectorstores/SupabaseVectorStore.py index d8108250f..6c2b801ec 100644 --- a/src/backend/langflow/components/vectorstores/SupabaseVectorStore.py +++ b/src/backend/langflow/components/vectorstores/SupabaseVectorStore.py @@ -20,10 +20,10 @@ class SupabaseComponent(CustomComponent): "documents": {"display_name": "Documents"}, "embedding": {"display_name": "Embedding"}, "query_name": {"display_name": "Query Name"}, - "search_kwargs": {"display_name": "Search Kwargs"}, + "search_kwargs": {"display_name": "Search Kwargs", "advanced": True}, "supabase_service_key": {"display_name": "Supabase Service Key"}, "supabase_url": {"display_name": "Supabase URL"}, - "table_name": {"display_name": "Table Name"}, + "table_name": {"display_name": "Table Name", "advanced": True}, } def build( From 0a21f2b453a3e69ef753af0de4e1f7bd25fe222d Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Tue, 16 Jan 2024 16:49:50 -0300 Subject: [PATCH 111/153] Refactor build method in JsonToolkitComponent --- src/backend/langflow/components/toolkits/JsonToolkit.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/src/backend/langflow/components/toolkits/JsonToolkit.py b/src/backend/langflow/components/toolkits/JsonToolkit.py index d6d1710bb..70e3355e4 100644 --- a/src/backend/langflow/components/toolkits/JsonToolkit.py +++ b/src/backend/langflow/components/toolkits/JsonToolkit.py @@ -1,5 +1,4 @@ from langflow import CustomComponent -from langflow.field_typing import Tool from langchain_community.tools.json.tool import JsonSpec from langchain_community.agent_toolkits.json.toolkit import JsonToolkit @@ -13,7 +12,5 @@ class JsonToolkitComponent(CustomComponent): "spec": {"display_name": "Spec", "type": JsonSpec}, } - def build(self, spec: JsonSpec) -> Tool: - # Assuming JsonToolkit is the class that should be instantiated with the spec - # The actual class name should be used in place of JsonToolkit if it is different - return JsonToolkit(spec=spec) # Replace JsonToolkit with the actual class name if necessary + def build(self, spec: JsonSpec) -> JsonToolkit: + return JsonToolkit(spec=spec) \ No newline at end of file From d9d89470c599c761bf2935d4b34664d673d40fc5 Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Tue, 16 Jan 2024 16:58:40 -0300 Subject: [PATCH 112/153] Remove commented code in OpenAPIToolkitComponent --- src/backend/langflow/components/toolkits/OpenAPIToolkit.py | 1 - 1 file changed, 1 deletion(-) diff --git a/src/backend/langflow/components/toolkits/OpenAPIToolkit.py b/src/backend/langflow/components/toolkits/OpenAPIToolkit.py index 4f3309117..2d9a860a0 100644 --- a/src/backend/langflow/components/toolkits/OpenAPIToolkit.py +++ b/src/backend/langflow/components/toolkits/OpenAPIToolkit.py @@ -20,5 +20,4 @@ class OpenAPIToolkitComponent(CustomComponent): json_agent: AgentExecutor, requests_wrapper: TextRequestsWrapper, ) -> Callable: - # Assuming the actual toolkit class name is OpenAPIToolkit return OpenAPIToolkit(json_agent=json_agent, requests_wrapper=requests_wrapper) From c05ab078fae2330d13aa668386e3aa7dea96be4b Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Tue, 16 Jan 2024 17:28:08 -0300 Subject: [PATCH 113/153] Refactor VectorStoreInfoComponent and VectorStoreToolkitComponent This commit refactors the VectorStoreInfoComponent and VectorStoreToolkitComponent classes. The changes include importing the VectorStoreInfo class from the correct module and adding a new parameter, llm, to the build method of the VectorStoreToolkitComponent class. --- .../langflow/components/toolkits/VectorStoreInfo.py | 13 ++----------- .../components/toolkits/VectorStoreToolkit.py | 7 ++++++- 2 files changed, 8 insertions(+), 12 deletions(-) diff --git a/src/backend/langflow/components/toolkits/VectorStoreInfo.py b/src/backend/langflow/components/toolkits/VectorStoreInfo.py index 3018d975e..27a5b3792 100644 --- a/src/backend/langflow/components/toolkits/VectorStoreInfo.py +++ b/src/backend/langflow/components/toolkits/VectorStoreInfo.py @@ -2,7 +2,7 @@ from langflow import CustomComponent from langchain.vectorstores import VectorStore from typing import Union, Callable -from langflow.field_typing import Chain +from langchain.agents.agent_toolkits.vectorstore.toolkit import VectorStoreInfo class VectorStoreInfoComponent(CustomComponent): display_name = "VectorStoreInfo" @@ -20,14 +20,5 @@ class VectorStoreInfoComponent(CustomComponent): vectorstore: VectorStore, description: str, name: str, - ) -> Union[Chain, Callable]: - # Since the actual implementation of VectorStoreInfo is not provided, this is a placeholder - # Replace VectorStoreInfo with the actual class that should be instantiated - # This is a hypothetical class, actual implementation may vary - class VectorStoreInfo: - def __init__(self, vectorstore, description, name): - self.vectorstore = vectorstore - self.description = description - self.name = name - + ) -> Union[VectorStoreInfo, Callable]: return VectorStoreInfo(vectorstore=vectorstore, description=description, name=name) diff --git a/src/backend/langflow/components/toolkits/VectorStoreToolkit.py b/src/backend/langflow/components/toolkits/VectorStoreToolkit.py index 236e2146f..118a06fd9 100644 --- a/src/backend/langflow/components/toolkits/VectorStoreToolkit.py +++ b/src/backend/langflow/components/toolkits/VectorStoreToolkit.py @@ -2,6 +2,9 @@ from langflow import CustomComponent from langchain.agents.agent_toolkits.vectorstore.toolkit import VectorStoreToolkit from langchain.agents.agent_toolkits.vectorstore.toolkit import VectorStoreInfo +from langflow.field_typing import ( + BaseLanguageModel, +) from langflow.field_typing import ( Tool, ) @@ -13,10 +16,12 @@ class VectorStoreToolkitComponent(CustomComponent): def build_config(self): return { "vectorstore_info": {"display_name": "Vector Store Info"}, + "llm": {"display_name": "LLM"}, } def build( self, vectorstore_info: VectorStoreInfo, + llm: BaseLanguageModel, ) -> Tool: - return VectorStoreToolkit(vectorstore_info=vectorstore_info) + return VectorStoreToolkit(vectorstore_info=vectorstore_info,llm=llm) From 00d80a4b9b3429c84c68a22a278db85a45797d28 Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Tue, 16 Jan 2024 17:33:39 -0300 Subject: [PATCH 114/153] Add llm parameter to build method in VectorStoreRouterToolkitComponent --- .../components/toolkits/VectorStoreRouterToolkit.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/src/backend/langflow/components/toolkits/VectorStoreRouterToolkit.py b/src/backend/langflow/components/toolkits/VectorStoreRouterToolkit.py index 11ff2a6cc..7264f1328 100644 --- a/src/backend/langflow/components/toolkits/VectorStoreRouterToolkit.py +++ b/src/backend/langflow/components/toolkits/VectorStoreRouterToolkit.py @@ -3,6 +3,7 @@ from langflow import CustomComponent from typing import List from langchain.agents.agent_toolkits.vectorstore.toolkit import VectorStoreRouterToolkit from langchain.agents.agent_toolkits.vectorstore.toolkit import VectorStoreInfo +from langflow.field_typing import BaseLanguageModel,Tool class VectorStoreRouterToolkitComponent(CustomComponent): display_name = "VectorStoreRouterToolkit" @@ -11,14 +12,12 @@ class VectorStoreRouterToolkitComponent(CustomComponent): def build_config(self): return { "vectorstores": {"display_name": "Vector Stores"}, + "llm": {"display_name": "LLM"}, } def build( self, vectorstores: List[VectorStoreInfo], - ): - # Assuming the class `VectorStoreRouterToolkit` exists within a module, but since there - # is no further information provided about the module structure, I will assume it is - # accessible from the current context. If it's in `langchain.vectorstores`, it should be - # imported from there. - return VectorStoreRouterToolkit(vectorstores=vectorstores) + llm: BaseLanguageModel + )->Tool: + return VectorStoreRouterToolkit(vectorstores=vectorstores,llm=llm) From f83120bcece9f5c2ee55c18e3bd309f6428c66fb Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Tue, 16 Jan 2024 17:56:42 -0300 Subject: [PATCH 115/153] Update VectorStoreAgent and VectorStoreToolkit --- .../langflow/components/agents/VectorStoreAgent.py | 10 +++++----- .../langflow/components/toolkits/VectorStoreToolkit.py | 3 ++- 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/src/backend/langflow/components/agents/VectorStoreAgent.py b/src/backend/langflow/components/agents/VectorStoreAgent.py index 339e3f713..99424a797 100644 --- a/src/backend/langflow/components/agents/VectorStoreAgent.py +++ b/src/backend/langflow/components/agents/VectorStoreAgent.py @@ -1,7 +1,7 @@ from langflow import CustomComponent -from langchain.agents import AgentExecutor -from langchain.agents.agent_toolkits.vectorstore.toolkit import VectorStoreInfo +from langchain.agents import AgentExecutor, create_vectorstore_agent +from langchain.agents.agent_toolkits.vectorstore.toolkit import VectorStoreToolkit from typing import Union, Callable from langflow.field_typing import BaseLanguageModel @@ -12,12 +12,12 @@ class VectorStoreAgentComponent(CustomComponent): def build_config(self): return { "llm": {"display_name": "LLM"}, - "vectorstoreinfo": {"display_name": "Vector Store Info"}, + "vector_store_toolkit": {"display_name": "Vector Store Info"}, } def build( self, llm: BaseLanguageModel, - vectorstoreinfo: VectorStoreInfo, + vector_store_toolkit: VectorStoreToolkit, ) -> Union[AgentExecutor, Callable]: - return AgentExecutor(llm=llm, vectorstore=vectorstoreinfo) + return create_vectorstore_agent(llm=llm,toolkit=vector_store_toolkit) diff --git a/src/backend/langflow/components/toolkits/VectorStoreToolkit.py b/src/backend/langflow/components/toolkits/VectorStoreToolkit.py index 118a06fd9..58529b373 100644 --- a/src/backend/langflow/components/toolkits/VectorStoreToolkit.py +++ b/src/backend/langflow/components/toolkits/VectorStoreToolkit.py @@ -8,6 +8,7 @@ from langflow.field_typing import ( from langflow.field_typing import ( Tool, ) +from typing import Union class VectorStoreToolkitComponent(CustomComponent): display_name = "VectorStoreToolkit" @@ -23,5 +24,5 @@ class VectorStoreToolkitComponent(CustomComponent): self, vectorstore_info: VectorStoreInfo, llm: BaseLanguageModel, - ) -> Tool: + ) -> Union[Tool,VectorStoreToolkit]: return VectorStoreToolkit(vectorstore_info=vectorstore_info,llm=llm) From 758b4e443dd38c1d02bf2fa7a910d8d0bf041341 Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Tue, 16 Jan 2024 18:04:14 -0300 Subject: [PATCH 116/153] Add create_vectorstore_router_agent function and update VectorStoreRouterToolkitComponent --- .../langflow/components/agents/VectorStoreRouterAgent.py | 4 ++-- .../components/toolkits/VectorStoreRouterToolkit.py | 6 ++++-- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/src/backend/langflow/components/agents/VectorStoreRouterAgent.py b/src/backend/langflow/components/agents/VectorStoreRouterAgent.py index d613f3080..8a3c3dc6a 100644 --- a/src/backend/langflow/components/agents/VectorStoreRouterAgent.py +++ b/src/backend/langflow/components/agents/VectorStoreRouterAgent.py @@ -2,7 +2,7 @@ from langflow import CustomComponent from langchain_core.language_models.base import BaseLanguageModel from langchain.agents.agent_toolkits.vectorstore.toolkit import VectorStoreRouterToolkit -from langchain.agents import AgentExecutor +from langchain.agents import create_vectorstore_router_agent from typing import Callable class VectorStoreRouterAgentComponent(CustomComponent): @@ -20,4 +20,4 @@ class VectorStoreRouterAgentComponent(CustomComponent): llm: BaseLanguageModel, vectorstoreroutertoolkit: VectorStoreRouterToolkit ) -> Callable: - return AgentExecutor(llm=llm, toolkit=vectorstoreroutertoolkit) + return create_vectorstore_router_agent(llm=llm,toolkit=vectorstoreroutertoolkit) diff --git a/src/backend/langflow/components/toolkits/VectorStoreRouterToolkit.py b/src/backend/langflow/components/toolkits/VectorStoreRouterToolkit.py index 7264f1328..fb1c66ef4 100644 --- a/src/backend/langflow/components/toolkits/VectorStoreRouterToolkit.py +++ b/src/backend/langflow/components/toolkits/VectorStoreRouterToolkit.py @@ -1,6 +1,6 @@ from langflow import CustomComponent -from typing import List +from typing import List, Union from langchain.agents.agent_toolkits.vectorstore.toolkit import VectorStoreRouterToolkit from langchain.agents.agent_toolkits.vectorstore.toolkit import VectorStoreInfo from langflow.field_typing import BaseLanguageModel,Tool @@ -19,5 +19,7 @@ class VectorStoreRouterToolkitComponent(CustomComponent): self, vectorstores: List[VectorStoreInfo], llm: BaseLanguageModel - )->Tool: + )->Union[Tool,VectorStoreRouterToolkit]: + print("vectorstores",vectorstores) + print("llm",llm) return VectorStoreRouterToolkit(vectorstores=vectorstores,llm=llm) From 4f5f8258f3c5cbfa6032efdac1eef11d87976f26 Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Tue, 16 Jan 2024 18:13:16 -0300 Subject: [PATCH 117/153] Add typing imports and update return type in LLMMathChainComponent --- src/backend/langflow/components/chains/LLMMathChain.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/backend/langflow/components/chains/LLMMathChain.py b/src/backend/langflow/components/chains/LLMMathChain.py index 406143418..1a6138a6e 100644 --- a/src/backend/langflow/components/chains/LLMMathChain.py +++ b/src/backend/langflow/components/chains/LLMMathChain.py @@ -1,10 +1,11 @@ - +from typing import Callable, Optional, Union from langflow import CustomComponent from langchain.chains import LLMChain from typing import Optional from langflow.field_typing import ( BaseLanguageModel, BaseMemory, + Chain ) class LLMMathChainComponent(CustomComponent): @@ -28,5 +29,5 @@ class LLMMathChainComponent(CustomComponent): input_key: str, output_key: str, memory: Optional[BaseMemory] = None, - ) -> LLMChain: + ) -> Union[Chain, Callable]: return LLMChain(llm=llm, prompt=llm_chain, input_key=input_key, output_key=output_key, memory=memory) From cb5076cba0c7f2d030dbbec099ab19e7e3a182d8 Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Tue, 16 Jan 2024 18:13:31 -0300 Subject: [PATCH 118/153] Update LLMChainComponent signature --- src/backend/langflow/components/chains/LLMChain.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/backend/langflow/components/chains/LLMChain.py b/src/backend/langflow/components/chains/LLMChain.py index ec88e128a..85f7ae96d 100644 --- a/src/backend/langflow/components/chains/LLMChain.py +++ b/src/backend/langflow/components/chains/LLMChain.py @@ -28,5 +28,5 @@ class LLMChainComponent(CustomComponent): prompt: BasePromptTemplate, llm: BaseLanguageModel, memory: Optional[BaseMemory] = None, - ) -> Union[Chain, Callable]: + ) -> Union[Chain, Callable,LLMChain]: return LLMChain(prompt=prompt, llm=llm, memory=memory) From b56cca506d8f10d54f227b1246ad032a8e0503d4 Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Tue, 16 Jan 2024 18:22:26 -0300 Subject: [PATCH 119/153] Update LLMMathChainComponent build method signature --- .../langflow/components/chains/LLMMathChain.py | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/src/backend/langflow/components/chains/LLMMathChain.py b/src/backend/langflow/components/chains/LLMMathChain.py index 1a6138a6e..73f0eddc0 100644 --- a/src/backend/langflow/components/chains/LLMMathChain.py +++ b/src/backend/langflow/components/chains/LLMMathChain.py @@ -5,7 +5,8 @@ from typing import Optional from langflow.field_typing import ( BaseLanguageModel, BaseMemory, - Chain + Chain, + BasePromptTemplate ) class LLMMathChainComponent(CustomComponent): @@ -16,18 +17,16 @@ class LLMMathChainComponent(CustomComponent): def build_config(self): return { "llm": {"display_name": "LLM"}, - "llm_chain": {"display_name": "LLM Chain"}, + "prompt": {"display_name": "Prompt"}, "memory": {"display_name": "Memory"}, - "input_key": {"display_name": "Input Key"}, "output_key": {"display_name": "Output Key"}, } def build( self, llm: BaseLanguageModel, - llm_chain: LLMChain, - input_key: str, - output_key: str, + prompt: BasePromptTemplate, + output_key: str="text", memory: Optional[BaseMemory] = None, ) -> Union[Chain, Callable]: - return LLMChain(llm=llm, prompt=llm_chain, input_key=input_key, output_key=output_key, memory=memory) + return LLMChain(llm=llm, prompt=prompt, output_key=output_key, memory=memory) From 9bd1c755c47635dfcf7d8c9419e992db97daf06b Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Tue, 16 Jan 2024 18:27:42 -0300 Subject: [PATCH 120/153] Update LLMMathChainComponent to use LLMMathChain --- .../components/chains/LLMMathChain.py | 21 ++++++++++--------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/src/backend/langflow/components/chains/LLMMathChain.py b/src/backend/langflow/components/chains/LLMMathChain.py index 73f0eddc0..cd508e478 100644 --- a/src/backend/langflow/components/chains/LLMMathChain.py +++ b/src/backend/langflow/components/chains/LLMMathChain.py @@ -1,12 +1,11 @@ -from typing import Callable, Optional, Union + from langflow import CustomComponent -from langchain.chains import LLMChain -from typing import Optional +from langchain.chains import LLMChain,LLMMathChain +from typing import Callable, Optional, Union from langflow.field_typing import ( BaseLanguageModel, BaseMemory, - Chain, - BasePromptTemplate + Chain ) class LLMMathChainComponent(CustomComponent): @@ -17,16 +16,18 @@ class LLMMathChainComponent(CustomComponent): def build_config(self): return { "llm": {"display_name": "LLM"}, - "prompt": {"display_name": "Prompt"}, + "llm_chain": {"display_name": "LLM Chain"}, "memory": {"display_name": "Memory"}, + "input_key": {"display_name": "Input Key"}, "output_key": {"display_name": "Output Key"}, } def build( self, llm: BaseLanguageModel, - prompt: BasePromptTemplate, - output_key: str="text", + llm_chain: LLMChain, + input_key: Optional[str]="question", + output_key: Optional[str]="answer", memory: Optional[BaseMemory] = None, - ) -> Union[Chain, Callable]: - return LLMChain(llm=llm, prompt=prompt, output_key=output_key, memory=memory) + ) -> Union[LLMMathChain, Callable,Chain]: + return LLMMathChain(llm=llm, llm_chain=llm_chain, input_key=input_key, output_key=output_key, memory=memory) From 921e34e37895b6178218d3fc45f33340a4ca499a Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Tue, 16 Jan 2024 18:33:41 -0300 Subject: [PATCH 121/153] Add BaseCombineDocumentsChain to CombineDocsChainComponent --- .../langflow/components/chains/CombineDocsChain.py | 13 ++----------- 1 file changed, 2 insertions(+), 11 deletions(-) diff --git a/src/backend/langflow/components/chains/CombineDocsChain.py b/src/backend/langflow/components/chains/CombineDocsChain.py index d18cec63a..0697d00bb 100644 --- a/src/backend/langflow/components/chains/CombineDocsChain.py +++ b/src/backend/langflow/components/chains/CombineDocsChain.py @@ -2,6 +2,7 @@ from langflow import CustomComponent from langflow.field_typing import BaseLanguageModel, Chain from typing import Union, Callable +from langchain.chains.combine_documents.base import BaseCombineDocumentsChain class CombineDocsChainComponent(CustomComponent): display_name = "CombineDocsChain" @@ -24,14 +25,4 @@ class CombineDocsChainComponent(CustomComponent): if chain_type not in ['stuff', 'map_reduce', 'map_rerank', 'refine']: raise ValueError(f"Invalid chain_type: {chain_type}") - # Implement the logic to create and return the appropriate chain based on the chain_type - # This could be a placeholder for now, as the specific chain loading function is not defined. - # Replace with actual implementation when available. - return load_qa_chain(llm=llm, chain_type=chain_type) - -# Assuming there is a function or class `load_qa_chain` that creates the chain -# based on the `chain_type` and `llm`. This is a placeholder for the actual -# implementation which should be replaced with the correct function/class call. -def load_qa_chain(llm: BaseLanguageModel, chain_type: str) -> Union[Chain, Callable]: - # Implement the logic to create and return the appropriate chain based on the chain_type - pass + return BaseCombineDocumentsChain() \ No newline at end of file From f4ae9b4308cce23b5a971d9bf112742c88533068 Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Tue, 16 Jan 2024 18:54:08 -0300 Subject: [PATCH 122/153] Refactor RetrievalQAWithSourcesChainComponent class --- .../chains/RetrievalQAWithSourcesChain.py | 27 ++++++++++--------- 1 file changed, 15 insertions(+), 12 deletions(-) diff --git a/src/backend/langflow/components/chains/RetrievalQAWithSourcesChain.py b/src/backend/langflow/components/chains/RetrievalQAWithSourcesChain.py index 44362683d..452572226 100644 --- a/src/backend/langflow/components/chains/RetrievalQAWithSourcesChain.py +++ b/src/backend/langflow/components/chains/RetrievalQAWithSourcesChain.py @@ -1,11 +1,12 @@ from langflow import CustomComponent from langchain.chains import RetrievalQAWithSourcesChain +from langchain.chains.combine_documents.base import BaseCombineDocumentsChain from typing import Optional from langflow.field_typing import ( BaseMemory, BaseRetriever, - Chain, + BaseLanguageModel ) class RetrievalQAWithSourcesChainComponent(CustomComponent): @@ -14,22 +15,24 @@ class RetrievalQAWithSourcesChainComponent(CustomComponent): def build_config(self): return { - "combine_documents_chain": {"display_name": "Combine Documents Chain"}, - "retriever": {"display_name": "Retriever"}, - "memory": {"display_name": "Memory", "optional": True}, - "return_source_documents": {"display_name": "Return Source Documents", "default": True, "advanced": True}, + "llm": {"display_name": "LLM"}, + "chain_type": { + "display_name": "Chain Type", + "options": ['stuff', 'map_reduce', 'map_rerank', 'refine'], + }, + "memory": {"display_name": "Memory"}, + "return_source_documents": {"display_name": "Return Source Documents"}, + } + def build( self, - combine_documents_chain: Chain, retriever: BaseRetriever, + llm: BaseLanguageModel, + combine_documents_chain: BaseCombineDocumentsChain, + chain_type: str, memory: Optional[BaseMemory] = None, return_source_documents: Optional[bool] = True, ) -> RetrievalQAWithSourcesChain: - return RetrievalQAWithSourcesChain( - combine_documents_chain=combine_documents_chain, - retriever=retriever, - memory=memory, - return_source_documents=return_source_documents - ) + return RetrievalQAWithSourcesChain(combine_documents_chain=combine_documents_chain,memory=memory,return_source_documents=return_source_documents,retriever=retriever).from_chain_type(llm=llm, chain_type=chain_type) \ No newline at end of file From 2ca2fc0f6a9fcf27753c9aa830f9c521d141fb8f Mon Sep 17 00:00:00 2001 From: igorrCarvalho Date: Tue, 16 Jan 2024 18:55:59 -0300 Subject: [PATCH 123/153] Fix: Transform empty object to array without opening 'editNode' modal to prevent flow build breakage. --- src/frontend/src/utils/reactflowUtils.ts | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/frontend/src/utils/reactflowUtils.ts b/src/frontend/src/utils/reactflowUtils.ts index 7c8a0b7c0..8d222349a 100644 --- a/src/frontend/src/utils/reactflowUtils.ts +++ b/src/frontend/src/utils/reactflowUtils.ts @@ -474,6 +474,8 @@ export function convertArrayToObj(arrayOfObjects) { export function hasDuplicateKeys(array) { const keys = {}; + // Transforms an empty object into an object array without opening the 'editNode' modal to prevent the flow build from breaking. + if (!Array.isArray(array)) array = [{"": ""}]; for (const obj of array) { for (const key in obj) { if (keys[key]) { @@ -486,6 +488,8 @@ export function hasDuplicateKeys(array) { } export function hasEmptyKey(objArray) { + // Transforms an empty object into an array without opening the 'editNode' modal to prevent the flow build from breaking. + if (!Array.isArray(objArray)) objArray = []; for (const obj of objArray) { for (const key in obj) { if (obj.hasOwnProperty(key) && key === "") { From 8c61654bdafbee95974227d8bd2bdd34053f9cf9 Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Tue, 16 Jan 2024 19:17:52 -0300 Subject: [PATCH 124/153] Update SQLDatabaseChainComponent return type --- src/backend/langflow/components/chains/SQLDatabaseChain.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/backend/langflow/components/chains/SQLDatabaseChain.py b/src/backend/langflow/components/chains/SQLDatabaseChain.py index a86555b35..80be78e72 100644 --- a/src/backend/langflow/components/chains/SQLDatabaseChain.py +++ b/src/backend/langflow/components/chains/SQLDatabaseChain.py @@ -7,6 +7,7 @@ from langflow.field_typing import ( Chain ) from langchain_community.utilities.sql_database import SQLDatabase +from langchain_experimental.sql.base import SQLDatabaseChain class SQLDatabaseChainComponent(CustomComponent): display_name = "SQLDatabaseChain" @@ -24,5 +25,5 @@ class SQLDatabaseChainComponent(CustomComponent): db: SQLDatabase, llm: BaseLanguageModel, prompt: BasePromptTemplate, - ) -> Union[Chain, Callable]: - return Chain(db=db, llm=llm, prompt=prompt) + ) -> Union[Chain, Callable,SQLDatabaseChain]: + return SQLDatabaseChain.from_llm(llm=llm, db=db, prompt=prompt) From 70717110d12753d757601cd9c3e75192a34b0129 Mon Sep 17 00:00:00 2001 From: igorrCarvalho Date: Tue, 16 Jan 2024 20:09:18 -0300 Subject: [PATCH 125/153] Fix: Add select trigger to Saved components to make toolbar open again --- .../extraSidebarComponent/sideBarDraggableComponent/index.tsx | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/frontend/src/pages/FlowPage/components/extraSidebarComponent/sideBarDraggableComponent/index.tsx b/src/frontend/src/pages/FlowPage/components/extraSidebarComponent/sideBarDraggableComponent/index.tsx index d7d3ee925..c2d550e60 100644 --- a/src/frontend/src/pages/FlowPage/components/extraSidebarComponent/sideBarDraggableComponent/index.tsx +++ b/src/frontend/src/pages/FlowPage/components/extraSidebarComponent/sideBarDraggableComponent/index.tsx @@ -4,6 +4,7 @@ import { Select, SelectContent, SelectItem, + SelectTrigger } from "../../../../../components/ui/select-custom"; import { useDarkStore } from "../../../../../stores/darkStore"; import useFlowsManagerStore from "../../../../../stores/flowsManagerStore"; @@ -117,6 +118,7 @@ export const SidebarDraggableComponent = forwardRef( name="Menu" className="side-bar-components-icon " /> + Date: Wed, 17 Jan 2024 13:42:36 -0300 Subject: [PATCH 126/153] Fix: Add value prop initial value to prevent it from breaking when open edit node modal --- src/frontend/src/components/dictComponent/index.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/frontend/src/components/dictComponent/index.tsx b/src/frontend/src/components/dictComponent/index.tsx index 066819ba1..2cf622e93 100644 --- a/src/frontend/src/components/dictComponent/index.tsx +++ b/src/frontend/src/components/dictComponent/index.tsx @@ -6,7 +6,7 @@ import { classNames } from "../../utils/utils"; import { Input } from "../ui/input"; export default function DictComponent({ - value, + value = [], onChange, disabled, editNode = false, From a3cc0c7fa67bfdee9c52ae02f4d9adf18587a6db Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Wed, 24 Jan 2024 19:57:58 -0300 Subject: [PATCH 127/153] Fix imports and formatting issues --- .../langflow/components/agents/CSVAgent.py | 1 + .../langflow/components/agents/JsonAgent.py | 7 ++-- .../langflow/components/agents/SQLAgent.py | 6 +-- .../components/agents/VectorStoreAgent.py | 6 +-- .../agents/VectorStoreRouterAgent.py | 10 ++--- .../components/agents/ZeroShotAgent.py | 9 +++-- .../components/chains/CombineDocsChain.py | 8 ++-- .../langflow/components/chains/LLMChain.py | 2 +- .../components/chains/LLMCheckerChain.py | 2 +- .../components/chains/LLMMathChain.py | 18 ++++----- .../langflow/components/chains/RetrievalQA.py | 24 ++++++------ .../chains/RetrievalQAWithSourcesChain.py | 29 ++++++++------- .../components/chains/SQLDatabaseChain.py | 10 ++--- .../documentloaders/AZLyricsLoader.py | 4 +- .../documentloaders/AirbyteJSONLoader.py | 2 +- .../components/documentloaders/CSVLoader.py | 10 ++--- .../documentloaders/CoNLLULoader.py | 2 +- .../CollegeConfidentialLoader.py | 14 +++---- .../documentloaders/DirectoryLoader.py | 2 +- .../documentloaders/EverNoteLoader.py | 3 +- .../documentloaders/FacebookChatLoader.py | 11 ++++-- .../documentloaders/GitbookLoader.py | 4 +- .../components/documentloaders/HNLoader.py | 19 +++------- .../documentloaders/IFixitLoader.py | 18 +++++++-- .../components/documentloaders/IMSDbLoader.py | 2 +- .../documentloaders/PyPDFDirectoryLoader.py | 23 ------------ .../components/documentloaders/PyPDFLoader.py | 20 +++++++--- .../documentloaders/ReadTheDocsLoader.py | 8 ++-- .../components/documentloaders/SRTLoader.py | 2 +- .../documentloaders/SlackDirectoryLoader.py | 9 +++-- .../components/documentloaders/TextLoader.py | 2 +- .../documentloaders/UnstructuredHTMLLoader.py | 17 +++++++-- .../UnstructuredPowerPointLoader.py | 1 - .../components/embeddings/CohereEmbeddings.py | 9 +++-- .../embeddings/HuggingFaceEmbeddings.py | 5 ++- .../components/embeddings/OpenAIEmbeddings.py | 6 +-- .../embeddings/VertexAIEmbeddings.py | 12 +++--- .../langflow/components/llms/Anthropic.py | 6 +-- .../components/llms/AzureChatOpenAI.py | 2 +- .../langflow/components/llms/CTransformers.py | 25 ++++++++----- .../langflow/components/llms/ChatAnthropic.py | 6 +-- .../langflow/components/llms/ChatOpenAI.py | 10 +++-- .../langflow/components/llms/ChatVertexAI.py | 15 ++++---- .../langflow/components/llms/Cohere.py | 22 ++--------- .../langflow/components/llms/LlamaCpp.py | 9 ++++- .../langflow/components/llms/OpenAI.py | 10 +++-- .../langflow/components/llms/VertexAI.py | 35 +++++++----------- .../retrievers/MultiQueryRetriever.py | 37 ++++++++++--------- .../textsplitters/CharacterTextSplitter.py | 1 - .../LanguageRecursiveTextSplitter.py | 8 ++-- .../components/toolkits/JsonToolkit.py | 2 +- .../components/toolkits/VectorStoreInfo.py | 2 +- .../toolkits/VectorStoreRouterToolkit.py | 16 ++++---- .../components/toolkits/VectorStoreToolkit.py | 6 +-- .../utilities/BingSearchAPIWrapper.py | 7 +--- .../utilities/GoogleSearchAPIWrapper.py | 3 +- .../utilities/GoogleSerperAPIWrapper.py | 19 +++------- .../utilities/SearxSearchWrapper.py | 17 ++++----- .../components/utilities/SerpAPIWrapper.py | 14 ++++--- .../utilities/WikipediaAPIWrapper.py | 4 +- .../utilities/WolframAlphaAPIWrapper.py | 10 ++--- .../langflow/components/vectorstores/FAISS.py | 6 +-- .../vectorstores/MongoDBAtlasVectorSearch.py | 2 +- .../components/vectorstores/Pinecone.py | 12 +++--- .../vectorstores/SupabaseVectorStore.py | 13 +++++-- .../components/vectorstores/Weaviate.py | 14 +++---- 66 files changed, 326 insertions(+), 344 deletions(-) delete mode 100644 src/backend/langflow/components/documentloaders/PyPDFDirectoryLoader.py diff --git a/src/backend/langflow/components/agents/CSVAgent.py b/src/backend/langflow/components/agents/CSVAgent.py index 30037dff2..b54e5d90d 100644 --- a/src/backend/langflow/components/agents/CSVAgent.py +++ b/src/backend/langflow/components/agents/CSVAgent.py @@ -2,6 +2,7 @@ from langflow import CustomComponent from langflow.field_typing import BaseLanguageModel, AgentExecutor from langchain_experimental.agents.agent_toolkits.csv.base import create_csv_agent + class CSVAgentComponent(CustomComponent): display_name = "CSVAgent" description = "Construct a CSV agent from a CSV and tools." diff --git a/src/backend/langflow/components/agents/JsonAgent.py b/src/backend/langflow/components/agents/JsonAgent.py index 2f45b037d..d15366b77 100644 --- a/src/backend/langflow/components/agents/JsonAgent.py +++ b/src/backend/langflow/components/agents/JsonAgent.py @@ -1,10 +1,11 @@ - from langflow import CustomComponent -from langchain.agents import AgentExecutor,create_json_agent +from langchain.agents import AgentExecutor, create_json_agent from langflow.field_typing import ( BaseLanguageModel, ) from langchain_community.agent_toolkits.base import BaseToolkit + + class JsonAgentComponent(CustomComponent): display_name = "JsonAgent" description = "Construct a json agent from an LLM and tools." @@ -20,4 +21,4 @@ class JsonAgentComponent(CustomComponent): llm: BaseLanguageModel, toolkit: BaseToolkit, ) -> AgentExecutor: - return create_json_agent(llm=llm, toolkit=toolkit) \ No newline at end of file + return create_json_agent(llm=llm, toolkit=toolkit) diff --git a/src/backend/langflow/components/agents/SQLAgent.py b/src/backend/langflow/components/agents/SQLAgent.py index 7b1865637..42b1b48f3 100644 --- a/src/backend/langflow/components/agents/SQLAgent.py +++ b/src/backend/langflow/components/agents/SQLAgent.py @@ -1,4 +1,3 @@ - from langflow import CustomComponent from typing import Union, Callable from langchain.agents import AgentExecutor @@ -6,7 +5,8 @@ from langflow.field_typing import BaseLanguageModel from langchain_community.agent_toolkits.sql.base import create_sql_agent from langchain.sql_database import SQLDatabase from langchain_community.agent_toolkits import SQLDatabaseToolkit - + + class SQLAgentComponent(CustomComponent): display_name = "SQLAgent" description = "Construct an SQL agent from an LLM and tools." @@ -15,7 +15,7 @@ class SQLAgentComponent(CustomComponent): return { "llm": {"display_name": "LLM"}, "database_uri": {"display_name": "Database URI"}, - "verbose": {"display_name": "Verbose", "value": False,"advanced": True}, + "verbose": {"display_name": "Verbose", "value": False, "advanced": True}, } def build( diff --git a/src/backend/langflow/components/agents/VectorStoreAgent.py b/src/backend/langflow/components/agents/VectorStoreAgent.py index 99424a797..b70ea4d59 100644 --- a/src/backend/langflow/components/agents/VectorStoreAgent.py +++ b/src/backend/langflow/components/agents/VectorStoreAgent.py @@ -1,10 +1,10 @@ - from langflow import CustomComponent from langchain.agents import AgentExecutor, create_vectorstore_agent -from langchain.agents.agent_toolkits.vectorstore.toolkit import VectorStoreToolkit +from langchain.agents.agent_toolkits.vectorstore.toolkit import VectorStoreToolkit from typing import Union, Callable from langflow.field_typing import BaseLanguageModel + class VectorStoreAgentComponent(CustomComponent): display_name = "VectorStoreAgent" description = "Construct an agent from a Vector Store." @@ -20,4 +20,4 @@ class VectorStoreAgentComponent(CustomComponent): llm: BaseLanguageModel, vector_store_toolkit: VectorStoreToolkit, ) -> Union[AgentExecutor, Callable]: - return create_vectorstore_agent(llm=llm,toolkit=vector_store_toolkit) + return create_vectorstore_agent(llm=llm, toolkit=vector_store_toolkit) diff --git a/src/backend/langflow/components/agents/VectorStoreRouterAgent.py b/src/backend/langflow/components/agents/VectorStoreRouterAgent.py index 8a3c3dc6a..3174d9513 100644 --- a/src/backend/langflow/components/agents/VectorStoreRouterAgent.py +++ b/src/backend/langflow/components/agents/VectorStoreRouterAgent.py @@ -1,10 +1,10 @@ - from langflow import CustomComponent from langchain_core.language_models.base import BaseLanguageModel from langchain.agents.agent_toolkits.vectorstore.toolkit import VectorStoreRouterToolkit from langchain.agents import create_vectorstore_router_agent from typing import Callable + class VectorStoreRouterAgentComponent(CustomComponent): display_name = "VectorStoreRouterAgent" description = "Construct an agent from a Vector Store Router." @@ -15,9 +15,5 @@ class VectorStoreRouterAgentComponent(CustomComponent): "vectorstoreroutertoolkit": {"display_name": "Vector Store Router Toolkit"}, } - def build( - self, - llm: BaseLanguageModel, - vectorstoreroutertoolkit: VectorStoreRouterToolkit - ) -> Callable: - return create_vectorstore_router_agent(llm=llm,toolkit=vectorstoreroutertoolkit) + def build(self, llm: BaseLanguageModel, vectorstoreroutertoolkit: VectorStoreRouterToolkit) -> Callable: + return create_vectorstore_router_agent(llm=llm, toolkit=vectorstoreroutertoolkit) diff --git a/src/backend/langflow/components/agents/ZeroShotAgent.py b/src/backend/langflow/components/agents/ZeroShotAgent.py index 8fba2db72..b65e14e41 100644 --- a/src/backend/langflow/components/agents/ZeroShotAgent.py +++ b/src/backend/langflow/components/agents/ZeroShotAgent.py @@ -1,10 +1,11 @@ +from typing import List -from langflow import CustomComponent from langchain.agents import ZeroShotAgent from langchain_core.tools import BaseTool -from typing import List, Optional +from langflow import CustomComponent from langflow.components.chains.LLMChain import LLMChain + class ZeroShotAgentComponent(CustomComponent): display_name = "ZeroShotAgent" description = "Construct an agent from an LLM and tools." @@ -21,7 +22,7 @@ class ZeroShotAgentComponent(CustomComponent): self, llm: LLMChain, tools: List[BaseTool], - prefix: Optional[str] = "Answer the following questions as best you can. You have access to the following tools:", - suffix: Optional[str] = "Begin!\n\nQuestion: {input}\nThought:{agent_scratchpad}", + prefix: str = "Answer the following questions as best you can. You have access to the following tools:", + suffix: str = "Begin!\n\nQuestion: {input}\nThought:{agent_scratchpad}", ) -> ZeroShotAgent: return ZeroShotAgent(llm_chain=llm, tools=tools, prefix=prefix, suffix=suffix) diff --git a/src/backend/langflow/components/chains/CombineDocsChain.py b/src/backend/langflow/components/chains/CombineDocsChain.py index 0697d00bb..bb5a32c29 100644 --- a/src/backend/langflow/components/chains/CombineDocsChain.py +++ b/src/backend/langflow/components/chains/CombineDocsChain.py @@ -1,9 +1,9 @@ - from langflow import CustomComponent from langflow.field_typing import BaseLanguageModel, Chain from typing import Union, Callable from langchain.chains.combine_documents.base import BaseCombineDocumentsChain + class CombineDocsChainComponent(CustomComponent): display_name = "CombineDocsChain" description = "Load question answering chain." @@ -13,7 +13,7 @@ class CombineDocsChainComponent(CustomComponent): "llm": {"display_name": "LLM"}, "chain_type": { "display_name": "Chain Type", - "options": ['stuff', 'map_reduce', 'map_rerank', 'refine'], + "options": ["stuff", "map_reduce", "map_rerank", "refine"], }, } @@ -22,7 +22,7 @@ class CombineDocsChainComponent(CustomComponent): llm: BaseLanguageModel, chain_type: str, ) -> Union[Chain, Callable]: - if chain_type not in ['stuff', 'map_reduce', 'map_rerank', 'refine']: + if chain_type not in ["stuff", "map_reduce", "map_rerank", "refine"]: raise ValueError(f"Invalid chain_type: {chain_type}") - return BaseCombineDocumentsChain() \ No newline at end of file + return BaseCombineDocumentsChain() diff --git a/src/backend/langflow/components/chains/LLMChain.py b/src/backend/langflow/components/chains/LLMChain.py index 85f7ae96d..c6efd04c7 100644 --- a/src/backend/langflow/components/chains/LLMChain.py +++ b/src/backend/langflow/components/chains/LLMChain.py @@ -28,5 +28,5 @@ class LLMChainComponent(CustomComponent): prompt: BasePromptTemplate, llm: BaseLanguageModel, memory: Optional[BaseMemory] = None, - ) -> Union[Chain, Callable,LLMChain]: + ) -> Union[Chain, Callable, LLMChain]: return LLMChain(prompt=prompt, llm=llm, memory=memory) diff --git a/src/backend/langflow/components/chains/LLMCheckerChain.py b/src/backend/langflow/components/chains/LLMCheckerChain.py index 0d2266470..cf5effac8 100644 --- a/src/backend/langflow/components/chains/LLMCheckerChain.py +++ b/src/backend/langflow/components/chains/LLMCheckerChain.py @@ -1,4 +1,3 @@ - from langflow import CustomComponent from langchain.chains import LLMCheckerChain from typing import Union, Callable @@ -7,6 +6,7 @@ from langflow.field_typing import ( Chain, ) + class LLMCheckerChainComponent(CustomComponent): display_name = "LLMCheckerChain" description = "" diff --git a/src/backend/langflow/components/chains/LLMMathChain.py b/src/backend/langflow/components/chains/LLMMathChain.py index cd508e478..28f430e6d 100644 --- a/src/backend/langflow/components/chains/LLMMathChain.py +++ b/src/backend/langflow/components/chains/LLMMathChain.py @@ -1,12 +1,10 @@ +from typing import Callable, Optional, Union + +from langchain.chains import LLMChain, LLMMathChain from langflow import CustomComponent -from langchain.chains import LLMChain,LLMMathChain -from typing import Callable, Optional, Union -from langflow.field_typing import ( - BaseLanguageModel, - BaseMemory, - Chain -) +from langflow.field_typing import BaseLanguageModel, BaseMemory, Chain + class LLMMathChainComponent(CustomComponent): display_name = "LLMMathChain" @@ -26,8 +24,8 @@ class LLMMathChainComponent(CustomComponent): self, llm: BaseLanguageModel, llm_chain: LLMChain, - input_key: Optional[str]="question", - output_key: Optional[str]="answer", + input_key: str = "question", + output_key: str = "answer", memory: Optional[BaseMemory] = None, - ) -> Union[LLMMathChain, Callable,Chain]: + ) -> Union[LLMMathChain, Callable, Chain]: return LLMMathChain(llm=llm, llm_chain=llm_chain, input_key=input_key, output_key=output_key, memory=memory) diff --git a/src/backend/langflow/components/chains/RetrievalQA.py b/src/backend/langflow/components/chains/RetrievalQA.py index 4f0abb264..5f1232443 100644 --- a/src/backend/langflow/components/chains/RetrievalQA.py +++ b/src/backend/langflow/components/chains/RetrievalQA.py @@ -1,11 +1,11 @@ +from typing import Callable, Optional, Union -from langflow import CustomComponent -from typing import Optional, Union, Callable -from langflow.field_typing import ( - BaseMemory, - BaseRetriever) -from langchain.chains.retrieval_qa.base import BaseRetrievalQA from langchain.chains.combine_documents.base import BaseCombineDocumentsChain +from langchain.chains.retrieval_qa.base import BaseRetrievalQA, RetrievalQA +from langflow import CustomComponent +from langflow.field_typing import BaseMemory, BaseRetriever + + class RetrievalQAComponent(CustomComponent): display_name = "RetrievalQA" description = "Chain for question-answering against an index." @@ -15,8 +15,8 @@ class RetrievalQAComponent(CustomComponent): "combine_documents_chain": {"display_name": "Combine Documents Chain"}, "retriever": {"display_name": "Retriever"}, "memory": {"display_name": "Memory", "required": False}, - "input_key": {"display_name": "Input Key","advanced":True}, - "output_key": {"display_name": "Output Key","advanced":True}, + "input_key": {"display_name": "Input Key", "advanced": True}, + "output_key": {"display_name": "Output Key", "advanced": True}, "return_source_documents": {"display_name": "Return Source Documents"}, } @@ -25,11 +25,11 @@ class RetrievalQAComponent(CustomComponent): combine_documents_chain: BaseCombineDocumentsChain, retriever: BaseRetriever, memory: Optional[BaseMemory] = None, - input_key: Optional[str] = "query", - output_key: Optional[str] = "result", - return_source_documents: Optional[bool] = True, + input_key: str = "query", + output_key: str = "result", + return_source_documents: bool = True, ) -> Union[BaseRetrievalQA, Callable]: - return BaseRetrievalQA( + return RetrievalQA( combine_documents_chain=combine_documents_chain, retriever=retriever, memory=memory, diff --git a/src/backend/langflow/components/chains/RetrievalQAWithSourcesChain.py b/src/backend/langflow/components/chains/RetrievalQAWithSourcesChain.py index 452572226..e10aac6bb 100644 --- a/src/backend/langflow/components/chains/RetrievalQAWithSourcesChain.py +++ b/src/backend/langflow/components/chains/RetrievalQAWithSourcesChain.py @@ -1,13 +1,11 @@ +from typing import Optional + +from langchain.chains import BaseQAWithSourcesChain, RetrievalQAWithSourcesChain +from langchain.chains.combine_documents.base import BaseCombineDocumentsChain from langflow import CustomComponent -from langchain.chains import RetrievalQAWithSourcesChain -from langchain.chains.combine_documents.base import BaseCombineDocumentsChain -from typing import Optional -from langflow.field_typing import ( - BaseMemory, - BaseRetriever, - BaseLanguageModel -) +from langflow.field_typing import BaseLanguageModel, BaseMemory, BaseRetriever + class RetrievalQAWithSourcesChainComponent(CustomComponent): display_name = "RetrievalQAWithSourcesChain" @@ -18,14 +16,12 @@ class RetrievalQAWithSourcesChainComponent(CustomComponent): "llm": {"display_name": "LLM"}, "chain_type": { "display_name": "Chain Type", - "options": ['stuff', 'map_reduce', 'map_rerank', 'refine'], + "options": ["stuff", "map_reduce", "map_rerank", "refine"], }, "memory": {"display_name": "Memory"}, "return_source_documents": {"display_name": "Return Source Documents"}, - } - def build( self, retriever: BaseRetriever, @@ -34,5 +30,12 @@ class RetrievalQAWithSourcesChainComponent(CustomComponent): chain_type: str, memory: Optional[BaseMemory] = None, return_source_documents: Optional[bool] = True, - ) -> RetrievalQAWithSourcesChain: - return RetrievalQAWithSourcesChain(combine_documents_chain=combine_documents_chain,memory=memory,return_source_documents=return_source_documents,retriever=retriever).from_chain_type(llm=llm, chain_type=chain_type) \ No newline at end of file + ) -> BaseQAWithSourcesChain: + return RetrievalQAWithSourcesChain.from_chain_type( + llm=llm, + chain_type=chain_type, + combine_documents_chain=combine_documents_chain, + memory=memory, + return_source_documents=return_source_documents, + retriever=retriever, + ) diff --git a/src/backend/langflow/components/chains/SQLDatabaseChain.py b/src/backend/langflow/components/chains/SQLDatabaseChain.py index 80be78e72..56bd433ba 100644 --- a/src/backend/langflow/components/chains/SQLDatabaseChain.py +++ b/src/backend/langflow/components/chains/SQLDatabaseChain.py @@ -1,14 +1,10 @@ - from langflow import CustomComponent from typing import Callable, Union -from langflow.field_typing import ( - BasePromptTemplate, - BaseLanguageModel, - Chain -) +from langflow.field_typing import BasePromptTemplate, BaseLanguageModel, Chain from langchain_community.utilities.sql_database import SQLDatabase from langchain_experimental.sql.base import SQLDatabaseChain + class SQLDatabaseChainComponent(CustomComponent): display_name = "SQLDatabaseChain" description = "" @@ -25,5 +21,5 @@ class SQLDatabaseChainComponent(CustomComponent): db: SQLDatabase, llm: BaseLanguageModel, prompt: BasePromptTemplate, - ) -> Union[Chain, Callable,SQLDatabaseChain]: + ) -> Union[Chain, Callable, SQLDatabaseChain]: return SQLDatabaseChain.from_llm(llm=llm, db=db, prompt=prompt) diff --git a/src/backend/langflow/components/documentloaders/AZLyricsLoader.py b/src/backend/langflow/components/documentloaders/AZLyricsLoader.py index f0a9236c1..eea64c2e1 100644 --- a/src/backend/langflow/components/documentloaders/AZLyricsLoader.py +++ b/src/backend/langflow/components/documentloaders/AZLyricsLoader.py @@ -17,10 +17,10 @@ class AZLyricsLoaderComponent(CustomComponent): def build(self, metadata: Optional[Dict] = None, web_path: str = "") -> Document: documents = AZLyricsLoader(web_path=web_path).load() - if(metadata): + if metadata: for document in documents: if not document.metadata: document.metadata = metadata else: document.metadata.update(metadata) - return documents \ No newline at end of file + return documents diff --git a/src/backend/langflow/components/documentloaders/AirbyteJSONLoader.py b/src/backend/langflow/components/documentloaders/AirbyteJSONLoader.py index 89a271945..eaff8fb4a 100644 --- a/src/backend/langflow/components/documentloaders/AirbyteJSONLoader.py +++ b/src/backend/langflow/components/documentloaders/AirbyteJSONLoader.py @@ -28,7 +28,7 @@ class AirbyteJSONLoaderComponent(CustomComponent): def build(self, file_path: str, metadata: Optional[Dict] = None) -> Document: documents = AirbyteJSONLoader(file_path=file_path).load() - if(metadata): + if metadata: for document in documents: if not document.metadata: document.metadata = metadata diff --git a/src/backend/langflow/components/documentloaders/CSVLoader.py b/src/backend/langflow/components/documentloaders/CSVLoader.py index f7d49da77..ac2b271e9 100644 --- a/src/backend/langflow/components/documentloaders/CSVLoader.py +++ b/src/backend/langflow/components/documentloaders/CSVLoader.py @@ -1,9 +1,9 @@ - from langflow import CustomComponent from typing import List from langchain_community.document_loaders.csv_loader import CSVLoader from langchain.docstore.document import Document + class CSVLoaderComponent(CustomComponent): display_name = "CSVLoader" description = "Load a `CSV` file into a list of Documents." @@ -23,13 +23,9 @@ class CSVLoaderComponent(CustomComponent): }, } - def build( - self, - file_path: str, - metadata: dict - ) -> List[Document]: + def build(self, file_path: str, metadata: dict) -> List[Document]: documents = CSVLoader(file_path=file_path).load() - if(metadata): + if metadata: for document in documents: if not document.metadata: document.metadata = metadata diff --git a/src/backend/langflow/components/documentloaders/CoNLLULoader.py b/src/backend/langflow/components/documentloaders/CoNLLULoader.py index ccc912c0c..2c243d25d 100644 --- a/src/backend/langflow/components/documentloaders/CoNLLULoader.py +++ b/src/backend/langflow/components/documentloaders/CoNLLULoader.py @@ -26,7 +26,7 @@ class CoNLLULoaderComponent(CustomComponent): def build(self, file_path: str, metadata: dict) -> Document: documents = CoNLLULoader(file_path=file_path).load() - if(metadata): + if metadata: for document in documents: if not document.metadata: document.metadata = metadata diff --git a/src/backend/langflow/components/documentloaders/CollegeConfidentialLoader.py b/src/backend/langflow/components/documentloaders/CollegeConfidentialLoader.py index 44fc8c9ab..dee77c1da 100644 --- a/src/backend/langflow/components/documentloaders/CollegeConfidentialLoader.py +++ b/src/backend/langflow/components/documentloaders/CollegeConfidentialLoader.py @@ -1,13 +1,15 @@ - from langflow import CustomComponent from langchain.docstore.document import Document from typing import Optional from langchain_community.document_loaders.college_confidential import CollegeConfidentialLoader + class CollegeConfidentialLoaderComponent(CustomComponent): display_name = "CollegeConfidentialLoader" description = "Load `College Confidential` webpages." - documentation = "https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/college_confidential" + documentation = ( + "https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/college_confidential" + ) def build_config(self): return { @@ -15,13 +17,9 @@ class CollegeConfidentialLoaderComponent(CustomComponent): "web_path": {"display_name": "Web Page", "required": True}, } - def build( - self, - web_path: str, - metadata: Optional[dict] = {} - ) -> Document: + def build(self, web_path: str, metadata: Optional[dict] = {}) -> Document: documents = CollegeConfidentialLoader(web_path=web_path).load() - if(metadata): + if metadata: for document in documents: if not document.metadata: document.metadata = metadata diff --git a/src/backend/langflow/components/documentloaders/DirectoryLoader.py b/src/backend/langflow/components/documentloaders/DirectoryLoader.py index 41ff19b7b..0b214dd34 100644 --- a/src/backend/langflow/components/documentloaders/DirectoryLoader.py +++ b/src/backend/langflow/components/documentloaders/DirectoryLoader.py @@ -1,8 +1,8 @@ - from langflow import CustomComponent from langchain.docstore.document import Document from typing import Optional, Dict, Any + class DirectoryLoaderComponent(CustomComponent): display_name = "DirectoryLoader" description = "Load from a directory." diff --git a/src/backend/langflow/components/documentloaders/EverNoteLoader.py b/src/backend/langflow/components/documentloaders/EverNoteLoader.py index 02d8882d3..ccf925e27 100644 --- a/src/backend/langflow/components/documentloaders/EverNoteLoader.py +++ b/src/backend/langflow/components/documentloaders/EverNoteLoader.py @@ -3,6 +3,7 @@ from langflow.field_typing import Document from typing import Optional, Dict from langchain_community.document_loaders.evernote import EverNoteLoader + class EverNoteLoaderComponent(CustomComponent): display_name = "EverNoteLoader" description = "Load from `EverNote`." @@ -28,7 +29,7 @@ class EverNoteLoaderComponent(CustomComponent): def build(self, file_path: str, metadata: Optional[Dict] = None) -> Document: documents = EverNoteLoader(file_path=file_path).load() - if(metadata): + if metadata: for document in documents: if not document.metadata: document.metadata = metadata diff --git a/src/backend/langflow/components/documentloaders/FacebookChatLoader.py b/src/backend/langflow/components/documentloaders/FacebookChatLoader.py index deb636eac..244066a7e 100644 --- a/src/backend/langflow/components/documentloaders/FacebookChatLoader.py +++ b/src/backend/langflow/components/documentloaders/FacebookChatLoader.py @@ -1,12 +1,15 @@ - from langflow import CustomComponent from langchain.docstore.document import Document from typing import Optional, Dict from langchain_community.document_loaders.facebook_chat import FacebookChatLoader + + class FacebookChatLoaderComponent(CustomComponent): display_name = "FacebookChatLoader" description = "Load `Facebook Chat` messages directory dump." - documentation = "https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/facebook_chat" + documentation = ( + "https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/facebook_chat" + ) def build_config(self): return { @@ -25,10 +28,10 @@ class FacebookChatLoaderComponent(CustomComponent): def build(self, file_path: str, metadata: Optional[Dict] = None) -> Document: documents = FacebookChatLoader(file_path=file_path).load() - if(metadata): + if metadata: for document in documents: if not document.metadata: document.metadata = metadata else: document.metadata.update(metadata) - return documents \ No newline at end of file + return documents diff --git a/src/backend/langflow/components/documentloaders/GitbookLoader.py b/src/backend/langflow/components/documentloaders/GitbookLoader.py index e859eccf0..f53f5959f 100644 --- a/src/backend/langflow/components/documentloaders/GitbookLoader.py +++ b/src/backend/langflow/components/documentloaders/GitbookLoader.py @@ -12,7 +12,7 @@ class GitbookLoaderComponent(CustomComponent): return { "metadata": { "display_name": "Metadata", - "field_type":"dict", + "field_type": "dict", "value": {}, }, "web_page": { @@ -23,7 +23,7 @@ class GitbookLoaderComponent(CustomComponent): def build(self, metadata: Optional[Dict] = None, web_page: str = "") -> Document: documents = GitbookLoader(web_page=web_page).load() - if(metadata): + if metadata: for document in documents: if not document.metadata: document.metadata = metadata diff --git a/src/backend/langflow/components/documentloaders/HNLoader.py b/src/backend/langflow/components/documentloaders/HNLoader.py index d54591d46..6e08f26e2 100644 --- a/src/backend/langflow/components/documentloaders/HNLoader.py +++ b/src/backend/langflow/components/documentloaders/HNLoader.py @@ -1,4 +1,3 @@ - from langflow import CustomComponent from typing import Optional, Dict from langchain_community.document_loaders.hn import HNLoader @@ -10,25 +9,17 @@ class HNLoaderComponent(CustomComponent): def build_config(self): return { - "metadata": { - "display_name": "Metadata", - "value": {}, - "required": False, - "field_type": "dict" - }, - "web_path": { - "display_name": "Web Page", - "required": True - }, + "metadata": {"display_name": "Metadata", "value": {}, "required": False, "field_type": "dict"}, + "web_path": {"display_name": "Web Page", "required": True}, } def build( - self, + self, web_path: str, - metadata: Optional[Dict] = None, + metadata: Optional[Dict] = None, ) -> HNLoader: documents = HNLoader(web_path=web_path).load() - if(metadata): + if metadata: for document in documents: if not document.metadata: document.metadata = metadata diff --git a/src/backend/langflow/components/documentloaders/IFixitLoader.py b/src/backend/langflow/components/documentloaders/IFixitLoader.py index b6a439b3e..48c5f7b47 100644 --- a/src/backend/langflow/components/documentloaders/IFixitLoader.py +++ b/src/backend/langflow/components/documentloaders/IFixitLoader.py @@ -1,6 +1,8 @@ +from typing import Dict, List, Optional + +from langchain_community.document_loaders.ifixit import IFixitLoader from langflow import CustomComponent from langflow.field_typing import Document -from typing import Optional, Dict class IFixitLoaderComponent(CustomComponent): @@ -14,7 +16,17 @@ class IFixitLoaderComponent(CustomComponent): "web_path": {"display_name": "Web Page", "type": "str"}, } - def build(self, web_path: str, metadata: Optional[Dict] = None) -> Document: + def build(self, web_path: str, metadata: Optional[Dict] = None) -> List[Document]: # Assuming IFixitLoader is the correct class name from the langchain library, # and it has a load method that returns a Document object. - return IFixitLoader(web_path=web_path, metadata=metadata).load() + if metadata is None: + metadata = {} + + docs = IFixitLoader(web_path=web_path).load() + + if metadata: + for doc in docs: + if doc.metadata is None: + doc.metadata = {} + doc.metadata.update(metadata) + return docs diff --git a/src/backend/langflow/components/documentloaders/IMSDbLoader.py b/src/backend/langflow/components/documentloaders/IMSDbLoader.py index 37c2c542c..1eebcc444 100644 --- a/src/backend/langflow/components/documentloaders/IMSDbLoader.py +++ b/src/backend/langflow/components/documentloaders/IMSDbLoader.py @@ -21,7 +21,7 @@ class IMSDbLoaderComponent(CustomComponent): web_path: str = "", ) -> Document: documents = IMSDbLoader(web_path=web_path).load() - if(metadata): + if metadata: for document in documents: if not document.metadata: document.metadata = metadata diff --git a/src/backend/langflow/components/documentloaders/PyPDFDirectoryLoader.py b/src/backend/langflow/components/documentloaders/PyPDFDirectoryLoader.py deleted file mode 100644 index fd3bc6ae8..000000000 --- a/src/backend/langflow/components/documentloaders/PyPDFDirectoryLoader.py +++ /dev/null @@ -1,23 +0,0 @@ - -from langflow import CustomComponent -from langchain.documents import Document -from typing import Optional, Dict - -class PyPDFDirectoryLoaderComponent(CustomComponent): - display_name = "PyPDFDirectoryLoader" - description = "Load a directory with `PDF` files using `pypdf` and chunks at character level." - - def build_config(self): - return { - "metadata": {"display_name": "Metadata", "required": False}, - "path": {"display_name": "Local directory", "required": True}, - } - - def build( - self, - path: str, - metadata: Optional[Dict] = None, - ) -> Document: - # Assuming there is a PyPDFDirectoryLoader class that takes these parameters - # Since the actual implementation is not provided, this is a placeholder - return PyPDFDirectoryLoader(path=path, metadata=metadata) diff --git a/src/backend/langflow/components/documentloaders/PyPDFLoader.py b/src/backend/langflow/components/documentloaders/PyPDFLoader.py index bf3667694..b44a90577 100644 --- a/src/backend/langflow/components/documentloaders/PyPDFLoader.py +++ b/src/backend/langflow/components/documentloaders/PyPDFLoader.py @@ -1,7 +1,10 @@ +from typing import Dict, List, Optional + +from langchain_community.document_loaders.pdf import PyPDFLoader +from langchain_core.documents import Document from langflow import CustomComponent -from langchain.document_loaders import BaseLoader -from typing import Optional, Dict + class PyPDFLoaderComponent(CustomComponent): display_name = "PyPDFLoader" @@ -22,10 +25,17 @@ class PyPDFLoaderComponent(CustomComponent): "required": False, "type": "dict", "show": True, - } + }, } - def build(self, file_path: str, metadata: Optional[Dict] = None) -> BaseLoader: + def build(self, file_path: str, metadata: Optional[Dict] = None) -> List[Document]: # Assuming there is a PyPDFLoader class that takes file_path and metadata as parameters # and inherits from BaseLoader - return PyPDFLoader(file_path=file_path, metadata=metadata) + docs = PyPDFLoader(file_path=file_path).load() + + if metadata: + for doc in docs: + if doc.metadata is None: + doc.metadata = {} + doc.metadata.update(metadata) + return docs diff --git a/src/backend/langflow/components/documentloaders/ReadTheDocsLoader.py b/src/backend/langflow/components/documentloaders/ReadTheDocsLoader.py index 1c78b7e34..55284b9bd 100644 --- a/src/backend/langflow/components/documentloaders/ReadTheDocsLoader.py +++ b/src/backend/langflow/components/documentloaders/ReadTheDocsLoader.py @@ -1,5 +1,5 @@ from langflow import CustomComponent -from typing import Dict, Optional,List +from typing import Dict, Optional, List from langchain_core.documents import Document from langchain_community.document_loaders.readthedocs import ReadTheDocsLoader @@ -10,7 +10,7 @@ class ReadTheDocsLoaderComponent(CustomComponent): def build_config(self): return { - "metadata": {"display_name": "Metadata", "default": {},"field_type": "dict"}, + "metadata": {"display_name": "Metadata", "default": {}, "field_type": "dict"}, "path": {"display_name": "Local directory", "required": True}, } @@ -20,10 +20,10 @@ class ReadTheDocsLoaderComponent(CustomComponent): metadata: Optional[Dict] = None, ) -> List[Document]: documents = ReadTheDocsLoader(path=path).load() - if(metadata): + if metadata: for document in documents: if not document.metadata: document.metadata = metadata else: document.metadata.update(metadata) - return documents \ No newline at end of file + return documents diff --git a/src/backend/langflow/components/documentloaders/SRTLoader.py b/src/backend/langflow/components/documentloaders/SRTLoader.py index 83a0cf1e8..5c640dee5 100644 --- a/src/backend/langflow/components/documentloaders/SRTLoader.py +++ b/src/backend/langflow/components/documentloaders/SRTLoader.py @@ -1,8 +1,8 @@ - from langflow import CustomComponent from langchain.documents import Document from typing import Optional, Dict + class SRTLoaderComponent(CustomComponent): display_name = "SRTLoader" description = "Load `.srt` (subtitle) files." diff --git a/src/backend/langflow/components/documentloaders/SlackDirectoryLoader.py b/src/backend/langflow/components/documentloaders/SlackDirectoryLoader.py index 6a38d1488..782f1cea9 100644 --- a/src/backend/langflow/components/documentloaders/SlackDirectoryLoader.py +++ b/src/backend/langflow/components/documentloaders/SlackDirectoryLoader.py @@ -1,8 +1,9 @@ - from langflow import CustomComponent from typing import Optional, Dict, List from langchain_core.documents import Document from langchain_community.document_loaders.slack_directory import SlackDirectoryLoader + + class SlackDirectoryLoaderComponent(CustomComponent): display_name = "SlackDirectoryLoader" description = "Load from a `Slack` directory dump." @@ -10,7 +11,7 @@ class SlackDirectoryLoaderComponent(CustomComponent): def build_config(self): return { - "zip_path": {"display_name": "Path to zip file","field_type": "file","file_types":[".zip"]}, + "zip_path": {"display_name": "Path to zip file", "field_type": "file", "file_types": [".zip"]}, "metadata": {"display_name": "Metadata", "field_type": "dict"}, "workspace_url": {"display_name": "Workspace URL"}, } @@ -21,8 +22,8 @@ class SlackDirectoryLoaderComponent(CustomComponent): metadata: Optional[Dict] = None, workspace_url: Optional[str] = None, ) -> List[Document]: - documents = SlackDirectoryLoader(zip_path=zip_path,workspace_url=workspace_url).load() - if(metadata): + documents = SlackDirectoryLoader(zip_path=zip_path, workspace_url=workspace_url).load() + if metadata: for document in documents: if not document.metadata: document.metadata = metadata diff --git a/src/backend/langflow/components/documentloaders/TextLoader.py b/src/backend/langflow/components/documentloaders/TextLoader.py index a8e6e18f7..0c4033898 100644 --- a/src/backend/langflow/components/documentloaders/TextLoader.py +++ b/src/backend/langflow/components/documentloaders/TextLoader.py @@ -1,8 +1,8 @@ - from langflow import CustomComponent from langchain.data_connections import Document from typing import Optional, Dict + class TextLoaderComponent(CustomComponent): display_name = "TextLoader" description = "Load text file." diff --git a/src/backend/langflow/components/documentloaders/UnstructuredHTMLLoader.py b/src/backend/langflow/components/documentloaders/UnstructuredHTMLLoader.py index 46dc5aa2e..41e5a468e 100644 --- a/src/backend/langflow/components/documentloaders/UnstructuredHTMLLoader.py +++ b/src/backend/langflow/components/documentloaders/UnstructuredHTMLLoader.py @@ -1,6 +1,8 @@ +from typing import Dict, List, Optional + from langchain import CustomComponent -from langflow.field_typing import Document -from typing import Optional, Dict +from langchain_community.document_loaders import UnstructuredHTMLLoader +from langchain_core.documents import Document class UnstructuredHTMLLoaderComponent(CustomComponent): @@ -14,7 +16,14 @@ class UnstructuredHTMLLoaderComponent(CustomComponent): "metadata": {"display_name": "Metadata"}, } - def build(self, file_path: str, metadata: Optional[Dict] = None) -> Document: + def build(self, file_path: str, metadata: Optional[Dict] = None) -> List[Document]: # Assuming the existence of a function or class named UnstructuredHTMLLoader that # loads HTML and creates a Document object; Replace with actual implementation. - return UnstructuredHTMLLoader(file_path=file_path, metadata=metadata) + docs = UnstructuredHTMLLoader(file_path=file_path).load() + + if metadata: + for doc in docs: + if doc.metadata is None: + doc.metadata = {} + doc.metadata.update(metadata) + return docs diff --git a/src/backend/langflow/components/documentloaders/UnstructuredPowerPointLoader.py b/src/backend/langflow/components/documentloaders/UnstructuredPowerPointLoader.py index 11cec9fc1..797ffbc12 100644 --- a/src/backend/langflow/components/documentloaders/UnstructuredPowerPointLoader.py +++ b/src/backend/langflow/components/documentloaders/UnstructuredPowerPointLoader.py @@ -1,4 +1,3 @@ - from langflow import CustomComponent from langchain.document_loaders import Document from typing import Optional, Dict diff --git a/src/backend/langflow/components/embeddings/CohereEmbeddings.py b/src/backend/langflow/components/embeddings/CohereEmbeddings.py index cac7c1d75..3fccc8c2c 100644 --- a/src/backend/langflow/components/embeddings/CohereEmbeddings.py +++ b/src/backend/langflow/components/embeddings/CohereEmbeddings.py @@ -1,7 +1,8 @@ +from typing import Optional + +from langchain_community.embeddings.cohere import CohereEmbeddings from langflow import CustomComponent -from langchain_community.embeddings.cohere import CohereEmbeddings -from typing import Optional class CohereEmbeddingsComponent(CustomComponent): @@ -10,7 +11,7 @@ class CohereEmbeddingsComponent(CustomComponent): def build_config(self): return { - "cohere_api_key": {"display_name": "Cohere API Key","password":True}, + "cohere_api_key": {"display_name": "Cohere API Key", "password": True}, "model": {"display_name": "Model", "default": "embed-english-v2.0", "advanced": True}, "truncate": {"display_name": "Truncate", "advanced": True}, "max_retries": {"display_name": "Max Retries", "advanced": True}, @@ -24,7 +25,7 @@ class CohereEmbeddingsComponent(CustomComponent): max_retries: Optional[int] = None, model: str = "embed-english-v2.0", truncate: Optional[str] = None, - user_agent: Optional[str] = "langchain", + user_agent: str = "langchain", ) -> CohereEmbeddings: return CohereEmbeddings( max_retries=max_retries, diff --git a/src/backend/langflow/components/embeddings/HuggingFaceEmbeddings.py b/src/backend/langflow/components/embeddings/HuggingFaceEmbeddings.py index 44ad25212..6f3540358 100644 --- a/src/backend/langflow/components/embeddings/HuggingFaceEmbeddings.py +++ b/src/backend/langflow/components/embeddings/HuggingFaceEmbeddings.py @@ -2,6 +2,7 @@ from langflow import CustomComponent from typing import Optional, Dict from langchain_community.embeddings.huggingface import HuggingFaceEmbeddings + class HuggingFaceEmbeddingsComponent(CustomComponent): display_name = "HuggingFaceEmbeddings" description = "HuggingFace sentence_transformers embedding models." @@ -12,8 +13,8 @@ class HuggingFaceEmbeddingsComponent(CustomComponent): def build_config(self): return { "cache_folder": {"display_name": "Cache Folder", "advanced": True}, - "encode_kwargs": {"display_name": "Encode Kwargs", "advanced": True,"field_type":"dict"}, - "model_kwargs": {"display_name": "Model Kwargs","field_type":"dict", "advanced": True}, + "encode_kwargs": {"display_name": "Encode Kwargs", "advanced": True, "field_type": "dict"}, + "model_kwargs": {"display_name": "Model Kwargs", "field_type": "dict", "advanced": True}, "model_name": {"display_name": "Model Name"}, "multi_process": {"display_name": "Multi Process", "advanced": True}, } diff --git a/src/backend/langflow/components/embeddings/OpenAIEmbeddings.py b/src/backend/langflow/components/embeddings/OpenAIEmbeddings.py index 6303389c8..6838a7759 100644 --- a/src/backend/langflow/components/embeddings/OpenAIEmbeddings.py +++ b/src/backend/langflow/components/embeddings/OpenAIEmbeddings.py @@ -42,9 +42,9 @@ class OpenAIEmbeddingsComponent(CustomComponent): "max_retries": {"display_name": "Max Retries", "advanced": True}, "model": {"display_name": "Model", "advanced": True}, "model_kwargs": {"display_name": "Model Kwargs", "advanced": True}, - "openai_api_base": {"display_name": "OpenAI API Base","password":True, "advanced": True}, - "openai_api_key": {"display_name": "OpenAI API Key","password":True}, - "openai_api_type": {"display_name": "OpenAI API Type", "advanced": True,"password":True}, + "openai_api_base": {"display_name": "OpenAI API Base", "password": True, "advanced": True}, + "openai_api_key": {"display_name": "OpenAI API Key", "password": True}, + "openai_api_type": {"display_name": "OpenAI API Type", "advanced": True, "password": True}, "openai_api_version": { "display_name": "OpenAI API Version", "advanced": True, diff --git a/src/backend/langflow/components/embeddings/VertexAIEmbeddings.py b/src/backend/langflow/components/embeddings/VertexAIEmbeddings.py index 542582a2e..053fd6c7f 100644 --- a/src/backend/langflow/components/embeddings/VertexAIEmbeddings.py +++ b/src/backend/langflow/components/embeddings/VertexAIEmbeddings.py @@ -1,20 +1,20 @@ - from langflow import CustomComponent from langchain.embeddings import VertexAIEmbeddings from typing import Optional, List + class VertexAIEmbeddingsComponent(CustomComponent): display_name = "VertexAIEmbeddings" description = "Google Cloud VertexAI embedding models." def build_config(self): return { - "credentials": {"display_name": "Credentials", "value": '', "file_types": ['.json'],"field_type": "file"}, + "credentials": {"display_name": "Credentials", "value": "", "file_types": [".json"], "field_type": "file"}, "instance": {"display_name": "instance", "advanced": True, "field_type": "dict"}, - "location": {"display_name": "Location", "value": 'us-central1', "advanced": True}, + "location": {"display_name": "Location", "value": "us-central1", "advanced": True}, "max_output_tokens": {"display_name": "Max Output Tokens", "value": 128}, "max_retries": {"display_name": "Max Retries", "value": 6, "advanced": True}, - "model_name": {"display_name": "Model Name", "value": 'textembedding-gecko'}, + "model_name": {"display_name": "Model Name", "value": "textembedding-gecko"}, "n": {"display_name": "N", "value": 1, "advanced": True}, "project": {"display_name": "Project", "advanced": True}, "request_parallelism": {"display_name": "Request Parallelism", "value": 5, "advanced": True}, @@ -29,10 +29,10 @@ class VertexAIEmbeddingsComponent(CustomComponent): self, instance: Optional[str] = None, credentials: Optional[str] = None, - location: str = 'us-central1', + location: str = "us-central1", max_output_tokens: int = 128, max_retries: int = 6, - model_name: str = 'textembedding-gecko', + model_name: str = "textembedding-gecko", n: int = 1, project: Optional[str] = None, request_parallelism: int = 5, diff --git a/src/backend/langflow/components/llms/Anthropic.py b/src/backend/langflow/components/llms/Anthropic.py index a588fa3a8..45292a6ea 100644 --- a/src/backend/langflow/components/llms/Anthropic.py +++ b/src/backend/langflow/components/llms/Anthropic.py @@ -1,6 +1,6 @@ from langflow import CustomComponent from typing import Optional -from langflow.field_typing import BaseLanguageModel,NestedDict +from langflow.field_typing import BaseLanguageModel, NestedDict from langchain_community.llms.anthropic import Anthropic @@ -21,7 +21,7 @@ class AnthropicComponent(CustomComponent): }, "model_kwargs": { "display_name": "Model Kwargs", - "field_type": 'NestedDict', + "field_type": "NestedDict", "advanced": True, }, "temperature": { @@ -42,4 +42,4 @@ class AnthropicComponent(CustomComponent): anthropic_api_url=anthropic_api_url, model_kwargs=model_kwargs, temperature=temperature, - ) \ No newline at end of file + ) diff --git a/src/backend/langflow/components/llms/AzureChatOpenAI.py b/src/backend/langflow/components/llms/AzureChatOpenAI.py index 53c2af9bb..1bce563bf 100644 --- a/src/backend/langflow/components/llms/AzureChatOpenAI.py +++ b/src/backend/langflow/components/llms/AzureChatOpenAI.py @@ -26,7 +26,7 @@ class AzureChatOpenAIComponent(CustomComponent): "2023-07-01-preview", "2023-08-01-preview", "2023-09-01-preview", - "2023-12-01-preview" + "2023-12-01-preview", ] def build_config(self): diff --git a/src/backend/langflow/components/llms/CTransformers.py b/src/backend/langflow/components/llms/CTransformers.py index 46b1c021e..f4826bf86 100644 --- a/src/backend/langflow/components/llms/CTransformers.py +++ b/src/backend/langflow/components/llms/CTransformers.py @@ -1,8 +1,8 @@ - from langflow import CustomComponent from langchain_community.llms.ctransformers import CTransformers from typing import Optional, Dict + class CTransformersComponent(CustomComponent): display_name = "CTransformers" description = "C Transformers LLM models" @@ -11,16 +11,21 @@ class CTransformersComponent(CustomComponent): def build_config(self): return { "model": {"display_name": "Model", "required": True}, - "model_file": {"display_name": "Model File", "required": False,"field_type":"file", "file_types":[".bin"]}, + "model_file": { + "display_name": "Model File", + "required": False, + "field_type": "file", + "file_types": [".bin"], + }, "model_type": {"display_name": "Model Type", "required": True}, - "config": {"display_name": "Config", "advanced": True, "required": False,"field_type":"dict","value":'{"top_k":40,"top_p":0.95,"temperature":0.8,"repetition_penalty":1.1,"last_n_tokens":64,"seed":-1,"max_new_tokens":256,"stop":"","stream":"False","reset":"True","batch_size":8,"threads":-1,"context_length":-1,"gpu_layers":0}'} + "config": { + "display_name": "Config", + "advanced": True, + "required": False, + "field_type": "dict", + "value": '{"top_k":40,"top_p":0.95,"temperature":0.8,"repetition_penalty":1.1,"last_n_tokens":64,"seed":-1,"max_new_tokens":256,"stop":"","stream":"False","reset":"True","batch_size":8,"threads":-1,"context_length":-1,"gpu_layers":0}', + }, } - def build( - self, - model: str, - model_file: str, - model_type: str, - config: Optional[Dict] = None - ) -> CTransformers: + def build(self, model: str, model_file: str, model_type: str, config: Optional[Dict] = None) -> CTransformers: return CTransformers(model=model, model_file=model_file, model_type=model_type, config=config) diff --git a/src/backend/langflow/components/llms/ChatAnthropic.py b/src/backend/langflow/components/llms/ChatAnthropic.py index a6e222ded..e6f8b1408 100644 --- a/src/backend/langflow/components/llms/ChatAnthropic.py +++ b/src/backend/langflow/components/llms/ChatAnthropic.py @@ -1,8 +1,9 @@ - from langflow import CustomComponent from typing import Optional, Union, Callable from langflow.field_typing import BaseLanguageModel from langchain_community.chat_models.anthropic import ChatAnthropic + + class ChatAnthropicComponent(CustomComponent): display_name = "ChatAnthropic" description = "`Anthropic` chat large language models." @@ -21,7 +22,7 @@ class ChatAnthropicComponent(CustomComponent): }, "model_kwargs": { "display_name": "Model Kwargs", - "field_type": 'dict', + "field_type": "dict", "advanced": True, }, "temperature": { @@ -37,7 +38,6 @@ class ChatAnthropicComponent(CustomComponent): model_kwargs: dict = {}, temperature: Optional[float] = None, ) -> Union[BaseLanguageModel, Callable]: - return ChatAnthropic( anthropic_api_key=anthropic_api_key, anthropic_api_url=anthropic_api_url, diff --git a/src/backend/langflow/components/llms/ChatOpenAI.py b/src/backend/langflow/components/llms/ChatOpenAI.py index 44fd8dd40..c2c3e9234 100644 --- a/src/backend/langflow/components/llms/ChatOpenAI.py +++ b/src/backend/langflow/components/llms/ChatOpenAI.py @@ -1,7 +1,9 @@ -from langflow import CustomComponent -from langchain.llms import BaseLLM from typing import Optional, Union + +from langchain.llms import BaseLLM from langchain_community.chat_models.openai import ChatOpenAI + +from langflow import CustomComponent from langflow.field_typing import BaseLanguageModel, NestedDict @@ -66,12 +68,12 @@ class ChatOpenAIComponent(CustomComponent): self, max_tokens: Optional[int] = 256, model_kwargs: Optional[NestedDict] = {}, - model_name: Optional[str] = "gpt-4-1106-preview", + model_name: str = "gpt-4-1106-preview", openai_api_base: Optional[str] = None, openai_api_key: Optional[str] = None, temperature: float = 0.7, ) -> Union[BaseLanguageModel, BaseLLM]: - if(not openai_api_base): + if not openai_api_base: openai_api_base = "https://api.openai.com/v1" return ChatOpenAI( max_tokens=max_tokens, diff --git a/src/backend/langflow/components/llms/ChatVertexAI.py b/src/backend/langflow/components/llms/ChatVertexAI.py index 072cc0a7f..df0ca3f65 100644 --- a/src/backend/langflow/components/llms/ChatVertexAI.py +++ b/src/backend/langflow/components/llms/ChatVertexAI.py @@ -1,9 +1,10 @@ -from langflow import CustomComponent from typing import List, Optional, Union -from langchain_core.messages.base import BaseMessage -from langchain_community.chat_models.vertexai import ChatVertexAI -from langflow.field_typing import BaseLanguageModel + from langchain.llms import BaseLLM +from langchain_community.chat_models.vertexai import ChatVertexAI +from langchain_core.messages.base import BaseMessage +from langflow import CustomComponent +from langflow.field_typing import BaseLanguageModel class ChatVertexAIComponent(CustomComponent): @@ -63,10 +64,10 @@ class ChatVertexAIComponent(CustomComponent): self, credentials: Optional[str], project: str, - examples: Optional[List[BaseMessage]]=[], - location: Optional[str] = "us-central1", + examples: Optional[List[BaseMessage]] = [], + location: str = "us-central1", max_output_tokens: Optional[int] = 128, - model_name: Optional[str] = "chat-bison", + model_name: str = "chat-bison", temperature: Optional[float] = 0.0, top_k: Optional[int] = 40, top_p: Optional[float] = 0.95, diff --git a/src/backend/langflow/components/llms/Cohere.py b/src/backend/langflow/components/llms/Cohere.py index 1e0fea8fb..41513e523 100644 --- a/src/backend/langflow/components/llms/Cohere.py +++ b/src/backend/langflow/components/llms/Cohere.py @@ -1,9 +1,9 @@ - from langflow import CustomComponent from langchain_core.language_models.base import BaseLanguageModel from typing import Optional from langchain_community.llms.cohere import Cohere + class CohereComponent(CustomComponent): display_name = "Cohere" description = "Cohere large language models." @@ -11,23 +11,9 @@ class CohereComponent(CustomComponent): def build_config(self): return { - "cohere_api_key": { - "display_name": "Cohere API Key", - "type": "password", - "password": True - }, - "max_tokens": { - "display_name": "Max Tokens", - "default": 256, - "type": "int", - "show": True - }, - "temperature": { - "display_name": "Temperature", - "default": 0.75, - "type": "float", - "show": True - }, + "cohere_api_key": {"display_name": "Cohere API Key", "type": "password", "password": True}, + "max_tokens": {"display_name": "Max Tokens", "default": 256, "type": "int", "show": True}, + "temperature": {"display_name": "Temperature", "default": 0.75, "type": "float", "show": True}, } def build( diff --git a/src/backend/langflow/components/llms/LlamaCpp.py b/src/backend/langflow/components/llms/LlamaCpp.py index 109bb1a7b..d10c9bec9 100644 --- a/src/backend/langflow/components/llms/LlamaCpp.py +++ b/src/backend/langflow/components/llms/LlamaCpp.py @@ -1,8 +1,8 @@ - from typing import Optional, List, Dict, Any from langflow import CustomComponent from langchain_community.llms.llamacpp import LlamaCpp + class LlamaCppComponent(CustomComponent): display_name = "LlamaCpp" description = "llama.cpp model." @@ -24,7 +24,12 @@ class LlamaCppComponent(CustomComponent): "max_tokens": {"display_name": "Max Tokens", "advanced": True}, "metadata": {"display_name": "Metadata", "advanced": True}, "model_kwargs": {"display_name": "Model Kwargs", "advanced": True}, - "model_path": {"display_name": "Model Path","field_type":"file", "file_types":[".bin"],"required":True}, + "model_path": { + "display_name": "Model Path", + "field_type": "file", + "file_types": [".bin"], + "required": True, + }, "n_batch": {"display_name": "N Batch", "advanced": True}, "n_ctx": {"display_name": "N Ctx", "advanced": True}, "n_gpu_layers": {"display_name": "N GPU Layers", "advanced": True}, diff --git a/src/backend/langflow/components/llms/OpenAI.py b/src/backend/langflow/components/llms/OpenAI.py index 15091f6e7..f4f26db2f 100644 --- a/src/backend/langflow/components/llms/OpenAI.py +++ b/src/backend/langflow/components/llms/OpenAI.py @@ -1,7 +1,9 @@ +from typing import Dict, Optional + +from langchain_openai.llms.base import OpenAI from langflow import CustomComponent -from typing import Optional, Dict -from langchain_openai.llms.base import OpenAI + class OpenAIComponent(CustomComponent): display_name = "OpenAI" @@ -41,12 +43,12 @@ class OpenAIComponent(CustomComponent): self, max_tokens: Optional[int] = 256, model_kwargs: Optional[Dict] = None, - model_name: Optional[str] = "text-davinci-003", + model_name: str = "text-davinci-003", openai_api_base: Optional[str] = "", openai_api_key: str = "", temperature: Optional[float] = 0.7, ) -> OpenAI: - if(not openai_api_base): + if not openai_api_base: openai_api_base = "https://api.openai.com/v1" return OpenAI( max_tokens=max_tokens, diff --git a/src/backend/langflow/components/llms/VertexAI.py b/src/backend/langflow/components/llms/VertexAI.py index 84f36330b..74f862c25 100644 --- a/src/backend/langflow/components/llms/VertexAI.py +++ b/src/backend/langflow/components/llms/VertexAI.py @@ -1,9 +1,9 @@ - from langflow import CustomComponent from langchain.llms import BaseLLM from typing import Optional, Union, Callable, Dict from langchain_community.llms.vertexai import VertexAI + class VertexAIComponent(CustomComponent): display_name = "VertexAI" description = "Google Vertex AI large language models" @@ -20,7 +20,7 @@ class VertexAIComponent(CustomComponent): "location": { "display_name": "Location", "type": "str", - "advanced":True, + "advanced": True, "value": "us-central1", "required": False, }, @@ -29,14 +29,14 @@ class VertexAIComponent(CustomComponent): "field_type": "int", "value": 128, "required": False, - "advanced":True + "advanced": True, }, "max_retries": { "display_name": "Max Retries", "type": "int", "value": 6, "required": False, - "advanced":True + "advanced": True, }, "metadata": { "display_name": "Metadata", @@ -51,7 +51,7 @@ class VertexAIComponent(CustomComponent): "required": False, }, "n": { - "advanced":True, + "advanced": True, "display_name": "N", "field_type": "int", "value": 1, @@ -68,42 +68,36 @@ class VertexAIComponent(CustomComponent): "field_type": "int", "value": 5, "required": False, - "advanced":True + "advanced": True, }, "streaming": { "display_name": "Streaming", "field_type": "bool", "value": False, "required": False, - "advanced":True + "advanced": True, }, "temperature": { "display_name": "Temperature", "field_type": "float", "value": 0.0, "required": False, - "advanced":True - }, - "top_k": { - "display_name": "Top K", - "type": "int", - "default": 40, - "required": False, - "advanced":True + "advanced": True, }, + "top_k": {"display_name": "Top K", "type": "int", "default": 40, "required": False, "advanced": True}, "top_p": { "display_name": "Top P", "field_type": "float", "value": 0.95, "required": False, - "advanced":True + "advanced": True, }, "tuned_model_name": { "display_name": "Tuned Model Name", "type": "str", "required": False, "value": None, - "advanced":True + "advanced": True, }, "verbose": { "display_name": "Verbose", @@ -111,10 +105,7 @@ class VertexAIComponent(CustomComponent): "value": False, "required": False, }, - "name":{ - "display_name":"Name", - "field_type":"str" - }, + "name": {"display_name": "Name", "field_type": "str"}, } def build( @@ -126,7 +117,7 @@ class VertexAIComponent(CustomComponent): metadata: Dict = None, model_name: str = "text-bison", n: int = 1, - name:Optional[str] = None, + name: Optional[str] = None, project: Optional[str] = None, request_parallelism: int = 5, streaming: bool = False, diff --git a/src/backend/langflow/components/retrievers/MultiQueryRetriever.py b/src/backend/langflow/components/retrievers/MultiQueryRetriever.py index 762756794..8c95d6d2a 100644 --- a/src/backend/langflow/components/retrievers/MultiQueryRetriever.py +++ b/src/backend/langflow/components/retrievers/MultiQueryRetriever.py @@ -1,4 +1,3 @@ - from langflow import CustomComponent from langchain.retrievers import MultiQueryRetriever from typing import Optional, Union, Callable @@ -8,6 +7,7 @@ from langflow.field_typing import ( BaseRetriever, ) + class MultiQueryRetrieverComponent(CustomComponent): display_name = "MultiQueryRetriever" description = "Initialize from llm using default template." @@ -16,22 +16,25 @@ class MultiQueryRetrieverComponent(CustomComponent): def build_config(self): return { "llm": {"display_name": "LLM"}, - "prompt": {"display_name": "Prompt", "default": { - "input_variables": ["question"], - "input_types": {}, - "output_parser": None, - "partial_variables": {}, - "template": 'You are an AI language model assistant. Your task is \n' - 'to generate 3 different versions of the given user \n' - 'question to retrieve relevant documents from a vector database. \n' - 'By generating multiple perspectives on the user question, \n' - 'your goal is to help the user overcome some of the limitations \n' - 'of distance-based similarity search. Provide these alternative \n' - 'questions separated by newlines. Original question: {question}', - "template_format": "f-string", - "validate_template": False, - "_type": "prompt" - }}, + "prompt": { + "display_name": "Prompt", + "default": { + "input_variables": ["question"], + "input_types": {}, + "output_parser": None, + "partial_variables": {}, + "template": "You are an AI language model assistant. Your task is \n" + "to generate 3 different versions of the given user \n" + "question to retrieve relevant documents from a vector database. \n" + "By generating multiple perspectives on the user question, \n" + "your goal is to help the user overcome some of the limitations \n" + "of distance-based similarity search. Provide these alternative \n" + "questions separated by newlines. Original question: {question}", + "template_format": "f-string", + "validate_template": False, + "_type": "prompt", + }, + }, "retriever": {"display_name": "Retriever"}, "parser_key": {"display_name": "Parser Key", "default": "lines"}, } diff --git a/src/backend/langflow/components/textsplitters/CharacterTextSplitter.py b/src/backend/langflow/components/textsplitters/CharacterTextSplitter.py index aa8fc77f2..d65c28dab 100644 --- a/src/backend/langflow/components/textsplitters/CharacterTextSplitter.py +++ b/src/backend/langflow/components/textsplitters/CharacterTextSplitter.py @@ -1,4 +1,3 @@ - from langflow import CustomComponent from langchain.text_splitter import CharacterTextSplitter from langchain_core.documents.base import Document diff --git a/src/backend/langflow/components/textsplitters/LanguageRecursiveTextSplitter.py b/src/backend/langflow/components/textsplitters/LanguageRecursiveTextSplitter.py index 636eb427e..1cea7a29b 100644 --- a/src/backend/langflow/components/textsplitters/LanguageRecursiveTextSplitter.py +++ b/src/backend/langflow/components/textsplitters/LanguageRecursiveTextSplitter.py @@ -1,7 +1,9 @@ from typing import Optional -from langflow import CustomComponent -from langchain.text_splitter import Language + from langchain.schema import Document +from langchain.text_splitter import Language + +from langflow import CustomComponent class LanguageRecursiveTextSplitterComponent(CustomComponent): @@ -48,7 +50,7 @@ class LanguageRecursiveTextSplitterComponent(CustomComponent): documents: list[Document], chunk_size: Optional[int] = 1000, chunk_overlap: Optional[int] = 200, - separator_type: Optional[str] = "Python", + separator_type: str = "Python", ) -> list[Document]: """ Split text into chunks of a specified length. diff --git a/src/backend/langflow/components/toolkits/JsonToolkit.py b/src/backend/langflow/components/toolkits/JsonToolkit.py index 70e3355e4..ec9e49621 100644 --- a/src/backend/langflow/components/toolkits/JsonToolkit.py +++ b/src/backend/langflow/components/toolkits/JsonToolkit.py @@ -13,4 +13,4 @@ class JsonToolkitComponent(CustomComponent): } def build(self, spec: JsonSpec) -> JsonToolkit: - return JsonToolkit(spec=spec) \ No newline at end of file + return JsonToolkit(spec=spec) diff --git a/src/backend/langflow/components/toolkits/VectorStoreInfo.py b/src/backend/langflow/components/toolkits/VectorStoreInfo.py index 27a5b3792..48e5d9d9f 100644 --- a/src/backend/langflow/components/toolkits/VectorStoreInfo.py +++ b/src/backend/langflow/components/toolkits/VectorStoreInfo.py @@ -1,9 +1,9 @@ - from langflow import CustomComponent from langchain.vectorstores import VectorStore from typing import Union, Callable from langchain.agents.agent_toolkits.vectorstore.toolkit import VectorStoreInfo + class VectorStoreInfoComponent(CustomComponent): display_name = "VectorStoreInfo" description = "Information about a VectorStore" diff --git a/src/backend/langflow/components/toolkits/VectorStoreRouterToolkit.py b/src/backend/langflow/components/toolkits/VectorStoreRouterToolkit.py index fb1c66ef4..ed8797044 100644 --- a/src/backend/langflow/components/toolkits/VectorStoreRouterToolkit.py +++ b/src/backend/langflow/components/toolkits/VectorStoreRouterToolkit.py @@ -1,9 +1,9 @@ - from langflow import CustomComponent from typing import List, Union from langchain.agents.agent_toolkits.vectorstore.toolkit import VectorStoreRouterToolkit from langchain.agents.agent_toolkits.vectorstore.toolkit import VectorStoreInfo -from langflow.field_typing import BaseLanguageModel,Tool +from langflow.field_typing import BaseLanguageModel, Tool + class VectorStoreRouterToolkitComponent(CustomComponent): display_name = "VectorStoreRouterToolkit" @@ -16,10 +16,8 @@ class VectorStoreRouterToolkitComponent(CustomComponent): } def build( - self, - vectorstores: List[VectorStoreInfo], - llm: BaseLanguageModel - )->Union[Tool,VectorStoreRouterToolkit]: - print("vectorstores",vectorstores) - print("llm",llm) - return VectorStoreRouterToolkit(vectorstores=vectorstores,llm=llm) + self, vectorstores: List[VectorStoreInfo], llm: BaseLanguageModel + ) -> Union[Tool, VectorStoreRouterToolkit]: + print("vectorstores", vectorstores) + print("llm", llm) + return VectorStoreRouterToolkit(vectorstores=vectorstores, llm=llm) diff --git a/src/backend/langflow/components/toolkits/VectorStoreToolkit.py b/src/backend/langflow/components/toolkits/VectorStoreToolkit.py index 58529b373..38b9c9171 100644 --- a/src/backend/langflow/components/toolkits/VectorStoreToolkit.py +++ b/src/backend/langflow/components/toolkits/VectorStoreToolkit.py @@ -1,4 +1,3 @@ - from langflow import CustomComponent from langchain.agents.agent_toolkits.vectorstore.toolkit import VectorStoreToolkit from langchain.agents.agent_toolkits.vectorstore.toolkit import VectorStoreInfo @@ -10,6 +9,7 @@ from langflow.field_typing import ( ) from typing import Union + class VectorStoreToolkitComponent(CustomComponent): display_name = "VectorStoreToolkit" description = "Toolkit for interacting with a Vector Store." @@ -24,5 +24,5 @@ class VectorStoreToolkitComponent(CustomComponent): self, vectorstore_info: VectorStoreInfo, llm: BaseLanguageModel, - ) -> Union[Tool,VectorStoreToolkit]: - return VectorStoreToolkit(vectorstore_info=vectorstore_info,llm=llm) + ) -> Union[Tool, VectorStoreToolkit]: + return VectorStoreToolkit(vectorstore_info=vectorstore_info, llm=llm) diff --git a/src/backend/langflow/components/utilities/BingSearchAPIWrapper.py b/src/backend/langflow/components/utilities/BingSearchAPIWrapper.py index 22443d021..bdb55473f 100644 --- a/src/backend/langflow/components/utilities/BingSearchAPIWrapper.py +++ b/src/backend/langflow/components/utilities/BingSearchAPIWrapper.py @@ -1,4 +1,3 @@ - from typing import Optional from langflow import CustomComponent @@ -30,8 +29,4 @@ class BingSearchAPIWrapperComponent(CustomComponent): k: Optional[int] = 10, ) -> BingSearchAPIWrapper: # 'k' has a default value and is not shown (show=False), so it is hardcoded here - return BingSearchAPIWrapper( - bing_search_url=bing_search_url, - bing_subscription_key=bing_subscription_key, - k=k - ) + return BingSearchAPIWrapper(bing_search_url=bing_search_url, bing_subscription_key=bing_subscription_key, k=k) diff --git a/src/backend/langflow/components/utilities/GoogleSearchAPIWrapper.py b/src/backend/langflow/components/utilities/GoogleSearchAPIWrapper.py index 205087076..37e9afff6 100644 --- a/src/backend/langflow/components/utilities/GoogleSearchAPIWrapper.py +++ b/src/backend/langflow/components/utilities/GoogleSearchAPIWrapper.py @@ -1,4 +1,3 @@ - from langflow import CustomComponent from typing import Union, Callable from langchain_community.utilities.google_search import GoogleSearchAPIWrapper @@ -11,7 +10,7 @@ class GoogleSearchAPIWrapperComponent(CustomComponent): def build_config(self): return { "google_api_key": {"display_name": "Google API Key", "password": True}, - "google_cse_id": {"display_name": "Google CSE ID","password":True}, + "google_cse_id": {"display_name": "Google CSE ID", "password": True}, } def build( diff --git a/src/backend/langflow/components/utilities/GoogleSerperAPIWrapper.py b/src/backend/langflow/components/utilities/GoogleSerperAPIWrapper.py index 0434dec53..84fefedad 100644 --- a/src/backend/langflow/components/utilities/GoogleSerperAPIWrapper.py +++ b/src/backend/langflow/components/utilities/GoogleSerperAPIWrapper.py @@ -1,4 +1,3 @@ - from langflow import CustomComponent from typing import Dict, Optional @@ -21,15 +20,10 @@ class GoogleSerperAPIWrapperComponent(CustomComponent): "name": "result_key_for_type", "advanced": False, "dynamic": False, - "info": '', + "info": "", "field_type": "dict", "list": False, - "value": { - "news": "news", - "places": "places", - "images": "images", - "search": "organic" - } + "value": {"news": "news", "places": "places", "images": "images", "search": "organic"}, }, "serper_api_key": { "display_name": "Serper API Key", @@ -39,10 +33,10 @@ class GoogleSerperAPIWrapperComponent(CustomComponent): "name": "serper_api_key", "advanced": False, "dynamic": False, - "info": '', + "info": "", "type": "str", "list": False, - } + }, } def build( @@ -50,7 +44,4 @@ class GoogleSerperAPIWrapperComponent(CustomComponent): serper_api_key: str, result_key_for_type: Optional[Dict[str, str]] = None, ) -> GoogleSerperAPIWrapper: - return GoogleSerperAPIWrapper( - result_key_for_type=result_key_for_type, - serper_api_key=serper_api_key - ) + return GoogleSerperAPIWrapper(result_key_for_type=result_key_for_type, serper_api_key=serper_api_key) diff --git a/src/backend/langflow/components/utilities/SearxSearchWrapper.py b/src/backend/langflow/components/utilities/SearxSearchWrapper.py index 51464cd0b..d89ecacf2 100644 --- a/src/backend/langflow/components/utilities/SearxSearchWrapper.py +++ b/src/backend/langflow/components/utilities/SearxSearchWrapper.py @@ -1,6 +1,8 @@ from langflow import CustomComponent from typing import Optional, Dict from langchain_community.utilities.searx_search import SearxSearchWrapper + + class SearxSearchWrapperComponent(CustomComponent): display_name = "SearxSearchWrapper" description = "Wrapper for Searx API." @@ -8,17 +10,12 @@ class SearxSearchWrapperComponent(CustomComponent): def build_config(self): return { "headers": { - "field_type":"dict", + "field_type": "dict", "display_name": "Headers", "multiline": True, - "value": '{"Authorization": "Bearer "}' - }, - "k": { - "display_name": "k", - "advanced": True, - "field_type": "int", - "value": 10 + "value": '{"Authorization": "Bearer "}', }, + "k": {"display_name": "k", "advanced": True, "field_type": "int", "value": 10}, "searx_host": { "display_name": "Searx Host", "field_type": "str", @@ -32,5 +29,5 @@ class SearxSearchWrapperComponent(CustomComponent): k: Optional[int] = 10, headers: Optional[Dict[str, str]] = None, searx_host: Optional[str] = None, - )->SearxSearchWrapper: - return SearxSearchWrapper(headers=headers,k=k,searx_host=searx_host) + ) -> SearxSearchWrapper: + return SearxSearchWrapper(headers=headers, k=k, searx_host=searx_host) diff --git a/src/backend/langflow/components/utilities/SerpAPIWrapper.py b/src/backend/langflow/components/utilities/SerpAPIWrapper.py index 79f806957..1513f4c5e 100644 --- a/src/backend/langflow/components/utilities/SerpAPIWrapper.py +++ b/src/backend/langflow/components/utilities/SerpAPIWrapper.py @@ -2,6 +2,7 @@ from langflow import CustomComponent from typing import Callable, Union from langchain_community.utilities.serpapi import SerpAPIWrapper + class SerpAPIWrapperComponent(CustomComponent): display_name = "SerpAPIWrapper" description = "Wrapper around SerpAPI" @@ -9,7 +10,13 @@ class SerpAPIWrapperComponent(CustomComponent): def build_config(self): return { "serpapi_api_key": {"display_name": "SerpAPI API Key", "type": "str", "password": True}, - "params": {"display_name": "Parameters", "type": "dict","advanced":True, "multiline": True,"value": '{"engine": "google","google_domain": "google.com","gl": "us","hl": "en"}'}, + "params": { + "display_name": "Parameters", + "type": "dict", + "advanced": True, + "multiline": True, + "value": '{"engine": "google","google_domain": "google.com","gl": "us","hl": "en"}', + }, } def build( @@ -17,7 +24,4 @@ class SerpAPIWrapperComponent(CustomComponent): serpapi_api_key: str, params: dict, ) -> Union[SerpAPIWrapper, Callable]: # Removed quotes around SerpAPIWrapper - return SerpAPIWrapper( - serpapi_api_key=serpapi_api_key, - params=params - ) \ No newline at end of file + return SerpAPIWrapper(serpapi_api_key=serpapi_api_key, params=params) diff --git a/src/backend/langflow/components/utilities/WikipediaAPIWrapper.py b/src/backend/langflow/components/utilities/WikipediaAPIWrapper.py index f099d78c3..a5dc908d6 100644 --- a/src/backend/langflow/components/utilities/WikipediaAPIWrapper.py +++ b/src/backend/langflow/components/utilities/WikipediaAPIWrapper.py @@ -1,12 +1,12 @@ - from langflow import CustomComponent from typing import Union, Callable from langchain_community.utilities.wikipedia import WikipediaAPIWrapper # Assuming WikipediaAPIWrapper is a class that needs to be imported. -# The import statement is not included as it is not provided in the JSON +# The import statement is not included as it is not provided in the JSON # and the actual implementation details are unknown. + class WikipediaAPIWrapperComponent(CustomComponent): display_name = "WikipediaAPIWrapper" description = "Wrapper around WikipediaAPI." diff --git a/src/backend/langflow/components/utilities/WolframAlphaAPIWrapper.py b/src/backend/langflow/components/utilities/WolframAlphaAPIWrapper.py index 025998b05..72e656a54 100644 --- a/src/backend/langflow/components/utilities/WolframAlphaAPIWrapper.py +++ b/src/backend/langflow/components/utilities/WolframAlphaAPIWrapper.py @@ -1,18 +1,16 @@ - from langflow import CustomComponent from typing import Callable, Union from langchain_community.utilities.wolfram_alpha import WolframAlphaAPIWrapper # Since all the fields in the JSON have show=False, we will only create a basic component # without any configurable fields. + class WolframAlphaAPIWrapperComponent(CustomComponent): display_name = "WolframAlphaAPIWrapper" description = "Wrapper for Wolfram Alpha." def build_config(self): - return { - "appid": {"display_name": "App ID", "type": "str", "password": True} - } + return {"appid": {"display_name": "App ID", "type": "str", "password": True}} - def build(self,appid:str) -> Union[Callable, WolframAlphaAPIWrapper]: - return WolframAlphaAPIWrapper(wolfram_alpha_appid=appid) \ No newline at end of file + def build(self, appid: str) -> Union[Callable, WolframAlphaAPIWrapper]: + return WolframAlphaAPIWrapper(wolfram_alpha_appid=appid) diff --git a/src/backend/langflow/components/vectorstores/FAISS.py b/src/backend/langflow/components/vectorstores/FAISS.py index 2e516e7bc..b171802a2 100644 --- a/src/backend/langflow/components/vectorstores/FAISS.py +++ b/src/backend/langflow/components/vectorstores/FAISS.py @@ -1,4 +1,3 @@ - from langflow import CustomComponent from langchain_community.vectorstores.faiss import FAISS from typing import Optional, List, Union @@ -9,6 +8,7 @@ from langflow.field_typing import ( Embeddings, ) + class FAISSComponent(CustomComponent): display_name = "FAISS" description = "Construct FAISS wrapper from raw documents." @@ -24,5 +24,5 @@ class FAISSComponent(CustomComponent): self, embedding: Embeddings, documents: Optional[List[Document]] = None, - ) -> Union[VectorStore,FAISS,BaseRetriever]: - return FAISS.from_documents(documents=documents,embedding=embedding) + ) -> Union[VectorStore, FAISS, BaseRetriever]: + return FAISS.from_documents(documents=documents, embedding=embedding) diff --git a/src/backend/langflow/components/vectorstores/MongoDBAtlasVectorSearch.py b/src/backend/langflow/components/vectorstores/MongoDBAtlasVectorSearch.py index 1d416780d..b477e8f54 100644 --- a/src/backend/langflow/components/vectorstores/MongoDBAtlasVectorSearch.py +++ b/src/backend/langflow/components/vectorstores/MongoDBAtlasVectorSearch.py @@ -1,4 +1,3 @@ - from langflow import CustomComponent from langchain.vectorstores import MongoDBAtlasVectorSearch from typing import Optional, List @@ -8,6 +7,7 @@ from langflow.field_typing import ( NestedDict, ) + class MongoDBAtlasComponent(CustomComponent): display_name = "MongoDB Atlas" description = "Construct a `MongoDB Atlas Vector Search` vector store from raw documents." diff --git a/src/backend/langflow/components/vectorstores/Pinecone.py b/src/backend/langflow/components/vectorstores/Pinecone.py index 6e62dd766..ca443f2e2 100644 --- a/src/backend/langflow/components/vectorstores/Pinecone.py +++ b/src/backend/langflow/components/vectorstores/Pinecone.py @@ -8,6 +8,8 @@ from langflow.field_typing import ( from langchain.schema import BaseRetriever from langchain.vectorstores.base import VectorStore import pinecone + + class PineconeComponent(CustomComponent): display_name = "Pinecone" description = "Construct Pinecone wrapper from raw documents." @@ -18,8 +20,8 @@ class PineconeComponent(CustomComponent): "embedding": {"display_name": "Embedding", "default": 1000}, "index_name": {"display_name": "Index Name"}, "namespace": {"display_name": "Namespace"}, - "pinecone_api_key": {"display_name": "Pinecone API Key", "default": "","password": True,"required": True}, - "pinecone_env": {"display_name": "Pinecone Environment", "default": "","required": True}, + "pinecone_api_key": {"display_name": "Pinecone API Key", "default": "", "password": True, "required": True}, + "pinecone_env": {"display_name": "Pinecone Environment", "default": "", "required": True}, "search_kwargs": {"display_name": "Search Kwargs", "default": "{}"}, } @@ -30,6 +32,6 @@ class PineconeComponent(CustomComponent): index_name: Optional[str] = None, pinecone_api_key: Optional[str] = None, pinecone_env: Optional[str] = None, - ) -> Union[VectorStore,Pinecone,BaseRetriever]: - pinecone.init(api_key=pinecone_api_key,environment=pinecone_env) - return Pinecone.from_documents(documents=documents,embedding=embedding,index_name=index_name) + ) -> Union[VectorStore, Pinecone, BaseRetriever]: + pinecone.init(api_key=pinecone_api_key, environment=pinecone_env) + return Pinecone.from_documents(documents=documents, embedding=embedding, index_name=index_name) diff --git a/src/backend/langflow/components/vectorstores/SupabaseVectorStore.py b/src/backend/langflow/components/vectorstores/SupabaseVectorStore.py index 6c2b801ec..2e2812e08 100644 --- a/src/backend/langflow/components/vectorstores/SupabaseVectorStore.py +++ b/src/backend/langflow/components/vectorstores/SupabaseVectorStore.py @@ -1,5 +1,5 @@ from langflow import CustomComponent -from typing import Optional, List,Union +from typing import Optional, List, Union from langchain_community.vectorstores.supabase import SupabaseVectorStore from langflow.field_typing import ( Document, @@ -35,6 +35,13 @@ class SupabaseComponent(CustomComponent): supabase_service_key: str = "", supabase_url: str = "", table_name: str = "", - ) -> Union[VectorStore,SupabaseVectorStore,BaseRetriever]: + ) -> Union[VectorStore, SupabaseVectorStore, BaseRetriever]: supabase: Client = create_client(supabase_url, supabase_key=supabase_service_key) - return SupabaseVectorStore.from_documents(documents=documents,embedding=embedding,query_name=query_name,search_kwargs=search_kwargs,client=supabase,table_name=table_name) \ No newline at end of file + return SupabaseVectorStore.from_documents( + documents=documents, + embedding=embedding, + query_name=query_name, + search_kwargs=search_kwargs, + client=supabase, + table_name=table_name, + ) diff --git a/src/backend/langflow/components/vectorstores/Weaviate.py b/src/backend/langflow/components/vectorstores/Weaviate.py index bab1322f7..3f0a208bb 100644 --- a/src/backend/langflow/components/vectorstores/Weaviate.py +++ b/src/backend/langflow/components/vectorstores/Weaviate.py @@ -1,12 +1,12 @@ -import weaviate # type: ignore from typing import Optional, Union -from langflow import CustomComponent -from langchain.vectorstores import Weaviate -from langchain.schema import Document -from langchain.vectorstores.base import VectorStore -from langchain.schema import BaseRetriever +import weaviate # type: ignore from langchain.embeddings.base import Embeddings +from langchain.schema import BaseRetriever, Document +from langchain.vectorstores import Weaviate +from langchain.vectorstores.base import VectorStore + +from langflow import CustomComponent class WeaviateVectorStore(CustomComponent): @@ -45,7 +45,7 @@ class WeaviateVectorStore(CustomComponent): search_by_text: bool = False, api_key: Optional[str] = None, index_name: Optional[str] = None, - text_key: Optional[str] = "text", + text_key: str = "text", embedding: Optional[Embeddings] = None, documents: Optional[Document] = None, attributes: Optional[list] = None, From ce39b75c3733646b0b981391c3918df79aeac6eb Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Thu, 25 Jan 2024 14:51:28 -0300 Subject: [PATCH 128/153] fix(components): fix import statement in UnstructuredWordDocumentLoader.py to use langflow instead of langchain fix(vectorstores): change optional List[Document] parameters to required List[Document] in FAISS.py, Pinecone.py, Qdrant.py, and SupabaseVectorStore.py to improve consistency and avoid potential NoneType errors --- .../documentloaders/UnstructuredWordDocumentLoader.py | 2 +- src/backend/langflow/components/vectorstores/FAISS.py | 4 ++-- src/backend/langflow/components/vectorstores/Pinecone.py | 2 +- src/backend/langflow/components/vectorstores/Qdrant.py | 2 +- .../langflow/components/vectorstores/SupabaseVectorStore.py | 4 ++-- 5 files changed, 7 insertions(+), 7 deletions(-) diff --git a/src/backend/langflow/components/documentloaders/UnstructuredWordDocumentLoader.py b/src/backend/langflow/components/documentloaders/UnstructuredWordDocumentLoader.py index bf57664ff..48ff3a24e 100644 --- a/src/backend/langflow/components/documentloaders/UnstructuredWordDocumentLoader.py +++ b/src/backend/langflow/components/documentloaders/UnstructuredWordDocumentLoader.py @@ -1,4 +1,4 @@ -from langchain import CustomComponent +from langflow import CustomComponent from langflow.field_typing import Document from typing import Optional, Dict diff --git a/src/backend/langflow/components/vectorstores/FAISS.py b/src/backend/langflow/components/vectorstores/FAISS.py index b171802a2..cfdbc0521 100644 --- a/src/backend/langflow/components/vectorstores/FAISS.py +++ b/src/backend/langflow/components/vectorstores/FAISS.py @@ -1,6 +1,6 @@ from langflow import CustomComponent from langchain_community.vectorstores.faiss import FAISS -from typing import Optional, List, Union +from typing import List, Union from langchain.schema import BaseRetriever from langchain.vectorstores.base import VectorStore from langflow.field_typing import ( @@ -23,6 +23,6 @@ class FAISSComponent(CustomComponent): def build( self, embedding: Embeddings, - documents: Optional[List[Document]] = None, + documents: List[Document] = None, ) -> Union[VectorStore, FAISS, BaseRetriever]: return FAISS.from_documents(documents=documents, embedding=embedding) diff --git a/src/backend/langflow/components/vectorstores/Pinecone.py b/src/backend/langflow/components/vectorstores/Pinecone.py index ca443f2e2..bdd3323ee 100644 --- a/src/backend/langflow/components/vectorstores/Pinecone.py +++ b/src/backend/langflow/components/vectorstores/Pinecone.py @@ -28,7 +28,7 @@ class PineconeComponent(CustomComponent): def build( self, embedding: Embeddings, - documents: Optional[List[Document]] = None, + documents: List[Document] = None, index_name: Optional[str] = None, pinecone_api_key: Optional[str] = None, pinecone_env: Optional[str] = None, diff --git a/src/backend/langflow/components/vectorstores/Qdrant.py b/src/backend/langflow/components/vectorstores/Qdrant.py index 548da03bf..74bc5ff59 100644 --- a/src/backend/langflow/components/vectorstores/Qdrant.py +++ b/src/backend/langflow/components/vectorstores/Qdrant.py @@ -35,7 +35,7 @@ class QdrantComponent(CustomComponent): def build( self, embedding: Embeddings, - documents: Optional[List[Document]] = None, + documents: List[Document] = None, api_key: Optional[str] = None, collection_name: Optional[str] = None, content_payload_key: str = "page_content", diff --git a/src/backend/langflow/components/vectorstores/SupabaseVectorStore.py b/src/backend/langflow/components/vectorstores/SupabaseVectorStore.py index 2e2812e08..40e6dd98e 100644 --- a/src/backend/langflow/components/vectorstores/SupabaseVectorStore.py +++ b/src/backend/langflow/components/vectorstores/SupabaseVectorStore.py @@ -1,5 +1,5 @@ from langflow import CustomComponent -from typing import Optional, List, Union +from typing import List, Union from langchain_community.vectorstores.supabase import SupabaseVectorStore from langflow.field_typing import ( Document, @@ -29,7 +29,7 @@ class SupabaseComponent(CustomComponent): def build( self, embedding: Embeddings, - documents: Optional[List[Document]] = None, + documents: List[Document], query_name: str = "", search_kwargs: NestedDict = {}, supabase_service_key: str = "", From 9c06b16eb30fd4802dff4057b80c2dc2d16c6245 Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Thu, 25 Jan 2024 16:40:58 -0300 Subject: [PATCH 129/153] Fix default value for k parameter and add default value for searx_host parameter --- .../langflow/components/utilities/SearxSearchWrapper.py | 4 ++-- src/backend/langflow/components/utilities/SerpAPIWrapper.py | 5 ++++- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/src/backend/langflow/components/utilities/SearxSearchWrapper.py b/src/backend/langflow/components/utilities/SearxSearchWrapper.py index d89ecacf2..b406f3882 100644 --- a/src/backend/langflow/components/utilities/SearxSearchWrapper.py +++ b/src/backend/langflow/components/utilities/SearxSearchWrapper.py @@ -26,8 +26,8 @@ class SearxSearchWrapperComponent(CustomComponent): def build( self, - k: Optional[int] = 10, + k: int = 10, headers: Optional[Dict[str, str]] = None, - searx_host: Optional[str] = None, + searx_host: str = "https://searx.example.com", ) -> SearxSearchWrapper: return SearxSearchWrapper(headers=headers, k=k, searx_host=searx_host) diff --git a/src/backend/langflow/components/utilities/SerpAPIWrapper.py b/src/backend/langflow/components/utilities/SerpAPIWrapper.py index 1513f4c5e..0fda8188e 100644 --- a/src/backend/langflow/components/utilities/SerpAPIWrapper.py +++ b/src/backend/langflow/components/utilities/SerpAPIWrapper.py @@ -24,4 +24,7 @@ class SerpAPIWrapperComponent(CustomComponent): serpapi_api_key: str, params: dict, ) -> Union[SerpAPIWrapper, Callable]: # Removed quotes around SerpAPIWrapper - return SerpAPIWrapper(serpapi_api_key=serpapi_api_key, params=params) + return SerpAPIWrapper( + serpapi_api_key=serpapi_api_key, + params=params, + ) From 8bff60d2f2f7ba1fbfdd13c2db16ba06559999c2 Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Thu, 25 Jan 2024 18:54:01 -0300 Subject: [PATCH 130/153] feat(llms): Add support for using SecretStr from pydantic to store sensitive API keys securely feat(llms): Add support for using process.env.PORT environment variable to configure server port fix(llms): Fix incorrect default value for model_kwargs parameter in AnthropicComponent fix(llms): Fix incorrect default value for model_kwargs parameter in ChatAnthropicComponent fix(llms): Fix incorrect default value for model_kwargs parameter in ChatOpenAIComponent fix(llms): Fix incorrect default value for model_kwargs parameter in ChatVertexAIComponent fix(llms): Fix incorrect default value for model_kwargs parameter in CohereComponent fix(llms): Fix incorrect default value for model_kwargs parameter in LlamaCppComponent fix(llms): Fix incorrect default value for model_kwargs parameter in VertexAIComponent fix(utilities): Fix incorrect default value for k parameter in BingSearchAPIWrapperComponent fix(vectorstores): Fix missing required documents parameter in FAISSComponent fix(vectorstores): Fix missing required documents parameter in PineconeComponent fix(vectorstores): Fix missing required documents parameter in QdrantComponent --- .../langflow/components/llms/Anthropic.py | 5 ++-- .../langflow/components/llms/ChatAnthropic.py | 5 ++-- .../langflow/components/llms/ChatOpenAI.py | 2 +- .../langflow/components/llms/ChatVertexAI.py | 10 ++++---- .../langflow/components/llms/Cohere.py | 5 ++-- .../langflow/components/llms/LlamaCpp.py | 24 +++++++++---------- .../langflow/components/llms/VertexAI.py | 4 +--- .../utilities/BingSearchAPIWrapper.py | 3 +-- .../langflow/components/vectorstores/FAISS.py | 2 +- .../components/vectorstores/Pinecone.py | 2 +- .../components/vectorstores/Qdrant.py | 2 +- 11 files changed, 31 insertions(+), 33 deletions(-) diff --git a/src/backend/langflow/components/llms/Anthropic.py b/src/backend/langflow/components/llms/Anthropic.py index 45292a6ea..f94d16deb 100644 --- a/src/backend/langflow/components/llms/Anthropic.py +++ b/src/backend/langflow/components/llms/Anthropic.py @@ -1,3 +1,4 @@ +from pydantic import SecretStr from langflow import CustomComponent from typing import Optional from langflow.field_typing import BaseLanguageModel, NestedDict @@ -34,11 +35,11 @@ class AnthropicComponent(CustomComponent): self, anthropic_api_key: str, anthropic_api_url: str, - model_kwargs: Optional[NestedDict], + model_kwargs: NestedDict = {}, temperature: Optional[float] = None, ) -> BaseLanguageModel: return Anthropic( - anthropic_api_key=anthropic_api_key, + anthropic_api_key=SecretStr(anthropic_api_key), anthropic_api_url=anthropic_api_url, model_kwargs=model_kwargs, temperature=temperature, diff --git a/src/backend/langflow/components/llms/ChatAnthropic.py b/src/backend/langflow/components/llms/ChatAnthropic.py index e6f8b1408..0bce33ab8 100644 --- a/src/backend/langflow/components/llms/ChatAnthropic.py +++ b/src/backend/langflow/components/llms/ChatAnthropic.py @@ -1,3 +1,4 @@ +from pydantic import SecretStr from langflow import CustomComponent from typing import Optional, Union, Callable from langflow.field_typing import BaseLanguageModel @@ -33,13 +34,13 @@ class ChatAnthropicComponent(CustomComponent): def build( self, - anthropic_api_key: Optional[str] = None, + anthropic_api_key: str, anthropic_api_url: Optional[str] = None, model_kwargs: dict = {}, temperature: Optional[float] = None, ) -> Union[BaseLanguageModel, Callable]: return ChatAnthropic( - anthropic_api_key=anthropic_api_key, + anthropic_api_key=SecretStr(anthropic_api_key), anthropic_api_url=anthropic_api_url, model_kwargs=model_kwargs, temperature=temperature, diff --git a/src/backend/langflow/components/llms/ChatOpenAI.py b/src/backend/langflow/components/llms/ChatOpenAI.py index c2c3e9234..e03a618fc 100644 --- a/src/backend/langflow/components/llms/ChatOpenAI.py +++ b/src/backend/langflow/components/llms/ChatOpenAI.py @@ -67,7 +67,7 @@ class ChatOpenAIComponent(CustomComponent): def build( self, max_tokens: Optional[int] = 256, - model_kwargs: Optional[NestedDict] = {}, + model_kwargs: NestedDict = {}, model_name: str = "gpt-4-1106-preview", openai_api_base: Optional[str] = None, openai_api_key: Optional[str] = None, diff --git a/src/backend/langflow/components/llms/ChatVertexAI.py b/src/backend/langflow/components/llms/ChatVertexAI.py index df0ca3f65..66235a1d8 100644 --- a/src/backend/langflow/components/llms/ChatVertexAI.py +++ b/src/backend/langflow/components/llms/ChatVertexAI.py @@ -66,12 +66,12 @@ class ChatVertexAIComponent(CustomComponent): project: str, examples: Optional[List[BaseMessage]] = [], location: str = "us-central1", - max_output_tokens: Optional[int] = 128, + max_output_tokens: int = 128, model_name: str = "chat-bison", - temperature: Optional[float] = 0.0, - top_k: Optional[int] = 40, - top_p: Optional[float] = 0.95, - verbose: Optional[bool] = False, + temperature: float = 0.0, + top_k: int = 40, + top_p: float = 0.95, + verbose: bool = False, ) -> Union[BaseLanguageModel, BaseLLM]: return ChatVertexAI( credentials=credentials, diff --git a/src/backend/langflow/components/llms/Cohere.py b/src/backend/langflow/components/llms/Cohere.py index 41513e523..bcb15678e 100644 --- a/src/backend/langflow/components/llms/Cohere.py +++ b/src/backend/langflow/components/llms/Cohere.py @@ -1,6 +1,5 @@ from langflow import CustomComponent from langchain_core.language_models.base import BaseLanguageModel -from typing import Optional from langchain_community.llms.cohere import Cohere @@ -19,7 +18,7 @@ class CohereComponent(CustomComponent): def build( self, cohere_api_key: str, - max_tokens: Optional[int] = 256, - temperature: Optional[float] = 0.75, + max_tokens: int = 256, + temperature: float = 0.75, ) -> BaseLanguageModel: return Cohere(cohere_api_key=cohere_api_key, max_tokens=max_tokens, temperature=temperature) diff --git a/src/backend/langflow/components/llms/LlamaCpp.py b/src/backend/langflow/components/llms/LlamaCpp.py index d10c9bec9..831665bfa 100644 --- a/src/backend/langflow/components/llms/LlamaCpp.py +++ b/src/backend/langflow/components/llms/LlamaCpp.py @@ -59,36 +59,36 @@ class LlamaCppComponent(CustomComponent): cache: Optional[bool] = None, client: Optional[Any] = None, echo: Optional[bool] = False, - f16_kv: Optional[bool] = True, + f16_kv: bool = True, grammar_path: Optional[str] = None, last_n_tokens_size: Optional[int] = 64, - logits_all: Optional[bool] = False, + logits_all: bool = False, logprobs: Optional[int] = None, lora_base: Optional[str] = None, lora_path: Optional[str] = None, max_tokens: Optional[int] = 256, metadata: Optional[Dict] = None, - model_kwargs: Optional[Dict] = {}, + model_kwargs: Dict = {}, n_batch: Optional[int] = 8, - n_ctx: Optional[int] = 512, + n_ctx: int = 512, n_gpu_layers: Optional[int] = 1, - n_parts: Optional[int] = -1, + n_parts: int = -1, n_threads: Optional[int] = 1, repeat_penalty: Optional[float] = 1.1, - rope_freq_base: Optional[float] = 10000.0, - rope_freq_scale: Optional[float] = 1.0, - seed: Optional[int] = -1, + rope_freq_base: float = 10000.0, + rope_freq_scale: float = 1.0, + seed: int = -1, stop: Optional[List[str]] = [], - streaming: Optional[bool] = True, + streaming: bool = True, suffix: Optional[str] = "", tags: Optional[List[str]] = [], temperature: Optional[float] = 0.8, top_k: Optional[int] = 40, top_p: Optional[float] = 0.95, - use_mlock: Optional[bool] = False, + use_mlock: bool = False, use_mmap: Optional[bool] = True, - verbose: Optional[bool] = True, - vocab_only: Optional[bool] = False, + verbose: bool = True, + vocab_only: bool = False, ) -> LlamaCpp: return LlamaCpp( model_path=model_path, diff --git a/src/backend/langflow/components/llms/VertexAI.py b/src/backend/langflow/components/llms/VertexAI.py index 74f862c25..008451bc8 100644 --- a/src/backend/langflow/components/llms/VertexAI.py +++ b/src/backend/langflow/components/llms/VertexAI.py @@ -114,7 +114,7 @@ class VertexAIComponent(CustomComponent): location: str = "us-central1", max_output_tokens: int = 128, max_retries: int = 6, - metadata: Dict = None, + metadata: Dict = {}, model_name: str = "text-bison", n: int = 1, name: Optional[str] = None, @@ -127,8 +127,6 @@ class VertexAIComponent(CustomComponent): tuned_model_name: Optional[str] = None, verbose: bool = False, ) -> Union[BaseLLM, Callable]: - if metadata is None: - metadata = {} return VertexAI( credentials=credentials, location=location, diff --git a/src/backend/langflow/components/utilities/BingSearchAPIWrapper.py b/src/backend/langflow/components/utilities/BingSearchAPIWrapper.py index bdb55473f..b9dc4a2ef 100644 --- a/src/backend/langflow/components/utilities/BingSearchAPIWrapper.py +++ b/src/backend/langflow/components/utilities/BingSearchAPIWrapper.py @@ -1,4 +1,3 @@ -from typing import Optional from langflow import CustomComponent # Assuming `BingSearchAPIWrapper` is a class that exists in the context @@ -26,7 +25,7 @@ class BingSearchAPIWrapperComponent(CustomComponent): self, bing_search_url: str, bing_subscription_key: str, - k: Optional[int] = 10, + k: int = 10, ) -> BingSearchAPIWrapper: # 'k' has a default value and is not shown (show=False), so it is hardcoded here return BingSearchAPIWrapper(bing_search_url=bing_search_url, bing_subscription_key=bing_subscription_key, k=k) diff --git a/src/backend/langflow/components/vectorstores/FAISS.py b/src/backend/langflow/components/vectorstores/FAISS.py index cfdbc0521..98fee467c 100644 --- a/src/backend/langflow/components/vectorstores/FAISS.py +++ b/src/backend/langflow/components/vectorstores/FAISS.py @@ -23,6 +23,6 @@ class FAISSComponent(CustomComponent): def build( self, embedding: Embeddings, - documents: List[Document] = None, + documents: List[Document], ) -> Union[VectorStore, FAISS, BaseRetriever]: return FAISS.from_documents(documents=documents, embedding=embedding) diff --git a/src/backend/langflow/components/vectorstores/Pinecone.py b/src/backend/langflow/components/vectorstores/Pinecone.py index bdd3323ee..ada0d405d 100644 --- a/src/backend/langflow/components/vectorstores/Pinecone.py +++ b/src/backend/langflow/components/vectorstores/Pinecone.py @@ -28,7 +28,7 @@ class PineconeComponent(CustomComponent): def build( self, embedding: Embeddings, - documents: List[Document] = None, + documents: List[Document], index_name: Optional[str] = None, pinecone_api_key: Optional[str] = None, pinecone_env: Optional[str] = None, diff --git a/src/backend/langflow/components/vectorstores/Qdrant.py b/src/backend/langflow/components/vectorstores/Qdrant.py index 74bc5ff59..4f5b28fa4 100644 --- a/src/backend/langflow/components/vectorstores/Qdrant.py +++ b/src/backend/langflow/components/vectorstores/Qdrant.py @@ -35,7 +35,7 @@ class QdrantComponent(CustomComponent): def build( self, embedding: Embeddings, - documents: List[Document] = None, + documents: List[Document], api_key: Optional[str] = None, collection_name: Optional[str] = None, content_payload_key: str = "page_content", From fa4a01caed91fc54aa170b3dc59f2fa06d1c2856 Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Thu, 25 Jan 2024 19:11:06 -0300 Subject: [PATCH 131/153] chore(RetrievalQAWithSourcesChain.py): reorder imports to improve readability and maintain consistency chore(AZLyricsLoader.py): reorder imports to improve readability and maintain consistency chore(AirbyteJSONLoader.py): reorder imports to improve readability and maintain consistency chore(CoNLLULoader.py): add import for List from typing module to fix type hinting chore(CollegeConfidentialLoader.py): add import for List from typing module to fix type hinting chore(EverNoteLoader.py): reorder imports to improve readability and maintain consistency chore(FacebookChatLoader.py): add import for List from typing module to fix type hinting chore(GitbookLoader.py): reorder imports to improve readability and maintain consistency chore(HNLoader.py): add import for List from typing module to fix type hinting chore(IMSDbLoader.py): add import for List from typing module to fix type hinting chore(TextLoader.py): reorder imports to improve readability and maintain consistency chore(CohereEmbeddings.py): change default value of cohere_api_key parameter to an empty string chore(OpenAIEmbeddings.py): change default values of allowed_special, disallowed_special, chunk_size, embedding_ctx_length, max_retries, show_progress_bar, skip_empty, and tikToken_enable parameters to their respective types --- .../chains/RetrievalQAWithSourcesChain.py | 3 ++- .../components/documentloaders/AZLyricsLoader.py | 4 ++-- .../documentloaders/AirbyteJSONLoader.py | 4 ++-- .../components/documentloaders/CoNLLULoader.py | 3 ++- .../documentloaders/CollegeConfidentialLoader.py | 4 ++-- .../components/documentloaders/EverNoteLoader.py | 4 ++-- .../documentloaders/FacebookChatLoader.py | 6 +++--- .../components/documentloaders/GitbookLoader.py | 4 ++-- .../components/documentloaders/HNLoader.py | 5 +++-- .../components/documentloaders/IMSDbLoader.py | 4 ++-- .../components/documentloaders/TextLoader.py | 2 +- .../components/embeddings/CohereEmbeddings.py | 2 +- .../components/embeddings/OpenAIEmbeddings.py | 14 +++++++------- 13 files changed, 31 insertions(+), 28 deletions(-) diff --git a/src/backend/langflow/components/chains/RetrievalQAWithSourcesChain.py b/src/backend/langflow/components/chains/RetrievalQAWithSourcesChain.py index e10aac6bb..3c46cd8bd 100644 --- a/src/backend/langflow/components/chains/RetrievalQAWithSourcesChain.py +++ b/src/backend/langflow/components/chains/RetrievalQAWithSourcesChain.py @@ -1,6 +1,7 @@ from typing import Optional -from langchain.chains import BaseQAWithSourcesChain, RetrievalQAWithSourcesChain +from langchain.chains import RetrievalQAWithSourcesChain +from langchain.chains.qa_with_sources.base import BaseQAWithSourcesChain from langchain.chains.combine_documents.base import BaseCombineDocumentsChain from langflow import CustomComponent diff --git a/src/backend/langflow/components/documentloaders/AZLyricsLoader.py b/src/backend/langflow/components/documentloaders/AZLyricsLoader.py index eea64c2e1..82d507d68 100644 --- a/src/backend/langflow/components/documentloaders/AZLyricsLoader.py +++ b/src/backend/langflow/components/documentloaders/AZLyricsLoader.py @@ -1,6 +1,6 @@ from langflow import CustomComponent from langflow.field_typing import Document -from typing import Optional, Dict +from typing import List, Optional, Dict from langchain_community.document_loaders.azlyrics import AZLyricsLoader @@ -15,7 +15,7 @@ class AZLyricsLoaderComponent(CustomComponent): "web_path": {"display_name": "Web Page", "type": "str", "required": True, "show": True}, } - def build(self, metadata: Optional[Dict] = None, web_path: str = "") -> Document: + def build(self, metadata: Optional[Dict] = None, web_path: str = "") -> List[Document]: documents = AZLyricsLoader(web_path=web_path).load() if metadata: for document in documents: diff --git a/src/backend/langflow/components/documentloaders/AirbyteJSONLoader.py b/src/backend/langflow/components/documentloaders/AirbyteJSONLoader.py index eaff8fb4a..8c670a8c0 100644 --- a/src/backend/langflow/components/documentloaders/AirbyteJSONLoader.py +++ b/src/backend/langflow/components/documentloaders/AirbyteJSONLoader.py @@ -1,6 +1,6 @@ from langflow import CustomComponent from langflow.field_typing import Document -from typing import Optional, Dict +from typing import List, Optional, Dict from langchain_community.document_loaders.airbyte_json import AirbyteJSONLoader @@ -26,7 +26,7 @@ class AirbyteJSONLoaderComponent(CustomComponent): }, } - def build(self, file_path: str, metadata: Optional[Dict] = None) -> Document: + def build(self, file_path: str, metadata: Optional[Dict] = None) -> List[Document]: documents = AirbyteJSONLoader(file_path=file_path).load() if metadata: for document in documents: diff --git a/src/backend/langflow/components/documentloaders/CoNLLULoader.py b/src/backend/langflow/components/documentloaders/CoNLLULoader.py index 2c243d25d..0cd2de50c 100644 --- a/src/backend/langflow/components/documentloaders/CoNLLULoader.py +++ b/src/backend/langflow/components/documentloaders/CoNLLULoader.py @@ -1,3 +1,4 @@ +from typing import List from langflow import CustomComponent from langchain.docstore.document import Document from langchain_community.document_loaders.conllu import CoNLLULoader @@ -24,7 +25,7 @@ class CoNLLULoaderComponent(CustomComponent): }, } - def build(self, file_path: str, metadata: dict) -> Document: + def build(self, file_path: str, metadata: dict) -> List[Document]: documents = CoNLLULoader(file_path=file_path).load() if metadata: for document in documents: diff --git a/src/backend/langflow/components/documentloaders/CollegeConfidentialLoader.py b/src/backend/langflow/components/documentloaders/CollegeConfidentialLoader.py index dee77c1da..8fea6e5d2 100644 --- a/src/backend/langflow/components/documentloaders/CollegeConfidentialLoader.py +++ b/src/backend/langflow/components/documentloaders/CollegeConfidentialLoader.py @@ -1,6 +1,6 @@ from langflow import CustomComponent from langchain.docstore.document import Document -from typing import Optional +from typing import Optional, List from langchain_community.document_loaders.college_confidential import CollegeConfidentialLoader @@ -17,7 +17,7 @@ class CollegeConfidentialLoaderComponent(CustomComponent): "web_path": {"display_name": "Web Page", "required": True}, } - def build(self, web_path: str, metadata: Optional[dict] = {}) -> Document: + def build(self, web_path: str, metadata: Optional[dict] = {}) -> List[Document]: documents = CollegeConfidentialLoader(web_path=web_path).load() if metadata: for document in documents: diff --git a/src/backend/langflow/components/documentloaders/EverNoteLoader.py b/src/backend/langflow/components/documentloaders/EverNoteLoader.py index ccf925e27..6f7431fcb 100644 --- a/src/backend/langflow/components/documentloaders/EverNoteLoader.py +++ b/src/backend/langflow/components/documentloaders/EverNoteLoader.py @@ -1,6 +1,6 @@ from langflow import CustomComponent from langflow.field_typing import Document -from typing import Optional, Dict +from typing import List, Optional, Dict from langchain_community.document_loaders.evernote import EverNoteLoader @@ -27,7 +27,7 @@ class EverNoteLoaderComponent(CustomComponent): }, } - def build(self, file_path: str, metadata: Optional[Dict] = None) -> Document: + def build(self, file_path: str, metadata: Optional[Dict] = None) -> List[Document]: documents = EverNoteLoader(file_path=file_path).load() if metadata: for document in documents: diff --git a/src/backend/langflow/components/documentloaders/FacebookChatLoader.py b/src/backend/langflow/components/documentloaders/FacebookChatLoader.py index 244066a7e..ecb99eea4 100644 --- a/src/backend/langflow/components/documentloaders/FacebookChatLoader.py +++ b/src/backend/langflow/components/documentloaders/FacebookChatLoader.py @@ -1,6 +1,6 @@ from langflow import CustomComponent from langchain.docstore.document import Document -from typing import Optional, Dict +from typing import List, Optional, Dict from langchain_community.document_loaders.facebook_chat import FacebookChatLoader @@ -26,8 +26,8 @@ class FacebookChatLoaderComponent(CustomComponent): }, } - def build(self, file_path: str, metadata: Optional[Dict] = None) -> Document: - documents = FacebookChatLoader(file_path=file_path).load() + def build(self, file_path: str, metadata: Optional[Dict] = None) -> List[Document]: + documents = FacebookChatLoader(path=file_path).load() if metadata: for document in documents: if not document.metadata: diff --git a/src/backend/langflow/components/documentloaders/GitbookLoader.py b/src/backend/langflow/components/documentloaders/GitbookLoader.py index f53f5959f..05b03e611 100644 --- a/src/backend/langflow/components/documentloaders/GitbookLoader.py +++ b/src/backend/langflow/components/documentloaders/GitbookLoader.py @@ -1,6 +1,6 @@ from langflow import CustomComponent from langflow.field_typing import Document -from typing import Optional, Dict +from typing import List, Optional, Dict from langchain_community.document_loaders.gitbook import GitbookLoader @@ -21,7 +21,7 @@ class GitbookLoaderComponent(CustomComponent): }, } - def build(self, metadata: Optional[Dict] = None, web_page: str = "") -> Document: + def build(self, metadata: Optional[Dict] = None, web_page: str = "") -> List[Document]: documents = GitbookLoader(web_page=web_page).load() if metadata: for document in documents: diff --git a/src/backend/langflow/components/documentloaders/HNLoader.py b/src/backend/langflow/components/documentloaders/HNLoader.py index 6e08f26e2..23f77d570 100644 --- a/src/backend/langflow/components/documentloaders/HNLoader.py +++ b/src/backend/langflow/components/documentloaders/HNLoader.py @@ -1,6 +1,7 @@ from langflow import CustomComponent -from typing import Optional, Dict +from typing import List, Optional, Dict from langchain_community.document_loaders.hn import HNLoader +from langflow.field_typing import Document class HNLoaderComponent(CustomComponent): @@ -17,7 +18,7 @@ class HNLoaderComponent(CustomComponent): self, web_path: str, metadata: Optional[Dict] = None, - ) -> HNLoader: + ) -> List[Document]: documents = HNLoader(web_path=web_path).load() if metadata: for document in documents: diff --git a/src/backend/langflow/components/documentloaders/IMSDbLoader.py b/src/backend/langflow/components/documentloaders/IMSDbLoader.py index 1eebcc444..bd16f854b 100644 --- a/src/backend/langflow/components/documentloaders/IMSDbLoader.py +++ b/src/backend/langflow/components/documentloaders/IMSDbLoader.py @@ -2,7 +2,7 @@ from langflow import CustomComponent from langflow.field_typing import Document from langchain_community.document_loaders.imsdb import IMSDbLoader -from typing import Dict, Optional +from typing import Dict, List, Optional class IMSDbLoaderComponent(CustomComponent): @@ -19,7 +19,7 @@ class IMSDbLoaderComponent(CustomComponent): self, metadata: Optional[Dict] = None, web_path: str = "", - ) -> Document: + ) -> List[Document]: documents = IMSDbLoader(web_path=web_path).load() if metadata: for document in documents: diff --git a/src/backend/langflow/components/documentloaders/TextLoader.py b/src/backend/langflow/components/documentloaders/TextLoader.py index 0c4033898..d3fa8e7ad 100644 --- a/src/backend/langflow/components/documentloaders/TextLoader.py +++ b/src/backend/langflow/components/documentloaders/TextLoader.py @@ -1,5 +1,5 @@ from langflow import CustomComponent -from langchain.data_connections import Document +from langflow.field_typing import Document from typing import Optional, Dict diff --git a/src/backend/langflow/components/embeddings/CohereEmbeddings.py b/src/backend/langflow/components/embeddings/CohereEmbeddings.py index 3fccc8c2c..4e7fb7b17 100644 --- a/src/backend/langflow/components/embeddings/CohereEmbeddings.py +++ b/src/backend/langflow/components/embeddings/CohereEmbeddings.py @@ -21,7 +21,7 @@ class CohereEmbeddingsComponent(CustomComponent): def build( self, request_timeout: Optional[float] = None, - cohere_api_key: str = None, + cohere_api_key: str = "", max_retries: Optional[int] = None, model: str = "embed-english-v2.0", truncate: Optional[str] = None, diff --git a/src/backend/langflow/components/embeddings/OpenAIEmbeddings.py b/src/backend/langflow/components/embeddings/OpenAIEmbeddings.py index 6838a7759..71bcb1db2 100644 --- a/src/backend/langflow/components/embeddings/OpenAIEmbeddings.py +++ b/src/backend/langflow/components/embeddings/OpenAIEmbeddings.py @@ -68,13 +68,13 @@ class OpenAIEmbeddingsComponent(CustomComponent): self, default_headers: Optional[Dict[str, str]] = None, default_query: Optional[NestedDict] = {}, - allowed_special: Optional[List[str]] = [], + allowed_special: List[str] = [], disallowed_special: List[str] = ["all"], - chunk_size: Optional[int] = 1000, + chunk_size: int = 1000, client: Optional[Any] = None, deployment: str = "text-embedding-ada-002", - embedding_ctx_length: Optional[int] = 8191, - max_retries: Optional[int] = 6, + embedding_ctx_length: int = 8191, + max_retries: int = 6, model: str = "text-embedding-ada-002", model_kwargs: NestedDict = {}, openai_api_base: Optional[str] = None, @@ -84,9 +84,9 @@ class OpenAIEmbeddingsComponent(CustomComponent): openai_organization: Optional[str] = None, openai_proxy: Optional[str] = None, request_timeout: Optional[float] = None, - show_progress_bar: Optional[bool] = False, - skip_empty: Optional[bool] = False, - tikToken_enable: Optional[bool] = True, + show_progress_bar: bool = False, + skip_empty: bool = False, + tikToken_enable: bool = True, tiktoken_model_name: Optional[str] = None, ) -> Union[OpenAIEmbeddings, Callable]: return OpenAIEmbeddings( From d43d9d3760c85e64c3d1c50d7fb3d14b68e0a0a4 Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Thu, 25 Jan 2024 19:14:58 -0300 Subject: [PATCH 132/153] Update JsonAgent and ZeroShotAgent components --- src/backend/langflow/components/agents/JsonAgent.py | 4 ++-- src/backend/langflow/components/agents/ZeroShotAgent.py | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/src/backend/langflow/components/agents/JsonAgent.py b/src/backend/langflow/components/agents/JsonAgent.py index d15366b77..0197b9210 100644 --- a/src/backend/langflow/components/agents/JsonAgent.py +++ b/src/backend/langflow/components/agents/JsonAgent.py @@ -3,7 +3,7 @@ from langchain.agents import AgentExecutor, create_json_agent from langflow.field_typing import ( BaseLanguageModel, ) -from langchain_community.agent_toolkits.base import BaseToolkit +from langchain_community.agent_toolkits.json.toolkit import JsonToolkit class JsonAgentComponent(CustomComponent): @@ -19,6 +19,6 @@ class JsonAgentComponent(CustomComponent): def build( self, llm: BaseLanguageModel, - toolkit: BaseToolkit, + toolkit: JsonToolkit, ) -> AgentExecutor: return create_json_agent(llm=llm, toolkit=toolkit) diff --git a/src/backend/langflow/components/agents/ZeroShotAgent.py b/src/backend/langflow/components/agents/ZeroShotAgent.py index b65e14e41..538e693cd 100644 --- a/src/backend/langflow/components/agents/ZeroShotAgent.py +++ b/src/backend/langflow/components/agents/ZeroShotAgent.py @@ -1,6 +1,6 @@ -from typing import List +from typing import List, Optional -from langchain.agents import ZeroShotAgent +from langchain.agents.mrkl.base import ZeroShotAgent from langchain_core.tools import BaseTool from langflow import CustomComponent from langflow.components.chains.LLMChain import LLMChain @@ -21,8 +21,8 @@ class ZeroShotAgentComponent(CustomComponent): def build( self, llm: LLMChain, - tools: List[BaseTool], + tools: Optional[List[BaseTool]] = None, prefix: str = "Answer the following questions as best you can. You have access to the following tools:", suffix: str = "Begin!\n\nQuestion: {input}\nThought:{agent_scratchpad}", ) -> ZeroShotAgent: - return ZeroShotAgent(llm_chain=llm, tools=tools, prefix=prefix, suffix=suffix) + return ZeroShotAgent(llm_chain=llm, allowed_tools=tools, prefix=prefix, suffix=suffix) From d258e0faeafb184703aa600ece41a957e9878c99 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Thu, 25 Jan 2024 19:53:51 -0300 Subject: [PATCH 133/153] Refactor OllamaLLM constructor parameters --- .../langflow/components/llms/OllamaLLM.py | 41 +++++++++---------- 1 file changed, 19 insertions(+), 22 deletions(-) diff --git a/src/backend/langflow/components/llms/OllamaLLM.py b/src/backend/langflow/components/llms/OllamaLLM.py index abbc00387..eb5c52975 100644 --- a/src/backend/langflow/components/llms/OllamaLLM.py +++ b/src/backend/langflow/components/llms/OllamaLLM.py @@ -2,6 +2,7 @@ from typing import List, Optional from langchain.llms.base import BaseLLM from langchain_community.llms.ollama import Ollama + from langflow import CustomComponent @@ -132,29 +133,25 @@ class OllamaLLM(CustomComponent): mirostat_eta = None mirostat_tau = None - llm_params = { - "base_url": base_url, - "model": model, - "mirostat": mirostat_value, - "mirostat_eta": mirostat_eta, - "mirostat_tau": mirostat_tau, - "num_ctx": num_ctx, - "num_gpu": num_gpu, - "num_thread": num_thread, - "repeat_last_n": repeat_last_n, - "repeat_penalty": repeat_penalty, - "temperature": temperature, - "stop": stop, - "tfs_z": tfs_z, - "top_k": top_k, - "top_p": top_p, - } - - # None Value remove - llm_params = {k: v for k, v in llm_params.items() if v is not None} - try: - llm = Ollama(**llm_params) + llm = Ollama( + base_url=base_url, + model=model, + mirostat=mirostat_value, + mirostat_eta=mirostat_eta, + mirostat_tau=mirostat_tau, + num_ctx=num_ctx, + num_gpu=num_gpu, + num_thread=num_thread, + repeat_last_n=repeat_last_n, + repeat_penalty=repeat_penalty, + temperature=temperature, + stop=stop, + tfs_z=tfs_z, + top_k=top_k, + top_p=top_p, + ) + except Exception as e: raise ValueError("Could not connect to Ollama.") from e From 5cf56ca7a89fae0d2ba84e3b8ad7f3d6aa5aed1a Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Thu, 25 Jan 2024 21:15:28 -0300 Subject: [PATCH 134/153] Fix import errors and type annotations --- .../embeddings/AzureOpenAIEmbeddings.py | 9 +-- .../components/embeddings/CohereEmbeddings.py | 3 +- .../components/embeddings/OpenAIEmbeddings.py | 16 ++--- .../langflow/components/llms/Anthropic.py | 8 ++- .../langflow/components/llms/CTransformers.py | 8 ++- .../langflow/components/llms/ChatOpenAI.py | 7 +-- .../langflow/components/llms/Cohere.py | 6 +- .../langflow/components/llms/OpenAI.py | 60 ------------------- .../retrievers/MultiQueryRetriever.py | 16 ++--- .../components/utilities/SerpAPIWrapper.py | 5 +- .../utilities/WikipediaAPIWrapper.py | 20 +++++-- .../utilities/WolframAlphaAPIWrapper.py | 6 +- .../components/vectorstores/Pinecone.py | 2 +- 13 files changed, 63 insertions(+), 103 deletions(-) delete mode 100644 src/backend/langflow/components/llms/OpenAI.py diff --git a/src/backend/langflow/components/embeddings/AzureOpenAIEmbeddings.py b/src/backend/langflow/components/embeddings/AzureOpenAIEmbeddings.py index e4fdd8738..09b03e17b 100644 --- a/src/backend/langflow/components/embeddings/AzureOpenAIEmbeddings.py +++ b/src/backend/langflow/components/embeddings/AzureOpenAIEmbeddings.py @@ -1,7 +1,8 @@ -from langflow import CustomComponent from langchain.embeddings.base import Embeddings from langchain_community.embeddings import AzureOpenAIEmbeddings +from langflow import CustomComponent + class AzureOpenAIEmbeddingsComponent(CustomComponent): display_name: str = "AzureOpenAIEmbeddings" @@ -53,9 +54,9 @@ class AzureOpenAIEmbeddingsComponent(CustomComponent): try: embeddings = AzureOpenAIEmbeddings( azure_endpoint=azure_endpoint, - deployment=azure_deployment, - openai_api_version=api_version, - openai_api_key=api_key, + azure_deployment=azure_deployment, + api_version=api_version, + api_key=api_key, ) except Exception as e: diff --git a/src/backend/langflow/components/embeddings/CohereEmbeddings.py b/src/backend/langflow/components/embeddings/CohereEmbeddings.py index 4e7fb7b17..049525b39 100644 --- a/src/backend/langflow/components/embeddings/CohereEmbeddings.py +++ b/src/backend/langflow/components/embeddings/CohereEmbeddings.py @@ -1,7 +1,6 @@ from typing import Optional from langchain_community.embeddings.cohere import CohereEmbeddings - from langflow import CustomComponent @@ -27,7 +26,7 @@ class CohereEmbeddingsComponent(CustomComponent): truncate: Optional[str] = None, user_agent: str = "langchain", ) -> CohereEmbeddings: - return CohereEmbeddings( + return CohereEmbeddings( # type: ignore max_retries=max_retries, user_agent=user_agent, request_timeout=request_timeout, diff --git a/src/backend/langflow/components/embeddings/OpenAIEmbeddings.py b/src/backend/langflow/components/embeddings/OpenAIEmbeddings.py index 71bcb1db2..555d45570 100644 --- a/src/backend/langflow/components/embeddings/OpenAIEmbeddings.py +++ b/src/backend/langflow/components/embeddings/OpenAIEmbeddings.py @@ -1,7 +1,9 @@ +from typing import Any, Callable, Dict, List, Optional, Union + +from langchain_openai.embeddings.base import OpenAIEmbeddings + from langflow import CustomComponent from langflow.field_typing import NestedDict -from typing import List, Optional, Dict, Any, Union, Callable -from langchain_openai.embeddings.base import OpenAIEmbeddings class OpenAIEmbeddingsComponent(CustomComponent): @@ -102,13 +104,13 @@ class OpenAIEmbeddingsComponent(CustomComponent): max_retries=max_retries, model=model, model_kwargs=model_kwargs, - openai_api_base=openai_api_base, - openai_api_key=openai_api_key, + base_url=openai_api_base, + api_key=openai_api_key, openai_api_type=openai_api_type, - openai_api_version=openai_api_version, - openai_organization=openai_organization, + api_version=openai_api_version, + organization=openai_organization, openai_proxy=openai_proxy, - request_timeout=request_timeout, + timeout=request_timeout, show_progress_bar=show_progress_bar, skip_empty=skip_empty, tiktoken_model_name=tiktoken_model_name, diff --git a/src/backend/langflow/components/llms/Anthropic.py b/src/backend/langflow/components/llms/Anthropic.py index f94d16deb..c1b776617 100644 --- a/src/backend/langflow/components/llms/Anthropic.py +++ b/src/backend/langflow/components/llms/Anthropic.py @@ -1,8 +1,10 @@ -from pydantic import SecretStr -from langflow import CustomComponent from typing import Optional -from langflow.field_typing import BaseLanguageModel, NestedDict + from langchain_community.llms.anthropic import Anthropic +from pydantic.v1 import SecretStr + +from langflow import CustomComponent +from langflow.field_typing import BaseLanguageModel, NestedDict class AnthropicComponent(CustomComponent): diff --git a/src/backend/langflow/components/llms/CTransformers.py b/src/backend/langflow/components/llms/CTransformers.py index f4826bf86..a0668814e 100644 --- a/src/backend/langflow/components/llms/CTransformers.py +++ b/src/backend/langflow/components/llms/CTransformers.py @@ -1,6 +1,8 @@ -from langflow import CustomComponent +from typing import Dict, Optional + from langchain_community.llms.ctransformers import CTransformers -from typing import Optional, Dict + +from langflow import CustomComponent class CTransformersComponent(CustomComponent): @@ -28,4 +30,4 @@ class CTransformersComponent(CustomComponent): } def build(self, model: str, model_file: str, model_type: str, config: Optional[Dict] = None) -> CTransformers: - return CTransformers(model=model, model_file=model_file, model_type=model_type, config=config) + return CTransformers(model=model, model_file=model_file, model_type=model_type, config=config) # type: ignore diff --git a/src/backend/langflow/components/llms/ChatOpenAI.py b/src/backend/langflow/components/llms/ChatOpenAI.py index e03a618fc..19ca4f41d 100644 --- a/src/backend/langflow/components/llms/ChatOpenAI.py +++ b/src/backend/langflow/components/llms/ChatOpenAI.py @@ -2,7 +2,6 @@ from typing import Optional, Union from langchain.llms import BaseLLM from langchain_community.chat_models.openai import ChatOpenAI - from langflow import CustomComponent from langflow.field_typing import BaseLanguageModel, NestedDict @@ -78,8 +77,8 @@ class ChatOpenAIComponent(CustomComponent): return ChatOpenAI( max_tokens=max_tokens, model_kwargs=model_kwargs, - model_name=model_name, - openai_api_base=openai_api_base, - openai_api_key=openai_api_key, + model=model_name, + base_url=openai_api_base, + api_key=openai_api_key, temperature=temperature, ) diff --git a/src/backend/langflow/components/llms/Cohere.py b/src/backend/langflow/components/llms/Cohere.py index bcb15678e..3b74fc9b4 100644 --- a/src/backend/langflow/components/llms/Cohere.py +++ b/src/backend/langflow/components/llms/Cohere.py @@ -1,6 +1,6 @@ -from langflow import CustomComponent -from langchain_core.language_models.base import BaseLanguageModel from langchain_community.llms.cohere import Cohere +from langchain_core.language_models.base import BaseLanguageModel +from langflow import CustomComponent class CohereComponent(CustomComponent): @@ -21,4 +21,4 @@ class CohereComponent(CustomComponent): max_tokens: int = 256, temperature: float = 0.75, ) -> BaseLanguageModel: - return Cohere(cohere_api_key=cohere_api_key, max_tokens=max_tokens, temperature=temperature) + return Cohere(cohere_api_key=cohere_api_key, max_tokens=max_tokens, temperature=temperature) # type: ignore diff --git a/src/backend/langflow/components/llms/OpenAI.py b/src/backend/langflow/components/llms/OpenAI.py deleted file mode 100644 index f4f26db2f..000000000 --- a/src/backend/langflow/components/llms/OpenAI.py +++ /dev/null @@ -1,60 +0,0 @@ -from typing import Dict, Optional - -from langchain_openai.llms.base import OpenAI - -from langflow import CustomComponent - - -class OpenAIComponent(CustomComponent): - display_name = "OpenAI" - description = "OpenAI large language models." - - def build_config(self): - return { - "max_tokens": {"display_name": "Max Tokens", "default": 256}, - "model_kwargs": {"display_name": "Model Kwargs", "advanced": True}, - "model_name": { - "display_name": "Model Name", - "value": "text-davinci-003", - "options": [ - "text-davinci-003", - "text-davinci-002", - "text-curie-001", - "text-babbage-001", - "text-ada-001", - ], - }, - "openai_api_base": { - "display_name": "OpenAI API Base", - "info": ( - "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\n" - "You can change this to use other APIs like JinaChat, LocalAI and Prem." - ), - }, - "openai_api_key": { - "display_name": "OpenAI API Key", - "value": "", - "password": True, - }, - "temperature": {"display_name": "Temperature", "value": 0.7}, - } - - def build( - self, - max_tokens: Optional[int] = 256, - model_kwargs: Optional[Dict] = None, - model_name: str = "text-davinci-003", - openai_api_base: Optional[str] = "", - openai_api_key: str = "", - temperature: Optional[float] = 0.7, - ) -> OpenAI: - if not openai_api_base: - openai_api_base = "https://api.openai.com/v1" - return OpenAI( - max_tokens=max_tokens, - model_kwargs=model_kwargs or {}, - model_name=model_name, - openai_api_base=openai_api_base, - openai_api_key=openai_api_key, - temperature=temperature, - ) diff --git a/src/backend/langflow/components/retrievers/MultiQueryRetriever.py b/src/backend/langflow/components/retrievers/MultiQueryRetriever.py index 8c95d6d2a..d2c12d7a5 100644 --- a/src/backend/langflow/components/retrievers/MultiQueryRetriever.py +++ b/src/backend/langflow/components/retrievers/MultiQueryRetriever.py @@ -1,11 +1,8 @@ -from langflow import CustomComponent +from typing import Callable, Optional, Union + from langchain.retrievers import MultiQueryRetriever -from typing import Optional, Union, Callable -from langflow.field_typing import ( - PromptTemplate, - BaseLLM, - BaseRetriever, -) +from langflow import CustomComponent +from langflow.field_typing import BaseLLM, BaseRetriever, PromptTemplate class MultiQueryRetrieverComponent(CustomComponent): @@ -46,4 +43,7 @@ class MultiQueryRetrieverComponent(CustomComponent): prompt: Optional[PromptTemplate] = None, parser_key: str = "lines", ) -> Union[Callable, MultiQueryRetriever]: - return MultiQueryRetriever(llm=llm, retriever=retriever, prompt=prompt, parser_key=parser_key) + if not prompt: + return MultiQueryRetriever.from_llm(llm=llm, retriever=retriever, parser_key=parser_key) + else: + return MultiQueryRetriever.from_llm(llm=llm, retriever=retriever, prompt=prompt, parser_key=parser_key) diff --git a/src/backend/langflow/components/utilities/SerpAPIWrapper.py b/src/backend/langflow/components/utilities/SerpAPIWrapper.py index 0fda8188e..924f5628d 100644 --- a/src/backend/langflow/components/utilities/SerpAPIWrapper.py +++ b/src/backend/langflow/components/utilities/SerpAPIWrapper.py @@ -1,6 +1,7 @@ -from langflow import CustomComponent from typing import Callable, Union + from langchain_community.utilities.serpapi import SerpAPIWrapper +from langflow import CustomComponent class SerpAPIWrapperComponent(CustomComponent): @@ -24,7 +25,7 @@ class SerpAPIWrapperComponent(CustomComponent): serpapi_api_key: str, params: dict, ) -> Union[SerpAPIWrapper, Callable]: # Removed quotes around SerpAPIWrapper - return SerpAPIWrapper( + return SerpAPIWrapper( # type: ignore serpapi_api_key=serpapi_api_key, params=params, ) diff --git a/src/backend/langflow/components/utilities/WikipediaAPIWrapper.py b/src/backend/langflow/components/utilities/WikipediaAPIWrapper.py index a5dc908d6..00820881b 100644 --- a/src/backend/langflow/components/utilities/WikipediaAPIWrapper.py +++ b/src/backend/langflow/components/utilities/WikipediaAPIWrapper.py @@ -1,6 +1,7 @@ -from langflow import CustomComponent -from typing import Union, Callable +from typing import Callable, Union + from langchain_community.utilities.wikipedia import WikipediaAPIWrapper +from langflow import CustomComponent # Assuming WikipediaAPIWrapper is a class that needs to be imported. # The import statement is not included as it is not provided in the JSON @@ -14,5 +15,16 @@ class WikipediaAPIWrapperComponent(CustomComponent): def build_config(self): return {} - def build(self) -> Union[WikipediaAPIWrapper, Callable]: - return WikipediaAPIWrapper() + def build( + self, + top_k_results: int = 3, + lang: str = "en", + load_all_available_meta: bool = False, + doc_content_chars_max: int = 4000, + ) -> Union[WikipediaAPIWrapper, Callable]: + return WikipediaAPIWrapper( # type: ignore + top_k_results=top_k_results, + lang=lang, + load_all_available_meta=load_all_available_meta, + doc_content_chars_max=doc_content_chars_max, + ) diff --git a/src/backend/langflow/components/utilities/WolframAlphaAPIWrapper.py b/src/backend/langflow/components/utilities/WolframAlphaAPIWrapper.py index 72e656a54..2e71a161c 100644 --- a/src/backend/langflow/components/utilities/WolframAlphaAPIWrapper.py +++ b/src/backend/langflow/components/utilities/WolframAlphaAPIWrapper.py @@ -1,6 +1,8 @@ -from langflow import CustomComponent from typing import Callable, Union + from langchain_community.utilities.wolfram_alpha import WolframAlphaAPIWrapper +from langflow import CustomComponent + # Since all the fields in the JSON have show=False, we will only create a basic component # without any configurable fields. @@ -13,4 +15,4 @@ class WolframAlphaAPIWrapperComponent(CustomComponent): return {"appid": {"display_name": "App ID", "type": "str", "password": True}} def build(self, appid: str) -> Union[Callable, WolframAlphaAPIWrapper]: - return WolframAlphaAPIWrapper(wolfram_alpha_appid=appid) + return WolframAlphaAPIWrapper(wolfram_alpha_appid=appid) # type: ignore diff --git a/src/backend/langflow/components/vectorstores/Pinecone.py b/src/backend/langflow/components/vectorstores/Pinecone.py index ada0d405d..1c04b6bc5 100644 --- a/src/backend/langflow/components/vectorstores/Pinecone.py +++ b/src/backend/langflow/components/vectorstores/Pinecone.py @@ -7,7 +7,7 @@ from langflow.field_typing import ( ) from langchain.schema import BaseRetriever from langchain.vectorstores.base import VectorStore -import pinecone +import pinecone # type: ignore class PineconeComponent(CustomComponent): From 53844a59637970dd40bf9d7b7a569524d68fab5f Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Thu, 25 Jan 2024 21:19:19 -0300 Subject: [PATCH 135/153] Delete ZeroShotAgent and CombineDocsChain components --- .../components/agents/ZeroShotAgent.py | 28 ------------------- .../components/chains/CombineDocsChain.py | 28 ------------------- 2 files changed, 56 deletions(-) delete mode 100644 src/backend/langflow/components/agents/ZeroShotAgent.py delete mode 100644 src/backend/langflow/components/chains/CombineDocsChain.py diff --git a/src/backend/langflow/components/agents/ZeroShotAgent.py b/src/backend/langflow/components/agents/ZeroShotAgent.py deleted file mode 100644 index 538e693cd..000000000 --- a/src/backend/langflow/components/agents/ZeroShotAgent.py +++ /dev/null @@ -1,28 +0,0 @@ -from typing import List, Optional - -from langchain.agents.mrkl.base import ZeroShotAgent -from langchain_core.tools import BaseTool -from langflow import CustomComponent -from langflow.components.chains.LLMChain import LLMChain - - -class ZeroShotAgentComponent(CustomComponent): - display_name = "ZeroShotAgent" - description = "Construct an agent from an LLM and tools." - - def build_config(self): - return { - "llm": {"display_name": "LLM Chain"}, - "tools": {"display_name": "Tools"}, - "prefix": {"display_name": "Prefix", "multiline": True}, - "suffix": {"display_name": "Suffix", "multiline": True}, - } - - def build( - self, - llm: LLMChain, - tools: Optional[List[BaseTool]] = None, - prefix: str = "Answer the following questions as best you can. You have access to the following tools:", - suffix: str = "Begin!\n\nQuestion: {input}\nThought:{agent_scratchpad}", - ) -> ZeroShotAgent: - return ZeroShotAgent(llm_chain=llm, allowed_tools=tools, prefix=prefix, suffix=suffix) diff --git a/src/backend/langflow/components/chains/CombineDocsChain.py b/src/backend/langflow/components/chains/CombineDocsChain.py deleted file mode 100644 index bb5a32c29..000000000 --- a/src/backend/langflow/components/chains/CombineDocsChain.py +++ /dev/null @@ -1,28 +0,0 @@ -from langflow import CustomComponent -from langflow.field_typing import BaseLanguageModel, Chain -from typing import Union, Callable -from langchain.chains.combine_documents.base import BaseCombineDocumentsChain - - -class CombineDocsChainComponent(CustomComponent): - display_name = "CombineDocsChain" - description = "Load question answering chain." - - def build_config(self): - return { - "llm": {"display_name": "LLM"}, - "chain_type": { - "display_name": "Chain Type", - "options": ["stuff", "map_reduce", "map_rerank", "refine"], - }, - } - - def build( - self, - llm: BaseLanguageModel, - chain_type: str, - ) -> Union[Chain, Callable]: - if chain_type not in ["stuff", "map_reduce", "map_rerank", "refine"]: - raise ValueError(f"Invalid chain_type: {chain_type}") - - return BaseCombineDocumentsChain() From 910c55fa0c10a1bb6ba04c8d58602a35c7e38d51 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Tue, 30 Jan 2024 09:00:32 -0300 Subject: [PATCH 136/153] Update langchain version to 0.1.0 --- poetry.lock | 945 +++++++++++++++++++++++-------------------------- pyproject.toml | 2 +- 2 files changed, 441 insertions(+), 506 deletions(-) diff --git a/poetry.lock b/poetry.lock index ef2f79058..783988997 100644 --- a/poetry.lock +++ b/poetry.lock @@ -13,87 +13,87 @@ files = [ [[package]] name = "aiohttp" -version = "3.9.1" +version = "3.9.3" description = "Async http client/server framework (asyncio)" optional = false python-versions = ">=3.8" files = [ - {file = "aiohttp-3.9.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e1f80197f8b0b846a8d5cf7b7ec6084493950d0882cc5537fb7b96a69e3c8590"}, - {file = "aiohttp-3.9.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c72444d17777865734aa1a4d167794c34b63e5883abb90356a0364a28904e6c0"}, - {file = "aiohttp-3.9.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9b05d5cbe9dafcdc733262c3a99ccf63d2f7ce02543620d2bd8db4d4f7a22f83"}, - {file = "aiohttp-3.9.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c4fa235d534b3547184831c624c0b7c1e262cd1de847d95085ec94c16fddcd5"}, - {file = "aiohttp-3.9.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:289ba9ae8e88d0ba16062ecf02dd730b34186ea3b1e7489046fc338bdc3361c4"}, - {file = "aiohttp-3.9.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bff7e2811814fa2271be95ab6e84c9436d027a0e59665de60edf44e529a42c1f"}, - {file = "aiohttp-3.9.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81b77f868814346662c96ab36b875d7814ebf82340d3284a31681085c051320f"}, - {file = "aiohttp-3.9.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b9c7426923bb7bd66d409da46c41e3fb40f5caf679da624439b9eba92043fa6"}, - {file = "aiohttp-3.9.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:8d44e7bf06b0c0a70a20f9100af9fcfd7f6d9d3913e37754c12d424179b4e48f"}, - {file = "aiohttp-3.9.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:22698f01ff5653fe66d16ffb7658f582a0ac084d7da1323e39fd9eab326a1f26"}, - {file = "aiohttp-3.9.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ca7ca5abfbfe8d39e653870fbe8d7710be7a857f8a8386fc9de1aae2e02ce7e4"}, - {file = "aiohttp-3.9.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:8d7f98fde213f74561be1d6d3fa353656197f75d4edfbb3d94c9eb9b0fc47f5d"}, - {file = "aiohttp-3.9.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5216b6082c624b55cfe79af5d538e499cd5f5b976820eac31951fb4325974501"}, - {file = "aiohttp-3.9.1-cp310-cp310-win32.whl", hash = "sha256:0e7ba7ff228c0d9a2cd66194e90f2bca6e0abca810b786901a569c0de082f489"}, - {file = "aiohttp-3.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:c7e939f1ae428a86e4abbb9a7c4732bf4706048818dfd979e5e2839ce0159f23"}, - {file = "aiohttp-3.9.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:df9cf74b9bc03d586fc53ba470828d7b77ce51b0582d1d0b5b2fb673c0baa32d"}, - {file = "aiohttp-3.9.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ecca113f19d5e74048c001934045a2b9368d77b0b17691d905af18bd1c21275e"}, - {file = "aiohttp-3.9.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8cef8710fb849d97c533f259103f09bac167a008d7131d7b2b0e3a33269185c0"}, - {file = "aiohttp-3.9.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bea94403a21eb94c93386d559bce297381609153e418a3ffc7d6bf772f59cc35"}, - {file = "aiohttp-3.9.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91c742ca59045dce7ba76cab6e223e41d2c70d79e82c284a96411f8645e2afff"}, - {file = "aiohttp-3.9.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6c93b7c2e52061f0925c3382d5cb8980e40f91c989563d3d32ca280069fd6a87"}, - {file = "aiohttp-3.9.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee2527134f95e106cc1653e9ac78846f3a2ec1004cf20ef4e02038035a74544d"}, - {file = "aiohttp-3.9.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11ff168d752cb41e8492817e10fb4f85828f6a0142b9726a30c27c35a1835f01"}, - {file = "aiohttp-3.9.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b8c3a67eb87394386847d188996920f33b01b32155f0a94f36ca0e0c635bf3e3"}, - {file = "aiohttp-3.9.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c7b5d5d64e2a14e35a9240b33b89389e0035e6de8dbb7ffa50d10d8b65c57449"}, - {file = "aiohttp-3.9.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:69985d50a2b6f709412d944ffb2e97d0be154ea90600b7a921f95a87d6f108a2"}, - {file = "aiohttp-3.9.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:c9110c06eaaac7e1f5562caf481f18ccf8f6fdf4c3323feab28a93d34cc646bd"}, - {file = "aiohttp-3.9.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d737e69d193dac7296365a6dcb73bbbf53bb760ab25a3727716bbd42022e8d7a"}, - {file = "aiohttp-3.9.1-cp311-cp311-win32.whl", hash = "sha256:4ee8caa925aebc1e64e98432d78ea8de67b2272252b0a931d2ac3bd876ad5544"}, - {file = "aiohttp-3.9.1-cp311-cp311-win_amd64.whl", hash = "sha256:a34086c5cc285be878622e0a6ab897a986a6e8bf5b67ecb377015f06ed316587"}, - {file = "aiohttp-3.9.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f800164276eec54e0af5c99feb9494c295118fc10a11b997bbb1348ba1a52065"}, - {file = "aiohttp-3.9.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:500f1c59906cd142d452074f3811614be04819a38ae2b3239a48b82649c08821"}, - {file = "aiohttp-3.9.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0b0a6a36ed7e164c6df1e18ee47afbd1990ce47cb428739d6c99aaabfaf1b3af"}, - {file = "aiohttp-3.9.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69da0f3ed3496808e8cbc5123a866c41c12c15baaaead96d256477edf168eb57"}, - {file = "aiohttp-3.9.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:176df045597e674fa950bf5ae536be85699e04cea68fa3a616cf75e413737eb5"}, - {file = "aiohttp-3.9.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b796b44111f0cab6bbf66214186e44734b5baab949cb5fb56154142a92989aeb"}, - {file = "aiohttp-3.9.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f27fdaadce22f2ef950fc10dcdf8048407c3b42b73779e48a4e76b3c35bca26c"}, - {file = "aiohttp-3.9.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bcb6532b9814ea7c5a6a3299747c49de30e84472fa72821b07f5a9818bce0f66"}, - {file = "aiohttp-3.9.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:54631fb69a6e44b2ba522f7c22a6fb2667a02fd97d636048478db2fd8c4e98fe"}, - {file = "aiohttp-3.9.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:4b4c452d0190c5a820d3f5c0f3cd8a28ace48c54053e24da9d6041bf81113183"}, - {file = "aiohttp-3.9.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:cae4c0c2ca800c793cae07ef3d40794625471040a87e1ba392039639ad61ab5b"}, - {file = "aiohttp-3.9.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:565760d6812b8d78d416c3c7cfdf5362fbe0d0d25b82fed75d0d29e18d7fc30f"}, - {file = "aiohttp-3.9.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:54311eb54f3a0c45efb9ed0d0a8f43d1bc6060d773f6973efd90037a51cd0a3f"}, - {file = "aiohttp-3.9.1-cp312-cp312-win32.whl", hash = "sha256:85c3e3c9cb1d480e0b9a64c658cd66b3cfb8e721636ab8b0e746e2d79a7a9eed"}, - {file = "aiohttp-3.9.1-cp312-cp312-win_amd64.whl", hash = "sha256:11cb254e397a82efb1805d12561e80124928e04e9c4483587ce7390b3866d213"}, - {file = "aiohttp-3.9.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8a22a34bc594d9d24621091d1b91511001a7eea91d6652ea495ce06e27381f70"}, - {file = "aiohttp-3.9.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:598db66eaf2e04aa0c8900a63b0101fdc5e6b8a7ddd805c56d86efb54eb66672"}, - {file = "aiohttp-3.9.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2c9376e2b09895c8ca8b95362283365eb5c03bdc8428ade80a864160605715f1"}, - {file = "aiohttp-3.9.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41473de252e1797c2d2293804e389a6d6986ef37cbb4a25208de537ae32141dd"}, - {file = "aiohttp-3.9.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9c5857612c9813796960c00767645cb5da815af16dafb32d70c72a8390bbf690"}, - {file = "aiohttp-3.9.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ffcd828e37dc219a72c9012ec44ad2e7e3066bec6ff3aaa19e7d435dbf4032ca"}, - {file = "aiohttp-3.9.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:219a16763dc0294842188ac8a12262b5671817042b35d45e44fd0a697d8c8361"}, - {file = "aiohttp-3.9.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f694dc8a6a3112059258a725a4ebe9acac5fe62f11c77ac4dcf896edfa78ca28"}, - {file = "aiohttp-3.9.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:bcc0ea8d5b74a41b621ad4a13d96c36079c81628ccc0b30cfb1603e3dfa3a014"}, - {file = "aiohttp-3.9.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:90ec72d231169b4b8d6085be13023ece8fa9b1bb495e4398d847e25218e0f431"}, - {file = "aiohttp-3.9.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:cf2a0ac0615842b849f40c4d7f304986a242f1e68286dbf3bd7a835e4f83acfd"}, - {file = "aiohttp-3.9.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:0e49b08eafa4f5707ecfb321ab9592717a319e37938e301d462f79b4e860c32a"}, - {file = "aiohttp-3.9.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2c59e0076ea31c08553e868cec02d22191c086f00b44610f8ab7363a11a5d9d8"}, - {file = "aiohttp-3.9.1-cp38-cp38-win32.whl", hash = "sha256:4831df72b053b1eed31eb00a2e1aff6896fb4485301d4ccb208cac264b648db4"}, - {file = "aiohttp-3.9.1-cp38-cp38-win_amd64.whl", hash = "sha256:3135713c5562731ee18f58d3ad1bf41e1d8883eb68b363f2ffde5b2ea4b84cc7"}, - {file = "aiohttp-3.9.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:cfeadf42840c1e870dc2042a232a8748e75a36b52d78968cda6736de55582766"}, - {file = "aiohttp-3.9.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:70907533db712f7aa791effb38efa96f044ce3d4e850e2d7691abd759f4f0ae0"}, - {file = "aiohttp-3.9.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cdefe289681507187e375a5064c7599f52c40343a8701761c802c1853a504558"}, - {file = "aiohttp-3.9.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7481f581251bb5558ba9f635db70908819caa221fc79ee52a7f58392778c636"}, - {file = "aiohttp-3.9.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:49f0c1b3c2842556e5de35f122fc0f0b721334ceb6e78c3719693364d4af8499"}, - {file = "aiohttp-3.9.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0d406b01a9f5a7e232d1b0d161b40c05275ffbcbd772dc18c1d5a570961a1ca4"}, - {file = "aiohttp-3.9.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d8e4450e7fe24d86e86b23cc209e0023177b6d59502e33807b732d2deb6975f"}, - {file = "aiohttp-3.9.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c0266cd6f005e99f3f51e583012de2778e65af6b73860038b968a0a8888487a"}, - {file = "aiohttp-3.9.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab221850108a4a063c5b8a70f00dd7a1975e5a1713f87f4ab26a46e5feac5a0e"}, - {file = "aiohttp-3.9.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c88a15f272a0ad3d7773cf3a37cc7b7d077cbfc8e331675cf1346e849d97a4e5"}, - {file = "aiohttp-3.9.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:237533179d9747080bcaad4d02083ce295c0d2eab3e9e8ce103411a4312991a0"}, - {file = "aiohttp-3.9.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:02ab6006ec3c3463b528374c4cdce86434e7b89ad355e7bf29e2f16b46c7dd6f"}, - {file = "aiohttp-3.9.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04fa38875e53eb7e354ece1607b1d2fdee2d175ea4e4d745f6ec9f751fe20c7c"}, - {file = "aiohttp-3.9.1-cp39-cp39-win32.whl", hash = "sha256:82eefaf1a996060602f3cc1112d93ba8b201dbf5d8fd9611227de2003dddb3b7"}, - {file = "aiohttp-3.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:9b05d33ff8e6b269e30a7957bd3244ffbce2a7a35a81b81c382629b80af1a8bf"}, - {file = "aiohttp-3.9.1.tar.gz", hash = "sha256:8fc49a87ac269d4529da45871e2ffb6874e87779c3d0e2ccd813c0899221239d"}, + {file = "aiohttp-3.9.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:939677b61f9d72a4fa2a042a5eee2a99a24001a67c13da113b2e30396567db54"}, + {file = "aiohttp-3.9.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1f5cd333fcf7590a18334c90f8c9147c837a6ec8a178e88d90a9b96ea03194cc"}, + {file = "aiohttp-3.9.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:82e6aa28dd46374f72093eda8bcd142f7771ee1eb9d1e223ff0fa7177a96b4a5"}, + {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f56455b0c2c7cc3b0c584815264461d07b177f903a04481dfc33e08a89f0c26b"}, + {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bca77a198bb6e69795ef2f09a5f4c12758487f83f33d63acde5f0d4919815768"}, + {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e083c285857b78ee21a96ba1eb1b5339733c3563f72980728ca2b08b53826ca5"}, + {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ab40e6251c3873d86ea9b30a1ac6d7478c09277b32e14745d0d3c6e76e3c7e29"}, + {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:df822ee7feaaeffb99c1a9e5e608800bd8eda6e5f18f5cfb0dc7eeb2eaa6bbec"}, + {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:acef0899fea7492145d2bbaaaec7b345c87753168589cc7faf0afec9afe9b747"}, + {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:cd73265a9e5ea618014802ab01babf1940cecb90c9762d8b9e7d2cc1e1969ec6"}, + {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:a78ed8a53a1221393d9637c01870248a6f4ea5b214a59a92a36f18151739452c"}, + {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:6b0e029353361f1746bac2e4cc19b32f972ec03f0f943b390c4ab3371840aabf"}, + {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7cf5c9458e1e90e3c390c2639f1017a0379a99a94fdfad3a1fd966a2874bba52"}, + {file = "aiohttp-3.9.3-cp310-cp310-win32.whl", hash = "sha256:3e59c23c52765951b69ec45ddbbc9403a8761ee6f57253250c6e1536cacc758b"}, + {file = "aiohttp-3.9.3-cp310-cp310-win_amd64.whl", hash = "sha256:055ce4f74b82551678291473f66dc9fb9048a50d8324278751926ff0ae7715e5"}, + {file = "aiohttp-3.9.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6b88f9386ff1ad91ace19d2a1c0225896e28815ee09fc6a8932fded8cda97c3d"}, + {file = "aiohttp-3.9.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c46956ed82961e31557b6857a5ca153c67e5476972e5f7190015018760938da2"}, + {file = "aiohttp-3.9.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:07b837ef0d2f252f96009e9b8435ec1fef68ef8b1461933253d318748ec1acdc"}, + {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad46e6f620574b3b4801c68255492e0159d1712271cc99d8bdf35f2043ec266"}, + {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ed3e046ea7b14938112ccd53d91c1539af3e6679b222f9469981e3dac7ba1ce"}, + {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:039df344b45ae0b34ac885ab5b53940b174530d4dd8a14ed8b0e2155b9dddccb"}, + {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7943c414d3a8d9235f5f15c22ace69787c140c80b718dcd57caaade95f7cd93b"}, + {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84871a243359bb42c12728f04d181a389718710129b36b6aad0fc4655a7647d4"}, + {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5eafe2c065df5401ba06821b9a054d9cb2848867f3c59801b5d07a0be3a380ae"}, + {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:9d3c9b50f19704552f23b4eaea1fc082fdd82c63429a6506446cbd8737823da3"}, + {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:f033d80bc6283092613882dfe40419c6a6a1527e04fc69350e87a9df02bbc283"}, + {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:2c895a656dd7e061b2fd6bb77d971cc38f2afc277229ce7dd3552de8313a483e"}, + {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1f5a71d25cd8106eab05f8704cd9167b6e5187bcdf8f090a66c6d88b634802b4"}, + {file = "aiohttp-3.9.3-cp311-cp311-win32.whl", hash = "sha256:50fca156d718f8ced687a373f9e140c1bb765ca16e3d6f4fe116e3df7c05b2c5"}, + {file = "aiohttp-3.9.3-cp311-cp311-win_amd64.whl", hash = "sha256:5fe9ce6c09668063b8447f85d43b8d1c4e5d3d7e92c63173e6180b2ac5d46dd8"}, + {file = "aiohttp-3.9.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:38a19bc3b686ad55804ae931012f78f7a534cce165d089a2059f658f6c91fa60"}, + {file = "aiohttp-3.9.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:770d015888c2a598b377bd2f663adfd947d78c0124cfe7b959e1ef39f5b13869"}, + {file = "aiohttp-3.9.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ee43080e75fc92bf36219926c8e6de497f9b247301bbf88c5c7593d931426679"}, + {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52df73f14ed99cee84865b95a3d9e044f226320a87af208f068ecc33e0c35b96"}, + {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc9b311743a78043b26ffaeeb9715dc360335e5517832f5a8e339f8a43581e4d"}, + {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b955ed993491f1a5da7f92e98d5dad3c1e14dc175f74517c4e610b1f2456fb11"}, + {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:504b6981675ace64c28bf4a05a508af5cde526e36492c98916127f5a02354d53"}, + {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a6fe5571784af92b6bc2fda8d1925cccdf24642d49546d3144948a6a1ed58ca5"}, + {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ba39e9c8627edc56544c8628cc180d88605df3892beeb2b94c9bc857774848ca"}, + {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:e5e46b578c0e9db71d04c4b506a2121c0cb371dd89af17a0586ff6769d4c58c1"}, + {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:938a9653e1e0c592053f815f7028e41a3062e902095e5a7dc84617c87267ebd5"}, + {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:c3452ea726c76e92f3b9fae4b34a151981a9ec0a4847a627c43d71a15ac32aa6"}, + {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ff30218887e62209942f91ac1be902cc80cddb86bf00fbc6783b7a43b2bea26f"}, + {file = "aiohttp-3.9.3-cp312-cp312-win32.whl", hash = "sha256:38f307b41e0bea3294a9a2a87833191e4bcf89bb0365e83a8be3a58b31fb7f38"}, + {file = "aiohttp-3.9.3-cp312-cp312-win_amd64.whl", hash = "sha256:b791a3143681a520c0a17e26ae7465f1b6f99461a28019d1a2f425236e6eedb5"}, + {file = "aiohttp-3.9.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0ed621426d961df79aa3b963ac7af0d40392956ffa9be022024cd16297b30c8c"}, + {file = "aiohttp-3.9.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7f46acd6a194287b7e41e87957bfe2ad1ad88318d447caf5b090012f2c5bb528"}, + {file = "aiohttp-3.9.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:feeb18a801aacb098220e2c3eea59a512362eb408d4afd0c242044c33ad6d542"}, + {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f734e38fd8666f53da904c52a23ce517f1b07722118d750405af7e4123933511"}, + {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b40670ec7e2156d8e57f70aec34a7216407848dfe6c693ef131ddf6e76feb672"}, + {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fdd215b7b7fd4a53994f238d0f46b7ba4ac4c0adb12452beee724ddd0743ae5d"}, + {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:017a21b0df49039c8f46ca0971b3a7fdc1f56741ab1240cb90ca408049766168"}, + {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e99abf0bba688259a496f966211c49a514e65afa9b3073a1fcee08856e04425b"}, + {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:648056db9a9fa565d3fa851880f99f45e3f9a771dd3ff3bb0c048ea83fb28194"}, + {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8aacb477dc26797ee089721536a292a664846489c49d3ef9725f992449eda5a8"}, + {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:522a11c934ea660ff8953eda090dcd2154d367dec1ae3c540aff9f8a5c109ab4"}, + {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:5bce0dc147ca85caa5d33debc4f4d65e8e8b5c97c7f9f660f215fa74fc49a321"}, + {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4b4af9f25b49a7be47c0972139e59ec0e8285c371049df1a63b6ca81fdd216a2"}, + {file = "aiohttp-3.9.3-cp38-cp38-win32.whl", hash = "sha256:298abd678033b8571995650ccee753d9458dfa0377be4dba91e4491da3f2be63"}, + {file = "aiohttp-3.9.3-cp38-cp38-win_amd64.whl", hash = "sha256:69361bfdca5468c0488d7017b9b1e5ce769d40b46a9f4a2eed26b78619e9396c"}, + {file = "aiohttp-3.9.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:0fa43c32d1643f518491d9d3a730f85f5bbaedcbd7fbcae27435bb8b7a061b29"}, + {file = "aiohttp-3.9.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:835a55b7ca49468aaaac0b217092dfdff370e6c215c9224c52f30daaa735c1c1"}, + {file = "aiohttp-3.9.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:06a9b2c8837d9a94fae16c6223acc14b4dfdff216ab9b7202e07a9a09541168f"}, + {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:abf151955990d23f84205286938796c55ff11bbfb4ccfada8c9c83ae6b3c89a3"}, + {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59c26c95975f26e662ca78fdf543d4eeaef70e533a672b4113dd888bd2423caa"}, + {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f95511dd5d0e05fd9728bac4096319f80615aaef4acbecb35a990afebe953b0e"}, + {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:595f105710293e76b9dc09f52e0dd896bd064a79346234b521f6b968ffdd8e58"}, + {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7c8b816c2b5af5c8a436df44ca08258fc1a13b449393a91484225fcb7545533"}, + {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f1088fa100bf46e7b398ffd9904f4808a0612e1d966b4aa43baa535d1b6341eb"}, + {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f59dfe57bb1ec82ac0698ebfcdb7bcd0e99c255bd637ff613760d5f33e7c81b3"}, + {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:361a1026c9dd4aba0109e4040e2aecf9884f5cfe1b1b1bd3d09419c205e2e53d"}, + {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:363afe77cfcbe3a36353d8ea133e904b108feea505aa4792dad6585a8192c55a"}, + {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e2c45c208c62e955e8256949eb225bd8b66a4c9b6865729a786f2aa79b72e9d"}, + {file = "aiohttp-3.9.3-cp39-cp39-win32.whl", hash = "sha256:f7217af2e14da0856e082e96ff637f14ae45c10a5714b63c77f26d8884cf1051"}, + {file = "aiohttp-3.9.3-cp39-cp39-win_amd64.whl", hash = "sha256:27468897f628c627230dba07ec65dc8d0db566923c48f29e084ce382119802bc"}, + {file = "aiohttp-3.9.3.tar.gz", hash = "sha256:90842933e5d1ff760fae6caca4b2b3edba53ba8f4b71e95dacf2818a2aca06f7"}, ] [package.dependencies] @@ -428,17 +428,17 @@ files = [ [[package]] name = "boto3" -version = "1.34.27" +version = "1.34.30" description = "The AWS SDK for Python" optional = false python-versions = ">= 3.8" files = [ - {file = "boto3-1.34.27-py3-none-any.whl", hash = "sha256:3626db4ba9fbb1b58c8fe923da5ed670873b3d881a102956ea19d3b69cd097cc"}, - {file = "boto3-1.34.27.tar.gz", hash = "sha256:ebdd938019f3df2e7b50585353963d4553faf3fbb7b2085c440107fa6caa233b"}, + {file = "boto3-1.34.30-py3-none-any.whl", hash = "sha256:cd6173380768faaecf6236dbdcec15d8d032cbb162ce354fdb111056a74fc298"}, + {file = "boto3-1.34.30.tar.gz", hash = "sha256:9e1476ce2b26437881a0381bf2daa54de619ac74ab4bd74278668acda6004a64"}, ] [package.dependencies] -botocore = ">=1.34.27,<1.35.0" +botocore = ">=1.34.30,<1.35.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.10.0,<0.11.0" @@ -447,13 +447,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.34.27" +version = "1.34.30" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">= 3.8" files = [ - {file = "botocore-1.34.27-py3-none-any.whl", hash = "sha256:1c10f247136ad17b6ef1588c1e043e294dbaebdebe9ce84dc56713029f515c53"}, - {file = "botocore-1.34.27.tar.gz", hash = "sha256:a0e68ba264275b358b8c1cca604161f4d9465cf7847d73e929543a9f30ff22d1"}, + {file = "botocore-1.34.30-py3-none-any.whl", hash = "sha256:caf82d91c2ff61235284a07ffdfba006873e0752e00896052f901a37720cefa4"}, + {file = "botocore-1.34.30.tar.gz", hash = "sha256:e071a9766e7fc2221ca42ec01dfc54368a7518610787342ea622f6edc57f7891"}, ] [package.dependencies] @@ -977,13 +977,13 @@ testing = ["pytest (>=7.2.1)", "pytest-cov (>=4.0.0)", "tox (>=4.4.3)"] [[package]] name = "cohere" -version = "4.44" +version = "4.45" description = "Python SDK for the Cohere API" optional = false python-versions = ">=3.8,<4.0" files = [ - {file = "cohere-4.44-py3-none-any.whl", hash = "sha256:a6d01d579dcd43ef98ce1886b55e00a308d40332eafc1da4bf56526c81d6ff73"}, - {file = "cohere-4.44.tar.gz", hash = "sha256:06d9d056ed9b40f152d9551ca547a20ed08410e2e43488ba174e9025cf09746b"}, + {file = "cohere-4.45-py3-none-any.whl", hash = "sha256:bdaa2e5e1c64cf3b1d55caf9d483a33fa8eafed731a999fb0934ae12c0638b75"}, + {file = "cohere-4.45.tar.gz", hash = "sha256:63b21b2dc3abd718b18cae726a69d1b096a34eb59f3331c20469fd0df1672816"}, ] [package.dependencies] @@ -1077,63 +1077,63 @@ yaml = ["PyYAML"] [[package]] name = "coverage" -version = "7.4.0" +version = "7.4.1" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.8" files = [ - {file = "coverage-7.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36b0ea8ab20d6a7564e89cb6135920bc9188fb5f1f7152e94e8300b7b189441a"}, - {file = "coverage-7.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0676cd0ba581e514b7f726495ea75aba3eb20899d824636c6f59b0ed2f88c471"}, - {file = "coverage-7.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0ca5c71a5a1765a0f8f88022c52b6b8be740e512980362f7fdbb03725a0d6b9"}, - {file = "coverage-7.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7c97726520f784239f6c62506bc70e48d01ae71e9da128259d61ca5e9788516"}, - {file = "coverage-7.4.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:815ac2d0f3398a14286dc2cea223a6f338109f9ecf39a71160cd1628786bc6f5"}, - {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:80b5ee39b7f0131ebec7968baa9b2309eddb35b8403d1869e08f024efd883566"}, - {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5b2ccb7548a0b65974860a78c9ffe1173cfb5877460e5a229238d985565574ae"}, - {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:995ea5c48c4ebfd898eacb098164b3cc826ba273b3049e4a889658548e321b43"}, - {file = "coverage-7.4.0-cp310-cp310-win32.whl", hash = "sha256:79287fd95585ed36e83182794a57a46aeae0b64ca53929d1176db56aacc83451"}, - {file = "coverage-7.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:5b14b4f8760006bfdb6e08667af7bc2d8d9bfdb648351915315ea17645347137"}, - {file = "coverage-7.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:04387a4a6ecb330c1878907ce0dc04078ea72a869263e53c72a1ba5bbdf380ca"}, - {file = "coverage-7.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ea81d8f9691bb53f4fb4db603203029643caffc82bf998ab5b59ca05560f4c06"}, - {file = "coverage-7.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74775198b702868ec2d058cb92720a3c5a9177296f75bd97317c787daf711505"}, - {file = "coverage-7.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76f03940f9973bfaee8cfba70ac991825611b9aac047e5c80d499a44079ec0bc"}, - {file = "coverage-7.4.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:485e9f897cf4856a65a57c7f6ea3dc0d4e6c076c87311d4bc003f82cfe199d25"}, - {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6ae8c9d301207e6856865867d762a4b6fd379c714fcc0607a84b92ee63feff70"}, - {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bf477c355274a72435ceb140dc42de0dc1e1e0bf6e97195be30487d8eaaf1a09"}, - {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:83c2dda2666fe32332f8e87481eed056c8b4d163fe18ecc690b02802d36a4d26"}, - {file = "coverage-7.4.0-cp311-cp311-win32.whl", hash = "sha256:697d1317e5290a313ef0d369650cfee1a114abb6021fa239ca12b4849ebbd614"}, - {file = "coverage-7.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:26776ff6c711d9d835557ee453082025d871e30b3fd6c27fcef14733f67f0590"}, - {file = "coverage-7.4.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:13eaf476ec3e883fe3e5fe3707caeb88268a06284484a3daf8250259ef1ba143"}, - {file = "coverage-7.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846f52f46e212affb5bcf131c952fb4075b55aae6b61adc9856222df89cbe3e2"}, - {file = "coverage-7.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26f66da8695719ccf90e794ed567a1549bb2644a706b41e9f6eae6816b398c4a"}, - {file = "coverage-7.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:164fdcc3246c69a6526a59b744b62e303039a81e42cfbbdc171c91a8cc2f9446"}, - {file = "coverage-7.4.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:316543f71025a6565677d84bc4df2114e9b6a615aa39fb165d697dba06a54af9"}, - {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bb1de682da0b824411e00a0d4da5a784ec6496b6850fdf8c865c1d68c0e318dd"}, - {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:0e8d06778e8fbffccfe96331a3946237f87b1e1d359d7fbe8b06b96c95a5407a"}, - {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a56de34db7b7ff77056a37aedded01b2b98b508227d2d0979d373a9b5d353daa"}, - {file = "coverage-7.4.0-cp312-cp312-win32.whl", hash = "sha256:51456e6fa099a8d9d91497202d9563a320513fcf59f33991b0661a4a6f2ad450"}, - {file = "coverage-7.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:cd3c1e4cb2ff0083758f09be0f77402e1bdf704adb7f89108007300a6da587d0"}, - {file = "coverage-7.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e9d1bf53c4c8de58d22e0e956a79a5b37f754ed1ffdbf1a260d9dcfa2d8a325e"}, - {file = "coverage-7.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:109f5985182b6b81fe33323ab4707011875198c41964f014579cf82cebf2bb85"}, - {file = "coverage-7.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cc9d4bc55de8003663ec94c2f215d12d42ceea128da8f0f4036235a119c88ac"}, - {file = "coverage-7.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cc6d65b21c219ec2072c1293c505cf36e4e913a3f936d80028993dd73c7906b1"}, - {file = "coverage-7.4.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a10a4920def78bbfff4eff8a05c51be03e42f1c3735be42d851f199144897ba"}, - {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b8e99f06160602bc64da35158bb76c73522a4010f0649be44a4e167ff8555952"}, - {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7d360587e64d006402b7116623cebf9d48893329ef035278969fa3bbf75b697e"}, - {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:29f3abe810930311c0b5d1a7140f6395369c3db1be68345638c33eec07535105"}, - {file = "coverage-7.4.0-cp38-cp38-win32.whl", hash = "sha256:5040148f4ec43644702e7b16ca864c5314ccb8ee0751ef617d49aa0e2d6bf4f2"}, - {file = "coverage-7.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:9864463c1c2f9cb3b5db2cf1ff475eed2f0b4285c2aaf4d357b69959941aa555"}, - {file = "coverage-7.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:936d38794044b26c99d3dd004d8af0035ac535b92090f7f2bb5aa9c8e2f5cd42"}, - {file = "coverage-7.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:799c8f873794a08cdf216aa5d0531c6a3747793b70c53f70e98259720a6fe2d7"}, - {file = "coverage-7.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e7defbb9737274023e2d7af02cac77043c86ce88a907c58f42b580a97d5bcca9"}, - {file = "coverage-7.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a1526d265743fb49363974b7aa8d5899ff64ee07df47dd8d3e37dcc0818f09ed"}, - {file = "coverage-7.4.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf635a52fc1ea401baf88843ae8708591aa4adff875e5c23220de43b1ccf575c"}, - {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:756ded44f47f330666843b5781be126ab57bb57c22adbb07d83f6b519783b870"}, - {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0eb3c2f32dabe3a4aaf6441dde94f35687224dfd7eb2a7f47f3fd9428e421058"}, - {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bfd5db349d15c08311702611f3dccbef4b4e2ec148fcc636cf8739519b4a5c0f"}, - {file = "coverage-7.4.0-cp39-cp39-win32.whl", hash = "sha256:53d7d9158ee03956e0eadac38dfa1ec8068431ef8058fe6447043db1fb40d932"}, - {file = "coverage-7.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:cfd2a8b6b0d8e66e944d47cdec2f47c48fef2ba2f2dff5a9a75757f64172857e"}, - {file = "coverage-7.4.0-pp38.pp39.pp310-none-any.whl", hash = "sha256:c530833afc4707fe48524a44844493f36d8727f04dcce91fb978c414a8556cc6"}, - {file = "coverage-7.4.0.tar.gz", hash = "sha256:707c0f58cb1712b8809ece32b68996ee1e609f71bd14615bd8f87a1293cb610e"}, + {file = "coverage-7.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:077d366e724f24fc02dbfe9d946534357fda71af9764ff99d73c3c596001bbd7"}, + {file = "coverage-7.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0193657651f5399d433c92f8ae264aff31fc1d066deee4b831549526433f3f61"}, + {file = "coverage-7.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d17bbc946f52ca67adf72a5ee783cd7cd3477f8f8796f59b4974a9b59cacc9ee"}, + {file = "coverage-7.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3277f5fa7483c927fe3a7b017b39351610265308f5267ac6d4c2b64cc1d8d25"}, + {file = "coverage-7.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dceb61d40cbfcf45f51e59933c784a50846dc03211054bd76b421a713dcdf19"}, + {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6008adeca04a445ea6ef31b2cbaf1d01d02986047606f7da266629afee982630"}, + {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c61f66d93d712f6e03369b6a7769233bfda880b12f417eefdd4f16d1deb2fc4c"}, + {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b9bb62fac84d5f2ff523304e59e5c439955fb3b7f44e3d7b2085184db74d733b"}, + {file = "coverage-7.4.1-cp310-cp310-win32.whl", hash = "sha256:f86f368e1c7ce897bf2457b9eb61169a44e2ef797099fb5728482b8d69f3f016"}, + {file = "coverage-7.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:869b5046d41abfea3e381dd143407b0d29b8282a904a19cb908fa24d090cc018"}, + {file = "coverage-7.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b8ffb498a83d7e0305968289441914154fb0ef5d8b3157df02a90c6695978295"}, + {file = "coverage-7.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3cacfaefe6089d477264001f90f55b7881ba615953414999c46cc9713ff93c8c"}, + {file = "coverage-7.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d6850e6e36e332d5511a48a251790ddc545e16e8beaf046c03985c69ccb2676"}, + {file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18e961aa13b6d47f758cc5879383d27b5b3f3dcd9ce8cdbfdc2571fe86feb4dd"}, + {file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dfd1e1b9f0898817babf840b77ce9fe655ecbe8b1b327983df485b30df8cc011"}, + {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6b00e21f86598b6330f0019b40fb397e705135040dbedc2ca9a93c7441178e74"}, + {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:536d609c6963c50055bab766d9951b6c394759190d03311f3e9fcf194ca909e1"}, + {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7ac8f8eb153724f84885a1374999b7e45734bf93a87d8df1e7ce2146860edef6"}, + {file = "coverage-7.4.1-cp311-cp311-win32.whl", hash = "sha256:f3771b23bb3675a06f5d885c3630b1d01ea6cac9e84a01aaf5508706dba546c5"}, + {file = "coverage-7.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:9d2f9d4cc2a53b38cabc2d6d80f7f9b7e3da26b2f53d48f05876fef7956b6968"}, + {file = "coverage-7.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f68ef3660677e6624c8cace943e4765545f8191313a07288a53d3da188bd8581"}, + {file = "coverage-7.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:23b27b8a698e749b61809fb637eb98ebf0e505710ec46a8aa6f1be7dc0dc43a6"}, + {file = "coverage-7.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e3424c554391dc9ef4a92ad28665756566a28fecf47308f91841f6c49288e66"}, + {file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0860a348bf7004c812c8368d1fc7f77fe8e4c095d661a579196a9533778e156"}, + {file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe558371c1bdf3b8fa03e097c523fb9645b8730399c14fe7721ee9c9e2a545d3"}, + {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3468cc8720402af37b6c6e7e2a9cdb9f6c16c728638a2ebc768ba1ef6f26c3a1"}, + {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:02f2edb575d62172aa28fe00efe821ae31f25dc3d589055b3fb64d51e52e4ab1"}, + {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ca6e61dc52f601d1d224526360cdeab0d0712ec104a2ce6cc5ccef6ed9a233bc"}, + {file = "coverage-7.4.1-cp312-cp312-win32.whl", hash = "sha256:ca7b26a5e456a843b9b6683eada193fc1f65c761b3a473941efe5a291f604c74"}, + {file = "coverage-7.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:85ccc5fa54c2ed64bd91ed3b4a627b9cce04646a659512a051fa82a92c04a448"}, + {file = "coverage-7.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8bdb0285a0202888d19ec6b6d23d5990410decb932b709f2b0dfe216d031d218"}, + {file = "coverage-7.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:918440dea04521f499721c039863ef95433314b1db00ff826a02580c1f503e45"}, + {file = "coverage-7.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:379d4c7abad5afbe9d88cc31ea8ca262296480a86af945b08214eb1a556a3e4d"}, + {file = "coverage-7.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b094116f0b6155e36a304ff912f89bbb5067157aff5f94060ff20bbabdc8da06"}, + {file = "coverage-7.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2f5968608b1fe2a1d00d01ad1017ee27efd99b3437e08b83ded9b7af3f6f766"}, + {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:10e88e7f41e6197ea0429ae18f21ff521d4f4490aa33048f6c6f94c6045a6a75"}, + {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a4a3907011d39dbc3e37bdc5df0a8c93853c369039b59efa33a7b6669de04c60"}, + {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6d224f0c4c9c98290a6990259073f496fcec1b5cc613eecbd22786d398ded3ad"}, + {file = "coverage-7.4.1-cp38-cp38-win32.whl", hash = "sha256:23f5881362dcb0e1a92b84b3c2809bdc90db892332daab81ad8f642d8ed55042"}, + {file = "coverage-7.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:a07f61fc452c43cd5328b392e52555f7d1952400a1ad09086c4a8addccbd138d"}, + {file = "coverage-7.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8e738a492b6221f8dcf281b67129510835461132b03024830ac0e554311a5c54"}, + {file = "coverage-7.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:46342fed0fff72efcda77040b14728049200cbba1279e0bf1188f1f2078c1d70"}, + {file = "coverage-7.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9641e21670c68c7e57d2053ddf6c443e4f0a6e18e547e86af3fad0795414a628"}, + {file = "coverage-7.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aeb2c2688ed93b027eb0d26aa188ada34acb22dceea256d76390eea135083950"}, + {file = "coverage-7.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d12c923757de24e4e2110cf8832d83a886a4cf215c6e61ed506006872b43a6d1"}, + {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0491275c3b9971cdbd28a4595c2cb5838f08036bca31765bad5e17edf900b2c7"}, + {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8dfc5e195bbef80aabd81596ef52a1277ee7143fe419efc3c4d8ba2754671756"}, + {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1a78b656a4d12b0490ca72651fe4d9f5e07e3c6461063a9b6265ee45eb2bdd35"}, + {file = "coverage-7.4.1-cp39-cp39-win32.whl", hash = "sha256:f90515974b39f4dea2f27c0959688621b46d96d5a626cf9c53dbc653a895c05c"}, + {file = "coverage-7.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:64e723ca82a84053dd7bfcc986bdb34af8d9da83c521c19d6b472bc6880e191a"}, + {file = "coverage-7.4.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:32a8d985462e37cfdab611a6f95b09d7c091d07668fdc26e47a725ee575fe166"}, + {file = "coverage-7.4.1.tar.gz", hash = "sha256:1ed4b95480952b1a26d863e546fa5094564aa0065e1e5f0d4d0041f293251d04"}, ] [package.dependencies] @@ -1144,43 +1144,43 @@ toml = ["tomli"] [[package]] name = "cryptography" -version = "42.0.0" +version = "42.0.1" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-42.0.0-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:c640b0ef54138fde761ec99a6c7dc4ce05e80420262c20fa239e694ca371d434"}, - {file = "cryptography-42.0.0-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:678cfa0d1e72ef41d48993a7be75a76b0725d29b820ff3cfd606a5b2b33fda01"}, - {file = "cryptography-42.0.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:146e971e92a6dd042214b537a726c9750496128453146ab0ee8971a0299dc9bd"}, - {file = "cryptography-42.0.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87086eae86a700307b544625e3ba11cc600c3c0ef8ab97b0fda0705d6db3d4e3"}, - {file = "cryptography-42.0.0-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:0a68bfcf57a6887818307600c3c0ebc3f62fbb6ccad2240aa21887cda1f8df1b"}, - {file = "cryptography-42.0.0-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:5a217bca51f3b91971400890905a9323ad805838ca3fa1e202a01844f485ee87"}, - {file = "cryptography-42.0.0-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:ca20550bb590db16223eb9ccc5852335b48b8f597e2f6f0878bbfd9e7314eb17"}, - {file = "cryptography-42.0.0-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:33588310b5c886dfb87dba5f013b8d27df7ffd31dc753775342a1e5ab139e59d"}, - {file = "cryptography-42.0.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9515ea7f596c8092fdc9902627e51b23a75daa2c7815ed5aa8cf4f07469212ec"}, - {file = "cryptography-42.0.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:35cf6ed4c38f054478a9df14f03c1169bb14bd98f0b1705751079b25e1cb58bc"}, - {file = "cryptography-42.0.0-cp37-abi3-win32.whl", hash = "sha256:8814722cffcfd1fbd91edd9f3451b88a8f26a5fd41b28c1c9193949d1c689dc4"}, - {file = "cryptography-42.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:a2a8d873667e4fd2f34aedab02ba500b824692c6542e017075a2efc38f60a4c0"}, - {file = "cryptography-42.0.0-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:8fedec73d590fd30c4e3f0d0f4bc961aeca8390c72f3eaa1a0874d180e868ddf"}, - {file = "cryptography-42.0.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be41b0c7366e5549265adf2145135dca107718fa44b6e418dc7499cfff6b4689"}, - {file = "cryptography-42.0.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ca482ea80626048975360c8e62be3ceb0f11803180b73163acd24bf014133a0"}, - {file = "cryptography-42.0.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:c58115384bdcfe9c7f644c72f10f6f42bed7cf59f7b52fe1bf7ae0a622b3a139"}, - {file = "cryptography-42.0.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:56ce0c106d5c3fec1038c3cca3d55ac320a5be1b44bf15116732d0bc716979a2"}, - {file = "cryptography-42.0.0-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:324721d93b998cb7367f1e6897370644751e5580ff9b370c0a50dc60a2003513"}, - {file = "cryptography-42.0.0-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:d97aae66b7de41cdf5b12087b5509e4e9805ed6f562406dfcf60e8481a9a28f8"}, - {file = "cryptography-42.0.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:85f759ed59ffd1d0baad296e72780aa62ff8a71f94dc1ab340386a1207d0ea81"}, - {file = "cryptography-42.0.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:206aaf42e031b93f86ad60f9f5d9da1b09164f25488238ac1dc488334eb5e221"}, - {file = "cryptography-42.0.0-cp39-abi3-win32.whl", hash = "sha256:74f18a4c8ca04134d2052a140322002fef535c99cdbc2a6afc18a8024d5c9d5b"}, - {file = "cryptography-42.0.0-cp39-abi3-win_amd64.whl", hash = "sha256:14e4b909373bc5bf1095311fa0f7fcabf2d1a160ca13f1e9e467be1ac4cbdf94"}, - {file = "cryptography-42.0.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3005166a39b70c8b94455fdbe78d87a444da31ff70de3331cdec2c568cf25b7e"}, - {file = "cryptography-42.0.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:be14b31eb3a293fc6e6aa2807c8a3224c71426f7c4e3639ccf1a2f3ffd6df8c3"}, - {file = "cryptography-42.0.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:bd7cf7a8d9f34cc67220f1195884151426ce616fdc8285df9054bfa10135925f"}, - {file = "cryptography-42.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c310767268d88803b653fffe6d6f2f17bb9d49ffceb8d70aed50ad45ea49ab08"}, - {file = "cryptography-42.0.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:bdce70e562c69bb089523e75ef1d9625b7417c6297a76ac27b1b8b1eb51b7d0f"}, - {file = "cryptography-42.0.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e9326ca78111e4c645f7e49cbce4ed2f3f85e17b61a563328c85a5208cf34440"}, - {file = "cryptography-42.0.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:69fd009a325cad6fbfd5b04c711a4da563c6c4854fc4c9544bff3088387c77c0"}, - {file = "cryptography-42.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:988b738f56c665366b1e4bfd9045c3efae89ee366ca3839cd5af53eaa1401bce"}, - {file = "cryptography-42.0.0.tar.gz", hash = "sha256:6cf9b76d6e93c62114bd19485e5cb003115c134cf9ce91f8ac924c44f8c8c3f4"}, + {file = "cryptography-42.0.1-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:265bdc693570b895eb641410b8fc9e8ddbce723a669236162b9d9cfb70bd8d77"}, + {file = "cryptography-42.0.1-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:160fa08dfa6dca9cb8ad9bd84e080c0db6414ba5ad9a7470bc60fb154f60111e"}, + {file = "cryptography-42.0.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:727387886c9c8de927c360a396c5edcb9340d9e960cda145fca75bdafdabd24c"}, + {file = "cryptography-42.0.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d84673c012aa698555d4710dcfe5f8a0ad76ea9dde8ef803128cc669640a2e0"}, + {file = "cryptography-42.0.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:e6edc3a568667daf7d349d7e820783426ee4f1c0feab86c29bd1d6fe2755e009"}, + {file = "cryptography-42.0.1-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:d50718dd574a49d3ef3f7ef7ece66ef281b527951eb2267ce570425459f6a404"}, + {file = "cryptography-42.0.1-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:9544492e8024f29919eac2117edd8c950165e74eb551a22c53f6fdf6ba5f4cb8"}, + {file = "cryptography-42.0.1-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:ab6b302d51fbb1dd339abc6f139a480de14d49d50f65fdc7dff782aa8631d035"}, + {file = "cryptography-42.0.1-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2fe16624637d6e3e765530bc55caa786ff2cbca67371d306e5d0a72e7c3d0407"}, + {file = "cryptography-42.0.1-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:ed1b2130f5456a09a134cc505a17fc2830a1a48ed53efd37dcc904a23d7b82fa"}, + {file = "cryptography-42.0.1-cp37-abi3-win32.whl", hash = "sha256:e5edf189431b4d51f5c6fb4a95084a75cef6b4646c934eb6e32304fc720e1453"}, + {file = "cryptography-42.0.1-cp37-abi3-win_amd64.whl", hash = "sha256:6bfd823b336fdcd8e06285ae8883d3d2624d3bdef312a0e2ef905f332f8e9302"}, + {file = "cryptography-42.0.1-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:351db02c1938c8e6b1fee8a78d6b15c5ccceca7a36b5ce48390479143da3b411"}, + {file = "cryptography-42.0.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:430100abed6d3652208ae1dd410c8396213baee2e01a003a4449357db7dc9e14"}, + {file = "cryptography-42.0.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2dff7a32880a51321f5de7869ac9dde6b1fca00fc1fef89d60e93f215468e824"}, + {file = "cryptography-42.0.1-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:b512f33c6ab195852595187af5440d01bb5f8dd57cb7a91e1e009a17f1b7ebca"}, + {file = "cryptography-42.0.1-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:95d900d19a370ae36087cc728e6e7be9c964ffd8cbcb517fd1efb9c9284a6abc"}, + {file = "cryptography-42.0.1-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:6ac8924085ed8287545cba89dc472fc224c10cc634cdf2c3e2866fe868108e77"}, + {file = "cryptography-42.0.1-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:cb2861a9364fa27d24832c718150fdbf9ce6781d7dc246a516435f57cfa31fe7"}, + {file = "cryptography-42.0.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:25ec6e9e81de5d39f111a4114193dbd39167cc4bbd31c30471cebedc2a92c323"}, + {file = "cryptography-42.0.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:9d61fcdf37647765086030d81872488e4cb3fafe1d2dda1d487875c3709c0a49"}, + {file = "cryptography-42.0.1-cp39-abi3-win32.whl", hash = "sha256:16b9260d04a0bfc8952b00335ff54f471309d3eb9d7e8dbfe9b0bd9e26e67881"}, + {file = "cryptography-42.0.1-cp39-abi3-win_amd64.whl", hash = "sha256:7911586fc69d06cd0ab3f874a169433db1bc2f0e40988661408ac06c4527a986"}, + {file = "cryptography-42.0.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:d3594947d2507d4ef7a180a7f49a6db41f75fb874c2fd0e94f36b89bfd678bf2"}, + {file = "cryptography-42.0.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:8d7efb6bf427d2add2f40b6e1e8e476c17508fa8907234775214b153e69c2e11"}, + {file = "cryptography-42.0.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:126e0ba3cc754b200a2fb88f67d66de0d9b9e94070c5bc548318c8dab6383cb6"}, + {file = "cryptography-42.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:802d6f83233cf9696b59b09eb067e6b4d5ae40942feeb8e13b213c8fad47f1aa"}, + {file = "cryptography-42.0.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:0b7cacc142260ada944de070ce810c3e2a438963ee3deb45aa26fd2cee94c9a4"}, + {file = "cryptography-42.0.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:32ea63ceeae870f1a62e87f9727359174089f7b4b01e4999750827bf10e15d60"}, + {file = "cryptography-42.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:d3902c779a92151f134f68e555dd0b17c658e13429f270d8a847399b99235a3f"}, + {file = "cryptography-42.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:50aecd93676bcca78379604ed664c45da82bc1241ffb6f97f6b7392ed5bc6f04"}, + {file = "cryptography-42.0.1.tar.gz", hash = "sha256:fd33f53809bb363cf126bebe7a99d97735988d9b0131a2be59fbf83e1259a5b7"}, ] [package.dependencies] @@ -1302,17 +1302,18 @@ packaging = "*" [[package]] name = "dill" -version = "0.3.7" +version = "0.3.8" description = "serialize all of Python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "dill-0.3.7-py3-none-any.whl", hash = "sha256:76b122c08ef4ce2eedcd4d1abd8e641114bfc6c2867f49f3c41facf65bf19f5e"}, - {file = "dill-0.3.7.tar.gz", hash = "sha256:cc1c8b182eb3013e24bd475ff2e9295af86c1a38eb1aff128dac8962a9ce3c03"}, + {file = "dill-0.3.8-py3-none-any.whl", hash = "sha256:c36ca9ffb54365bdd2f8eb3eff7d2a21237f8452b57ace88b1ac615b7e815bd7"}, + {file = "dill-0.3.8.tar.gz", hash = "sha256:3ebe3c479ad625c4553aca177444d89b486b1d84982eeacded644afc0cf797ca"}, ] [package.extras] graph = ["objgraph (>=1.7.2)"] +profile = ["gprof2dot (>=2022.7.29)"] [[package]] name = "diskcache" @@ -1785,19 +1786,6 @@ Werkzeug = ">=3.0.0" async = ["asgiref (>=3.2)"] dotenv = ["python-dotenv"] -[[package]] -name = "flask-basicauth" -version = "0.2.0" -description = "HTTP basic access authentication for Flask." -optional = false -python-versions = "*" -files = [ - {file = "Flask-BasicAuth-0.2.0.tar.gz", hash = "sha256:df5ebd489dc0914c224419da059d991eb72988a01cdd4b956d52932ce7d501ff"}, -] - -[package.dependencies] -Flask = "*" - [[package]] name = "flask-cors" version = "4.0.0" @@ -1812,6 +1800,21 @@ files = [ [package.dependencies] Flask = ">=0.9" +[[package]] +name = "flask-login" +version = "0.6.3" +description = "User authentication and session management for Flask." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Flask-Login-0.6.3.tar.gz", hash = "sha256:5e23d14a607ef12806c699590b89d0f0e0d67baeec599d75947bf9c147330333"}, + {file = "Flask_Login-0.6.3-py3-none-any.whl", hash = "sha256:849b25b82a436bf830a054e74214074af59097171562ab10bfa999e6b78aae5d"}, +] + +[package.dependencies] +Flask = ">=1.0.4" +Werkzeug = ">=1.0.1" + [[package]] name = "flatbuffers" version = "23.5.26" @@ -2175,13 +2178,13 @@ protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4 [[package]] name = "google-api-core" -version = "2.15.0" +version = "2.16.0" description = "Google API client core library" optional = false python-versions = ">=3.7" files = [ - {file = "google-api-core-2.15.0.tar.gz", hash = "sha256:abc978a72658f14a2df1e5e12532effe40f94f868f6e23d95133bd6abcca35ca"}, - {file = "google_api_core-2.15.0-py3-none-any.whl", hash = "sha256:2aa56d2be495551e66bbff7f729b790546f87d5c90e74781aa77233bcb395a8a"}, + {file = "google-api-core-2.16.0.tar.gz", hash = "sha256:d1fc42e52aa4042ad812827b7aad858394e2bf73da8329af95ad8efa30bc886b"}, + {file = "google_api_core-2.16.0-py3-none-any.whl", hash = "sha256:c424f9f271c7f55366254708e0d0383963a72376286018af0a04f322be843400"}, ] [package.dependencies] @@ -2255,13 +2258,13 @@ httplib2 = ">=0.19.0" [[package]] name = "google-cloud-aiplatform" -version = "1.39.0" +version = "1.40.0" description = "Vertex AI API client library" optional = false python-versions = ">=3.8" files = [ - {file = "google-cloud-aiplatform-1.39.0.tar.gz", hash = "sha256:62d6accbf9035895736910bc980f0b2a819d5841ae8bc0c981457cc16c49ecd1"}, - {file = "google_cloud_aiplatform-1.39.0-py2.py3-none-any.whl", hash = "sha256:d7b5c44fbb10d34c7941c5f7aadf7ff480c1469e37eac5b305bc9821fa49f7ee"}, + {file = "google-cloud-aiplatform-1.40.0.tar.gz", hash = "sha256:1ee9aff2fa27c6852558a2abeaf0ffe0537bff90c5dc9f0e967762ac17291001"}, + {file = "google_cloud_aiplatform-1.40.0-py2.py3-none-any.whl", hash = "sha256:9c67a2664e138387ea82d70dec4b54e081b7de6e1089ed23fdaf66900d00320a"}, ] [package.dependencies] @@ -2279,7 +2282,7 @@ autologging = ["mlflow (>=1.27.0,<=2.1.1)"] cloud-profiler = ["tensorboard-plugin-profile (>=2.4.0,<3.0.0dev)", "tensorflow (>=2.4.0,<3.0.0dev)", "werkzeug (>=2.0.0,<2.1.0dev)"] datasets = ["pyarrow (>=10.0.1)", "pyarrow (>=3.0.0,<8.0dev)"] endpoint = ["requests (>=2.28.1)"] -full = ["cloudpickle (<3.0)", "docker (>=5.0.3)", "explainable-ai-sdk (>=1.0.0)", "fastapi (>=0.71.0,<0.103.1)", "google-cloud-bigquery", "google-cloud-bigquery-storage", "google-cloud-logging (<4.0)", "google-vizier (==0.0.11)", "google-vizier (==0.0.4)", "google-vizier (>=0.0.14)", "google-vizier (>=0.1.6)", "httpx (>=0.23.0,<0.25.0)", "lit-nlp (==0.4.0)", "mlflow (>=1.27.0,<=2.1.1)", "numpy (>=1.15.0)", "pandas (>=1.0.0)", "pyarrow (>=10.0.1)", "pyarrow (>=3.0.0,<8.0dev)", "pyarrow (>=6.0.1)", "pydantic (<2)", "pyyaml (==5.3.1)", "ray[default] (>=2.4,<2.5)", "ray[default] (>=2.5,<2.5.1)", "requests (>=2.28.1)", "starlette (>=0.17.1)", "tensorflow (>=2.3.0,<2.15.0)", "tensorflow (>=2.3.0,<3.0.0dev)", "urllib3 (>=1.21.1,<1.27)", "uvicorn[standard] (>=0.16.0)"] +full = ["cloudpickle (<3.0)", "docker (>=5.0.3)", "explainable-ai-sdk (>=1.0.0)", "fastapi (>=0.71.0,<0.103.1)", "google-cloud-bigquery", "google-cloud-bigquery-storage", "google-cloud-logging (<4.0)", "google-vizier (>=0.1.6)", "httpx (>=0.23.0,<0.25.0)", "lit-nlp (==0.4.0)", "mlflow (>=1.27.0,<=2.1.1)", "numpy (>=1.15.0)", "pandas (>=1.0.0)", "pyarrow (>=10.0.1)", "pyarrow (>=3.0.0,<8.0dev)", "pyarrow (>=6.0.1)", "pydantic (<2)", "pyyaml (==5.3.1)", "ray[default] (>=2.4,<2.5)", "ray[default] (>=2.5,<2.5.1)", "requests (>=2.28.1)", "starlette (>=0.17.1)", "tensorflow (>=2.3.0,<2.15.0)", "tensorflow (>=2.3.0,<3.0.0dev)", "urllib3 (>=1.21.1,<1.27)", "uvicorn[standard] (>=0.16.0)"] lit = ["explainable-ai-sdk (>=1.0.0)", "lit-nlp (==0.4.0)", "pandas (>=1.0.0)", "tensorflow (>=2.3.0,<3.0.0dev)"] metadata = ["numpy (>=1.15.0)", "pandas (>=1.0.0)"] pipelines = ["pyyaml (==5.3.1)"] @@ -2288,19 +2291,19 @@ preview = ["cloudpickle (<3.0)", "google-cloud-logging (<4.0)"] private-endpoints = ["requests (>=2.28.1)", "urllib3 (>=1.21.1,<1.27)"] ray = ["google-cloud-bigquery", "google-cloud-bigquery-storage", "pandas (>=1.0.0)", "pyarrow (>=6.0.1)", "pydantic (<2)", "ray[default] (>=2.4,<2.5)", "ray[default] (>=2.5,<2.5.1)"] tensorboard = ["tensorflow (>=2.3.0,<2.15.0)"] -testing = ["bigframes", "cloudpickle (<3.0)", "docker (>=5.0.3)", "explainable-ai-sdk (>=1.0.0)", "fastapi (>=0.71.0,<0.103.1)", "google-cloud-bigquery", "google-cloud-bigquery-storage", "google-cloud-logging (<4.0)", "google-vizier (==0.0.11)", "google-vizier (==0.0.4)", "google-vizier (>=0.0.14)", "google-vizier (>=0.1.6)", "grpcio-testing", "httpx (>=0.23.0,<0.25.0)", "ipython", "kfp", "lit-nlp (==0.4.0)", "mlflow (>=1.27.0,<=2.1.1)", "numpy (>=1.15.0)", "pandas (>=1.0.0)", "pyarrow (>=10.0.1)", "pyarrow (>=3.0.0,<8.0dev)", "pyarrow (>=6.0.1)", "pydantic (<2)", "pyfakefs", "pytest-asyncio", "pytest-xdist", "pyyaml (==5.3.1)", "ray[default] (>=2.4,<2.5)", "ray[default] (>=2.5,<2.5.1)", "requests (>=2.28.1)", "requests-toolbelt (<1.0.0)", "scikit-learn", "starlette (>=0.17.1)", "tensorboard-plugin-profile (>=2.4.0,<3.0.0dev)", "tensorflow (>=2.3.0,<2.15.0)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.3.0,<=2.12.0)", "tensorflow (>=2.4.0,<3.0.0dev)", "torch (>=2.0.0,<2.1.0)", "urllib3 (>=1.21.1,<1.27)", "uvicorn[standard] (>=0.16.0)", "werkzeug (>=2.0.0,<2.1.0dev)", "xgboost", "xgboost-ray"] -vizier = ["google-vizier (==0.0.11)", "google-vizier (==0.0.4)", "google-vizier (>=0.0.14)", "google-vizier (>=0.1.6)"] +testing = ["bigframes", "cloudpickle (<3.0)", "docker (>=5.0.3)", "explainable-ai-sdk (>=1.0.0)", "fastapi (>=0.71.0,<0.103.1)", "google-cloud-bigquery", "google-cloud-bigquery-storage", "google-cloud-logging (<4.0)", "google-vizier (>=0.1.6)", "grpcio-testing", "httpx (>=0.23.0,<0.25.0)", "ipython", "kfp (>=2.6.0,<3.0.0)", "lit-nlp (==0.4.0)", "mlflow (>=1.27.0,<=2.1.1)", "numpy (>=1.15.0)", "pandas (>=1.0.0)", "pyarrow (>=10.0.1)", "pyarrow (>=3.0.0,<8.0dev)", "pyarrow (>=6.0.1)", "pydantic (<2)", "pyfakefs", "pytest-asyncio", "pytest-xdist", "pyyaml (==5.3.1)", "ray[default] (>=2.4,<2.5)", "ray[default] (>=2.5,<2.5.1)", "requests (>=2.28.1)", "requests-toolbelt (<1.0.0)", "scikit-learn", "starlette (>=0.17.1)", "tensorboard-plugin-profile (>=2.4.0,<3.0.0dev)", "tensorflow (>=2.3.0,<2.15.0)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.3.0,<=2.12.0)", "tensorflow (>=2.4.0,<3.0.0dev)", "torch (>=2.0.0,<2.1.0)", "urllib3 (>=1.21.1,<1.27)", "uvicorn[standard] (>=0.16.0)", "werkzeug (>=2.0.0,<2.1.0dev)", "xgboost", "xgboost-ray"] +vizier = ["google-vizier (>=0.1.6)"] xai = ["tensorflow (>=2.3.0,<3.0.0dev)"] [[package]] name = "google-cloud-bigquery" -version = "3.17.0" +version = "3.17.1" description = "Google BigQuery API client library" optional = false python-versions = ">=3.7" files = [ - {file = "google-cloud-bigquery-3.17.0.tar.gz", hash = "sha256:c5617295fc3dec730d16cc5385ab944b123b31d62fa409bd055a4bf3edf83f49"}, - {file = "google_cloud_bigquery-3.17.0-py2.py3-none-any.whl", hash = "sha256:89f208a3b7aba0f8a78d7864cb9a559595383974cda1d2f488acfee6feda7c62"}, + {file = "google-cloud-bigquery-3.17.1.tar.gz", hash = "sha256:0ae07b90d5052ba3a296a2210a2144c28469300d71f6f455881f94c2df543057"}, + {file = "google_cloud_bigquery-3.17.1-py2.py3-none-any.whl", hash = "sha256:7a9a92c7b1f6a6bf8b4c05c150e49f4ad1a03dd591dbd4522381b3f23bf07c73"}, ] [package.dependencies] @@ -3636,13 +3639,13 @@ adal = ["adal (>=1.0.2)"] [[package]] name = "langchain" -version = "0.0.354" +version = "0.1.4" description = "Building applications with LLMs through composability" optional = false python-versions = ">=3.8.1,<4.0" files = [ - {file = "langchain-0.0.354-py3-none-any.whl", hash = "sha256:8d28283a2891422a685b0605dd23b5a1cd6a15ab57a8e359b37a3151a322bad4"}, - {file = "langchain-0.0.354.tar.gz", hash = "sha256:419c48735b803d70c0dee985e0afcfd7c88528b8c1cd918c57eb23e53d94ea87"}, + {file = "langchain-0.1.4-py3-none-any.whl", hash = "sha256:6befdd6221f5f326092e31a3c19efdc7ce3d7d1f2e2cab065141071451730ed7"}, + {file = "langchain-0.1.4.tar.gz", hash = "sha256:8767a9461e2b717ce9a35b1fa20659de89ea86ba9c2a4ff516e05d47ab2d195d"}, ] [package.dependencies] @@ -3650,9 +3653,9 @@ aiohttp = ">=3.8.3,<4.0.0" async-timeout = {version = ">=4.0.0,<5.0.0", markers = "python_version < \"3.11\""} dataclasses-json = ">=0.5.7,<0.7" jsonpatch = ">=1.33,<2.0" -langchain-community = ">=0.0.8,<0.1" -langchain-core = ">=0.1.5,<0.2" -langsmith = ">=0.0.77,<0.1.0" +langchain-community = ">=0.0.14,<0.1" +langchain-core = ">=0.1.16,<0.2" +langsmith = ">=0.0.83,<0.1" numpy = ">=1,<2" pydantic = ">=1,<3" PyYAML = ">=5.3" @@ -3667,7 +3670,7 @@ cli = ["typer (>=0.9.0,<0.10.0)"] cohere = ["cohere (>=4,<5)"] docarray = ["docarray[hnswlib] (>=0.32.0,<0.33.0)"] embeddings = ["sentence-transformers (>=2,<3)"] -extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.0,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "cohere (>=4,<5)", "couchbase (>=4.1.9,<5.0.0)", "dashvector (>=1.0.1,<2.0.0)", "databricks-vectorsearch (>=0.21,<0.22)", "datasets (>=2.15.0,<3.0.0)", "dgml-utils (>=0.3.0,<0.4.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.9.0,<0.10.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "hologres-vector (>=0.0.6,<0.0.7)", "html2text (>=2020.1.16,<2021.0.0)", "javelin-sdk (>=0.1.8,<0.2.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "lxml (>=4.9.2,<5.0.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "msal (>=1.25.0,<2.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "openai (<2)", "openapi-pydantic (>=0.3.2,<0.4.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "praw (>=7.7.1,<8.0.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "upstash-redis (>=0.15.0,<0.16.0)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)"] +extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.0,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "cohere (>=4,<5)", "couchbase (>=4.1.9,<5.0.0)", "dashvector (>=1.0.1,<2.0.0)", "databricks-vectorsearch (>=0.21,<0.22)", "datasets (>=2.15.0,<3.0.0)", "dgml-utils (>=0.3.0,<0.4.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.9.0,<0.10.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "hologres-vector (>=0.0.6,<0.0.7)", "html2text (>=2020.1.16,<2021.0.0)", "javelin-sdk (>=0.1.8,<0.2.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "langchain-openai (>=0.0.2,<0.1)", "lxml (>=4.9.2,<5.0.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "msal (>=1.25.0,<2.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "openai (<2)", "openapi-pydantic (>=0.3.2,<0.4.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "praw (>=7.7.1,<8.0.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "upstash-redis (>=0.15.0,<0.16.0)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)"] javascript = ["esprima (>=4.0.1,<5.0.0)"] llms = ["clarifai (>=9.1.0)", "cohere (>=4,<5)", "huggingface_hub (>=0,<1)", "manifest-ml (>=0.0.1,<0.0.2)", "nlpcloud (>=1,<2)", "openai (<2)", "openlm (>=0.0.5,<0.0.6)", "torch (>=1,<3)", "transformers (>=4,<5)"] openai = ["openai (<2)", "tiktoken (>=0.3.2,<0.6.0)"] @@ -3676,19 +3679,19 @@ text-helpers = ["chardet (>=5.1.0,<6.0.0)"] [[package]] name = "langchain-community" -version = "0.0.15" +version = "0.0.16" description = "Community contributed LangChain integrations." optional = false python-versions = ">=3.8.1,<4.0" files = [ - {file = "langchain_community-0.0.15-py3-none-any.whl", hash = "sha256:2b830c79366e192aed2a997a11a69b62505fb2ee8d08a85f3df7bd3ab62473f1"}, - {file = "langchain_community-0.0.15.tar.gz", hash = "sha256:b027d7765661300edced958228e78077780d96332efe63c5949bc5e435cc7c2b"}, + {file = "langchain_community-0.0.16-py3-none-any.whl", hash = "sha256:0f1dfc1a6205ce8d39931d3515974a208a9f69c16157c649f83490a7cc830b73"}, + {file = "langchain_community-0.0.16.tar.gz", hash = "sha256:c06512a93013a06fba7679cd5a1254ff8b927cddd2d1fbe0cc444bf7bbdf0b8c"}, ] [package.dependencies] aiohttp = ">=3.8.3,<4.0.0" dataclasses-json = ">=0.5.7,<0.7" -langchain-core = ">=0.1.14,<0.2" +langchain-core = ">=0.1.16,<0.2" langsmith = ">=0.0.83,<0.1" numpy = ">=1,<2" PyYAML = ">=5.3" @@ -3698,17 +3701,17 @@ tenacity = ">=8.1.0,<9.0.0" [package.extras] cli = ["typer (>=0.9.0,<0.10.0)"] -extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "azure-ai-documentintelligence (>=1.0.0b1,<2.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.0,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "cohere (>=4,<5)", "dashvector (>=1.0.1,<2.0.0)", "databricks-vectorsearch (>=0.21,<0.22)", "datasets (>=2.15.0,<3.0.0)", "dgml-utils (>=0.3.0,<0.4.0)", "elasticsearch (>=8.12.0,<9.0.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.9.0,<0.10.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "gradientai (>=1.4.0,<2.0.0)", "hologres-vector (>=0.0.6,<0.0.7)", "html2text (>=2020.1.16,<2021.0.0)", "javelin-sdk (>=0.1.8,<0.2.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "lxml (>=4.9.2,<5.0.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "msal (>=1.25.0,<2.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "openai (<2)", "openapi-pydantic (>=0.3.2,<0.4.0)", "oracle-ads (>=2.9.1,<3.0.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "praw (>=7.7.1,<8.0.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "upstash-redis (>=0.15.0,<0.16.0)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)", "zhipuai (>=1.0.7,<2.0.0)"] +extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "azure-ai-documentintelligence (>=1.0.0b1,<2.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.0,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "cohere (>=4,<5)", "dashvector (>=1.0.1,<2.0.0)", "databricks-vectorsearch (>=0.21,<0.22)", "datasets (>=2.15.0,<3.0.0)", "dgml-utils (>=0.3.0,<0.4.0)", "elasticsearch (>=8.12.0,<9.0.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.9.0,<0.10.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "gradientai (>=1.4.0,<2.0.0)", "hdbcli (>=2.19.21,<3.0.0)", "hologres-vector (>=0.0.6,<0.0.7)", "html2text (>=2020.1.16,<2021.0.0)", "javelin-sdk (>=0.1.8,<0.2.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "lxml (>=4.9.2,<5.0.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "msal (>=1.25.0,<2.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "oci (>=2.119.1,<3.0.0)", "openai (<2)", "openapi-pydantic (>=0.3.2,<0.4.0)", "oracle-ads (>=2.9.1,<3.0.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "praw (>=7.7.1,<8.0.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "upstash-redis (>=0.15.0,<0.16.0)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)", "zhipuai (>=1.0.7,<2.0.0)"] [[package]] name = "langchain-core" -version = "0.1.15" +version = "0.1.17" description = "Building applications with LLMs through composability" optional = false python-versions = ">=3.8.1,<4.0" files = [ - {file = "langchain_core-0.1.15-py3-none-any.whl", hash = "sha256:eb2e6a81400a98e8d27638ede158c0dc4245ca8e69578138f5ef4c913258ff95"}, - {file = "langchain_core-0.1.15.tar.gz", hash = "sha256:129c0e92ef9a31971766496a9fade429217bd56d16bb6d0024d27925f26187d6"}, + {file = "langchain_core-0.1.17-py3-none-any.whl", hash = "sha256:026155cf97867bde410ab1834799ab4c5ba64c39380f2a4328bcf9c78623ca64"}, + {file = "langchain_core-0.1.17.tar.gz", hash = "sha256:59016e457cd6a1708d83a3a454acc97cf02c2a2c3af95626d13f83894fd4e777"}, ] [package.dependencies] @@ -3726,18 +3729,18 @@ extended-testing = ["jinja2 (>=3,<4)"] [[package]] name = "langchain-experimental" -version = "0.0.47" +version = "0.0.49" description = "Building applications with LLMs through composability" optional = false python-versions = ">=3.8.1,<4.0" files = [ - {file = "langchain_experimental-0.0.47-py3-none-any.whl", hash = "sha256:d5b6930c4e0a6f280cbb7d327d03d86f555f6168e185a1df64ba4a52b1059f65"}, - {file = "langchain_experimental-0.0.47.tar.gz", hash = "sha256:0fdba89a438287c14fa0632c6adf87acffd55158a2f66c7a12be7721a7882a0e"}, + {file = "langchain_experimental-0.0.49-py3-none-any.whl", hash = "sha256:7ddfeab733524785545a09657a482ab3ce866d2bd0984e757768d07ac55fcdcd"}, + {file = "langchain_experimental-0.0.49.tar.gz", hash = "sha256:b36ef864f8464055e9063f94ee568c0d28cd1f029d23d02a7b4d8ff0f115e69f"}, ] [package.dependencies] -langchain = ">=0.0.350,<0.1" -langchain-core = ">=0.1,<0.2" +langchain = ">=0.1,<0.2" +langchain-core = ">=0.1.7,<0.2.0" [package.extras] extended-testing = ["faker (>=19.3.1,<20.0.0)", "jinja2 (>=3,<4)", "presidio-analyzer (>=2.2.33,<3.0.0)", "presidio-anonymizer (>=2.2.33,<3.0.0)", "sentence-transformers (>=2,<3)", "vowpal-wabbit-next (==0.6.0)"] @@ -3816,13 +3819,13 @@ langchain = ["langchain (>=0.0.309)"] [[package]] name = "langsmith" -version = "0.0.83" +version = "0.0.84" description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." optional = false python-versions = ">=3.8.1,<4.0" files = [ - {file = "langsmith-0.0.83-py3-none-any.whl", hash = "sha256:a5bb7ac58c19a415a9d5f51db56dd32ee2cd7343a00825bbc2018312eb3d122a"}, - {file = "langsmith-0.0.83.tar.gz", hash = "sha256:94427846b334ad9bdbec3266fee12903fe9f5448f628667689d0412012aaf392"}, + {file = "langsmith-0.0.84-py3-none-any.whl", hash = "sha256:9ae1ab777018e2174f68e8f53c88e7a7feb8dbf1c458b473644a3d5e22dc1eb7"}, + {file = "langsmith-0.0.84.tar.gz", hash = "sha256:dd163f89bca14c86759c651a72917c6d45f7dd18435d7bc65dc205a23dd9ec8d"}, ] [package.dependencies] @@ -3848,12 +3851,12 @@ regex = ["regex"] [[package]] name = "llama-cpp-python" -version = "0.2.32" +version = "0.2.36" description = "Python bindings for the llama.cpp library" optional = true python-versions = ">=3.8" files = [ - {file = "llama_cpp_python-0.2.32.tar.gz", hash = "sha256:202d0596f044542678d062a4ef6d8eddb70acf818895b7535b3311a51f262fb4"}, + {file = "llama_cpp_python-0.2.36.tar.gz", hash = "sha256:21dba178604d17f40924afbf4b8c56305f951ebf5a6da23097e9ce2874b97cc3"}, ] [package.dependencies] @@ -3870,13 +3873,13 @@ test = ["httpx (>=0.24.1)", "pytest (>=7.4.0)", "scipy (>=1.10)"] [[package]] name = "llama-index" -version = "0.9.36" +version = "0.9.39" description = "Interface between LLMs and your data" optional = false python-versions = ">=3.8.1,<4.0" files = [ - {file = "llama_index-0.9.36-py3-none-any.whl", hash = "sha256:1d538876e52b391326c787d4e65932821c3138e2f686766714af86cbe69dc7e7"}, - {file = "llama_index-0.9.36.tar.gz", hash = "sha256:c0470875fdb63809e2e94eb90f5ac82a07af5cd1df43d73574986269c791bb34"}, + {file = "llama_index-0.9.39-py3-none-any.whl", hash = "sha256:73e19bf664b0643e3c1b88229d4bcaad841f4c6e882a63b27f637386c54d5353"}, + {file = "llama_index-0.9.39.tar.gz", hash = "sha256:c0d4093cd1c6d6056275f96d6acba56f383ef98925c9ce3fc8cde9fb4dee1f75"}, ] [package.dependencies] @@ -3908,20 +3911,20 @@ query-tools = ["guidance (>=0.0.64,<0.0.65)", "jsonpath-ng (>=1.6.0,<2.0.0)", "l [[package]] name = "locust" -version = "2.20.1" +version = "2.21.0" description = "Developer friendly load testing framework" optional = false python-versions = ">=3.8" files = [ - {file = "locust-2.20.1-py3-none-any.whl", hash = "sha256:70168ccf462f125e0e4304c1f8301d3c18f186e8f257bc6578e7bed5e74e59a7"}, - {file = "locust-2.20.1.tar.gz", hash = "sha256:9ba4c8658a158aed55774ac3650ac0139fcc1dfa65fea0dabb00ea35b0d56a4e"}, + {file = "locust-2.21.0-py3-none-any.whl", hash = "sha256:77c5fa66d69425ab639f7ea7f05c6793893adc3c5e8309b97f751f6e414ea5a9"}, + {file = "locust-2.21.0.tar.gz", hash = "sha256:682f27d6696a2eea9f04f2c3ba87aab255a90285ba3a57c3c40444f646b39726"}, ] [package.dependencies] ConfigArgParse = ">=1.5.5" flask = ">=2.0.0" -Flask-BasicAuth = ">=0.2.0" Flask-Cors = ">=3.0.10" +Flask-Login = ">=0.6.3" gevent = ">=22.10.2" geventhttpclient = ">=2.0.11" msgpack = ">=1.0.0" @@ -4059,13 +4062,13 @@ source = ["Cython (==0.29.37)"] [[package]] name = "mako" -version = "1.3.1" +version = "1.3.0" description = "A super-fast templating language that borrows the best ideas from the existing templating languages." optional = false python-versions = ">=3.8" files = [ - {file = "Mako-1.3.1-py3-none-any.whl", hash = "sha256:463f03e04559689adaee25e0967778d6ad41285ed607dc1e7df0dd4e4df81f9e"}, - {file = "Mako-1.3.1.tar.gz", hash = "sha256:baee30b9c61718e093130298e678abed0dbfa1b411fcc4c1ab4df87cd631a0f2"}, + {file = "Mako-1.3.0-py3-none-any.whl", hash = "sha256:57d4e997349f1a92035aa25c17ace371a4213f2ca42f99bee9a602500cfd54d9"}, + {file = "Mako-1.3.0.tar.gz", hash = "sha256:e3a9d388fd00e87043edbe8792f45880ac0114e9c4adc69f6e9bfb2c55e3b11b"}, ] [package.dependencies] @@ -4528,31 +4531,27 @@ files = [ [[package]] name = "multiprocess" -version = "0.70.15" +version = "0.70.16" description = "better multiprocessing and multithreading in Python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "multiprocess-0.70.15-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:aa36c7ed16f508091438687fe9baa393a7a8e206731d321e443745e743a0d4e5"}, - {file = "multiprocess-0.70.15-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:20e024018c46d0d1602024c613007ac948f9754659e3853b0aa705e83f6931d8"}, - {file = "multiprocess-0.70.15-pp37-pypy37_pp73-manylinux_2_24_i686.whl", hash = "sha256:e576062981c91f0fe8a463c3d52506e598dfc51320a8dd8d78b987dfca91c5db"}, - {file = "multiprocess-0.70.15-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:e73f497e6696a0f5433ada2b3d599ae733b87a6e8b008e387c62ac9127add177"}, - {file = "multiprocess-0.70.15-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:73db2e7b32dcc7f9b0f075c2ffa45c90b6729d3f1805f27e88534c8d321a1be5"}, - {file = "multiprocess-0.70.15-pp38-pypy38_pp73-manylinux_2_24_i686.whl", hash = "sha256:4271647bd8a49c28ecd6eb56a7fdbd3c212c45529ad5303b40b3c65fc6928e5f"}, - {file = "multiprocess-0.70.15-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:cf981fb998d6ec3208cb14f0cf2e9e80216e834f5d51fd09ebc937c32b960902"}, - {file = "multiprocess-0.70.15-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:18f9f2c7063346d1617bd1684fdcae8d33380ae96b99427260f562e1a1228b67"}, - {file = "multiprocess-0.70.15-pp39-pypy39_pp73-manylinux_2_24_i686.whl", hash = "sha256:0eac53214d664c49a34695e5824872db4006b1a465edd7459a251809c3773370"}, - {file = "multiprocess-0.70.15-pp39-pypy39_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:1a51dd34096db47fb21fa2b839e615b051d51b97af9a67afbcdaa67186b44883"}, - {file = "multiprocess-0.70.15-py310-none-any.whl", hash = "sha256:7dd58e33235e83cf09d625e55cffd7b0f0eede7ee9223cdd666a87624f60c21a"}, - {file = "multiprocess-0.70.15-py311-none-any.whl", hash = "sha256:134f89053d82c9ed3b73edd3a2531eb791e602d4f4156fc92a79259590bd9670"}, - {file = "multiprocess-0.70.15-py37-none-any.whl", hash = "sha256:f7d4a1629bccb433114c3b4885f69eccc200994323c80f6feee73b0edc9199c5"}, - {file = "multiprocess-0.70.15-py38-none-any.whl", hash = "sha256:bee9afba476c91f9ebee7beeee0601face9eff67d822e893f9a893725fbd6316"}, - {file = "multiprocess-0.70.15-py39-none-any.whl", hash = "sha256:3e0953f5d52b4c76f1c973eaf8214554d146f2be5decb48e928e55c7a2d19338"}, - {file = "multiprocess-0.70.15.tar.gz", hash = "sha256:f20eed3036c0ef477b07a4177cf7c1ba520d9a2677870a4f47fe026f0cd6787e"}, + {file = "multiprocess-0.70.16-pp310-pypy310_pp73-macosx_10_13_x86_64.whl", hash = "sha256:476887be10e2f59ff183c006af746cb6f1fd0eadcfd4ef49e605cbe2659920ee"}, + {file = "multiprocess-0.70.16-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:d951bed82c8f73929ac82c61f01a7b5ce8f3e5ef40f5b52553b4f547ce2b08ec"}, + {file = "multiprocess-0.70.16-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:37b55f71c07e2d741374998c043b9520b626a8dddc8b3129222ca4f1a06ef67a"}, + {file = "multiprocess-0.70.16-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ba8c31889abf4511c7308a8c52bb4a30b9d590e7f58523302ba00237702ca054"}, + {file = "multiprocess-0.70.16-pp39-pypy39_pp73-macosx_10_13_x86_64.whl", hash = "sha256:0dfd078c306e08d46d7a8d06fb120313d87aa43af60d66da43ffff40b44d2f41"}, + {file = "multiprocess-0.70.16-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e7b9d0f307cd9bd50851afaac0dba2cb6c44449efff697df7c7645f7d3f2be3a"}, + {file = "multiprocess-0.70.16-py310-none-any.whl", hash = "sha256:c4a9944c67bd49f823687463660a2d6daae94c289adff97e0f9d696ba6371d02"}, + {file = "multiprocess-0.70.16-py311-none-any.whl", hash = "sha256:af4cabb0dac72abfb1e794fa7855c325fd2b55a10a44628a3c1ad3311c04127a"}, + {file = "multiprocess-0.70.16-py312-none-any.whl", hash = "sha256:fc0544c531920dde3b00c29863377f87e1632601092ea2daca74e4beb40faa2e"}, + {file = "multiprocess-0.70.16-py38-none-any.whl", hash = "sha256:a71d82033454891091a226dfc319d0cfa8019a4e888ef9ca910372a446de4435"}, + {file = "multiprocess-0.70.16-py39-none-any.whl", hash = "sha256:a0bafd3ae1b732eac64be2e72038231c1ba97724b60b09400d68f229fcc2fbf3"}, + {file = "multiprocess-0.70.16.tar.gz", hash = "sha256:161af703d4652a0e1410be6abccecde4a7ddffd19341be0a7011b94aeb171ac1"}, ] [package.dependencies] -dill = ">=0.3.7" +dill = ">=0.3.8" [[package]] name = "mypy" @@ -4668,40 +4667,39 @@ twitter = ["twython"] [[package]] name = "numexpr" -version = "2.8.8" +version = "2.9.0" description = "Fast numerical expression evaluator for NumPy" optional = false python-versions = ">=3.9" files = [ - {file = "numexpr-2.8.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:85c9f79e346c26aa0d425ecfc9e5de7184567d5e48d0bdb02d468bb927e92525"}, - {file = "numexpr-2.8.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:dbac846f713b4c82333e6af0814ebea0b4e74dfb2649e76c58953fd4862322dd"}, - {file = "numexpr-2.8.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d7bfc8b77d8a7b04cd64ae42b62b3bf824a8c751ca235692bfd5231c6e90127"}, - {file = "numexpr-2.8.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:307b49fd15ef2ca292f381e67759e5b477410341f2f499a377234f1b42f529a6"}, - {file = "numexpr-2.8.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:aab17d65751c039d13ed9d49c9a7517b130ef488c1885c4666af9b5c6ad59520"}, - {file = "numexpr-2.8.8-cp310-cp310-win32.whl", hash = "sha256:6459dc6ed6abcdeab3cd3667c79f29e4a0f0a02c29ad71ee5cff065e880ee9ef"}, - {file = "numexpr-2.8.8-cp310-cp310-win_amd64.whl", hash = "sha256:22ccd67c0fbeae091f2c577f5b9c8046de6631d46b1cbe22aad46a08d2b42c2d"}, - {file = "numexpr-2.8.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:47c05007cd1c553515492c1a78b5477eaaba9cadc5d7b795d49f7aae53ccdf68"}, - {file = "numexpr-2.8.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b4649c1dcf9b0c2ae0a7b767dbbbde4e05ee68480c1ba7f06fc7963f1f73acf4"}, - {file = "numexpr-2.8.8-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a82d710145b0fbaec919dde9c90ed9df1e6785625cc36d1c71f3a53112b66fc5"}, - {file = "numexpr-2.8.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a92f230dd9d6c42803f855970e93677b44290b6dad15cb6796fd85edee171ce"}, - {file = "numexpr-2.8.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ccef9b09432d59229c2a737882e55de7906006452003323e107576f264cec373"}, - {file = "numexpr-2.8.8-cp311-cp311-win32.whl", hash = "sha256:bf8c517bbbb82c07c23c17f9d52b4c9f86601f57d48e87c0cbda24af5907f4dd"}, - {file = "numexpr-2.8.8-cp311-cp311-win_amd64.whl", hash = "sha256:4f01d71db6fdb97a68def5407e2dbd748eaea9d98929db08816de40aa4ae3084"}, - {file = "numexpr-2.8.8-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:76f0f010f9c6318bae213b21c5c0e381c2fc9c9ecb8b35f99f5030e7ac96c9ce"}, - {file = "numexpr-2.8.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3f168b4b42d4cb120fe1993676dcf74b77a3e8e45b58855566da037cfd938ca3"}, - {file = "numexpr-2.8.8-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f031ac4e70f9ad867543bfbde8452e9d1a14f0525346b4b8bd4e5c0f1380a11c"}, - {file = "numexpr-2.8.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:121b049b6909787111daf92919c052c4fd87b5691172e8f19f702b96f20aaafa"}, - {file = "numexpr-2.8.8-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2ae264c35fa67cd510191ab8144f131fddd0f1d13413af710913ea6fc0c6aa61"}, - {file = "numexpr-2.8.8-cp312-cp312-win32.whl", hash = "sha256:399cb914b41c4027ba88a18f6b8ccfc3af5c32bc3b1758403a7c44c72530618a"}, - {file = "numexpr-2.8.8-cp312-cp312-win_amd64.whl", hash = "sha256:925927cd1f610593e7783d8f2e12e3d800d5928601e077e4910e2b50bde624b6"}, - {file = "numexpr-2.8.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:cd07793b074cc38e478637cbe738dff7d8eb92b5cf8ffaacff0c4f0bca5270a0"}, - {file = "numexpr-2.8.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:290f91c7ba7772abaf7107f3cc0601d93d6a3f21c13ee3da93f1b8a9ca3e8d39"}, - {file = "numexpr-2.8.8-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:296dc1f79d386166dec3bdb45f51caba29ffd8dc91db15447108c04d3001d921"}, - {file = "numexpr-2.8.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7badc50efbb2f1c8b78cd68089031e0fd29cbafa6a9e6d730533f22d88168406"}, - {file = "numexpr-2.8.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4d83a542d9deefb050e389aacaddea0f09d68ec617dd37e45b9a7cfbcba6d729"}, - {file = "numexpr-2.8.8-cp39-cp39-win32.whl", hash = "sha256:17104051f0bd83fd350212e268d8b48017d5eff522b09b573fdbcc560c5e7ab3"}, - {file = "numexpr-2.8.8-cp39-cp39-win_amd64.whl", hash = "sha256:12146521b1730073859a20454e75004e38cd0cb61333e763c58ef5171e101eb2"}, - {file = "numexpr-2.8.8.tar.gz", hash = "sha256:e76ce4d25372f46170cf7eb1ff14ed5d9c69a0b162a405063cbe481bafe3af34"}, + {file = "numexpr-2.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c52b4ac54514f5d4d8ead66768810cd5f77aa198e6064213d9b5c7b2e1c97c35"}, + {file = "numexpr-2.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:50f57bc333f285e8c46b1ce61c6e94ec9bb74e4ea0d674d1c6c6f4a286f64fe4"}, + {file = "numexpr-2.9.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:943ba141f3884ffafa3fa1a3ebf3cdda9e9688a67a3c91986e6eae13dc073d43"}, + {file = "numexpr-2.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee48acd6339748a65c0e32403b802ebfadd9cb0e3b602ba5889896238eafdd61"}, + {file = "numexpr-2.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:972e29b5cecc21466c5b177e38568372ab66aab1f053ae04690a49cea09e747d"}, + {file = "numexpr-2.9.0-cp310-cp310-win32.whl", hash = "sha256:520e55d75bd99c76e376b6326e35ecf44c5ce2635a5caed72799a3885fc49173"}, + {file = "numexpr-2.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:5615497c3f34b637fda9b571f7774b6a82f2367cc1364b7a4573068dd1aabcaa"}, + {file = "numexpr-2.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:bffcbc55dea5a5f5255e2586da08f00929998820e6592ee717273a08ad021eb3"}, + {file = "numexpr-2.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:374dc6ca54b2af813cb15c2b34e85092dfeac1f73d51ec358dd81876bd9adcec"}, + {file = "numexpr-2.9.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:549afc1622296cca3478a132c6e0fb5e55a19e08d32bc0d5a415434824a9c157"}, + {file = "numexpr-2.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c618a5895e34db0a364dcdb9960084c080f93f9d377c45b1ca9c394c24b4e77"}, + {file = "numexpr-2.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:37a7dd36fd79a2b69c3fd2bc2b51ac8270bebc69cc96e6d78f1148e147fcbfa8"}, + {file = "numexpr-2.9.0-cp311-cp311-win32.whl", hash = "sha256:00dab81d49239ea5423861ad627097b44d10d802df5f883d1b00f742139c3349"}, + {file = "numexpr-2.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:0e2574cafb18373774f351cac45ed23b5b360d9ecd1dbf3c12dac6d6eefefc87"}, + {file = "numexpr-2.9.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:9761195526a228e05eba400b8c484c94bbabfea853b9ea35ab8fa1bf415331b1"}, + {file = "numexpr-2.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0f619e91034b346ea85a4e1856ff06011dcb7dce10a60eda75e74db90120f880"}, + {file = "numexpr-2.9.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2749bce1c48706d58894992634a43b8458c4ba9411191471c4565fa41e9979ec"}, + {file = "numexpr-2.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1c31f621a625c7be602f92b027d90f2d3d60dcbc19b106e77fb04a4362152af"}, + {file = "numexpr-2.9.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1a78b937861d13de67d440d54c85a835faed7572be5a6fd10d4f3bd4e66e157f"}, + {file = "numexpr-2.9.0-cp312-cp312-win32.whl", hash = "sha256:aa6298fb46bd7ec69911b5b80927a00663d066e719b29f48eb952d559bdd8371"}, + {file = "numexpr-2.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:8efd879839572bde5a38a1aa3ac23fd4dd9b956fb969bc5e43d1c403419e1e8c"}, + {file = "numexpr-2.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b04f12a6130094a251e3a8fff40130589c1c83be6d4eb223873bea14d8c8b630"}, + {file = "numexpr-2.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:977537f2a1cc843f888fb5f0507626f956ada674e4b3847168214a3f3c7446fa"}, + {file = "numexpr-2.9.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6eae6c0c2d5682c02e8ac9c4287c2232c2443c9148b239df22500eaa3c5d73b7"}, + {file = "numexpr-2.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1fae6828042b70c2f52a132bfcb9139da704274ed11b982fbf537f91c075d2ef"}, + {file = "numexpr-2.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7c77392aea53f0700d60eb270ad63174b4ff10b04f8de92861101ca2129fee51"}, + {file = "numexpr-2.9.0-cp39-cp39-win32.whl", hash = "sha256:3b03a6cf37a72f5b52f2b962d7ac7f565bea8eaba83c3c4e5fcf8fbb6a938153"}, + {file = "numexpr-2.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:d655b6eacc4e81006b662cba014e4615a9ddd96881b8b4db4ad0d7f6d38069af"}, ] [package.dependencies] @@ -4984,13 +4982,13 @@ sympy = "*" [[package]] name = "openai" -version = "1.9.0" +version = "1.10.0" description = "The official Python library for the openai API" optional = false python-versions = ">=3.7.1" files = [ - {file = "openai-1.9.0-py3-none-any.whl", hash = "sha256:5774a0582ed82f6de92200ed5024e03e272b93e04e9d31caeda5fb80f63df50d"}, - {file = "openai-1.9.0.tar.gz", hash = "sha256:3e9947a544556c051fa138a4def5bd8b468364ec52803c6628532ab949ddce55"}, + {file = "openai-1.10.0-py3-none-any.whl", hash = "sha256:aa69e97d0223ace9835fbf9c997abe9ee95318f684fd2de6d02c870700c71ebc"}, + {file = "openai-1.10.0.tar.gz", hash = "sha256:208886cb501b930dc63f48d51db9c15e5380380f80516d07332adad67c9f1053"}, ] [package.dependencies] @@ -5237,13 +5235,13 @@ files = [ [[package]] name = "overrides" -version = "7.6.0" +version = "7.7.0" description = "A decorator to automatically detect mismatch when overriding a method." optional = false python-versions = ">=3.6" files = [ - {file = "overrides-7.6.0-py3-none-any.whl", hash = "sha256:c36e6635519ea9c5b043b65c36d4b886aee8bd45b7d4681d2a6df0898df4b654"}, - {file = "overrides-7.6.0.tar.gz", hash = "sha256:01e15bbbf15b766f0675c275baa1878bd1c7dc9bc7b9ee13e677cdba93dc1bd9"}, + {file = "overrides-7.7.0-py3-none-any.whl", hash = "sha256:c7ed9d062f78b8e4c1a7b70bd8796b35ead4d9f510227ef9c5dc7626c60d7e49"}, + {file = "overrides-7.7.0.tar.gz", hash = "sha256:55158fa3d93b98cc75299b1e67078ad9003ca27945c76162c1c0766d6f91820a"}, ] [[package]] @@ -5601,13 +5599,13 @@ strenum = ">=0.4.9,<0.5.0" [[package]] name = "posthog" -version = "3.3.2" +version = "3.3.3" description = "Integrate PostHog into any python application." optional = false python-versions = "*" files = [ - {file = "posthog-3.3.2-py2.py3-none-any.whl", hash = "sha256:14fb43ea95c40b353db59c49af2c09ff15188aa2963f48091fc7912fa9375263"}, - {file = "posthog-3.3.2.tar.gz", hash = "sha256:734bf89f3c372605a8bbf2b07f600885287209145d747b09ccd004c59834750e"}, + {file = "posthog-3.3.3-py2.py3-none-any.whl", hash = "sha256:22b450be8cfdbcf403cbbcc50e9ca6e681d04c6a057336483cc4b8beb5794a15"}, + {file = "posthog-3.3.3.tar.gz", hash = "sha256:2f16372675a81fb093dd19d7fb8ea8166068f5a7a17358d71ccb2f7080a4c8c1"}, ] [package.dependencies] @@ -6048,13 +6046,13 @@ pyasn1 = ">=0.4.6,<0.6.0" [[package]] name = "pyautogen" -version = "0.2.8" +version = "0.2.9" description = "Enabling Next-Gen LLM Applications via Multi-Agent Conversation Framework" optional = false python-versions = ">=3.8,<3.13" files = [ - {file = "pyautogen-0.2.8-py3-none-any.whl", hash = "sha256:9c3f66db332277b8f4ffbf1eb6250612f560c574ef30907a05bb90cb5af890a4"}, - {file = "pyautogen-0.2.8.tar.gz", hash = "sha256:2aeb2576d586b9cebe76925eb87342a44b3e9e9719052ae94245056695524975"}, + {file = "pyautogen-0.2.9-py3-none-any.whl", hash = "sha256:50e8b8c36a1d19be3183d0e10b161ab9c4f4f26149729240ef2a73f3bf9d163a"}, + {file = "pyautogen-0.2.9.tar.gz", hash = "sha256:993416a268f47963e1bd39ef549096e457ae890b33e6d6ced7adfc069c73911e"}, ] [package.dependencies] @@ -6076,7 +6074,7 @@ mathchat = ["pydantic (==1.10.9)", "sympy", "wolframalpha"] redis = ["redis"] retrievechat = ["chromadb", "ipython", "pypdf", "sentence-transformers"] teachable = ["chromadb"] -test = ["coverage (>=5.3)", "ipykernel", "nbconvert", "nbformat", "pre-commit", "pytest (>=6.1.1)", "pytest-asyncio"] +test = ["coverage (>=5.3)", "ipykernel", "nbconvert", "nbformat", "pre-commit", "pytest (>=6.1.1,<8)", "pytest-asyncio"] websurfer = ["beautifulsoup4", "markdownify", "pathvalidate", "pdfminer.six"] [[package]] @@ -6133,18 +6131,18 @@ files = [ [[package]] name = "pydantic" -version = "2.5.3" +version = "2.6.0" description = "Data validation using Python type hints" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pydantic-2.5.3-py3-none-any.whl", hash = "sha256:d0caf5954bee831b6bfe7e338c32b9e30c85dfe080c843680783ac2b631673b4"}, - {file = "pydantic-2.5.3.tar.gz", hash = "sha256:b3ef57c62535b0941697cce638c08900d87fcb67e29cfa99e8a68f747f393f7a"}, + {file = "pydantic-2.6.0-py3-none-any.whl", hash = "sha256:1440966574e1b5b99cf75a13bec7b20e3512e8a61b894ae252f56275e2c465ae"}, + {file = "pydantic-2.6.0.tar.gz", hash = "sha256:ae887bd94eb404b09d86e4d12f93893bdca79d766e738528c6fa1c849f3c6bcf"}, ] [package.dependencies] annotated-types = ">=0.4.0" -pydantic-core = "2.14.6" +pydantic-core = "2.16.1" typing-extensions = ">=4.6.1" [package.extras] @@ -6152,116 +6150,90 @@ email = ["email-validator (>=2.0.0)"] [[package]] name = "pydantic-core" -version = "2.14.6" +version = "2.16.1" description = "" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.14.6-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:72f9a942d739f09cd42fffe5dc759928217649f070056f03c70df14f5770acf9"}, - {file = "pydantic_core-2.14.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6a31d98c0d69776c2576dda4b77b8e0c69ad08e8b539c25c7d0ca0dc19a50d6c"}, - {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5aa90562bc079c6c290f0512b21768967f9968e4cfea84ea4ff5af5d917016e4"}, - {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:370ffecb5316ed23b667d99ce4debe53ea664b99cc37bfa2af47bc769056d534"}, - {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f85f3843bdb1fe80e8c206fe6eed7a1caeae897e496542cee499c374a85c6e08"}, - {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9862bf828112e19685b76ca499b379338fd4c5c269d897e218b2ae8fcb80139d"}, - {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:036137b5ad0cb0004c75b579445a1efccd072387a36c7f217bb8efd1afbe5245"}, - {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:92879bce89f91f4b2416eba4429c7b5ca22c45ef4a499c39f0c5c69257522c7c"}, - {file = "pydantic_core-2.14.6-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0c08de15d50fa190d577e8591f0329a643eeaed696d7771760295998aca6bc66"}, - {file = "pydantic_core-2.14.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:36099c69f6b14fc2c49d7996cbf4f87ec4f0e66d1c74aa05228583225a07b590"}, - {file = "pydantic_core-2.14.6-cp310-none-win32.whl", hash = "sha256:7be719e4d2ae6c314f72844ba9d69e38dff342bc360379f7c8537c48e23034b7"}, - {file = "pydantic_core-2.14.6-cp310-none-win_amd64.whl", hash = "sha256:36fa402dcdc8ea7f1b0ddcf0df4254cc6b2e08f8cd80e7010d4c4ae6e86b2a87"}, - {file = "pydantic_core-2.14.6-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:dea7fcd62915fb150cdc373212141a30037e11b761fbced340e9db3379b892d4"}, - {file = "pydantic_core-2.14.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ffff855100bc066ff2cd3aa4a60bc9534661816b110f0243e59503ec2df38421"}, - {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b027c86c66b8627eb90e57aee1f526df77dc6d8b354ec498be9a757d513b92b"}, - {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:00b1087dabcee0b0ffd104f9f53d7d3eaddfaa314cdd6726143af6bc713aa27e"}, - {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:75ec284328b60a4e91010c1acade0c30584f28a1f345bc8f72fe8b9e46ec6a96"}, - {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7e1f4744eea1501404b20b0ac059ff7e3f96a97d3e3f48ce27a139e053bb370b"}, - {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2602177668f89b38b9f84b7b3435d0a72511ddef45dc14446811759b82235a1"}, - {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6c8edaea3089bf908dd27da8f5d9e395c5b4dc092dbcce9b65e7156099b4b937"}, - {file = "pydantic_core-2.14.6-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:478e9e7b360dfec451daafe286998d4a1eeaecf6d69c427b834ae771cad4b622"}, - {file = "pydantic_core-2.14.6-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b6ca36c12a5120bad343eef193cc0122928c5c7466121da7c20f41160ba00ba2"}, - {file = "pydantic_core-2.14.6-cp311-none-win32.whl", hash = "sha256:2b8719037e570639e6b665a4050add43134d80b687288ba3ade18b22bbb29dd2"}, - {file = "pydantic_core-2.14.6-cp311-none-win_amd64.whl", hash = "sha256:78ee52ecc088c61cce32b2d30a826f929e1708f7b9247dc3b921aec367dc1b23"}, - {file = "pydantic_core-2.14.6-cp311-none-win_arm64.whl", hash = "sha256:a19b794f8fe6569472ff77602437ec4430f9b2b9ec7a1105cfd2232f9ba355e6"}, - {file = "pydantic_core-2.14.6-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:667aa2eac9cd0700af1ddb38b7b1ef246d8cf94c85637cbb03d7757ca4c3fdec"}, - {file = "pydantic_core-2.14.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cdee837710ef6b56ebd20245b83799fce40b265b3b406e51e8ccc5b85b9099b7"}, - {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c5bcf3414367e29f83fd66f7de64509a8fd2368b1edf4351e862910727d3e51"}, - {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:26a92ae76f75d1915806b77cf459811e772d8f71fd1e4339c99750f0e7f6324f"}, - {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a983cca5ed1dd9a35e9e42ebf9f278d344603bfcb174ff99a5815f953925140a"}, - {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cb92f9061657287eded380d7dc455bbf115430b3aa4741bdc662d02977e7d0af"}, - {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4ace1e220b078c8e48e82c081e35002038657e4b37d403ce940fa679e57113b"}, - {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ef633add81832f4b56d3b4c9408b43d530dfca29e68fb1b797dcb861a2c734cd"}, - {file = "pydantic_core-2.14.6-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7e90d6cc4aad2cc1f5e16ed56e46cebf4877c62403a311af20459c15da76fd91"}, - {file = "pydantic_core-2.14.6-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e8a5ac97ea521d7bde7621d86c30e86b798cdecd985723c4ed737a2aa9e77d0c"}, - {file = "pydantic_core-2.14.6-cp312-none-win32.whl", hash = "sha256:f27207e8ca3e5e021e2402ba942e5b4c629718e665c81b8b306f3c8b1ddbb786"}, - {file = "pydantic_core-2.14.6-cp312-none-win_amd64.whl", hash = "sha256:b3e5fe4538001bb82e2295b8d2a39356a84694c97cb73a566dc36328b9f83b40"}, - {file = "pydantic_core-2.14.6-cp312-none-win_arm64.whl", hash = "sha256:64634ccf9d671c6be242a664a33c4acf12882670b09b3f163cd00a24cffbd74e"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:24368e31be2c88bd69340fbfe741b405302993242ccb476c5c3ff48aeee1afe0"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:e33b0834f1cf779aa839975f9d8755a7c2420510c0fa1e9fa0497de77cd35d2c"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6af4b3f52cc65f8a0bc8b1cd9676f8c21ef3e9132f21fed250f6958bd7223bed"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d15687d7d7f40333bd8266f3814c591c2e2cd263fa2116e314f60d82086e353a"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:095b707bb287bfd534044166ab767bec70a9bba3175dcdc3371782175c14e43c"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94fc0e6621e07d1e91c44e016cc0b189b48db053061cc22d6298a611de8071bb"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ce830e480f6774608dedfd4a90c42aac4a7af0a711f1b52f807130c2e434c06"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a306cdd2ad3a7d795d8e617a58c3a2ed0f76c8496fb7621b6cd514eb1532cae8"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:2f5fa187bde8524b1e37ba894db13aadd64faa884657473b03a019f625cee9a8"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:438027a975cc213a47c5d70672e0d29776082155cfae540c4e225716586be75e"}, - {file = "pydantic_core-2.14.6-cp37-none-win32.whl", hash = "sha256:f96ae96a060a8072ceff4cfde89d261837b4294a4f28b84a28765470d502ccc6"}, - {file = "pydantic_core-2.14.6-cp37-none-win_amd64.whl", hash = "sha256:e646c0e282e960345314f42f2cea5e0b5f56938c093541ea6dbf11aec2862391"}, - {file = "pydantic_core-2.14.6-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:db453f2da3f59a348f514cfbfeb042393b68720787bbef2b4c6068ea362c8149"}, - {file = "pydantic_core-2.14.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3860c62057acd95cc84044e758e47b18dcd8871a328ebc8ccdefd18b0d26a21b"}, - {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36026d8f99c58d7044413e1b819a67ca0e0b8ebe0f25e775e6c3d1fabb3c38fb"}, - {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8ed1af8692bd8d2a29d702f1a2e6065416d76897d726e45a1775b1444f5928a7"}, - {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:314ccc4264ce7d854941231cf71b592e30d8d368a71e50197c905874feacc8a8"}, - {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:982487f8931067a32e72d40ab6b47b1628a9c5d344be7f1a4e668fb462d2da42"}, - {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2dbe357bc4ddda078f79d2a36fc1dd0494a7f2fad83a0a684465b6f24b46fe80"}, - {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2f6ffc6701a0eb28648c845f4945a194dc7ab3c651f535b81793251e1185ac3d"}, - {file = "pydantic_core-2.14.6-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7f5025db12fc6de7bc1104d826d5aee1d172f9ba6ca936bf6474c2148ac336c1"}, - {file = "pydantic_core-2.14.6-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:dab03ed811ed1c71d700ed08bde8431cf429bbe59e423394f0f4055f1ca0ea60"}, - {file = "pydantic_core-2.14.6-cp38-none-win32.whl", hash = "sha256:dfcbebdb3c4b6f739a91769aea5ed615023f3c88cb70df812849aef634c25fbe"}, - {file = "pydantic_core-2.14.6-cp38-none-win_amd64.whl", hash = "sha256:99b14dbea2fdb563d8b5a57c9badfcd72083f6006caf8e126b491519c7d64ca8"}, - {file = "pydantic_core-2.14.6-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:4ce8299b481bcb68e5c82002b96e411796b844d72b3e92a3fbedfe8e19813eab"}, - {file = "pydantic_core-2.14.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b9a9d92f10772d2a181b5ca339dee066ab7d1c9a34ae2421b2a52556e719756f"}, - {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd9e98b408384989ea4ab60206b8e100d8687da18b5c813c11e92fd8212a98e0"}, - {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4f86f1f318e56f5cbb282fe61eb84767aee743ebe32c7c0834690ebea50c0a6b"}, - {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86ce5fcfc3accf3a07a729779d0b86c5d0309a4764c897d86c11089be61da160"}, - {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dcf1978be02153c6a31692d4fbcc2a3f1db9da36039ead23173bc256ee3b91b"}, - {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eedf97be7bc3dbc8addcef4142f4b4164066df0c6f36397ae4aaed3eb187d8ab"}, - {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d5f916acf8afbcab6bacbb376ba7dc61f845367901ecd5e328fc4d4aef2fcab0"}, - {file = "pydantic_core-2.14.6-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:8a14c192c1d724c3acbfb3f10a958c55a2638391319ce8078cb36c02283959b9"}, - {file = "pydantic_core-2.14.6-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0348b1dc6b76041516e8a854ff95b21c55f5a411c3297d2ca52f5528e49d8411"}, - {file = "pydantic_core-2.14.6-cp39-none-win32.whl", hash = "sha256:de2a0645a923ba57c5527497daf8ec5df69c6eadf869e9cd46e86349146e5975"}, - {file = "pydantic_core-2.14.6-cp39-none-win_amd64.whl", hash = "sha256:aca48506a9c20f68ee61c87f2008f81f8ee99f8d7f0104bff3c47e2d148f89d9"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:d5c28525c19f5bb1e09511669bb57353d22b94cf8b65f3a8d141c389a55dec95"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:78d0768ee59baa3de0f4adac9e3748b4b1fffc52143caebddfd5ea2961595277"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b93785eadaef932e4fe9c6e12ba67beb1b3f1e5495631419c784ab87e975670"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a874f21f87c485310944b2b2734cd6d318765bcbb7515eead33af9641816506e"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b89f4477d915ea43b4ceea6756f63f0288941b6443a2b28c69004fe07fde0d0d"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:172de779e2a153d36ee690dbc49c6db568d7b33b18dc56b69a7514aecbcf380d"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:dfcebb950aa7e667ec226a442722134539e77c575f6cfaa423f24371bb8d2e94"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:55a23dcd98c858c0db44fc5c04fc7ed81c4b4d33c653a7c45ddaebf6563a2f66"}, - {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:4241204e4b36ab5ae466ecec5c4c16527a054c69f99bba20f6f75232a6a534e2"}, - {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e574de99d735b3fc8364cba9912c2bec2da78775eba95cbb225ef7dda6acea24"}, - {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1302a54f87b5cd8528e4d6d1bf2133b6aa7c6122ff8e9dc5220fbc1e07bffebd"}, - {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f8e81e4b55930e5ffab4a68db1af431629cf2e4066dbdbfef65348b8ab804ea8"}, - {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:c99462ffc538717b3e60151dfaf91125f637e801f5ab008f81c402f1dff0cd0f"}, - {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e4cf2d5829f6963a5483ec01578ee76d329eb5caf330ecd05b3edd697e7d768a"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:cf10b7d58ae4a1f07fccbf4a0a956d705356fea05fb4c70608bb6fa81d103cda"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:399ac0891c284fa8eb998bcfa323f2234858f5d2efca3950ae58c8f88830f145"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c6a5c79b28003543db3ba67d1df336f253a87d3112dac3a51b94f7d48e4c0e1"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:599c87d79cab2a6a2a9df4aefe0455e61e7d2aeede2f8577c1b7c0aec643ee8e"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:43e166ad47ba900f2542a80d83f9fc65fe99eb63ceec4debec160ae729824052"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3a0b5db001b98e1c649dd55afa928e75aa4087e587b9524a4992316fa23c9fba"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:747265448cb57a9f37572a488a57d873fd96bf51e5bb7edb52cfb37124516da4"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:7ebe3416785f65c28f4f9441e916bfc8a54179c8dea73c23023f7086fa601c5d"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:86c963186ca5e50d5c8287b1d1c9d3f8f024cbe343d048c5bd282aec2d8641f2"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:e0641b506486f0b4cd1500a2a65740243e8670a2549bb02bc4556a83af84ae03"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71d72ca5eaaa8d38c8df16b7deb1a2da4f650c41b58bb142f3fb75d5ad4a611f"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:27e524624eace5c59af499cd97dc18bb201dc6a7a2da24bfc66ef151c69a5f2a"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a3dde6cac75e0b0902778978d3b1646ca9f438654395a362cb21d9ad34b24acf"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:00646784f6cd993b1e1c0e7b0fdcbccc375d539db95555477771c27555e3c556"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:23598acb8ccaa3d1d875ef3b35cb6376535095e9405d91a3d57a8c7db5d29341"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7f41533d7e3cf9520065f610b41ac1c76bc2161415955fbcead4981b22c7611e"}, - {file = "pydantic_core-2.14.6.tar.gz", hash = "sha256:1fd0c1d395372843fba13a51c28e3bb9d59bd7aebfeb17358ffaaa1e4dbbe948"}, + {file = "pydantic_core-2.16.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:300616102fb71241ff477a2cbbc847321dbec49428434a2f17f37528721c4948"}, + {file = "pydantic_core-2.16.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5511f962dd1b9b553e9534c3b9c6a4b0c9ded3d8c2be96e61d56f933feef9e1f"}, + {file = "pydantic_core-2.16.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:98f0edee7ee9cc7f9221af2e1b95bd02810e1c7a6d115cfd82698803d385b28f"}, + {file = "pydantic_core-2.16.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9795f56aa6b2296f05ac79d8a424e94056730c0b860a62b0fdcfe6340b658cc8"}, + {file = "pydantic_core-2.16.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c45f62e4107ebd05166717ac58f6feb44471ed450d07fecd90e5f69d9bf03c48"}, + {file = "pydantic_core-2.16.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:462d599299c5971f03c676e2b63aa80fec5ebc572d89ce766cd11ca8bcb56f3f"}, + {file = "pydantic_core-2.16.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21ebaa4bf6386a3b22eec518da7d679c8363fb7fb70cf6972161e5542f470798"}, + {file = "pydantic_core-2.16.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:99f9a50b56713a598d33bc23a9912224fc5d7f9f292444e6664236ae471ddf17"}, + {file = "pydantic_core-2.16.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:8ec364e280db4235389b5e1e6ee924723c693cbc98e9d28dc1767041ff9bc388"}, + {file = "pydantic_core-2.16.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:653a5dfd00f601a0ed6654a8b877b18d65ac32c9d9997456e0ab240807be6cf7"}, + {file = "pydantic_core-2.16.1-cp310-none-win32.whl", hash = "sha256:1661c668c1bb67b7cec96914329d9ab66755911d093bb9063c4c8914188af6d4"}, + {file = "pydantic_core-2.16.1-cp310-none-win_amd64.whl", hash = "sha256:561be4e3e952c2f9056fba5267b99be4ec2afadc27261505d4992c50b33c513c"}, + {file = "pydantic_core-2.16.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:102569d371fadc40d8f8598a59379c37ec60164315884467052830b28cc4e9da"}, + {file = "pydantic_core-2.16.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:735dceec50fa907a3c314b84ed609dec54b76a814aa14eb90da31d1d36873a5e"}, + {file = "pydantic_core-2.16.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e83ebbf020be727d6e0991c1b192a5c2e7113eb66e3def0cd0c62f9f266247e4"}, + {file = "pydantic_core-2.16.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:30a8259569fbeec49cfac7fda3ec8123486ef1b729225222f0d41d5f840b476f"}, + {file = "pydantic_core-2.16.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:920c4897e55e2881db6a6da151198e5001552c3777cd42b8a4c2f72eedc2ee91"}, + {file = "pydantic_core-2.16.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f5247a3d74355f8b1d780d0f3b32a23dd9f6d3ff43ef2037c6dcd249f35ecf4c"}, + {file = "pydantic_core-2.16.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d5bea8012df5bb6dda1e67d0563ac50b7f64a5d5858348b5c8cb5043811c19d"}, + {file = "pydantic_core-2.16.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ed3025a8a7e5a59817b7494686d449ebfbe301f3e757b852c8d0d1961d6be864"}, + {file = "pydantic_core-2.16.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:06f0d5a1d9e1b7932477c172cc720b3b23c18762ed7a8efa8398298a59d177c7"}, + {file = "pydantic_core-2.16.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:150ba5c86f502c040b822777e2e519b5625b47813bd05f9273a8ed169c97d9ae"}, + {file = "pydantic_core-2.16.1-cp311-none-win32.whl", hash = "sha256:d6cbdf12ef967a6aa401cf5cdf47850559e59eedad10e781471c960583f25aa1"}, + {file = "pydantic_core-2.16.1-cp311-none-win_amd64.whl", hash = "sha256:afa01d25769af33a8dac0d905d5c7bb2d73c7c3d5161b2dd6f8b5b5eea6a3c4c"}, + {file = "pydantic_core-2.16.1-cp311-none-win_arm64.whl", hash = "sha256:1a2fe7b00a49b51047334d84aafd7e39f80b7675cad0083678c58983662da89b"}, + {file = "pydantic_core-2.16.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:0f478ec204772a5c8218e30eb813ca43e34005dff2eafa03931b3d8caef87d51"}, + {file = "pydantic_core-2.16.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f1936ef138bed2165dd8573aa65e3095ef7c2b6247faccd0e15186aabdda7f66"}, + {file = "pydantic_core-2.16.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99d3a433ef5dc3021c9534a58a3686c88363c591974c16c54a01af7efd741f13"}, + {file = "pydantic_core-2.16.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bd88f40f2294440d3f3c6308e50d96a0d3d0973d6f1a5732875d10f569acef49"}, + {file = "pydantic_core-2.16.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3fac641bbfa43d5a1bed99d28aa1fded1984d31c670a95aac1bf1d36ac6ce137"}, + {file = "pydantic_core-2.16.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:72bf9308a82b75039b8c8edd2be2924c352eda5da14a920551a8b65d5ee89253"}, + {file = "pydantic_core-2.16.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb4363e6c9fc87365c2bc777a1f585a22f2f56642501885ffc7942138499bf54"}, + {file = "pydantic_core-2.16.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:20f724a023042588d0f4396bbbcf4cffd0ddd0ad3ed4f0d8e6d4ac4264bae81e"}, + {file = "pydantic_core-2.16.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:fb4370b15111905bf8b5ba2129b926af9470f014cb0493a67d23e9d7a48348e8"}, + {file = "pydantic_core-2.16.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:23632132f1fd608034f1a56cc3e484be00854db845b3a4a508834be5a6435a6f"}, + {file = "pydantic_core-2.16.1-cp312-none-win32.whl", hash = "sha256:b9f3e0bffad6e238f7acc20c393c1ed8fab4371e3b3bc311020dfa6020d99212"}, + {file = "pydantic_core-2.16.1-cp312-none-win_amd64.whl", hash = "sha256:a0b4cfe408cd84c53bab7d83e4209458de676a6ec5e9c623ae914ce1cb79b96f"}, + {file = "pydantic_core-2.16.1-cp312-none-win_arm64.whl", hash = "sha256:d195add190abccefc70ad0f9a0141ad7da53e16183048380e688b466702195dd"}, + {file = "pydantic_core-2.16.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:502c062a18d84452858f8aea1e520e12a4d5228fc3621ea5061409d666ea1706"}, + {file = "pydantic_core-2.16.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d8c032ccee90b37b44e05948b449a2d6baed7e614df3d3f47fe432c952c21b60"}, + {file = "pydantic_core-2.16.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:920f4633bee43d7a2818e1a1a788906df5a17b7ab6fe411220ed92b42940f818"}, + {file = "pydantic_core-2.16.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9f5d37ff01edcbace53a402e80793640c25798fb7208f105d87a25e6fcc9ea06"}, + {file = "pydantic_core-2.16.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:399166f24c33a0c5759ecc4801f040dbc87d412c1a6d6292b2349b4c505effc9"}, + {file = "pydantic_core-2.16.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ac89ccc39cd1d556cc72d6752f252dc869dde41c7c936e86beac5eb555041b66"}, + {file = "pydantic_core-2.16.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73802194f10c394c2bedce7a135ba1d8ba6cff23adf4217612bfc5cf060de34c"}, + {file = "pydantic_core-2.16.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8fa00fa24ffd8c31fac081bf7be7eb495be6d248db127f8776575a746fa55c95"}, + {file = "pydantic_core-2.16.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:601d3e42452cd4f2891c13fa8c70366d71851c1593ed42f57bf37f40f7dca3c8"}, + {file = "pydantic_core-2.16.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:07982b82d121ed3fc1c51faf6e8f57ff09b1325d2efccaa257dd8c0dd937acca"}, + {file = "pydantic_core-2.16.1-cp38-none-win32.whl", hash = "sha256:d0bf6f93a55d3fa7a079d811b29100b019784e2ee6bc06b0bb839538272a5610"}, + {file = "pydantic_core-2.16.1-cp38-none-win_amd64.whl", hash = "sha256:fbec2af0ebafa57eb82c18c304b37c86a8abddf7022955d1742b3d5471a6339e"}, + {file = "pydantic_core-2.16.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a497be217818c318d93f07e14502ef93d44e6a20c72b04c530611e45e54c2196"}, + {file = "pydantic_core-2.16.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:694a5e9f1f2c124a17ff2d0be613fd53ba0c26de588eb4bdab8bca855e550d95"}, + {file = "pydantic_core-2.16.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d4dfc66abea3ec6d9f83e837a8f8a7d9d3a76d25c9911735c76d6745950e62c"}, + {file = "pydantic_core-2.16.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8655f55fe68c4685673265a650ef71beb2d31871c049c8b80262026f23605ee3"}, + {file = "pydantic_core-2.16.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:21e3298486c4ea4e4d5cc6fb69e06fb02a4e22089304308817035ac006a7f506"}, + {file = "pydantic_core-2.16.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:71b4a48a7427f14679f0015b13c712863d28bb1ab700bd11776a5368135c7d60"}, + {file = "pydantic_core-2.16.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10dca874e35bb60ce4f9f6665bfbfad050dd7573596608aeb9e098621ac331dc"}, + {file = "pydantic_core-2.16.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fa496cd45cda0165d597e9d6f01e36c33c9508f75cf03c0a650018c5048f578e"}, + {file = "pydantic_core-2.16.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5317c04349472e683803da262c781c42c5628a9be73f4750ac7d13040efb5d2d"}, + {file = "pydantic_core-2.16.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:42c29d54ed4501a30cd71015bf982fa95e4a60117b44e1a200290ce687d3e640"}, + {file = "pydantic_core-2.16.1-cp39-none-win32.whl", hash = "sha256:ba07646f35e4e49376c9831130039d1b478fbfa1215ae62ad62d2ee63cf9c18f"}, + {file = "pydantic_core-2.16.1-cp39-none-win_amd64.whl", hash = "sha256:2133b0e412a47868a358713287ff9f9a328879da547dc88be67481cdac529118"}, + {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:d25ef0c33f22649b7a088035fd65ac1ce6464fa2876578df1adad9472f918a76"}, + {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:99c095457eea8550c9fa9a7a992e842aeae1429dab6b6b378710f62bfb70b394"}, + {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b49c604ace7a7aa8af31196abbf8f2193be605db6739ed905ecaf62af31ccae0"}, + {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c56da23034fe66221f2208c813d8aa509eea34d97328ce2add56e219c3a9f41c"}, + {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cebf8d56fee3b08ad40d332a807ecccd4153d3f1ba8231e111d9759f02edfd05"}, + {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:1ae8048cba95f382dba56766525abca438328455e35c283bb202964f41a780b0"}, + {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:780daad9e35b18d10d7219d24bfb30148ca2afc309928e1d4d53de86822593dc"}, + {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c94b5537bf6ce66e4d7830c6993152940a188600f6ae044435287753044a8fe2"}, + {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:adf28099d061a25fbcc6531febb7a091e027605385de9fe14dd6a97319d614cf"}, + {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:644904600c15816a1f9a1bafa6aab0d21db2788abcdf4e2a77951280473f33e1"}, + {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87bce04f09f0552b66fca0c4e10da78d17cb0e71c205864bab4e9595122cb9d9"}, + {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:877045a7969ace04d59516d5d6a7dee13106822f99a5d8df5e6822941f7bedc8"}, + {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9c46e556ee266ed3fb7b7a882b53df3c76b45e872fdab8d9cf49ae5e91147fd7"}, + {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4eebbd049008eb800f519578e944b8dc8e0f7d59a5abb5924cc2d4ed3a1834ff"}, + {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:c0be58529d43d38ae849a91932391eb93275a06b93b79a8ab828b012e916a206"}, + {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:b1fc07896fc1851558f532dffc8987e526b682ec73140886c831d773cef44b76"}, + {file = "pydantic_core-2.16.1.tar.gz", hash = "sha256:daff04257b49ab7f4b3f73f98283d3dbb1a65bf3500d55c7beac3c66c310fe34"}, ] [package.dependencies] @@ -6503,17 +6475,17 @@ testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "no [[package]] name = "pytest-asyncio" -version = "0.23.3" +version = "0.23.4" description = "Pytest support for asyncio" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-asyncio-0.23.3.tar.gz", hash = "sha256:af313ce900a62fbe2b1aed18e37ad757f1ef9940c6b6a88e2954de38d6b1fb9f"}, - {file = "pytest_asyncio-0.23.3-py3-none-any.whl", hash = "sha256:37a9d912e8338ee7b4a3e917381d1c95bfc8682048cb0fbc35baba316ec1faba"}, + {file = "pytest-asyncio-0.23.4.tar.gz", hash = "sha256:2143d9d9375bf372a73260e4114541485e84fca350b0b6b92674ca56ff5f7ea2"}, + {file = "pytest_asyncio-0.23.4-py3-none-any.whl", hash = "sha256:b0079dfac14b60cd1ce4691fbfb1748fe939db7d0234b5aba97197d10fbe0fef"}, ] [package.dependencies] -pytest = ">=7.0.0" +pytest = ">=7.0.0,<8" [package.extras] docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] @@ -6708,13 +6680,13 @@ files = [ [[package]] name = "pytz" -version = "2023.3.post1" +version = "2023.4" description = "World timezone definitions, modern and historical" optional = false python-versions = "*" files = [ - {file = "pytz-2023.3.post1-py2.py3-none-any.whl", hash = "sha256:ce42d816b81b68506614c11e8937d3aa9e41007ceb50bfdcb0749b921bf646c7"}, - {file = "pytz-2023.3.post1.tar.gz", hash = "sha256:7b4fddbeb94a1eba4b557da24f19fdf9db575192544270a9101d8509f9f43d7b"}, + {file = "pytz-2023.4-py2.py3-none-any.whl", hash = "sha256:f90ef520d95e7c46951105338d918664ebfd6f1d995bd7d153127ce90efafa6a"}, + {file = "pytz-2023.4.tar.gz", hash = "sha256:31d4583c4ed539cd037956140d695e42c033a19e984bfce9964a3f7d59bc2b40"}, ] [[package]] @@ -7294,28 +7266,28 @@ msg-parse = ["extract-msg (>=0.27)"] [[package]] name = "ruff" -version = "0.1.14" +version = "0.1.15" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.1.14-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:96f76536df9b26622755c12ed8680f159817be2f725c17ed9305b472a757cdbb"}, - {file = "ruff-0.1.14-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:ab3f71f64498c7241123bb5a768544cf42821d2a537f894b22457a543d3ca7a9"}, - {file = "ruff-0.1.14-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7060156ecc572b8f984fd20fd8b0fcb692dd5d837b7606e968334ab7ff0090ab"}, - {file = "ruff-0.1.14-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a53d8e35313d7b67eb3db15a66c08434809107659226a90dcd7acb2afa55faea"}, - {file = "ruff-0.1.14-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bea9be712b8f5b4ebed40e1949379cfb2a7d907f42921cf9ab3aae07e6fba9eb"}, - {file = "ruff-0.1.14-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:2270504d629a0b064247983cbc495bed277f372fb9eaba41e5cf51f7ba705a6a"}, - {file = "ruff-0.1.14-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80258bb3b8909b1700610dfabef7876423eed1bc930fe177c71c414921898efa"}, - {file = "ruff-0.1.14-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:653230dd00aaf449eb5ff25d10a6e03bc3006813e2cb99799e568f55482e5cae"}, - {file = "ruff-0.1.14-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87b3acc6c4e6928459ba9eb7459dd4f0c4bf266a053c863d72a44c33246bfdbf"}, - {file = "ruff-0.1.14-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:6b3dadc9522d0eccc060699a9816e8127b27addbb4697fc0c08611e4e6aeb8b5"}, - {file = "ruff-0.1.14-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:1c8eca1a47b4150dc0fbec7fe68fc91c695aed798532a18dbb1424e61e9b721f"}, - {file = "ruff-0.1.14-py3-none-musllinux_1_2_i686.whl", hash = "sha256:62ce2ae46303ee896fc6811f63d6dabf8d9c389da0f3e3f2bce8bc7f15ef5488"}, - {file = "ruff-0.1.14-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:b2027dde79d217b211d725fc833e8965dc90a16d0d3213f1298f97465956661b"}, - {file = "ruff-0.1.14-py3-none-win32.whl", hash = "sha256:722bafc299145575a63bbd6b5069cb643eaa62546a5b6398f82b3e4403329cab"}, - {file = "ruff-0.1.14-py3-none-win_amd64.whl", hash = "sha256:e3d241aa61f92b0805a7082bd89a9990826448e4d0398f0e2bc8f05c75c63d99"}, - {file = "ruff-0.1.14-py3-none-win_arm64.whl", hash = "sha256:269302b31ade4cde6cf6f9dd58ea593773a37ed3f7b97e793c8594b262466b67"}, - {file = "ruff-0.1.14.tar.gz", hash = "sha256:ad3f8088b2dfd884820289a06ab718cde7d38b94972212cc4ba90d5fbc9955f3"}, + {file = "ruff-0.1.15-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:5fe8d54df166ecc24106db7dd6a68d44852d14eb0729ea4672bb4d96c320b7df"}, + {file = "ruff-0.1.15-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:6f0bfbb53c4b4de117ac4d6ddfd33aa5fc31beeaa21d23c45c6dd249faf9126f"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0d432aec35bfc0d800d4f70eba26e23a352386be3a6cf157083d18f6f5881c8"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9405fa9ac0e97f35aaddf185a1be194a589424b8713e3b97b762336ec79ff807"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c66ec24fe36841636e814b8f90f572a8c0cb0e54d8b5c2d0e300d28a0d7bffec"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:6f8ad828f01e8dd32cc58bc28375150171d198491fc901f6f98d2a39ba8e3ff5"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86811954eec63e9ea162af0ffa9f8d09088bab51b7438e8b6488b9401863c25e"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fd4025ac5e87d9b80e1f300207eb2fd099ff8200fa2320d7dc066a3f4622dc6b"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b17b93c02cdb6aeb696effecea1095ac93f3884a49a554a9afa76bb125c114c1"}, + {file = "ruff-0.1.15-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:ddb87643be40f034e97e97f5bc2ef7ce39de20e34608f3f829db727a93fb82c5"}, + {file = "ruff-0.1.15-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:abf4822129ed3a5ce54383d5f0e964e7fef74a41e48eb1dfad404151efc130a2"}, + {file = "ruff-0.1.15-py3-none-musllinux_1_2_i686.whl", hash = "sha256:6c629cf64bacfd136c07c78ac10a54578ec9d1bd2a9d395efbee0935868bf852"}, + {file = "ruff-0.1.15-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:1bab866aafb53da39c2cadfb8e1c4550ac5340bb40300083eb8967ba25481447"}, + {file = "ruff-0.1.15-py3-none-win32.whl", hash = "sha256:2417e1cb6e2068389b07e6fa74c306b2810fe3ee3476d5b8a96616633f40d14f"}, + {file = "ruff-0.1.15-py3-none-win_amd64.whl", hash = "sha256:3837ac73d869efc4182d9036b1405ef4c73d9b1f88da2413875e34e0d6919587"}, + {file = "ruff-0.1.15-py3-none-win_arm64.whl", hash = "sha256:9a933dfb1c14ec7a33cceb1e49ec4a16b51ce3c20fd42663198746efc0427360"}, + {file = "ruff-0.1.15.tar.gz", hash = "sha256:f6dfa8c1b21c913c326919056c390966648b680966febcb796cc9d1aaab8564e"}, ] [[package]] @@ -7573,25 +7545,26 @@ test = ["asv", "gmpy2", "hypothesis", "mpmath", "pooch", "pytest", "pytest-cov", [[package]] name = "sentence-transformers" -version = "2.2.2" +version = "2.3.0" description = "Multilingual text embeddings" optional = true -python-versions = ">=3.6.0" +python-versions = ">=3.8.0" files = [ - {file = "sentence-transformers-2.2.2.tar.gz", hash = "sha256:dbc60163b27de21076c9a30d24b5b7b6fa05141d68cf2553fa9a77bf79a29136"}, + {file = "sentence-transformers-2.3.0.tar.gz", hash = "sha256:8cd90bedc73f2cb6b8fa81f9ebf2bb24fb15a3eed4dd39bf7865d211c3bb6253"}, + {file = "sentence_transformers-2.3.0-py3-none-any.whl", hash = "sha256:d12607a2172c50e77ccd3cf160a5070c1d1e8f974d17f0dd7e05d015f76d6108"}, ] [package.dependencies] -huggingface-hub = ">=0.4.0" +huggingface-hub = ">=0.15.1" nltk = "*" numpy = "*" +Pillow = "*" scikit-learn = "*" scipy = "*" sentencepiece = "*" -torch = ">=1.6.0" -torchvision = "*" +torch = ">=1.11.0" tqdm = "*" -transformers = ">=4.6.0,<5.0.0" +transformers = ">=4.32.0,<5.0.0" [[package]] name = "sentencepiece" @@ -8267,44 +8240,6 @@ typing-extensions = "*" dynamo = ["jinja2"] opt-einsum = ["opt-einsum (>=3.3)"] -[[package]] -name = "torchvision" -version = "0.16.2" -description = "image and video datasets and models for torch deep learning" -optional = true -python-versions = ">=3.8" -files = [ - {file = "torchvision-0.16.2-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:bc86f2800cb2c0c1a09c581409cdd6bff66e62f103dc83fc63f73346264c3756"}, - {file = "torchvision-0.16.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b024bd412df6d3a007dcebf311a894eb3c5c21e1af80d12be382bbcb097a7c3a"}, - {file = "torchvision-0.16.2-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:e89f10f3c8351972b6e3fda95bc3e479ea8dbfc9dfcfd2c32902dbad4ba5cfc5"}, - {file = "torchvision-0.16.2-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:96c7583700112a410bdc4e1e4f118c429dab49c29c9a31a2cc3579bc9b08b19d"}, - {file = "torchvision-0.16.2-cp310-cp310-win_amd64.whl", hash = "sha256:9f4032ebb3277fb07ff6a9b818d50a547fb8fcd89d958cfd9e773322454bb688"}, - {file = "torchvision-0.16.2-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:67b1aaf8b8cb02ce75dd445f291a27c8036a502f8c0aa76e28c37a0faac2e153"}, - {file = "torchvision-0.16.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bef30d03e1d1c629761f4dca51d3b7d8a0dc0acce6f4068ab2a1634e8e7b64e0"}, - {file = "torchvision-0.16.2-cp311-cp311-manylinux1_x86_64.whl", hash = "sha256:e59cc7b2bd1ab5c0ce4ae382e4e37be8f1c174e8b5de2f6a23c170de9ae28495"}, - {file = "torchvision-0.16.2-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:e130b08cc9b3cc73a6c59d6edf032394a322f9579bfd21d14bc2e1d0999aa758"}, - {file = "torchvision-0.16.2-cp311-cp311-win_amd64.whl", hash = "sha256:8692ab1e48807e9604046a6f4beeb67b523294cee1b00828654bb0df2cfce2b2"}, - {file = "torchvision-0.16.2-cp38-cp38-macosx_10_13_x86_64.whl", hash = "sha256:b82732dcf876a37c852772342aa6ee3480c03bb3e2a802ae109fc5f7e28d26e9"}, - {file = "torchvision-0.16.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4b065143d1a720fe8a9077fd4be35d491f98819ec80b3dbbc3ec64d0b707a906"}, - {file = "torchvision-0.16.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:bc5f274e4ecd1b86062063cdf4fd385a1d39d147a3a2685fbbde9ff08bb720b8"}, - {file = "torchvision-0.16.2-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:335959c43b371c0474af34c1ef2a52efdc7603c45700d29e4475eeb02984170c"}, - {file = "torchvision-0.16.2-cp38-cp38-win_amd64.whl", hash = "sha256:7fd22d86e08eba321af70cad291020c2cdeac069b00ce88b923ca52e06174769"}, - {file = "torchvision-0.16.2-cp39-cp39-macosx_10_13_x86_64.whl", hash = "sha256:56115268b37f0b75364e3654e47ad9abc66ac34c1f9e5e3dfa89a22d6a40017a"}, - {file = "torchvision-0.16.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:82805f8445b094f9d1e770390ee6cc86855e89955e08ce34af2e2274fc0e5c45"}, - {file = "torchvision-0.16.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:3f4bd5fcbc361476e2e78016636ac7d5509e59d9962521f06eb98e6803898182"}, - {file = "torchvision-0.16.2-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:8199acdf8ab066a28b84a5b6f4d97b58976d9e164b1acc3a9d14fccfaf74bb3a"}, - {file = "torchvision-0.16.2-cp39-cp39-win_amd64.whl", hash = "sha256:41dd4fa9f176d563fe9f1b9adef3b7e582cdfb60ce8c9bc51b094a025be687c9"}, -] - -[package.dependencies] -numpy = "*" -pillow = ">=5.3.0,<8.3.dev0 || >=8.4.dev0" -requests = "*" -torch = "2.1.2" - -[package.extras] -scipy = ["scipy"] - [[package]] name = "tornado" version = "6.4" @@ -8362,13 +8297,13 @@ test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0, [[package]] name = "transformers" -version = "4.37.1" +version = "4.37.2" description = "State-of-the-art Machine Learning for JAX, PyTorch and TensorFlow" optional = true python-versions = ">=3.8.0" files = [ - {file = "transformers-4.37.1-py3-none-any.whl", hash = "sha256:05e4c4bf94f74addeb716bc83517f49d55df1e9022db3d5b027c801e9a410ebf"}, - {file = "transformers-4.37.1.tar.gz", hash = "sha256:9843368d97fd7ac30126664743adc65e8e5be930da7d66342172e97bd1243e2d"}, + {file = "transformers-4.37.2-py3-none-any.whl", hash = "sha256:595a8b12a1fcc4ad0ced49ce206c58e17be68c85d7aee3d7546d04a32c910d2e"}, + {file = "transformers-4.37.2.tar.gz", hash = "sha256:f307082ae5d528b8480611a4879a4a11651012d0e9aaea3f6cf17219ffd95542"}, ] [package.dependencies] @@ -8379,7 +8314,7 @@ packaging = ">=20.0" pyyaml = ">=5.1" regex = "!=2019.12.17" requests = "*" -safetensors = ">=0.3.1" +safetensors = ">=0.4.1" tokenizers = ">=0.14,<0.19" tqdm = ">=4.27" @@ -8487,13 +8422,13 @@ files = [ [[package]] name = "types-google-cloud-ndb" -version = "2.2.0.20240106" +version = "2.2.0.20240129" description = "Typing stubs for google-cloud-ndb" optional = false python-versions = ">=3.8" files = [ - {file = "types-google-cloud-ndb-2.2.0.20240106.tar.gz", hash = "sha256:b81d4ea35f212dc845429d08f1981eb011fe78cee3eebba81157d18b7f6e4616"}, - {file = "types_google_cloud_ndb-2.2.0.20240106-py3-none-any.whl", hash = "sha256:c76efa97b17c15865784fb4e54da56cad805acf81f908dfe4f962a957cb84555"}, + {file = "types-google-cloud-ndb-2.2.0.20240129.tar.gz", hash = "sha256:4da51fbc817d579804dd0201198baed72503e5bb9a8daa34dff79cb79f231a9d"}, + {file = "types_google_cloud_ndb-2.2.0.20240129-py3-none-any.whl", hash = "sha256:993731eacbfd6bb914083c1996ca5b7d47535f89880afe415c52a66d9bd11325"}, ] [[package]] @@ -8531,13 +8466,13 @@ files = [ [[package]] name = "types-pyopenssl" -version = "23.3.0.20240106" +version = "24.0.0.20240130" description = "Typing stubs for pyOpenSSL" optional = false python-versions = ">=3.8" files = [ - {file = "types-pyOpenSSL-23.3.0.20240106.tar.gz", hash = "sha256:3d6f3462bec0c260caadf93fbb377225c126661b779c7d9ab99b6dad5ca10db9"}, - {file = "types_pyOpenSSL-23.3.0.20240106-py3-none-any.whl", hash = "sha256:47a7eedbd18b7bcad17efebf1c53416148f5a173918a6d75027e75e32fe039ae"}, + {file = "types-pyOpenSSL-24.0.0.20240130.tar.gz", hash = "sha256:c812e5c1c35249f75ef5935708b2a997d62abf9745be222e5f94b9595472ab25"}, + {file = "types_pyOpenSSL-24.0.0.20240130-py3-none-any.whl", hash = "sha256:24a255458b5b8a7fca8139cf56f2a8ad5a4f1a5f711b73a5bb9cb50dc688fab5"}, ] [package.dependencies] @@ -8559,24 +8494,24 @@ types-pyasn1 = "*" [[package]] name = "types-pytz" -version = "2023.3.1.1" +version = "2023.4.0.20240130" description = "Typing stubs for pytz" optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "types-pytz-2023.3.1.1.tar.gz", hash = "sha256:cc23d0192cd49c8f6bba44ee0c81e4586a8f30204970fc0894d209a6b08dab9a"}, - {file = "types_pytz-2023.3.1.1-py3-none-any.whl", hash = "sha256:1999a123a3dc0e39a2ef6d19f3f8584211de9e6a77fe7a0259f04a524e90a5cf"}, + {file = "types-pytz-2023.4.0.20240130.tar.gz", hash = "sha256:33676a90bf04b19f92c33eec8581136bea2f35ddd12759e579a624a006fd387a"}, + {file = "types_pytz-2023.4.0.20240130-py3-none-any.whl", hash = "sha256:6ce76a9f8fd22bd39b01a59c35bfa2db39b60d11a2f77145e97b730de7e64fe0"}, ] [[package]] name = "types-pywin32" -version = "306.0.0.20240106" +version = "306.0.0.20240130" description = "Typing stubs for pywin32" optional = false python-versions = ">=3.8" files = [ - {file = "types-pywin32-306.0.0.20240106.tar.gz", hash = "sha256:827a0787afceb1728afbc031fc9e74ffe758a98eff8d3613446cc0474119e9d5"}, - {file = "types_pywin32-306.0.0.20240106-py3-none-any.whl", hash = "sha256:421859291662cda4ca3ffa305a57c8719b4b35bdd90260f34ce4e3524682f3cf"}, + {file = "types-pywin32-306.0.0.20240130.tar.gz", hash = "sha256:16ec2059a2b5e40c13e533f2bf8e5a46788efe058e332c46ddce1f56d1ccc527"}, + {file = "types_pywin32-306.0.0.20240130-py3-none-any.whl", hash = "sha256:85dbc541b1161279aea8c471ac1b157ef1ab221ead53ce7f4fdc4d7644d1e44e"}, ] [[package]] @@ -8621,13 +8556,13 @@ types-urllib3 = "*" [[package]] name = "types-requests" -version = "2.31.0.20240106" +version = "2.31.0.20240125" description = "Typing stubs for requests" optional = false python-versions = ">=3.8" files = [ - {file = "types-requests-2.31.0.20240106.tar.gz", hash = "sha256:0e1c731c17f33618ec58e022b614a1a2ecc25f7dc86800b36ef341380402c612"}, - {file = "types_requests-2.31.0.20240106-py3-none-any.whl", hash = "sha256:da997b3b6a72cc08d09f4dba9802fdbabc89104b35fe24ee588e674037689354"}, + {file = "types-requests-2.31.0.20240125.tar.gz", hash = "sha256:03a28ce1d7cd54199148e043b2079cdded22d6795d19a2c2a6791a4b2b5e2eb5"}, + {file = "types_requests-2.31.0.20240125-py3-none-any.whl", hash = "sha256:9592a9a4cb92d6d75d9b491a41477272b710e021011a2a3061157e2fb1f1a5d1"}, ] [package.dependencies] @@ -8787,13 +8722,13 @@ xlsx = ["networkx", "openpyxl", "pandas", "xlrd"] [[package]] name = "unstructured-client" -version = "0.15.5" +version = "0.16.0" description = "Python Client SDK for Unstructured API" optional = false python-versions = ">=3.8" files = [ - {file = "unstructured-client-0.15.5.tar.gz", hash = "sha256:da36c26c230707aba5b4a586b954631f100e493a660b16f16442e8c8cac8803a"}, - {file = "unstructured_client-0.15.5-py3-none-any.whl", hash = "sha256:82906fef86961a7efce35a4dcd46651e2fc4b96f3ad2b27d4e403def015bf76e"}, + {file = "unstructured-client-0.16.0.tar.gz", hash = "sha256:88928dd6c18d6f3fe58a0a80b03aa34f841a35e1f60ed280bdafa7f05029d380"}, + {file = "unstructured_client-0.16.0-py3-none-any.whl", hash = "sha256:351db1444a30133dc4f3946f80de6c93a786407b954db0828e5787df08b39125"}, ] [package.dependencies] @@ -9515,4 +9450,4 @@ local = ["ctransformers", "llama-cpp-python", "sentence-transformers"] [metadata] lock-version = "2.0" python-versions = ">=3.9,<3.11" -content-hash = "db11539b82bcfff4137452acb4f77066e3f9b228529365df1f55d7d44f38920d" +content-hash = "2c1761b379f2cf4f9a9ecf53a66ffa0742a8a1d221797e49c1e5c8c4c67384ff" diff --git a/pyproject.toml b/pyproject.toml index 4c1565358..5ae9ac473 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -35,7 +35,7 @@ google-search-results = "^2.4.1" google-api-python-client = "^2.79.0" typer = "^0.9.0" gunicorn = "^21.2.0" -langchain = "~0.0.345" +langchain = "~0.1.0" openai = "^1.6.1" pandas = "2.0.3" chromadb = "^0.4.0" From 05588767e88dacb4f1f51cabe728678e9d20ac10 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Tue, 30 Jan 2024 09:16:04 -0300 Subject: [PATCH 137/153] Refactor PineconeComponent in Pinecone.py --- .../components/vectorstores/Pinecone.py | 46 ++++++++++++++----- 1 file changed, 35 insertions(+), 11 deletions(-) diff --git a/src/backend/langflow/components/vectorstores/Pinecone.py b/src/backend/langflow/components/vectorstores/Pinecone.py index 1c04b6bc5..c8ac3c689 100644 --- a/src/backend/langflow/components/vectorstores/Pinecone.py +++ b/src/backend/langflow/components/vectorstores/Pinecone.py @@ -1,13 +1,12 @@ -from langflow import CustomComponent -from typing import Optional, List, Union -from langchain_community.vectorstores.pinecone import Pinecone -from langflow.field_typing import ( - Document, - Embeddings, -) +import os +from typing import List, Optional, Union + +import pinecone # type: ignore from langchain.schema import BaseRetriever from langchain.vectorstores.base import VectorStore -import pinecone # type: ignore +from langchain_community.vectorstores.pinecone import Pinecone +from langflow import CustomComponent +from langflow.field_typing import Document, Embeddings class PineconeComponent(CustomComponent): @@ -23,15 +22,40 @@ class PineconeComponent(CustomComponent): "pinecone_api_key": {"display_name": "Pinecone API Key", "default": "", "password": True, "required": True}, "pinecone_env": {"display_name": "Pinecone Environment", "default": "", "required": True}, "search_kwargs": {"display_name": "Search Kwargs", "default": "{}"}, + "pool_threads": {"display_name": "Pool Threads", "default": 1, "advanced": True}, } def build( self, embedding: Embeddings, + pinecone_env: str, documents: List[Document], index_name: Optional[str] = None, pinecone_api_key: Optional[str] = None, - pinecone_env: Optional[str] = None, + text_key: Optional[str] = "text", + namespace: Optional[str] = "default", + pool_threads: Optional[int] = None, ) -> Union[VectorStore, Pinecone, BaseRetriever]: - pinecone.init(api_key=pinecone_api_key, environment=pinecone_env) - return Pinecone.from_documents(documents=documents, embedding=embedding, index_name=index_name) + if pinecone_api_key is None or pinecone_env is None: + raise ValueError("Pinecone API Key and Environment are required.") + if os.getenv("PINECONE_API_KEY") is None and pinecone_api_key is None: + raise ValueError("Pinecone API Key is required.") + + pinecone.init(api_key=pinecone_api_key, environment=pinecone_env) # type: ignore + if documents: + return Pinecone.from_documents( + documents=documents, + embedding=embedding, + index_name=index_name, + pool_threads=pool_threads, + namespace=namespace, + text_key=text_key, + ) + + return Pinecone.from_existing_index( + index_name=index_name, + embedding=embedding, + text_key=text_key, + namespace=namespace, + pool_threads=pool_threads, + ) From 09354748cdf600d58c3b6828ebe7440c1ad05c85 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Tue, 30 Jan 2024 09:16:38 -0300 Subject: [PATCH 138/153] Update Pinecone.py to remove default value for "embedding" configuration --- src/backend/langflow/components/vectorstores/Pinecone.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/backend/langflow/components/vectorstores/Pinecone.py b/src/backend/langflow/components/vectorstores/Pinecone.py index c8ac3c689..36a87c1e2 100644 --- a/src/backend/langflow/components/vectorstores/Pinecone.py +++ b/src/backend/langflow/components/vectorstores/Pinecone.py @@ -5,6 +5,7 @@ import pinecone # type: ignore from langchain.schema import BaseRetriever from langchain.vectorstores.base import VectorStore from langchain_community.vectorstores.pinecone import Pinecone + from langflow import CustomComponent from langflow.field_typing import Document, Embeddings @@ -16,7 +17,7 @@ class PineconeComponent(CustomComponent): def build_config(self): return { "documents": {"display_name": "Documents"}, - "embedding": {"display_name": "Embedding", "default": 1000}, + "embedding": {"display_name": "Embedding"}, "index_name": {"display_name": "Index Name"}, "namespace": {"display_name": "Namespace"}, "pinecone_api_key": {"display_name": "Pinecone API Key", "default": "", "password": True, "required": True}, From 4918f0303254dd722bf40aee99f2ef8b1441347c Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Tue, 30 Jan 2024 11:53:21 -0300 Subject: [PATCH 139/153] Remove test_agents_template.py and update test_zero_shot_agent, test_json_agent, and test_csv_agent --- tests/test_agents_template.py | 210 ------------- tests/test_chains_template.py | 312 ------------------- tests/test_llms_template.py | 559 ---------------------------------- 3 files changed, 1081 deletions(-) delete mode 100644 tests/test_agents_template.py delete mode 100644 tests/test_chains_template.py delete mode 100644 tests/test_llms_template.py diff --git a/tests/test_agents_template.py b/tests/test_agents_template.py deleted file mode 100644 index 01891ec05..000000000 --- a/tests/test_agents_template.py +++ /dev/null @@ -1,210 +0,0 @@ -from fastapi.testclient import TestClient - - -def test_zero_shot_agent(client: TestClient, logged_in_headers): - response = client.get("api/v1/all", headers=logged_in_headers) - assert response.status_code == 200 - json_response = response.json() - agents = json_response["agents"] - - zero_shot_agent = agents["ZeroShotAgent"] - assert set(zero_shot_agent["base_classes"]) == { - "ZeroShotAgent", - "BaseSingleActionAgent", - "Agent", - "Callable", - } - template = zero_shot_agent["template"] - - assert template["tools"] == { - "required": True, - "dynamic": False, - "placeholder": "", - "show": True, - "multiline": False, - "password": False, - "name": "tools", - "type": "BaseTool", - "list": True, - "advanced": False, - "info": "", - "fileTypes": [], - } - - # Additional assertions for other template variables - assert template["callback_manager"] == { - "required": False, - "dynamic": False, - "placeholder": "", - "show": False, - "multiline": False, - "password": False, - "name": "callback_manager", - "type": "BaseCallbackManager", - "list": False, - "advanced": False, - "info": "", - "fileTypes": [], - } - assert template["llm"] == { - "required": True, - "dynamic": False, - "placeholder": "", - "show": True, - "multiline": False, - "password": False, - "name": "llm", - "type": "BaseLanguageModel", - "list": False, - "advanced": False, - "info": "", - "fileTypes": [], - } - assert template["output_parser"] == { - "required": False, - "dynamic": False, - "placeholder": "", - "show": False, - "multiline": False, - "password": False, - "name": "output_parser", - "type": "AgentOutputParser", - "list": False, - "advanced": False, - "info": "", - "fileTypes": [], - } - assert template["input_variables"] == { - "required": False, - "dynamic": False, - "placeholder": "", - "show": False, - "multiline": False, - "password": False, - "name": "input_variables", - "type": "str", - "list": True, - "advanced": False, - "info": "", - "fileTypes": [], - } - assert template["prefix"] == { - "required": False, - "dynamic": False, - "placeholder": "", - "show": True, - "multiline": True, - "value": "Answer the following questions as best you can. You have access to the following tools:", - "password": False, - "name": "prefix", - "type": "str", - "list": False, - "advanced": False, - "info": "", - "fileTypes": [], - } - assert template["suffix"] == { - "required": False, - "dynamic": False, - "placeholder": "", - "show": True, - "multiline": True, - "value": "Begin!\n\nQuestion: {input}\nThought:{agent_scratchpad}", - "password": False, - "name": "suffix", - "type": "str", - "list": False, - "advanced": False, - "info": "", - "fileTypes": [], - } - - -def test_json_agent(client: TestClient, logged_in_headers): - response = client.get("api/v1/all", headers=logged_in_headers) - assert response.status_code == 200 - json_response = response.json() - agents = json_response["agents"] - - json_agent = agents["JsonAgent"] - assert json_agent["base_classes"] == ["AgentExecutor"] - template = json_agent["template"] - - assert template["toolkit"] == { - "required": True, - "dynamic": False, - "placeholder": "", - "show": True, - "multiline": False, - "password": False, - "name": "toolkit", - "type": "BaseToolkit", - "list": False, - "advanced": False, - "info": "", - "file_path": "", - "fileTypes": [], - "value": "", - } - assert template["llm"] == { - "required": True, - "dynamic": False, - "placeholder": "", - "show": True, - "multiline": False, - "password": False, - "name": "llm", - "type": "BaseLanguageModel", - "list": False, - "advanced": False, - "display_name": "LLM", - "info": "", - "file_path": "", - "fileTypes": [], - "value": "", - } - - -def test_csv_agent(client: TestClient, logged_in_headers): - response = client.get("api/v1/all", headers=logged_in_headers) - assert response.status_code == 200 - json_response = response.json() - agents = json_response["agents"] - - csv_agent = agents["CSVAgent"] - assert csv_agent["base_classes"] == ["AgentExecutor"] - template = csv_agent["template"] - - assert template["path"] == { - "required": True, - "dynamic": False, - "placeholder": "", - "show": True, - "multiline": False, - "value": "", - "fileTypes": [".csv"], - "password": False, - "name": "path", - "type": "file", - "list": False, - "file_path": "", - "advanced": False, - "info": "", - } - assert template["llm"] == { - "required": True, - "dynamic": False, - "placeholder": "", - "show": True, - "multiline": False, - "password": False, - "name": "llm", - "type": "BaseLanguageModel", - "list": False, - "advanced": False, - "display_name": "LLM", - "info": "", - "file_path": "", - "fileTypes": [], - "value": "", - } diff --git a/tests/test_chains_template.py b/tests/test_chains_template.py deleted file mode 100644 index 2e705ac00..000000000 --- a/tests/test_chains_template.py +++ /dev/null @@ -1,312 +0,0 @@ -from fastapi.testclient import TestClient - -# def test_chains_settings(client: TestClient, logged_in_headers): -# response = client.get("api/v1/all", headers=logged_in_headers) -# assert response.status_code == 200 -# json_response = response.json() -# chains = json_response["chains"] -# assert set(chains.keys()) == set(settings.chains) - - -def test_llm_checker_chain(client: TestClient, logged_in_headers): - response = client.get("api/v1/all", headers=logged_in_headers) - assert response.status_code == 200 - json_response = response.json() - chains = json_response["chains"] - chain = chains["LLMCheckerChain"] - - # Test the base classes, template, memory, verbose, llm, input_key, output_key, and _type objects - assert set(chain["base_classes"]) == { - "Callable", - "LLMCheckerChain", - "Chain", - } - - template = chain["template"] - assert template["llm"] == { - "required": True, - "dynamic": False, - "placeholder": "", - "show": True, - "multiline": False, - "password": False, - "name": "llm", - "type": "BaseLanguageModel", - "list": False, - "advanced": False, - "info": "", - "fileTypes": [], - } - assert template["_type"] == "LLMCheckerChain" - - # Test the description object - assert chain["description"] == "" - - -def test_llm_math_chain(client: TestClient, logged_in_headers): - response = client.get("api/v1/all", headers=logged_in_headers) - assert response.status_code == 200 - json_response = response.json() - chains = json_response["chains"] - - chain = chains["LLMMathChain"] - # Test the base classes, template, memory, verbose, llm, input_key, output_key, and _type objects - assert set(chain["base_classes"]) == { - "Callable", - "LLMMathChain", - "Chain", - } - - template = chain["template"] - assert template["memory"] == { - "required": False, - "dynamic": False, - "placeholder": "", - "show": True, - "multiline": False, - "password": False, - "name": "memory", - "type": "BaseMemory", - "list": False, - "advanced": False, - "info": "", - "fileTypes": [], - } - assert template["verbose"] == { - "required": False, - "dynamic": False, - "placeholder": "", - "show": False, - "multiline": False, - "value": False, - "password": False, - "name": "verbose", - "type": "bool", - "list": False, - "advanced": True, - "info": "", - "fileTypes": [], - } - assert template["llm"] == { - "required": True, - "dynamic": False, - "placeholder": "", - "show": True, - "multiline": False, - "password": False, - "name": "llm", - "type": "BaseLanguageModel", - "list": False, - "advanced": False, - "info": "", - "fileTypes": [], - } - assert template["input_key"] == { - "required": True, - "dynamic": False, - "placeholder": "", - "show": True, - "multiline": False, - "value": "question", - "password": False, - "name": "input_key", - "type": "str", - "list": False, - "advanced": True, - "info": "", - "fileTypes": [], - } - assert template["output_key"] == { - "required": True, - "dynamic": False, - "placeholder": "", - "show": True, - "multiline": False, - "value": "answer", - "password": False, - "name": "output_key", - "type": "str", - "list": False, - "advanced": True, - "info": "", - "fileTypes": [], - } - assert template["_type"] == "LLMMathChain" - - # Test the description object - assert chain["description"] == "Chain that interprets a prompt and executes python code to do math." - - -def test_series_character_chain(client: TestClient, logged_in_headers): - response = client.get("api/v1/all", headers=logged_in_headers) - assert response.status_code == 200 - json_response = response.json() - chains = json_response["chains"] - - chain = chains["SeriesCharacterChain"] - - # Test the base classes, template, memory, verbose, llm, input_key, output_key, and _type objects - assert set(chain["base_classes"]) == { - "Callable", - "LLMChain", - "BaseCustomChain", - "Chain", - "ConversationChain", - "SeriesCharacterChain", - } - template = chain["template"] - - assert template["llm"] == { - "required": True, - "dynamic": False, - "display_name": "LLM", - "placeholder": "", - "show": True, - "multiline": False, - "password": False, - "name": "llm", - "type": "BaseLanguageModel", - "list": False, - "advanced": False, - "info": "", - "fileTypes": [], - "file_path": "", - "value": "", - } - assert template["character"] == { - "required": True, - "dynamic": False, - "placeholder": "", - "show": True, - "multiline": False, - "password": False, - "name": "character", - "type": "str", - "list": False, - "advanced": False, - "info": "", - "fileTypes": [], - "file_path": "", - "value": "", - } - assert template["series"] == { - "required": True, - "dynamic": False, - "placeholder": "", - "show": True, - "multiline": False, - "password": False, - "name": "series", - "type": "str", - "list": False, - "advanced": False, - "info": "", - "fileTypes": [], - "file_path": "", - "value": "", - } - assert template["_type"] == "SeriesCharacterChain" - - # Test the description object - assert ( - chain["description"] - == "SeriesCharacterChain is a chain you can use to have a conversation with a character from a series." - ) - - -def test_mid_journey_prompt_chain(client: TestClient, logged_in_headers): - response = client.get("api/v1/all", headers=logged_in_headers) - assert response.status_code == 200 - json_response = response.json() - chains = json_response["chains"] - chain = chains["MidJourneyPromptChain"] - assert isinstance(chain, dict) - - # Test the base_classes object - assert set(chain["base_classes"]) == { - "LLMChain", - "BaseCustomChain", - "Chain", - "ConversationChain", - "MidJourneyPromptChain", - } - - # Test the template object - template = chain["template"] - - assert template["llm"] == { - "required": True, - "dynamic": False, - "display_name": "LLM", - "placeholder": "", - "show": True, - "multiline": False, - "password": False, - "name": "llm", - "type": "BaseLanguageModel", - "list": False, - "advanced": False, - "info": "", - "file_path": "", - "fileTypes": [], - "value": "", - } - # Test the description object - assert chain["description"] == "MidJourneyPromptChain is a chain you can use to generate new MidJourney prompts." - - -def test_time_travel_guide_chain(client: TestClient, logged_in_headers): - response = client.get("api/v1/all", headers=logged_in_headers) - assert response.status_code == 200 - json_response = response.json() - chains = json_response["chains"] - chain = chains["TimeTravelGuideChain"] - assert isinstance(chain, dict) - - # Test the base_classes object - assert set(chain["base_classes"]) == { - "LLMChain", - "BaseCustomChain", - "TimeTravelGuideChain", - "Chain", - "ConversationChain", - } - - # Test the template object - template = chain["template"] - - assert template["llm"] == { - "required": True, - "dynamic": False, - "placeholder": "", - "display_name": "LLM", - "show": True, - "multiline": False, - "password": False, - "name": "llm", - "type": "BaseLanguageModel", - "list": False, - "advanced": False, - "info": "", - "file_path": "", - "fileTypes": [], - "value": "", - } - assert template["memory"] == { - "required": False, - "dynamic": False, - "placeholder": "", - "show": True, - "multiline": False, - "password": False, - "name": "memory", - "type": "BaseChatMemory", - "list": False, - "advanced": False, - "info": "", - "file_path": "", - "fileTypes": [], - "value": "", - } - - assert chain["description"] == "Time travel guide chain." diff --git a/tests/test_llms_template.py b/tests/test_llms_template.py deleted file mode 100644 index 30a15c932..000000000 --- a/tests/test_llms_template.py +++ /dev/null @@ -1,559 +0,0 @@ -from fastapi.testclient import TestClient - - -def test_openai(client: TestClient, logged_in_headers): - response = client.get("api/v1/all", headers=logged_in_headers) - assert response.status_code == 200 - json_response = response.json() - language_models = json_response["llms"] - - model = language_models["OpenAI"] - template = model["template"] - - assert template["cache"] == { - "required": False, - "dynamic": False, - "placeholder": "", - "show": False, - "multiline": False, - "password": False, - "name": "cache", - "type": "bool", - "list": False, - "advanced": False, - "info": "", - "fileTypes": [], - } - assert template["verbose"] == { - "required": False, - "dynamic": False, - "placeholder": "", - "show": False, - "multiline": False, - "password": False, - "name": "verbose", - "type": "bool", - "list": False, - "advanced": False, - "info": "", - "fileTypes": [], - } - assert template["client"] == { - "required": False, - "dynamic": False, - "placeholder": "", - "show": False, - "multiline": False, - "password": False, - "name": "client", - "type": "Any", - "list": False, - "advanced": False, - "info": "", - "fileTypes": [], - } - assert template["model_name"] == { - "required": False, - "dynamic": False, - "placeholder": "", - "show": True, - "multiline": False, - "value": "text-davinci-003", - "password": False, - "options": [ - "text-davinci-003", - "text-davinci-002", - "text-curie-001", - "text-babbage-001", - "text-ada-001", - ], - "name": "model_name", - "type": "str", - "list": True, - "advanced": False, - "info": "", - "fileTypes": [], - } - # Add more assertions for other properties here - assert template["temperature"] == { - "required": False, - "dynamic": False, - "placeholder": "", - "show": True, - "multiline": False, - "value": 0.7, - "password": False, - "name": "temperature", - "type": "float", - "list": False, - "advanced": False, - "info": "", - "rangeSpec": {"max": 1.0, "min": -1.0, "step": 0.1}, - "fileTypes": [], - } - assert template["max_tokens"] == { - "required": False, - "dynamic": False, - "placeholder": "", - "show": True, - "multiline": False, - "value": 256, - "password": True, - "name": "max_tokens", - "type": "int", - "list": False, - "advanced": False, - "info": "", - "fileTypes": [], - } - assert template["top_p"] == { - "required": False, - "dynamic": False, - "placeholder": "", - "show": False, - "multiline": False, - "value": 1, - "password": False, - "name": "top_p", - "type": "float", - "list": False, - "advanced": False, - "info": "", - "rangeSpec": {"max": 1.0, "min": -1.0, "step": 0.1}, - "fileTypes": [], - } - assert template["frequency_penalty"] == { - "required": False, - "dynamic": False, - "placeholder": "", - "show": False, - "multiline": False, - "value": 0, - "password": False, - "name": "frequency_penalty", - "type": "float", - "list": False, - "advanced": False, - "info": "", - "rangeSpec": {"max": 1.0, "min": -1.0, "step": 0.1}, - "fileTypes": [], - } - assert template["presence_penalty"] == { - "required": False, - "dynamic": False, - "placeholder": "", - "show": False, - "multiline": False, - "value": 0, - "password": False, - "name": "presence_penalty", - "type": "float", - "list": False, - "advanced": False, - "info": "", - "rangeSpec": {"max": 1.0, "min": -1.0, "step": 0.1}, - "fileTypes": [], - } - assert template["n"] == { - "required": False, - "dynamic": False, - "placeholder": "", - "show": False, - "multiline": False, - "value": 1, - "password": False, - "name": "n", - "type": "int", - "list": False, - "advanced": False, - "info": "", - "fileTypes": [], - } - assert template["best_of"] == { - "required": False, - "dynamic": False, - "placeholder": "", - "show": False, - "multiline": False, - "value": 1, - "password": False, - "name": "best_of", - "type": "int", - "list": False, - "advanced": False, - "info": "", - "fileTypes": [], - } - assert template["model_kwargs"] == { - "required": False, - "dynamic": False, - "placeholder": "", - "show": True, - "multiline": False, - "password": False, - "name": "model_kwargs", - "type": "dict", - "list": False, - "advanced": True, - "info": "", - "fileTypes": [], - } - assert template["openai_api_key"] == { - "required": False, - "dynamic": False, - "placeholder": "", - "show": True, - "multiline": False, - "value": "", - "password": True, - "name": "openai_api_key", - "display_name": "OpenAI API Key", - "type": "str", - "list": False, - "advanced": False, - "info": "", - "fileTypes": [], - } - assert template["batch_size"] == { - "required": False, - "dynamic": False, - "placeholder": "", - "show": False, - "multiline": False, - "value": 20, - "password": False, - "name": "batch_size", - "type": "int", - "list": False, - "advanced": False, - "info": "", - "fileTypes": [], - } - assert template["request_timeout"] == { - "required": False, - "dynamic": False, - "placeholder": "", - "show": False, - "multiline": False, - "password": False, - "name": "request_timeout", - "type": "float", - "list": False, - "advanced": False, - "info": "", - "rangeSpec": {"max": 1.0, "min": -1.0, "step": 0.1}, - "fileTypes": [], - } - assert template["logit_bias"] == { - "required": False, - "dynamic": False, - "placeholder": "", - "show": False, - "multiline": False, - "password": False, - "name": "logit_bias", - "type": "dict", - "list": False, - "advanced": False, - "info": "", - "fileTypes": [], - } - assert template["max_retries"] == { - "required": False, - "dynamic": False, - "placeholder": "", - "show": False, - "multiline": False, - "value": 2, - "password": False, - "name": "max_retries", - "type": "int", - "list": False, - "advanced": False, - "info": "", - "fileTypes": [], - } - assert template["streaming"] == { - "required": False, - "dynamic": False, - "placeholder": "", - "show": False, - "multiline": False, - "value": False, - "password": False, - "name": "streaming", - "type": "bool", - "list": False, - "advanced": False, - "info": "", - "fileTypes": [], - } - - -def test_chat_open_ai(client: TestClient, logged_in_headers): - response = client.get("api/v1/all", headers=logged_in_headers) - assert response.status_code == 200 - json_response = response.json() - language_models = json_response["llms"] - - model = language_models["ChatOpenAI"] - template = model["template"] - - assert template["verbose"] == { - "required": False, - "dynamic": False, - "placeholder": "", - "show": False, - "multiline": False, - "value": False, - "password": False, - "name": "verbose", - "type": "bool", - "list": False, - "advanced": False, - "info": "", - "fileTypes": [], - } - assert template["client"] == { - "required": False, - "dynamic": False, - "placeholder": "", - "show": False, - "multiline": False, - "password": False, - "name": "client", - "type": "Any", - "list": False, - "advanced": False, - "info": "", - "fileTypes": [], - } - assert template["model_name"] == { - "required": False, - "dynamic": False, - "placeholder": "", - "show": True, - "multiline": False, - "value": "gpt-4-1106-preview", - "password": False, - "options": [ - "gpt-4-1106-preview", - "gpt-4-vision-preview", - "gpt-4", - "gpt-4-32k", - "gpt-3.5-turbo", - "gpt-3.5-turbo-16k", - ], - "name": "model_name", - "type": "str", - "list": True, - "advanced": False, - "info": "", - "fileTypes": [], - } - assert template["temperature"] == { - "required": False, - "dynamic": False, - "placeholder": "", - "show": True, - "multiline": False, - "value": 0.7, - "password": False, - "name": "temperature", - "type": "float", - "list": False, - "advanced": False, - "info": "", - "rangeSpec": {"max": 1.0, "min": -1.0, "step": 0.1}, - "fileTypes": [], - } - assert template["model_kwargs"] == { - "required": False, - "dynamic": False, - "placeholder": "", - "show": True, - "multiline": False, - "password": False, - "name": "model_kwargs", - "type": "dict", - "list": False, - "advanced": True, - "info": "", - "fileTypes": [], - } - assert template["openai_api_key"] == { - "required": False, - "dynamic": False, - "placeholder": "", - "show": True, - "multiline": False, - "value": "", - "password": True, - "name": "openai_api_key", - "display_name": "OpenAI API Key", - "type": "str", - "list": False, - "advanced": False, - "info": "", - "fileTypes": [], - } - assert template["request_timeout"] == { - "required": False, - "dynamic": False, - "placeholder": "", - "show": False, - "multiline": False, - "password": False, - "name": "request_timeout", - "type": "float", - "list": False, - "advanced": False, - "info": "", - "rangeSpec": {"max": 1.0, "min": -1.0, "step": 0.1}, - "fileTypes": [], - } - assert template["max_retries"] == { - "required": False, - "dynamic": False, - "placeholder": "", - "show": False, - "multiline": False, - "value": 2, - "password": False, - "name": "max_retries", - "type": "int", - "list": False, - "advanced": False, - "info": "", - "fileTypes": [], - } - assert template["streaming"] == { - "required": False, - "dynamic": False, - "placeholder": "", - "show": False, - "multiline": False, - "value": False, - "password": False, - "name": "streaming", - "type": "bool", - "list": False, - "advanced": False, - "info": "", - "fileTypes": [], - } - assert template["n"] == { - "required": False, - "dynamic": False, - "placeholder": "", - "show": False, - "multiline": False, - "value": 1, - "password": False, - "name": "n", - "type": "int", - "list": False, - "advanced": False, - "info": "", - "fileTypes": [], - } - - assert template["max_tokens"] == { - "required": False, - "dynamic": False, - "placeholder": "", - "show": True, - "multiline": False, - "password": True, - "name": "max_tokens", - "type": "int", - "list": False, - "advanced": False, - "info": "", - "fileTypes": [], - } - assert template["_type"] == "ChatOpenAI" - assert ( - model["description"] == "`OpenAI` Chat large language models API." # noqa E501 - ) - assert set(model["base_classes"]) == { - "BaseLLM", - "BaseChatModel", - "ChatOpenAI", - "BaseLanguageModel", - } - - -# Commenting this out for now, as it requires to activate the nodes -# def test_azure_open_ai(client: TestClient): -# response = client.get("/all") -# assert response.status_code == 200 -# json_response = response.json() -# language_models = json_response["llms"] - -# model = language_models["AzureOpenAI"] -# template = model["template"] - -# assert template["model_name"]["show"] is False -# assert template["deployment_name"] == { -# "required": False, -# "placeholder": "", -# "show": True, -# "multiline": False, -# "value": "", -# "password": False, -# "name": "deployment_name", -# "advanced": False, -# "type": "str", -# "list": False, -# } - - -# def test_azure_chat_open_ai(client: TestClient): -# response = client.get("/all") -# assert response.status_code == 200 -# json_response = response.json() -# language_models = json_response["llms"] - -# model = language_models["AzureChatOpenAI"] -# template = model["template"] - -# assert template["model_name"]["show"] is False -# assert template["deployment_name"] == { -# "required": False, -# "placeholder": "", -# "show": True, -# "multiline": False, -# "value": "", -# "password": False, -# "name": "deployment_name", -# "advanced": False, -# "type": "str", -# "list": False, -# } -# assert template["openai_api_type"] == { -# "required": False, -# "placeholder": "", -# "show": False, -# "multiline": False, -# "value": "azure", -# "password": False, -# "name": "openai_api_type", -# "display_name": "OpenAI API Type", -# "advanced": False, -# "type": "str", -# "list": False, -# } -# assert template["openai_api_version"] == { -# "required": False, -# "placeholder": "", -# "show": True, -# "multiline": False, -# "value": "2023-03-15-preview", -# "password": False, -# "name": "openai_api_version", -# "display_name": "OpenAI API Version", -# "advanced": False, -# "type": "str", -# "list": False, -# } From 4b697fc0df30a5bf47cc20e65402fd23842a6a9d Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Tue, 30 Jan 2024 14:03:33 -0300 Subject: [PATCH 140/153] Remove vectorstore_creator import and test_vectorstore_template.py --- src/backend/langflow/graph/graph/constants.py | 3 +-- src/backend/langflow/interface/tools/constants.py | 3 +-- src/backend/langflow/interface/types.py | 4 ++-- tests/test_vectorstore_template.py | 14 -------------- 4 files changed, 4 insertions(+), 20 deletions(-) delete mode 100644 tests/test_vectorstore_template.py diff --git a/src/backend/langflow/graph/graph/constants.py b/src/backend/langflow/graph/graph/constants.py index 9514764b7..928280743 100644 --- a/src/backend/langflow/graph/graph/constants.py +++ b/src/backend/langflow/graph/graph/constants.py @@ -12,7 +12,6 @@ from langflow.interface.retrievers.base import retriever_creator from langflow.interface.text_splitters.base import textsplitter_creator from langflow.interface.toolkits.base import toolkits_creator from langflow.interface.tools.base import tool_creator -from langflow.interface.vector_store.base import vectorstore_creator from langflow.interface.wrappers.base import wrapper_creator from langflow.utils.lazy_load import LazyLoadDictBase @@ -46,7 +45,7 @@ class VertexTypesDict(LazyLoadDictBase): **{t: types.LLMVertex for t in llm_creator.to_list()}, **{t: types.MemoryVertex for t in memory_creator.to_list()}, **{t: types.EmbeddingVertex for t in embedding_creator.to_list()}, - **{t: types.VectorStoreVertex for t in vectorstore_creator.to_list()}, + # **{t: types.VectorStoreVertex for t in vectorstore_creator.to_list()}, **{t: types.DocumentLoaderVertex for t in documentloader_creator.to_list()}, **{t: types.TextSplitterVertex for t in textsplitter_creator.to_list()}, **{t: types.OutputParserVertex for t in output_parser_creator.to_list()}, diff --git a/src/backend/langflow/interface/tools/constants.py b/src/backend/langflow/interface/tools/constants.py index 89ab047d7..0ac37a0a4 100644 --- a/src/backend/langflow/interface/tools/constants.py +++ b/src/backend/langflow/interface/tools/constants.py @@ -2,7 +2,6 @@ from langchain import tools from langchain.agents import Tool from langchain.agents.load_tools import _BASE_TOOLS, _EXTRA_LLM_TOOLS, _EXTRA_OPTIONAL_TOOLS, _LLM_TOOLS from langchain.tools.json.tool import JsonSpec - from langflow.interface.importing.utils import import_class from langflow.interface.tools.custom import PythonFunction, PythonFunctionTool @@ -13,7 +12,7 @@ CUSTOM_TOOLS = { "PythonFunction": PythonFunction, } -OTHER_TOOLS = {tool: import_class(f"langchain.tools.{tool}") for tool in tools.__all__} +OTHER_TOOLS = {tool: import_class(f"langchain_community.tools.{tool}") for tool in tools.__all__} ALL_TOOLS_NAMES = { **_BASE_TOOLS, diff --git a/src/backend/langflow/interface/types.py b/src/backend/langflow/interface/types.py index 7f44be3ec..39c68889f 100644 --- a/src/backend/langflow/interface/types.py +++ b/src/backend/langflow/interface/types.py @@ -1,4 +1,5 @@ from cachetools import LRUCache, cached + from langflow.interface.agents.base import agent_creator from langflow.interface.chains.base import chain_creator from langflow.interface.custom.directory_reader.utils import merge_nested_dicts_with_renaming @@ -14,7 +15,6 @@ from langflow.interface.text_splitters.base import textsplitter_creator from langflow.interface.toolkits.base import toolkits_creator from langflow.interface.tools.base import tool_creator from langflow.interface.utilities.base import utility_creator -from langflow.interface.vector_store.base import vectorstore_creator from langflow.interface.wrappers.base import wrapper_creator @@ -46,7 +46,7 @@ def build_langchain_types_dict(): # sourcery skip: dict-assign-update-to-union toolkits_creator, wrapper_creator, embedding_creator, - vectorstore_creator, + # vectorstore_creator, documentloader_creator, textsplitter_creator, utility_creator, diff --git a/tests/test_vectorstore_template.py b/tests/test_vectorstore_template.py deleted file mode 100644 index 3b5c7ed42..000000000 --- a/tests/test_vectorstore_template.py +++ /dev/null @@ -1,14 +0,0 @@ -from fastapi.testclient import TestClient -from langflow.services.deps import get_settings_service - - -# check that all agents are in settings.agents -# are in json_response["agents"] -def test_vectorstores_settings(client: TestClient, logged_in_headers): - settings_service = get_settings_service() - response = client.get("api/v1/all", headers=logged_in_headers) - assert response.status_code == 200 - json_response = response.json() - vectorstores = json_response["vectorstores"] - settings_vecs = set(settings_service.settings.VECTORSTORES) - assert all(vs in vectorstores for vs in settings_vecs) From 43e3baaa84265f1acdcd85fb1ec6aeaba90124bd Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Tue, 30 Jan 2024 14:46:28 -0300 Subject: [PATCH 141/153] Update import statements for langchain_community packages --- .../agents/OpenAIConversationalAgent.py | 2 +- .../langflow/components/llms/AnthropicLLM.py | 2 +- .../components/llms/AzureChatOpenAI.py | 2 +- .../llms/BaiduQianfanChatEndpoints.py | 2 +- .../components/toolkits/VectorStoreInfo.py | 8 +++--- .../components/vectorstores/Chroma.py | 5 ++-- .../langflow/components/vectorstores/FAISS.py | 12 ++++----- .../vectorstores/MongoDBAtlasVectorSearch.py | 6 +++-- .../components/vectorstores/Pinecone.py | 2 +- .../components/vectorstores/Qdrant.py | 11 ++++---- .../langflow/components/vectorstores/Redis.py | 8 +++--- .../vectorstores/SupabaseVectorStore.py | 13 ++++------ .../components/vectorstores/Vectara.py | 13 +++++----- .../components/vectorstores/Weaviate.py | 3 +-- .../components/vectorstores/pgvector.py | 10 +++---- .../langflow/field_typing/constants.py | 2 +- .../langflow/interface/agents/custom.py | 9 ++++--- .../langflow/interface/custom_lists.py | 16 +++++++----- .../langflow/interface/importing/utils.py | 14 +++++----- .../langflow/interface/initialize/loading.py | 2 +- .../interface/initialize/vector_store.py | 26 +++++++++---------- .../langflow/interface/retrievers/base.py | 7 +++-- .../langflow/interface/utilities/base.py | 4 +-- .../langflow/interface/vector_store/base.py | 5 ++-- .../langflow/interface/wrappers/base.py | 4 +-- src/backend/langflow/processing/process.py | 2 +- 26 files changed, 94 insertions(+), 96 deletions(-) diff --git a/src/backend/langflow/components/agents/OpenAIConversationalAgent.py b/src/backend/langflow/components/agents/OpenAIConversationalAgent.py index 66ff0f275..1a06e25fb 100644 --- a/src/backend/langflow/components/agents/OpenAIConversationalAgent.py +++ b/src/backend/langflow/components/agents/OpenAIConversationalAgent.py @@ -3,7 +3,7 @@ from typing import List, Optional from langchain.agents.agent import AgentExecutor from langchain.agents.agent_toolkits.conversational_retrieval.openai_functions import _get_default_system_message from langchain.agents.openai_functions_agent.base import OpenAIFunctionsAgent -from langchain.chat_models import ChatOpenAI +from langchain_community.chat_models import ChatOpenAI from langchain.memory.token_buffer import ConversationTokenBufferMemory from langchain.prompts import SystemMessagePromptTemplate from langchain.prompts.chat import MessagesPlaceholder diff --git a/src/backend/langflow/components/llms/AnthropicLLM.py b/src/backend/langflow/components/llms/AnthropicLLM.py index 2b9c758e5..94c4ed8f5 100644 --- a/src/backend/langflow/components/llms/AnthropicLLM.py +++ b/src/backend/langflow/components/llms/AnthropicLLM.py @@ -1,6 +1,6 @@ from typing import Optional -from langchain.chat_models.anthropic import ChatAnthropic +from langchain_community.chat_models.anthropic import ChatAnthropic from langchain.llms.base import BaseLanguageModel from pydantic.v1 import SecretStr diff --git a/src/backend/langflow/components/llms/AzureChatOpenAI.py b/src/backend/langflow/components/llms/AzureChatOpenAI.py index 1bce563bf..d43abb3dc 100644 --- a/src/backend/langflow/components/llms/AzureChatOpenAI.py +++ b/src/backend/langflow/components/llms/AzureChatOpenAI.py @@ -1,7 +1,7 @@ from typing import Optional from langflow import CustomComponent from langchain.llms.base import BaseLanguageModel -from langchain.chat_models.azure_openai import AzureChatOpenAI +from langchain_community.chat_models.azure_openai import AzureChatOpenAI class AzureChatOpenAIComponent(CustomComponent): diff --git a/src/backend/langflow/components/llms/BaiduQianfanChatEndpoints.py b/src/backend/langflow/components/llms/BaiduQianfanChatEndpoints.py index 8c828dc64..fd7341e15 100644 --- a/src/backend/langflow/components/llms/BaiduQianfanChatEndpoints.py +++ b/src/backend/langflow/components/llms/BaiduQianfanChatEndpoints.py @@ -1,6 +1,6 @@ from typing import Optional -from langchain.chat_models.baidu_qianfan_endpoint import QianfanChatEndpoint +from langchain_community.chat_models.baidu_qianfan_endpoint import QianfanChatEndpoint from langchain.llms.base import BaseLLM from pydantic.v1 import SecretStr diff --git a/src/backend/langflow/components/toolkits/VectorStoreInfo.py b/src/backend/langflow/components/toolkits/VectorStoreInfo.py index 48e5d9d9f..ec2323bfd 100644 --- a/src/backend/langflow/components/toolkits/VectorStoreInfo.py +++ b/src/backend/langflow/components/toolkits/VectorStoreInfo.py @@ -1,7 +1,9 @@ -from langflow import CustomComponent -from langchain.vectorstores import VectorStore -from typing import Union, Callable +from typing import Callable, Union + from langchain.agents.agent_toolkits.vectorstore.toolkit import VectorStoreInfo +from langchain_community.vectorstores import VectorStore + +from langflow import CustomComponent class VectorStoreInfoComponent(CustomComponent): diff --git a/src/backend/langflow/components/vectorstores/Chroma.py b/src/backend/langflow/components/vectorstores/Chroma.py index adc3ed554..8f25548a8 100644 --- a/src/backend/langflow/components/vectorstores/Chroma.py +++ b/src/backend/langflow/components/vectorstores/Chroma.py @@ -3,9 +3,8 @@ from typing import List, Optional, Union import chromadb # type: ignore from langchain.embeddings.base import Embeddings from langchain.schema import BaseRetriever, Document -from langchain.vectorstores.chroma import Chroma -from langchain.vectorstores.base import VectorStore - +from langchain_community.vectorstores import VectorStore +from langchain_community.vectorstores.chroma import Chroma from langflow import CustomComponent diff --git a/src/backend/langflow/components/vectorstores/FAISS.py b/src/backend/langflow/components/vectorstores/FAISS.py index 98fee467c..dec14f6db 100644 --- a/src/backend/langflow/components/vectorstores/FAISS.py +++ b/src/backend/langflow/components/vectorstores/FAISS.py @@ -1,12 +1,10 @@ -from langflow import CustomComponent -from langchain_community.vectorstores.faiss import FAISS from typing import List, Union + from langchain.schema import BaseRetriever -from langchain.vectorstores.base import VectorStore -from langflow.field_typing import ( - Document, - Embeddings, -) +from langchain_community.vectorstores import VectorStore +from langchain_community.vectorstores.faiss import FAISS +from langflow import CustomComponent +from langflow.field_typing import Document, Embeddings class FAISSComponent(CustomComponent): diff --git a/src/backend/langflow/components/vectorstores/MongoDBAtlasVectorSearch.py b/src/backend/langflow/components/vectorstores/MongoDBAtlasVectorSearch.py index b477e8f54..d2d215f2b 100644 --- a/src/backend/langflow/components/vectorstores/MongoDBAtlasVectorSearch.py +++ b/src/backend/langflow/components/vectorstores/MongoDBAtlasVectorSearch.py @@ -1,6 +1,8 @@ +from typing import List, Optional + +from langchain_community.vectorstores import MongoDBAtlasVectorSearch + from langflow import CustomComponent -from langchain.vectorstores import MongoDBAtlasVectorSearch -from typing import Optional, List from langflow.field_typing import ( Document, Embeddings, diff --git a/src/backend/langflow/components/vectorstores/Pinecone.py b/src/backend/langflow/components/vectorstores/Pinecone.py index 36a87c1e2..c4613948a 100644 --- a/src/backend/langflow/components/vectorstores/Pinecone.py +++ b/src/backend/langflow/components/vectorstores/Pinecone.py @@ -3,7 +3,7 @@ from typing import List, Optional, Union import pinecone # type: ignore from langchain.schema import BaseRetriever -from langchain.vectorstores.base import VectorStore +from langchain_community.vectorstores import VectorStore from langchain_community.vectorstores.pinecone import Pinecone from langflow import CustomComponent diff --git a/src/backend/langflow/components/vectorstores/Qdrant.py b/src/backend/langflow/components/vectorstores/Qdrant.py index 4f5b28fa4..ee38406df 100644 --- a/src/backend/langflow/components/vectorstores/Qdrant.py +++ b/src/backend/langflow/components/vectorstores/Qdrant.py @@ -1,9 +1,10 @@ -from langflow import CustomComponent -from langchain_community.vectorstores.qdrant import Qdrant -from typing import Optional, List, Union -from langflow.field_typing import Document, Embeddings, NestedDict +from typing import List, Optional, Union + from langchain.schema import BaseRetriever -from langchain.vectorstores.base import VectorStore +from langchain_community.vectorstores import VectorStore +from langchain_community.vectorstores.qdrant import Qdrant +from langflow import CustomComponent +from langflow.field_typing import Document, Embeddings, NestedDict class QdrantComponent(CustomComponent): diff --git a/src/backend/langflow/components/vectorstores/Redis.py b/src/backend/langflow/components/vectorstores/Redis.py index f13428829..6a4e9db2a 100644 --- a/src/backend/langflow/components/vectorstores/Redis.py +++ b/src/backend/langflow/components/vectorstores/Redis.py @@ -1,10 +1,10 @@ from typing import Optional -from langflow import CustomComponent -from langchain.vectorstores.redis import Redis -from langchain.schema import Document -from langchain.vectorstores.base import VectorStore from langchain.embeddings.base import Embeddings +from langchain.schema import Document +from langchain_community.vectorstores import VectorStore +from langchain_community.vectorstores.redis import Redis +from langflow import CustomComponent class RedisComponent(CustomComponent): diff --git a/src/backend/langflow/components/vectorstores/SupabaseVectorStore.py b/src/backend/langflow/components/vectorstores/SupabaseVectorStore.py index 40e6dd98e..2ec6dfabc 100644 --- a/src/backend/langflow/components/vectorstores/SupabaseVectorStore.py +++ b/src/backend/langflow/components/vectorstores/SupabaseVectorStore.py @@ -1,13 +1,10 @@ -from langflow import CustomComponent from typing import List, Union -from langchain_community.vectorstores.supabase import SupabaseVectorStore -from langflow.field_typing import ( - Document, - Embeddings, - NestedDict, -) + from langchain.schema import BaseRetriever -from langchain.vectorstores.base import VectorStore +from langchain_community.vectorstores import VectorStore +from langchain_community.vectorstores.supabase import SupabaseVectorStore +from langflow import CustomComponent +from langflow.field_typing import Document, Embeddings, NestedDict from supabase.client import Client, create_client diff --git a/src/backend/langflow/components/vectorstores/Vectara.py b/src/backend/langflow/components/vectorstores/Vectara.py index e3555e6f1..af828c49e 100644 --- a/src/backend/langflow/components/vectorstores/Vectara.py +++ b/src/backend/langflow/components/vectorstores/Vectara.py @@ -1,14 +1,13 @@ -from typing import Optional, Union, List -from langflow import CustomComponent import tempfile -import urllib.request import urllib +import urllib.request +from typing import List, Optional, Union -from langchain.vectorstores import Vectara -from langchain.schema import Document -from langchain.vectorstores.base import VectorStore -from langchain.schema import BaseRetriever from langchain.embeddings import FakeEmbeddings +from langchain.schema import BaseRetriever, Document +from langchain_community.vectorstores import Vectara, VectorStore + +from langflow import CustomComponent class VectaraComponent(CustomComponent): diff --git a/src/backend/langflow/components/vectorstores/Weaviate.py b/src/backend/langflow/components/vectorstores/Weaviate.py index 3f0a208bb..9b4967c36 100644 --- a/src/backend/langflow/components/vectorstores/Weaviate.py +++ b/src/backend/langflow/components/vectorstores/Weaviate.py @@ -3,8 +3,7 @@ from typing import Optional, Union import weaviate # type: ignore from langchain.embeddings.base import Embeddings from langchain.schema import BaseRetriever, Document -from langchain.vectorstores import Weaviate -from langchain.vectorstores.base import VectorStore +from langchain_community.vectorstores import VectorStore, Weaviate from langflow import CustomComponent diff --git a/src/backend/langflow/components/vectorstores/pgvector.py b/src/backend/langflow/components/vectorstores/pgvector.py index 4e0c2eb4d..786629dd0 100644 --- a/src/backend/langflow/components/vectorstores/pgvector.py +++ b/src/backend/langflow/components/vectorstores/pgvector.py @@ -1,10 +1,10 @@ -from typing import Optional, List -from langflow import CustomComponent +from typing import List, Optional -from langchain.vectorstores.pgvector import PGVector -from langchain.schema import Document -from langchain.vectorstores.base import VectorStore from langchain.embeddings.base import Embeddings +from langchain.schema import Document +from langchain_community.vectorstores import VectorStore +from langchain_community.vectorstores.pgvector import PGVector +from langflow import CustomComponent class PostgresqlVectorComponent(CustomComponent): diff --git a/src/backend/langflow/field_typing/constants.py b/src/backend/langflow/field_typing/constants.py index d3ca22baa..6680aceab 100644 --- a/src/backend/langflow/field_typing/constants.py +++ b/src/backend/langflow/field_typing/constants.py @@ -12,7 +12,7 @@ from langchain.schema.language_model import BaseLanguageModel from langchain.schema.memory import BaseMemory from langchain.text_splitter import TextSplitter from langchain.tools import Tool -from langchain.vectorstores.base import VectorStore +from langchain_community.vectorstores import VectorStore # Type alias for more complex dicts NestedDict = Dict[str, Union[str, Dict]] diff --git a/src/backend/langflow/interface/agents/custom.py b/src/backend/langflow/interface/agents/custom.py index 5df96e584..608a98a9b 100644 --- a/src/backend/langflow/interface/agents/custom.py +++ b/src/backend/langflow/interface/agents/custom.py @@ -2,14 +2,10 @@ from typing import Any, Optional from langchain.agents import AgentExecutor, ZeroShotAgent from langchain.agents.agent_toolkits import ( - SQLDatabaseToolkit, VectorStoreInfo, VectorStoreRouterToolkit, VectorStoreToolkit, ) -from langchain.agents.agent_toolkits.json.prompt import JSON_PREFIX, JSON_SUFFIX -from langchain.agents.agent_toolkits.json.toolkit import JsonToolkit -from langchain.agents.agent_toolkits.sql.prompt import SQL_PREFIX, SQL_SUFFIX from langchain.agents.agent_toolkits.vectorstore.prompt import PREFIX as VECTORSTORE_PREFIX from langchain.agents.agent_toolkits.vectorstore.prompt import ROUTER_PREFIX as VECTORSTORE_ROUTER_PREFIX from langchain.agents.mrkl.prompt import FORMAT_INSTRUCTIONS @@ -17,9 +13,14 @@ from langchain.base_language import BaseLanguageModel from langchain.chains.llm import LLMChain from langchain.sql_database import SQLDatabase from langchain.tools.sql_database.prompt import QUERY_CHECKER +from langchain_community.agent_toolkits import SQLDatabaseToolkit +from langchain_community.agent_toolkits.json.prompt import JSON_PREFIX, JSON_SUFFIX +from langchain_community.agent_toolkits.json.toolkit import JsonToolkit +from langchain_community.agent_toolkits.sql.prompt import SQL_PREFIX, SQL_SUFFIX from langchain_experimental.agents.agent_toolkits.pandas.prompt import PREFIX as PANDAS_PREFIX from langchain_experimental.agents.agent_toolkits.pandas.prompt import SUFFIX_WITH_DF as PANDAS_SUFFIX from langchain_experimental.tools.python.tool import PythonAstREPLTool + from langflow.interface.base import CustomAgentExecutor diff --git a/src/backend/langflow/interface/custom_lists.py b/src/backend/langflow/interface/custom_lists.py index 80a46c856..3cff26099 100644 --- a/src/backend/langflow/interface/custom_lists.py +++ b/src/backend/langflow/interface/custom_lists.py @@ -1,9 +1,10 @@ import inspect from typing import Any -from langchain import document_loaders, embeddings, llms, memory, requests, text_splitter -from langchain.agents import agent_toolkits -from langchain.chat_models import AzureChatOpenAI, ChatAnthropic, ChatOpenAI, ChatVertexAI +from langchain import llms, memory, requests, text_splitter +from langchain_community.chat_models import AzureChatOpenAI, ChatAnthropic, ChatOpenAI, ChatVertexAI +from langchain_community import agent_toolkits, document_loaders, embeddings + from langflow.interface.agents.custom import CUSTOM_AGENTS from langflow.interface.chains.custom import CUSTOM_CHAINS from langflow.interface.importing.utils import import_class @@ -24,14 +25,14 @@ llm_type_to_cls_dict["vertexai-chat"] = ChatVertexAI # type: ignore # Toolkits toolkit_type_to_loader_dict: dict[str, Any] = { - toolkit_name: import_class(f"langchain.agents.agent_toolkits.{toolkit_name}") + toolkit_name: import_class(f"langchain_community.agent_toolkits.{toolkit_name}") # if toolkit_name is lower case it is a loader for toolkit_name in agent_toolkits.__all__ if toolkit_name.islower() } toolkit_type_to_cls_dict: dict[str, Any] = { - toolkit_name: import_class(f"langchain.agents.agent_toolkits.{toolkit_name}") + toolkit_name: import_class(f"langchain_community.agent_toolkits.{toolkit_name}") # if toolkit_name is not lower case it is a class for toolkit_name in agent_toolkits.__all__ if not toolkit_name.islower() @@ -47,13 +48,14 @@ wrapper_type_to_cls_dict: dict[str, Any] = {wrapper.__name__: wrapper for wrappe # Embeddings embedding_type_to_cls_dict: dict[str, Any] = { - embedding_name: import_class(f"langchain.embeddings.{embedding_name}") for embedding_name in embeddings.__all__ + embedding_name: import_class(f"langchain_community.embeddings.{embedding_name}") + for embedding_name in embeddings.__all__ } # Document Loaders documentloaders_type_to_cls_dict: dict[str, Any] = { - documentloader_name: import_class(f"langchain.document_loaders.{documentloader_name}") + documentloader_name: import_class(f"langchain_community.document_loaders.{documentloader_name}") for documentloader_name in document_loaders.__all__ } diff --git a/src/backend/langflow/interface/importing/utils.py b/src/backend/langflow/interface/importing/utils.py index f3276d952..9d7305f16 100644 --- a/src/backend/langflow/interface/importing/utils.py +++ b/src/backend/langflow/interface/importing/utils.py @@ -6,9 +6,9 @@ from typing import Any, Type from langchain.agents import Agent from langchain.base_language import BaseLanguageModel from langchain.chains.base import Chain -from langchain.chat_models.base import BaseChatModel from langchain.prompts import PromptTemplate from langchain.tools import BaseTool +from langchain_core.language_models.chat_models import BaseChatModel from langflow.interface.custom.custom_component import CustomComponent from langflow.interface.wrappers.base import wrapper_creator from langflow.utils import validate @@ -71,7 +71,7 @@ def import_output_parser(output_parser: str) -> Any: def import_chat_llm(llm: str) -> BaseChatModel: """Import chat llm from llm name""" - return import_class(f"langchain.chat_models.{llm}") + return import_class(f"langchain_community.chat_models.{llm}") def import_retriever(retriever: str) -> Any: @@ -148,17 +148,17 @@ def import_chain(chain: str) -> Type[Chain]: def import_embedding(embedding: str) -> Any: """Import embedding from embedding name""" - return import_class(f"langchain.embeddings.{embedding}") + return import_class(f"langchain_community.embeddings.{embedding}") def import_vectorstore(vectorstore: str) -> Any: """Import vectorstore from vectorstore name""" - return import_class(f"langchain.vectorstores.{vectorstore}") + return import_class(f"langchain_community.vectorstores.{vectorstore}") def import_documentloader(documentloader: str) -> Any: """Import documentloader from documentloader name""" - return import_class(f"langchain.document_loaders.{documentloader}") + return import_class(f"langchain_community.document_loaders.{documentloader}") def import_textsplitter(textsplitter: str) -> Any: @@ -169,8 +169,8 @@ def import_textsplitter(textsplitter: str) -> Any: def import_utility(utility: str) -> Any: """Import utility from utility name""" if utility == "SQLDatabase": - return import_class(f"langchain.sql_database.{utility}") - return import_class(f"langchain.utilities.{utility}") + return import_class(f"langchain_community.sql_database.{utility}") + return import_class(f"langchain_community.utilities.{utility}") def get_function(code): diff --git a/src/backend/langflow/interface/initialize/loading.py b/src/backend/langflow/interface/initialize/loading.py index 8fab13351..a1113ddd9 100644 --- a/src/backend/langflow/interface/initialize/loading.py +++ b/src/backend/langflow/interface/initialize/loading.py @@ -10,7 +10,7 @@ from langchain.agents.tools import BaseTool from langchain.chains.base import Chain from langchain.document_loaders.base import BaseLoader from langchain.schema import Document -from langchain.vectorstores.base import VectorStore +from langchain_community.vectorstores import VectorStore from loguru import logger from pydantic import ValidationError diff --git a/src/backend/langflow/interface/initialize/vector_store.py b/src/backend/langflow/interface/initialize/vector_store.py index a0adf85e3..0163cdcc3 100644 --- a/src/backend/langflow/interface/initialize/vector_store.py +++ b/src/backend/langflow/interface/initialize/vector_store.py @@ -1,18 +1,18 @@ -from typing import Any, Callable, Dict, Type -from langchain.vectorstores import ( - Pinecone, - ElasticsearchStore, - Qdrant, - Chroma, - FAISS, - Weaviate, - SupabaseVectorStore, - MongoDBAtlasVectorSearch, -) -from langchain.schema import Document import os +from typing import Any, Callable, Dict, Type import orjson +from langchain.schema import Document +from langchain_community.vectorstores import ( + FAISS, + Chroma, + ElasticsearchStore, + MongoDBAtlasVectorSearch, + Pinecone, + Qdrant, + SupabaseVectorStore, + Weaviate, +) def docs_in_params(params: dict) -> bool: @@ -27,8 +27,8 @@ def initialize_mongodb(class_object: Type[MongoDBAtlasVectorSearch], params: dic MONGODB_ATLAS_CLUSTER_URI = params.pop("mongodb_atlas_cluster_uri") if not MONGODB_ATLAS_CLUSTER_URI: raise ValueError("Mongodb atlas cluster uri must be provided in the params") - from pymongo import MongoClient import certifi + from pymongo import MongoClient client: MongoClient = MongoClient(MONGODB_ATLAS_CLUSTER_URI, tlsCAFile=certifi.where()) db_name = params.pop("db_name", None) diff --git a/src/backend/langflow/interface/retrievers/base.py b/src/backend/langflow/interface/retrievers/base.py index 2439708a3..63d0ef915 100644 --- a/src/backend/langflow/interface/retrievers/base.py +++ b/src/backend/langflow/interface/retrievers/base.py @@ -1,14 +1,13 @@ from typing import Any, ClassVar, Dict, List, Optional, Type from langchain import retrievers +from loguru import logger from langflow.interface.base import LangChainTypeCreator from langflow.interface.importing.utils import import_class from langflow.services.deps import get_settings_service - from langflow.template.frontend_node.retrievers import RetrieverFrontendNode -from loguru import logger -from langflow.utils.util import build_template_from_method, build_template_from_class +from langflow.utils.util import build_template_from_class, build_template_from_method class RetrieverCreator(LangChainTypeCreator): @@ -27,7 +26,7 @@ class RetrieverCreator(LangChainTypeCreator): def type_to_loader_dict(self) -> Dict: if self.type_dict is None: self.type_dict: dict[str, Any] = { - retriever_name: import_class(f"langchain.retrievers.{retriever_name}") + retriever_name: import_class(f"langchain_community.retrievers.{retriever_name}") for retriever_name in retrievers.__all__ } return self.type_dict diff --git a/src/backend/langflow/interface/utilities/base.py b/src/backend/langflow/interface/utilities/base.py index cfebf83a4..0a58ef1bc 100644 --- a/src/backend/langflow/interface/utilities/base.py +++ b/src/backend/langflow/interface/utilities/base.py @@ -1,6 +1,6 @@ from typing import Dict, List, Optional, Type -from langchain import utilities +from langchain_community import utilities from loguru import logger from langflow.custom.customs import get_custom_nodes @@ -30,7 +30,7 @@ class UtilityCreator(LangChainTypeCreator): self.type_dict = {} for utility_name in utilities.__all__: try: - imported = import_class(f"langchain.utilities.{utility_name}") + imported = import_class(f"langchain_community.utilities.{utility_name}") self.type_dict[utility_name] = imported except Exception: pass diff --git a/src/backend/langflow/interface/vector_store/base.py b/src/backend/langflow/interface/vector_store/base.py index d04689469..893c78fca 100644 --- a/src/backend/langflow/interface/vector_store/base.py +++ b/src/backend/langflow/interface/vector_store/base.py @@ -1,13 +1,12 @@ from typing import Any, Dict, List, Optional, Type from langchain import vectorstores +from loguru import logger from langflow.interface.base import LangChainTypeCreator from langflow.interface.importing.utils import import_class from langflow.services.deps import get_settings_service - from langflow.template.frontend_node.vectorstores import VectorStoreFrontendNode -from loguru import logger from langflow.utils.util import build_template_from_method @@ -22,7 +21,7 @@ class VectorstoreCreator(LangChainTypeCreator): def type_to_loader_dict(self) -> Dict: if self.type_dict is None: self.type_dict: dict[str, Any] = { - vectorstore_name: import_class(f"langchain.vectorstores.{vectorstore_name}") + vectorstore_name: import_class(f"langchain_community.vectorstores.{vectorstore_name}") for vectorstore_name in vectorstores.__all__ } return self.type_dict diff --git a/src/backend/langflow/interface/wrappers/base.py b/src/backend/langflow/interface/wrappers/base.py index 38d61af78..469559ad9 100644 --- a/src/backend/langflow/interface/wrappers/base.py +++ b/src/backend/langflow/interface/wrappers/base.py @@ -1,9 +1,9 @@ from typing import ClassVar, Dict, List, Optional -from langchain.utilities import requests, sql_database +from langchain_community.utilities import requests, sql_database +from loguru import logger from langflow.interface.base import LangChainTypeCreator -from loguru import logger from langflow.utils.util import build_template_from_class, build_template_from_method diff --git a/src/backend/langflow/processing/process.py b/src/backend/langflow/processing/process.py index 2fa14e5e5..13eb7a816 100644 --- a/src/backend/langflow/processing/process.py +++ b/src/backend/langflow/processing/process.py @@ -4,7 +4,7 @@ from typing import Any, Coroutine, Dict, List, Optional, Tuple, Union from langchain.agents import AgentExecutor from langchain.chains.base import Chain from langchain.schema import AgentAction, Document -from langchain.vectorstores.base import VectorStore +from langchain_community.vectorstores import VectorStore from langchain_core.messages import AIMessage from langchain_core.runnables.base import Runnable from langflow.graph.graph.base import Graph From 866e820ecb8625bb2526ced8fd27ceff5f5c539f Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Tue, 30 Jan 2024 16:16:17 -0300 Subject: [PATCH 142/153] Import Document from langchain_core instead of langchain --- .../langflow/components/documentloaders/SRTLoader.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/backend/langflow/components/documentloaders/SRTLoader.py b/src/backend/langflow/components/documentloaders/SRTLoader.py index 5c640dee5..931660947 100644 --- a/src/backend/langflow/components/documentloaders/SRTLoader.py +++ b/src/backend/langflow/components/documentloaders/SRTLoader.py @@ -1,6 +1,8 @@ +from typing import Dict, Optional + +from langchain_core.documents import Document + from langflow import CustomComponent -from langchain.documents import Document -from typing import Optional, Dict class SRTLoaderComponent(CustomComponent): From 44696a9eb412120832ae21ebbefe5d1b0de2c1c6 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Tue, 30 Jan 2024 16:16:38 -0300 Subject: [PATCH 143/153] Refactor locustfile.py: Import modules and reorder code --- tests/locust/locustfile.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/tests/locust/locustfile.py b/tests/locust/locustfile.py index 1fc91ee2c..856e3f6dd 100644 --- a/tests/locust/locustfile.py +++ b/tests/locust/locustfile.py @@ -1,11 +1,12 @@ -from locust import FastHttpUser, task, between import random import time -import orjson -from rich import print -import httpx from pathlib import Path +import httpx +import orjson +from locust import FastHttpUser, between, task +from rich import print + class NameTest(FastHttpUser): wait_time = between(1, 5) @@ -13,7 +14,7 @@ class NameTest(FastHttpUser): with open("names.txt", "r") as file: names = [line.strip() for line in file.readlines()] - headers = {} + headers: dict = {} def poll_task(self, task_id, sleep_time=1): while True: From c78c34983df0d9d25d675e10e01f12a247978777 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Tue, 30 Jan 2024 16:17:02 -0300 Subject: [PATCH 144/153] Update mypy version and add follow_imports option --- poetry.lock | 171 +++++++++++++++++++++++++------------------------ pyproject.toml | 5 +- 2 files changed, 90 insertions(+), 86 deletions(-) diff --git a/poetry.lock b/poetry.lock index 783988997..507dca21f 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1144,43 +1144,43 @@ toml = ["tomli"] [[package]] name = "cryptography" -version = "42.0.1" +version = "42.0.2" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-42.0.1-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:265bdc693570b895eb641410b8fc9e8ddbce723a669236162b9d9cfb70bd8d77"}, - {file = "cryptography-42.0.1-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:160fa08dfa6dca9cb8ad9bd84e080c0db6414ba5ad9a7470bc60fb154f60111e"}, - {file = "cryptography-42.0.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:727387886c9c8de927c360a396c5edcb9340d9e960cda145fca75bdafdabd24c"}, - {file = "cryptography-42.0.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d84673c012aa698555d4710dcfe5f8a0ad76ea9dde8ef803128cc669640a2e0"}, - {file = "cryptography-42.0.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:e6edc3a568667daf7d349d7e820783426ee4f1c0feab86c29bd1d6fe2755e009"}, - {file = "cryptography-42.0.1-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:d50718dd574a49d3ef3f7ef7ece66ef281b527951eb2267ce570425459f6a404"}, - {file = "cryptography-42.0.1-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:9544492e8024f29919eac2117edd8c950165e74eb551a22c53f6fdf6ba5f4cb8"}, - {file = "cryptography-42.0.1-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:ab6b302d51fbb1dd339abc6f139a480de14d49d50f65fdc7dff782aa8631d035"}, - {file = "cryptography-42.0.1-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2fe16624637d6e3e765530bc55caa786ff2cbca67371d306e5d0a72e7c3d0407"}, - {file = "cryptography-42.0.1-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:ed1b2130f5456a09a134cc505a17fc2830a1a48ed53efd37dcc904a23d7b82fa"}, - {file = "cryptography-42.0.1-cp37-abi3-win32.whl", hash = "sha256:e5edf189431b4d51f5c6fb4a95084a75cef6b4646c934eb6e32304fc720e1453"}, - {file = "cryptography-42.0.1-cp37-abi3-win_amd64.whl", hash = "sha256:6bfd823b336fdcd8e06285ae8883d3d2624d3bdef312a0e2ef905f332f8e9302"}, - {file = "cryptography-42.0.1-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:351db02c1938c8e6b1fee8a78d6b15c5ccceca7a36b5ce48390479143da3b411"}, - {file = "cryptography-42.0.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:430100abed6d3652208ae1dd410c8396213baee2e01a003a4449357db7dc9e14"}, - {file = "cryptography-42.0.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2dff7a32880a51321f5de7869ac9dde6b1fca00fc1fef89d60e93f215468e824"}, - {file = "cryptography-42.0.1-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:b512f33c6ab195852595187af5440d01bb5f8dd57cb7a91e1e009a17f1b7ebca"}, - {file = "cryptography-42.0.1-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:95d900d19a370ae36087cc728e6e7be9c964ffd8cbcb517fd1efb9c9284a6abc"}, - {file = "cryptography-42.0.1-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:6ac8924085ed8287545cba89dc472fc224c10cc634cdf2c3e2866fe868108e77"}, - {file = "cryptography-42.0.1-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:cb2861a9364fa27d24832c718150fdbf9ce6781d7dc246a516435f57cfa31fe7"}, - {file = "cryptography-42.0.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:25ec6e9e81de5d39f111a4114193dbd39167cc4bbd31c30471cebedc2a92c323"}, - {file = "cryptography-42.0.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:9d61fcdf37647765086030d81872488e4cb3fafe1d2dda1d487875c3709c0a49"}, - {file = "cryptography-42.0.1-cp39-abi3-win32.whl", hash = "sha256:16b9260d04a0bfc8952b00335ff54f471309d3eb9d7e8dbfe9b0bd9e26e67881"}, - {file = "cryptography-42.0.1-cp39-abi3-win_amd64.whl", hash = "sha256:7911586fc69d06cd0ab3f874a169433db1bc2f0e40988661408ac06c4527a986"}, - {file = "cryptography-42.0.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:d3594947d2507d4ef7a180a7f49a6db41f75fb874c2fd0e94f36b89bfd678bf2"}, - {file = "cryptography-42.0.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:8d7efb6bf427d2add2f40b6e1e8e476c17508fa8907234775214b153e69c2e11"}, - {file = "cryptography-42.0.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:126e0ba3cc754b200a2fb88f67d66de0d9b9e94070c5bc548318c8dab6383cb6"}, - {file = "cryptography-42.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:802d6f83233cf9696b59b09eb067e6b4d5ae40942feeb8e13b213c8fad47f1aa"}, - {file = "cryptography-42.0.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:0b7cacc142260ada944de070ce810c3e2a438963ee3deb45aa26fd2cee94c9a4"}, - {file = "cryptography-42.0.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:32ea63ceeae870f1a62e87f9727359174089f7b4b01e4999750827bf10e15d60"}, - {file = "cryptography-42.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:d3902c779a92151f134f68e555dd0b17c658e13429f270d8a847399b99235a3f"}, - {file = "cryptography-42.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:50aecd93676bcca78379604ed664c45da82bc1241ffb6f97f6b7392ed5bc6f04"}, - {file = "cryptography-42.0.1.tar.gz", hash = "sha256:fd33f53809bb363cf126bebe7a99d97735988d9b0131a2be59fbf83e1259a5b7"}, + {file = "cryptography-42.0.2-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:701171f825dcab90969596ce2af253143b93b08f1a716d4b2a9d2db5084ef7be"}, + {file = "cryptography-42.0.2-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:61321672b3ac7aade25c40449ccedbc6db72c7f5f0fdf34def5e2f8b51ca530d"}, + {file = "cryptography-42.0.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea2c3ffb662fec8bbbfce5602e2c159ff097a4631d96235fcf0fb00e59e3ece4"}, + {file = "cryptography-42.0.2-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b15c678f27d66d247132cbf13df2f75255627bcc9b6a570f7d2fd08e8c081d2"}, + {file = "cryptography-42.0.2-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:8e88bb9eafbf6a4014d55fb222e7360eef53e613215085e65a13290577394529"}, + {file = "cryptography-42.0.2-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:a047682d324ba56e61b7ea7c7299d51e61fd3bca7dad2ccc39b72bd0118d60a1"}, + {file = "cryptography-42.0.2-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:36d4b7c4be6411f58f60d9ce555a73df8406d484ba12a63549c88bd64f7967f1"}, + {file = "cryptography-42.0.2-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:a00aee5d1b6c20620161984f8ab2ab69134466c51f58c052c11b076715e72929"}, + {file = "cryptography-42.0.2-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:b97fe7d7991c25e6a31e5d5e795986b18fbbb3107b873d5f3ae6dc9a103278e9"}, + {file = "cryptography-42.0.2-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:5fa82a26f92871eca593b53359c12ad7949772462f887c35edaf36f87953c0e2"}, + {file = "cryptography-42.0.2-cp37-abi3-win32.whl", hash = "sha256:4b063d3413f853e056161eb0c7724822a9740ad3caa24b8424d776cebf98e7ee"}, + {file = "cryptography-42.0.2-cp37-abi3-win_amd64.whl", hash = "sha256:841ec8af7a8491ac76ec5a9522226e287187a3107e12b7d686ad354bb78facee"}, + {file = "cryptography-42.0.2-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:55d1580e2d7e17f45d19d3b12098e352f3a37fe86d380bf45846ef257054b242"}, + {file = "cryptography-42.0.2-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28cb2c41f131a5758d6ba6a0504150d644054fd9f3203a1e8e8d7ac3aea7f73a"}, + {file = "cryptography-42.0.2-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9097a208875fc7bbeb1286d0125d90bdfed961f61f214d3f5be62cd4ed8a446"}, + {file = "cryptography-42.0.2-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:44c95c0e96b3cb628e8452ec060413a49002a247b2b9938989e23a2c8291fc90"}, + {file = "cryptography-42.0.2-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:2f9f14185962e6a04ab32d1abe34eae8a9001569ee4edb64d2304bf0d65c53f3"}, + {file = "cryptography-42.0.2-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:09a77e5b2e8ca732a19a90c5bca2d124621a1edb5438c5daa2d2738bfeb02589"}, + {file = "cryptography-42.0.2-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:ad28cff53f60d99a928dfcf1e861e0b2ceb2bc1f08a074fdd601b314e1cc9e0a"}, + {file = "cryptography-42.0.2-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:130c0f77022b2b9c99d8cebcdd834d81705f61c68e91ddd614ce74c657f8b3ea"}, + {file = "cryptography-42.0.2-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:fa3dec4ba8fb6e662770b74f62f1a0c7d4e37e25b58b2bf2c1be4c95372b4a33"}, + {file = "cryptography-42.0.2-cp39-abi3-win32.whl", hash = "sha256:3dbd37e14ce795b4af61b89b037d4bc157f2cb23e676fa16932185a04dfbf635"}, + {file = "cryptography-42.0.2-cp39-abi3-win_amd64.whl", hash = "sha256:8a06641fb07d4e8f6c7dda4fc3f8871d327803ab6542e33831c7ccfdcb4d0ad6"}, + {file = "cryptography-42.0.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:087887e55e0b9c8724cf05361357875adb5c20dec27e5816b653492980d20380"}, + {file = "cryptography-42.0.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a7ef8dd0bf2e1d0a27042b231a3baac6883cdd5557036f5e8df7139255feaac6"}, + {file = "cryptography-42.0.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4383b47f45b14459cab66048d384614019965ba6c1a1a141f11b5a551cace1b2"}, + {file = "cryptography-42.0.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:fbeb725c9dc799a574518109336acccaf1303c30d45c075c665c0793c2f79a7f"}, + {file = "cryptography-42.0.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:320948ab49883557a256eab46149df79435a22d2fefd6a66fe6946f1b9d9d008"}, + {file = "cryptography-42.0.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5ef9bc3d046ce83c4bbf4c25e1e0547b9c441c01d30922d812e887dc5f125c12"}, + {file = "cryptography-42.0.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:52ed9ebf8ac602385126c9a2fe951db36f2cb0c2538d22971487f89d0de4065a"}, + {file = "cryptography-42.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:141e2aa5ba100d3788c0ad7919b288f89d1fe015878b9659b307c9ef867d3a65"}, + {file = "cryptography-42.0.2.tar.gz", hash = "sha256:e0ec52ba3c7f1b7d813cd52649a5b3ef1fc0d433219dc8c93827c57eab6cf888"}, ] [package.dependencies] @@ -2202,13 +2202,13 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] [[package]] name = "google-api-python-client" -version = "2.115.0" +version = "2.116.0" description = "Google API Client Library for Python" optional = false python-versions = ">=3.7" files = [ - {file = "google-api-python-client-2.115.0.tar.gz", hash = "sha256:96af11376535236ba600ebbe23588cfe003ec9b74e66dd6ddb53aa3ec87e1b52"}, - {file = "google_api_python_client-2.115.0-py2.py3-none-any.whl", hash = "sha256:26178e33684763099142e2cad201057bd27d4efefd859a495aac21ab3e6129c2"}, + {file = "google-api-python-client-2.116.0.tar.gz", hash = "sha256:f9f32361e16114d62929638fe07f77be30216b079ad316dc2ced859d9f72e5ad"}, + {file = "google_api_python_client-2.116.0-py2.py3-none-any.whl", hash = "sha256:846e44417c6b7385fa5f5a46cb6b9d23327754c560830245ee53a577c5e44cec"}, ] [package.dependencies] @@ -3851,12 +3851,12 @@ regex = ["regex"] [[package]] name = "llama-cpp-python" -version = "0.2.36" +version = "0.2.37" description = "Python bindings for the llama.cpp library" optional = true python-versions = ">=3.8" files = [ - {file = "llama_cpp_python-0.2.36.tar.gz", hash = "sha256:21dba178604d17f40924afbf4b8c56305f951ebf5a6da23097e9ce2874b97cc3"}, + {file = "llama_cpp_python-0.2.37.tar.gz", hash = "sha256:a8f7c8d27334c6b38afac9a33f02a90a8a6c3eed15309e4b9315ac13a3cb3c04"}, ] [package.dependencies] @@ -4062,13 +4062,13 @@ source = ["Cython (==0.29.37)"] [[package]] name = "mako" -version = "1.3.0" +version = "1.3.2" description = "A super-fast templating language that borrows the best ideas from the existing templating languages." optional = false python-versions = ">=3.8" files = [ - {file = "Mako-1.3.0-py3-none-any.whl", hash = "sha256:57d4e997349f1a92035aa25c17ace371a4213f2ca42f99bee9a602500cfd54d9"}, - {file = "Mako-1.3.0.tar.gz", hash = "sha256:e3a9d388fd00e87043edbe8792f45880ac0114e9c4adc69f6e9bfb2c55e3b11b"}, + {file = "Mako-1.3.2-py3-none-any.whl", hash = "sha256:32a99d70754dfce237019d17ffe4a282d2d3351b9c476e90d8a60e63f133b80c"}, + {file = "Mako-1.3.2.tar.gz", hash = "sha256:2a0c8ad7f6274271b3bb7467dd37cf9cc6dab4bc19cb69a4ef10669402de698e"}, ] [package.dependencies] @@ -4861,12 +4861,12 @@ nvidia-nvjitlink-cu12 = "*" [[package]] name = "nvidia-nccl-cu12" -version = "2.18.1" +version = "2.19.3" description = "NVIDIA Collective Communication Library (NCCL) Runtime" optional = true python-versions = ">=3" files = [ - {file = "nvidia_nccl_cu12-2.18.1-py3-none-manylinux1_x86_64.whl", hash = "sha256:1a6c4acefcbebfa6de320f412bf7866de856e786e0462326ba1bac40de0b5e71"}, + {file = "nvidia_nccl_cu12-2.19.3-py3-none-manylinux1_x86_64.whl", hash = "sha256:a9734707a2c96443331c1e48c717024aa6678a0e2a4cb66b2c364d18cee6b48d"}, ] [[package]] @@ -5599,13 +5599,13 @@ strenum = ">=0.4.9,<0.5.0" [[package]] name = "posthog" -version = "3.3.3" +version = "3.3.4" description = "Integrate PostHog into any python application." optional = false python-versions = "*" files = [ - {file = "posthog-3.3.3-py2.py3-none-any.whl", hash = "sha256:22b450be8cfdbcf403cbbcc50e9ca6e681d04c6a057336483cc4b8beb5794a15"}, - {file = "posthog-3.3.3.tar.gz", hash = "sha256:2f16372675a81fb093dd19d7fb8ea8166068f5a7a17358d71ccb2f7080a4c8c1"}, + {file = "posthog-3.3.4-py2.py3-none-any.whl", hash = "sha256:2fec5112c6df1d6a214a899e409659ed354511236537e861f1556a0c88e3fd26"}, + {file = "posthog-3.3.4.tar.gz", hash = "sha256:23a891639bc0a4f6fe4d04864d02410c60b7ee5d523de79becbc7325c983dba9"}, ] [package.dependencies] @@ -8189,31 +8189,36 @@ files = [ [[package]] name = "torch" -version = "2.1.2" +version = "2.2.0" description = "Tensors and Dynamic neural networks in Python with strong GPU acceleration" optional = true python-versions = ">=3.8.0" files = [ - {file = "torch-2.1.2-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:3a871edd6c02dae77ad810335c0833391c1a4ce49af21ea8cf0f6a5d2096eea8"}, - {file = "torch-2.1.2-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:bef6996c27d8f6e92ea4e13a772d89611da0e103b48790de78131e308cf73076"}, - {file = "torch-2.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:0e13034fd5fb323cbbc29e56d0637a3791e50dd589616f40c79adfa36a5a35a1"}, - {file = "torch-2.1.2-cp310-none-macosx_10_9_x86_64.whl", hash = "sha256:d9b535cad0df3d13997dbe8bd68ac33e0e3ae5377639c9881948e40794a61403"}, - {file = "torch-2.1.2-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:f9a55d55af02826ebfbadf4e9b682f0f27766bc33df8236b48d28d705587868f"}, - {file = "torch-2.1.2-cp311-cp311-manylinux1_x86_64.whl", hash = "sha256:a6ebbe517097ef289cc7952783588c72de071d4b15ce0f8b285093f0916b1162"}, - {file = "torch-2.1.2-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:8f32ce591616a30304f37a7d5ea80b69ca9e1b94bba7f308184bf616fdaea155"}, - {file = "torch-2.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:e0ee6cf90c8970e05760f898d58f9ac65821c37ffe8b04269ec787aa70962b69"}, - {file = "torch-2.1.2-cp311-none-macosx_10_9_x86_64.whl", hash = "sha256:76d37967c31c99548ad2c4d3f2cf191db48476f2e69b35a0937137116da356a1"}, - {file = "torch-2.1.2-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:e2d83f07b4aac983453ea5bf8f9aa9dacf2278a8d31247f5d9037f37befc60e4"}, - {file = "torch-2.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:f41fe0c7ecbf903a568c73486139a75cfab287a0f6c17ed0698fdea7a1e8641d"}, - {file = "torch-2.1.2-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:e3225f47d50bb66f756fe9196a768055d1c26b02154eb1f770ce47a2578d3aa7"}, - {file = "torch-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:33d59cd03cb60106857f6c26b36457793637512998666ee3ce17311f217afe2b"}, - {file = "torch-2.1.2-cp38-none-macosx_10_9_x86_64.whl", hash = "sha256:8e221deccd0def6c2badff6be403e0c53491805ed9915e2c029adbcdb87ab6b5"}, - {file = "torch-2.1.2-cp38-none-macosx_11_0_arm64.whl", hash = "sha256:05b18594f60a911a0c4f023f38a8bda77131fba5fd741bda626e97dcf5a3dd0a"}, - {file = "torch-2.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:9ca96253b761e9aaf8e06fb30a66ee301aecbf15bb5a303097de1969077620b6"}, - {file = "torch-2.1.2-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:d93ba70f67b08c2ae5598ee711cbc546a1bc8102cef938904b8c85c2089a51a0"}, - {file = "torch-2.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:255b50bc0608db177e6a3cc118961d77de7e5105f07816585fa6f191f33a9ff3"}, - {file = "torch-2.1.2-cp39-none-macosx_10_9_x86_64.whl", hash = "sha256:6984cd5057c0c977b3c9757254e989d3f1124f4ce9d07caa6cb637783c71d42a"}, - {file = "torch-2.1.2-cp39-none-macosx_11_0_arm64.whl", hash = "sha256:bc195d7927feabc0eb7c110e457c955ed2ab616f3c7c28439dd4188cf589699f"}, + {file = "torch-2.2.0-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:d366158d6503a3447e67f8c0ad1328d54e6c181d88572d688a625fac61b13a97"}, + {file = "torch-2.2.0-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:707f2f80402981e9f90d0038d7d481678586251e6642a7a6ef67fc93511cb446"}, + {file = "torch-2.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:15c8f0a105c66b28496092fca1520346082e734095f8eaf47b5786bac24b8a31"}, + {file = "torch-2.2.0-cp310-none-macosx_10_9_x86_64.whl", hash = "sha256:0ca4df4b728515ad009b79f5107b00bcb2c63dc202d991412b9eb3b6a4f24349"}, + {file = "torch-2.2.0-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:3d3eea2d5969b9a1c9401429ca79efc668120314d443d3463edc3289d7f003c7"}, + {file = "torch-2.2.0-cp311-cp311-manylinux1_x86_64.whl", hash = "sha256:0d1c580e379c0d48f0f0a08ea28d8e373295aa254de4f9ad0631f9ed8bc04c24"}, + {file = "torch-2.2.0-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:9328e3c1ce628a281d2707526b4d1080eae7c4afab4f81cea75bde1f9441dc78"}, + {file = "torch-2.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:03c8e660907ac1b8ee07f6d929c4e15cd95be2fb764368799cca02c725a212b8"}, + {file = "torch-2.2.0-cp311-none-macosx_10_9_x86_64.whl", hash = "sha256:da0cefe7f84ece3e3b56c11c773b59d1cb2c0fd83ddf6b5f7f1fd1a987b15c3e"}, + {file = "torch-2.2.0-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:f81d23227034221a4a4ff8ef24cc6cec7901edd98d9e64e32822778ff01be85e"}, + {file = "torch-2.2.0-cp312-cp312-manylinux1_x86_64.whl", hash = "sha256:dcbfb2192ac41ca93c756ebe9e2af29df0a4c14ee0e7a0dd78f82c67a63d91d4"}, + {file = "torch-2.2.0-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:9eeb42971619e24392c9088b5b6d387d896e267889d41d267b1fec334f5227c5"}, + {file = "torch-2.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:c718b2ca69a6cac28baa36d86d8c0ec708b102cebd1ceb1b6488e404cd9be1d1"}, + {file = "torch-2.2.0-cp312-none-macosx_10_9_x86_64.whl", hash = "sha256:f11d18fceb4f9ecb1ac680dde7c463c120ed29056225d75469c19637e9f98d12"}, + {file = "torch-2.2.0-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:ee1da852bfd4a7e674135a446d6074c2da7194c1b08549e31eae0b3138c6b4d2"}, + {file = "torch-2.2.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:0d819399819d0862268ac531cf12a501c253007df4f9e6709ede8a0148f1a7b8"}, + {file = "torch-2.2.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:08f53ccc38c49d839bc703ea1b20769cc8a429e0c4b20b56921a9f64949bf325"}, + {file = "torch-2.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:93bffe3779965a71dab25fc29787538c37c5d54298fd2f2369e372b6fb137d41"}, + {file = "torch-2.2.0-cp38-none-macosx_10_9_x86_64.whl", hash = "sha256:c17ec323da778efe8dad49d8fb534381479ca37af1bfc58efdbb8607a9d263a3"}, + {file = "torch-2.2.0-cp38-none-macosx_11_0_arm64.whl", hash = "sha256:c02685118008834e878f676f81eab3a952b7936fa31f474ef8a5ff4b5c78b36d"}, + {file = "torch-2.2.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:d9f39d6f53cec240a0e3baa82cb697593340f9d4554cee6d3d6ca07925c2fac0"}, + {file = "torch-2.2.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:51770c065206250dc1222ea7c0eff3f88ab317d3e931cca2aee461b85fbc2472"}, + {file = "torch-2.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:008e4c6ad703de55af760c73bf937ecdd61a109f9b08f2bbb9c17e7c7017f194"}, + {file = "torch-2.2.0-cp39-none-macosx_10_9_x86_64.whl", hash = "sha256:de8680472dd14e316f42ceef2a18a301461a9058cd6e99a1f1b20f78f11412f1"}, + {file = "torch-2.2.0-cp39-none-macosx_11_0_arm64.whl", hash = "sha256:99e1dcecb488e3fd25bcaac56e48cdb3539842904bdc8588b0b255fde03a254c"}, ] [package.dependencies] @@ -8230,15 +8235,15 @@ nvidia-cufft-cu12 = {version = "11.0.2.54", markers = "platform_system == \"Linu nvidia-curand-cu12 = {version = "10.3.2.106", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} nvidia-cusolver-cu12 = {version = "11.4.5.107", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} nvidia-cusparse-cu12 = {version = "12.1.0.106", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-nccl-cu12 = {version = "2.18.1", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} +nvidia-nccl-cu12 = {version = "2.19.3", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} nvidia-nvtx-cu12 = {version = "12.1.105", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} sympy = "*" -triton = {version = "2.1.0", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -typing-extensions = "*" +triton = {version = "2.2.0", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} +typing-extensions = ">=4.8.0" [package.extras] -dynamo = ["jinja2"] opt-einsum = ["opt-einsum (>=3.3)"] +optree = ["optree (>=0.9.1)"] [[package]] name = "tornado" @@ -8365,28 +8370,26 @@ vision = ["Pillow (>=10.0.1,<=15.0)"] [[package]] name = "triton" -version = "2.1.0" +version = "2.2.0" description = "A language and compiler for custom Deep Learning operations" optional = true python-versions = "*" files = [ - {file = "triton-2.1.0-0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:66439923a30d5d48399b08a9eae10370f6c261a5ec864a64983bae63152d39d7"}, - {file = "triton-2.1.0-0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:919b06453f0033ea52c13eaf7833de0e57db3178d23d4e04f9fc71c4f2c32bf8"}, - {file = "triton-2.1.0-0-cp37-cp37m-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ae4bb8a91de790e1866405211c4d618379781188f40d5c4c399766914e84cd94"}, - {file = "triton-2.1.0-0-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:39f6fb6bdccb3e98f3152e3fbea724f1aeae7d749412bbb1fa9c441d474eba26"}, - {file = "triton-2.1.0-0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:21544e522c02005a626c8ad63d39bdff2f31d41069592919ef281e964ed26446"}, - {file = "triton-2.1.0-0-pp37-pypy37_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:143582ca31dd89cd982bd3bf53666bab1c7527d41e185f9e3d8a3051ce1b663b"}, - {file = "triton-2.1.0-0-pp38-pypy38_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:82fc5aeeedf6e36be4e4530cbdcba81a09d65c18e02f52dc298696d45721f3bd"}, - {file = "triton-2.1.0-0-pp39-pypy39_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:81a96d110a738ff63339fc892ded095b31bd0d205e3aace262af8400d40b6fa8"}, + {file = "triton-2.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2294514340cfe4e8f4f9e5c66c702744c4a117d25e618bd08469d0bfed1e2e5"}, + {file = "triton-2.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da58a152bddb62cafa9a857dd2bc1f886dbf9f9c90a2b5da82157cd2b34392b0"}, + {file = "triton-2.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0af58716e721460a61886668b205963dc4d1e4ac20508cc3f623aef0d70283d5"}, + {file = "triton-2.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8fe46d3ab94a8103e291bd44c741cc294b91d1d81c1a2888254cbf7ff846dab"}, + {file = "triton-2.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8ce26093e539d727e7cf6f6f0d932b1ab0574dc02567e684377630d86723ace"}, + {file = "triton-2.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:227cc6f357c5efcb357f3867ac2a8e7ecea2298cd4606a8ba1e931d1d5a947df"}, ] [package.dependencies] filelock = "*" [package.extras] -build = ["cmake (>=3.18)", "lit"] -tests = ["autopep8", "flake8", "isort", "numpy", "pytest", "scipy (>=1.7.1)"] -tutorials = ["matplotlib", "pandas", "tabulate"] +build = ["cmake (>=3.20)", "lit"] +tests = ["autopep8", "flake8", "isort", "numpy", "pytest", "scipy (>=1.7.1)", "torch"] +tutorials = ["matplotlib", "pandas", "tabulate", "torch"] [[package]] name = "typer" @@ -9450,4 +9453,4 @@ local = ["ctransformers", "llama-cpp-python", "sentence-transformers"] [metadata] lock-version = "2.0" python-versions = ">=3.9,<3.11" -content-hash = "2c1761b379f2cf4f9a9ecf53a66ffa0742a8a1d221797e49c1e5c8c4c67384ff" +content-hash = "2b6c2054383eb3a1b92ac3fe0037fca6cdb6407107d2229ae9ea8e4433b55d11" diff --git a/pyproject.toml b/pyproject.toml index 5ae9ac473..9b8f588aa 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -113,7 +113,7 @@ langchain-openai = "^0.0.2" pytest-asyncio = "^0.23.1" types-redis = "^4.6.0.5" ipykernel = "^6.27.0" -mypy = "^1.7.1" +mypy = "^1.8.0" ruff = "^0.1.5" httpx = "*" pytest = "^7.4.2" @@ -155,7 +155,8 @@ exclude = ["src/backend/langflow/alembic/*"] line-length = 120 [tool.mypy] -plugins = "pydantic.mypy" +plugins = ["pydantic.mypy"] +follow_imports = "silent" [build-system] requires = ["poetry-core"] From 349fc929811f29ad7ebf935f15221fb4ea768134 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Tue, 30 Jan 2024 16:24:32 -0300 Subject: [PATCH 145/153] Update anthropic version to 0.12.0 --- poetry.lock | 11 +++++++---- pyproject.toml | 2 +- 2 files changed, 8 insertions(+), 5 deletions(-) diff --git a/poetry.lock b/poetry.lock index 507dca21f..22e7c431c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -192,13 +192,13 @@ files = [ [[package]] name = "anthropic" -version = "0.8.1" +version = "0.12.0" description = "The official Python library for the anthropic API" optional = false python-versions = ">=3.7" files = [ - {file = "anthropic-0.8.1-py3-none-any.whl", hash = "sha256:4578b99d6e140c84ad5f409e460aabc85acffc6e189dfd91378b9277b4c158b7"}, - {file = "anthropic-0.8.1.tar.gz", hash = "sha256:736cf85681b8ed066014b0485d8d5be1f2aae65f86e6a25c76a5d5ebb970eee5"}, + {file = "anthropic-0.12.0-py3-none-any.whl", hash = "sha256:d2f706c8dc95c7974e71c901c55245eeda5f129e78c3e3e6752411d4aecfe416"}, + {file = "anthropic-0.12.0.tar.gz", hash = "sha256:f61bc5f83d195a0b1b5d92d4e5d9a131cab196bfdf9c8f69b73a815a40665e12"}, ] [package.dependencies] @@ -210,6 +210,9 @@ sniffio = "*" tokenizers = ">=0.13.0" typing-extensions = ">=4.7,<5" +[package.extras] +vertex = ["google-auth (>=2,<3)"] + [[package]] name = "anyio" version = "4.2.0" @@ -9453,4 +9456,4 @@ local = ["ctransformers", "llama-cpp-python", "sentence-transformers"] [metadata] lock-version = "2.0" python-versions = ">=3.9,<3.11" -content-hash = "2b6c2054383eb3a1b92ac3fe0037fca6cdb6407107d2229ae9ea8e4433b55d11" +content-hash = "8d7773b92331fa8603c4d92401eadb7f07954b972ad8354f690c63761d459397" diff --git a/pyproject.toml b/pyproject.toml index 9b8f588aa..e1e372b98 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -63,7 +63,7 @@ cohere = "^4.39.0" python-multipart = "^0.0.6" sqlmodel = "^0.0.14" faiss-cpu = "^1.7.4" -anthropic = "^0.8.0" +anthropic = "^0.12.0" orjson = "3.9.3" multiprocess = "^0.70.14" cachetools = "^5.3.1" From c54c7246b66d0ecfc1199c1a2dc38b5b18ec9554 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Wed, 31 Jan 2024 15:50:04 -0300 Subject: [PATCH 146/153] Update document loader imports --- src/backend/langflow/utils/constants.py | 32 ++++++++++++------------- 1 file changed, 16 insertions(+), 16 deletions(-) diff --git a/src/backend/langflow/utils/constants.py b/src/backend/langflow/utils/constants.py index b2d00f8e6..0704f2ec9 100644 --- a/src/backend/langflow/utils/constants.py +++ b/src/backend/langflow/utils/constants.py @@ -68,110 +68,110 @@ LOADERS_INFO: List[Dict[str, Any]] = [ { "loader": "AirbyteJSONLoader", "name": "Airbyte JSON (.jsonl)", - "import": "langchain.document_loaders.AirbyteJSONLoader", + "import": "langchain_community.document_loaders.AirbyteJSONLoader", "defaultFor": ["jsonl"], "allowdTypes": ["jsonl"], }, { "loader": "JSONLoader", "name": "JSON (.json)", - "import": "langchain.document_loaders.JSONLoader", + "import": "langchain_community.document_loaders.JSONLoader", "defaultFor": ["json"], "allowdTypes": ["json"], }, { "loader": "BSHTMLLoader", "name": "BeautifulSoup4 HTML (.html, .htm)", - "import": "langchain.document_loaders.BSHTMLLoader", + "import": "langchain_community.document_loaders.BSHTMLLoader", "allowdTypes": ["html", "htm"], }, { "loader": "CSVLoader", "name": "CSV (.csv)", - "import": "langchain.document_loaders.CSVLoader", + "import": "langchain_community.document_loaders.CSVLoader", "defaultFor": ["csv"], "allowdTypes": ["csv"], }, { "loader": "CoNLLULoader", "name": "CoNLL-U (.conllu)", - "import": "langchain.document_loaders.CoNLLULoader", + "import": "langchain_community.document_loaders.CoNLLULoader", "defaultFor": ["conllu"], "allowdTypes": ["conllu"], }, { "loader": "EverNoteLoader", "name": "EverNote (.enex)", - "import": "langchain.document_loaders.EverNoteLoader", + "import": "langchain_community.document_loaders.EverNoteLoader", "defaultFor": ["enex"], "allowdTypes": ["enex"], }, { "loader": "FacebookChatLoader", "name": "Facebook Chat (.json)", - "import": "langchain.document_loaders.FacebookChatLoader", + "import": "langchain_community.document_loaders.FacebookChatLoader", "allowdTypes": ["json"], }, { "loader": "OutlookMessageLoader", "name": "Outlook Message (.msg)", - "import": "langchain.document_loaders.OutlookMessageLoader", + "import": "langchain_community.document_loaders.OutlookMessageLoader", "defaultFor": ["msg"], "allowdTypes": ["msg"], }, { "loader": "PyPDFLoader", "name": "PyPDF (.pdf)", - "import": "langchain.document_loaders.PyPDFLoader", + "import": "langchain_community.document_loaders.PyPDFLoader", "defaultFor": ["pdf"], "allowdTypes": ["pdf"], }, { "loader": "STRLoader", "name": "Subtitle (.str)", - "import": "langchain.document_loaders.STRLoader", + "import": "langchain_community.document_loaders.STRLoader", "defaultFor": ["str"], "allowdTypes": ["str"], }, { "loader": "TextLoader", "name": "Text (.txt)", - "import": "langchain.document_loaders.TextLoader", + "import": "langchain_community.document_loaders.TextLoader", "defaultFor": ["txt"], "allowdTypes": ["txt"], }, { "loader": "UnstructuredEmailLoader", "name": "Unstructured Email (.eml)", - "import": "langchain.document_loaders.UnstructuredEmailLoader", + "import": "langchain_community.document_loaders.UnstructuredEmailLoader", "defaultFor": ["eml"], "allowdTypes": ["eml"], }, { "loader": "UnstructuredHTMLLoader", "name": "Unstructured HTML (.html, .htm)", - "import": "langchain.document_loaders.UnstructuredHTMLLoader", + "import": "langchain_community.document_loaders.UnstructuredHTMLLoader", "defaultFor": ["html", "htm"], "allowdTypes": ["html", "htm"], }, { "loader": "UnstructuredMarkdownLoader", "name": "Unstructured Markdown (.md)", - "import": "langchain.document_loaders.UnstructuredMarkdownLoader", + "import": "langchain_community.document_loaders.UnstructuredMarkdownLoader", "defaultFor": ["md"], "allowdTypes": ["md"], }, { "loader": "UnstructuredPowerPointLoader", "name": "Unstructured PowerPoint (.pptx)", - "import": "langchain.document_loaders.UnstructuredPowerPointLoader", + "import": "langchain_community.document_loaders.UnstructuredPowerPointLoader", "defaultFor": ["pptx"], "allowdTypes": ["pptx"], }, { "loader": "UnstructuredWordLoader", "name": "Unstructured Word (.docx)", - "import": "langchain.document_loaders.UnstructuredWordLoader", + "import": "langchain_community.document_loaders.UnstructuredWordLoader", "defaultFor": ["docx"], "allowdTypes": ["docx"], }, From cbf80c9fcd89b6d34f34d8643e92b194b3ea0866 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Wed, 31 Jan 2024 15:50:24 -0300 Subject: [PATCH 147/153] Delete document loader components --- .../documentloaders/AZLyricsLoader.py | 26 ------------ .../documentloaders/AirbyteJSONLoader.py | 37 ----------------- .../documentloaders/BSHTMLLoader.py | 34 --------------- .../components/documentloaders/CSVLoader.py | 34 --------------- .../documentloaders/CoNLLULoader.py | 36 ---------------- .../CollegeConfidentialLoader.py | 28 ------------- .../documentloaders/EverNoteLoader.py | 38 ----------------- .../documentloaders/FacebookChatLoader.py | 37 ----------------- .../documentloaders/GitbookLoader.py | 32 --------------- .../components/documentloaders/HNLoader.py | 29 ------------- .../documentloaders/IFixitLoader.py | 32 --------------- .../components/documentloaders/IMSDbLoader.py | 30 -------------- .../components/documentloaders/PyPDFLoader.py | 41 ------------------- .../documentloaders/ReadTheDocsLoader.py | 29 ------------- .../components/documentloaders/SRTLoader.py | 27 ------------ .../documentloaders/SlackDirectoryLoader.py | 32 --------------- .../components/documentloaders/TextLoader.py | 28 ------------- .../documentloaders/UnstructuredHTMLLoader.py | 29 ------------- .../UnstructuredPowerPointLoader.py | 31 -------------- .../UnstructuredWordDocumentLoader.py | 25 ----------- 20 files changed, 635 deletions(-) delete mode 100644 src/backend/langflow/components/documentloaders/AZLyricsLoader.py delete mode 100644 src/backend/langflow/components/documentloaders/AirbyteJSONLoader.py delete mode 100644 src/backend/langflow/components/documentloaders/BSHTMLLoader.py delete mode 100644 src/backend/langflow/components/documentloaders/CSVLoader.py delete mode 100644 src/backend/langflow/components/documentloaders/CoNLLULoader.py delete mode 100644 src/backend/langflow/components/documentloaders/CollegeConfidentialLoader.py delete mode 100644 src/backend/langflow/components/documentloaders/EverNoteLoader.py delete mode 100644 src/backend/langflow/components/documentloaders/FacebookChatLoader.py delete mode 100644 src/backend/langflow/components/documentloaders/GitbookLoader.py delete mode 100644 src/backend/langflow/components/documentloaders/HNLoader.py delete mode 100644 src/backend/langflow/components/documentloaders/IFixitLoader.py delete mode 100644 src/backend/langflow/components/documentloaders/IMSDbLoader.py delete mode 100644 src/backend/langflow/components/documentloaders/PyPDFLoader.py delete mode 100644 src/backend/langflow/components/documentloaders/ReadTheDocsLoader.py delete mode 100644 src/backend/langflow/components/documentloaders/SRTLoader.py delete mode 100644 src/backend/langflow/components/documentloaders/SlackDirectoryLoader.py delete mode 100644 src/backend/langflow/components/documentloaders/TextLoader.py delete mode 100644 src/backend/langflow/components/documentloaders/UnstructuredHTMLLoader.py delete mode 100644 src/backend/langflow/components/documentloaders/UnstructuredPowerPointLoader.py delete mode 100644 src/backend/langflow/components/documentloaders/UnstructuredWordDocumentLoader.py diff --git a/src/backend/langflow/components/documentloaders/AZLyricsLoader.py b/src/backend/langflow/components/documentloaders/AZLyricsLoader.py deleted file mode 100644 index 82d507d68..000000000 --- a/src/backend/langflow/components/documentloaders/AZLyricsLoader.py +++ /dev/null @@ -1,26 +0,0 @@ -from langflow import CustomComponent -from langflow.field_typing import Document -from typing import List, Optional, Dict -from langchain_community.document_loaders.azlyrics import AZLyricsLoader - - -class AZLyricsLoaderComponent(CustomComponent): - display_name = "AZLyricsLoader" - description = "Load `AZLyrics` webpages." - documentation = "https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/azlyrics" - - def build_config(self): - return { - "metadata": {"display_name": "Metadata", "field_type": "dict", "value": {}, "show": True}, - "web_path": {"display_name": "Web Page", "type": "str", "required": True, "show": True}, - } - - def build(self, metadata: Optional[Dict] = None, web_path: str = "") -> List[Document]: - documents = AZLyricsLoader(web_path=web_path).load() - if metadata: - for document in documents: - if not document.metadata: - document.metadata = metadata - else: - document.metadata.update(metadata) - return documents diff --git a/src/backend/langflow/components/documentloaders/AirbyteJSONLoader.py b/src/backend/langflow/components/documentloaders/AirbyteJSONLoader.py deleted file mode 100644 index 8c670a8c0..000000000 --- a/src/backend/langflow/components/documentloaders/AirbyteJSONLoader.py +++ /dev/null @@ -1,37 +0,0 @@ -from langflow import CustomComponent -from langflow.field_typing import Document -from typing import List, Optional, Dict -from langchain_community.document_loaders.airbyte_json import AirbyteJSONLoader - - -class AirbyteJSONLoaderComponent(CustomComponent): - display_name = "AirbyteJSONLoader" - description = "Load local `Airbyte` json files." - documentation = ( - "https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/airbyte_json" - ) - - def build_config(self): - return { - "file_path": { - "display_name": "File Path", - "file_types": [".json"], - "required": True, - "field_type": "file", - }, - "metadata": { - "display_name": "Metadata", - "field_type": "dict", - "required": False, - }, - } - - def build(self, file_path: str, metadata: Optional[Dict] = None) -> List[Document]: - documents = AirbyteJSONLoader(file_path=file_path).load() - if metadata: - for document in documents: - if not document.metadata: - document.metadata = metadata - else: - document.metadata.update(metadata) - return documents diff --git a/src/backend/langflow/components/documentloaders/BSHTMLLoader.py b/src/backend/langflow/components/documentloaders/BSHTMLLoader.py deleted file mode 100644 index 672e1e24d..000000000 --- a/src/backend/langflow/components/documentloaders/BSHTMLLoader.py +++ /dev/null @@ -1,34 +0,0 @@ -from langflow import CustomComponent -from langflow.field_typing import Document -from typing import Optional, Dict - - -class BSHTMLLoaderComponent(CustomComponent): - display_name = "BSHTMLLoader" - description = "Load `HTML` files and parse them with `beautiful soup`." - documentation = "https://python.langchain.com/docs/modules/data_connection/document_loaders/how_to/html" - - def build_config(self): - return { - "file_path": { - "display_name": "File Path", - "required": True, - "show": True, - "type": "file", - "suffixes": [".html"], - "file_types": ["html"], - "field_type": "file", - }, - "metadata": { - "display_name": "Metadata", - "required": False, - "show": True, - "field_type": "dict", - }, - } - - def build(self, file_path: str, metadata: Optional[Dict] = None) -> Document: - # Assuming there is a class or function named BSHTMLLoader that takes a file path and optional metadata - # and returns a Document object after parsing HTML. Since the actual implementation of BSHTMLLoader is not provided, - # this is a placeholder and should be replaced with the actual logic. - raise NotImplementedError("The BSHTMLLoader function or class needs to be implemented.") diff --git a/src/backend/langflow/components/documentloaders/CSVLoader.py b/src/backend/langflow/components/documentloaders/CSVLoader.py deleted file mode 100644 index ac2b271e9..000000000 --- a/src/backend/langflow/components/documentloaders/CSVLoader.py +++ /dev/null @@ -1,34 +0,0 @@ -from langflow import CustomComponent -from typing import List -from langchain_community.document_loaders.csv_loader import CSVLoader -from langchain.docstore.document import Document - - -class CSVLoaderComponent(CustomComponent): - display_name = "CSVLoader" - description = "Load a `CSV` file into a list of Documents." - - def build_config(self): - return { - "file_path": { - "display_name": "File Path", - "required": True, - "suffixes": [".csv"], - "file_types": ["csv"], - "field_type": "file", - }, - "metadata": { - "display_name": "Metadata", - "required": False, - }, - } - - def build(self, file_path: str, metadata: dict) -> List[Document]: - documents = CSVLoader(file_path=file_path).load() - if metadata: - for document in documents: - if not document.metadata: - document.metadata = metadata - else: - document.metadata.update(metadata) - return documents diff --git a/src/backend/langflow/components/documentloaders/CoNLLULoader.py b/src/backend/langflow/components/documentloaders/CoNLLULoader.py deleted file mode 100644 index 0cd2de50c..000000000 --- a/src/backend/langflow/components/documentloaders/CoNLLULoader.py +++ /dev/null @@ -1,36 +0,0 @@ -from typing import List -from langflow import CustomComponent -from langchain.docstore.document import Document -from langchain_community.document_loaders.conllu import CoNLLULoader - - -class CoNLLULoaderComponent(CustomComponent): - display_name = "CoNLLULoader" - description = "Load `CoNLL-U` files." - documentation = "https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/conll-u" - - def build_config(self): - return { - "file_path": { - "display_name": "File Path", - "required": True, - "suffixes": [".conllu"], - "file_types": ["conllu"], - "field_type": "file", - }, - "metadata": { - "display_name": "Metadata", - "field_type": "dict", - "required": False, - }, - } - - def build(self, file_path: str, metadata: dict) -> List[Document]: - documents = CoNLLULoader(file_path=file_path).load() - if metadata: - for document in documents: - if not document.metadata: - document.metadata = metadata - else: - document.metadata.update(metadata) - return documents diff --git a/src/backend/langflow/components/documentloaders/CollegeConfidentialLoader.py b/src/backend/langflow/components/documentloaders/CollegeConfidentialLoader.py deleted file mode 100644 index 8fea6e5d2..000000000 --- a/src/backend/langflow/components/documentloaders/CollegeConfidentialLoader.py +++ /dev/null @@ -1,28 +0,0 @@ -from langflow import CustomComponent -from langchain.docstore.document import Document -from typing import Optional, List -from langchain_community.document_loaders.college_confidential import CollegeConfidentialLoader - - -class CollegeConfidentialLoaderComponent(CustomComponent): - display_name = "CollegeConfidentialLoader" - description = "Load `College Confidential` webpages." - documentation = ( - "https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/college_confidential" - ) - - def build_config(self): - return { - "metadata": {"display_name": "Metadata", "values": {}}, - "web_path": {"display_name": "Web Page", "required": True}, - } - - def build(self, web_path: str, metadata: Optional[dict] = {}) -> List[Document]: - documents = CollegeConfidentialLoader(web_path=web_path).load() - if metadata: - for document in documents: - if not document.metadata: - document.metadata = metadata - else: - document.metadata.update(metadata) - return documents diff --git a/src/backend/langflow/components/documentloaders/EverNoteLoader.py b/src/backend/langflow/components/documentloaders/EverNoteLoader.py deleted file mode 100644 index 6f7431fcb..000000000 --- a/src/backend/langflow/components/documentloaders/EverNoteLoader.py +++ /dev/null @@ -1,38 +0,0 @@ -from langflow import CustomComponent -from langflow.field_typing import Document -from typing import List, Optional, Dict -from langchain_community.document_loaders.evernote import EverNoteLoader - - -class EverNoteLoaderComponent(CustomComponent): - display_name = "EverNoteLoader" - description = "Load from `EverNote`." - documentation = "https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/evernote" - - def build_config(self): - return { - "file_path": { - "display_name": "File Path", - "required": True, - "show": True, - "type": "file", - "file_types": [".xml"], - "field_type": "file", - }, - "metadata": { - "display_name": "Metadata", - "required": False, - "show": True, - "field_type": "dict", - }, - } - - def build(self, file_path: str, metadata: Optional[Dict] = None) -> List[Document]: - documents = EverNoteLoader(file_path=file_path).load() - if metadata: - for document in documents: - if not document.metadata: - document.metadata = metadata - else: - document.metadata.update(metadata) - return documents diff --git a/src/backend/langflow/components/documentloaders/FacebookChatLoader.py b/src/backend/langflow/components/documentloaders/FacebookChatLoader.py deleted file mode 100644 index ecb99eea4..000000000 --- a/src/backend/langflow/components/documentloaders/FacebookChatLoader.py +++ /dev/null @@ -1,37 +0,0 @@ -from langflow import CustomComponent -from langchain.docstore.document import Document -from typing import List, Optional, Dict -from langchain_community.document_loaders.facebook_chat import FacebookChatLoader - - -class FacebookChatLoaderComponent(CustomComponent): - display_name = "FacebookChatLoader" - description = "Load `Facebook Chat` messages directory dump." - documentation = ( - "https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/facebook_chat" - ) - - def build_config(self): - return { - "file_path": { - "display_name": "File Path", - "required": True, - "file_types": [".json"], - "field_type": "file", - }, - "metadata": { - "display_name": "Metadata", - "required": False, - "field_type": "dict", - }, - } - - def build(self, file_path: str, metadata: Optional[Dict] = None) -> List[Document]: - documents = FacebookChatLoader(path=file_path).load() - if metadata: - for document in documents: - if not document.metadata: - document.metadata = metadata - else: - document.metadata.update(metadata) - return documents diff --git a/src/backend/langflow/components/documentloaders/GitbookLoader.py b/src/backend/langflow/components/documentloaders/GitbookLoader.py deleted file mode 100644 index 05b03e611..000000000 --- a/src/backend/langflow/components/documentloaders/GitbookLoader.py +++ /dev/null @@ -1,32 +0,0 @@ -from langflow import CustomComponent -from langflow.field_typing import Document -from typing import List, Optional, Dict -from langchain_community.document_loaders.gitbook import GitbookLoader - - -class GitbookLoaderComponent(CustomComponent): - display_name = "GitbookLoader" - description = "Load `GitBook` data." - - def build_config(self): - return { - "metadata": { - "display_name": "Metadata", - "field_type": "dict", - "value": {}, - }, - "web_page": { - "display_name": "Web Page", - "required": True, - }, - } - - def build(self, metadata: Optional[Dict] = None, web_page: str = "") -> List[Document]: - documents = GitbookLoader(web_page=web_page).load() - if metadata: - for document in documents: - if not document.metadata: - document.metadata = metadata - else: - document.metadata.update(metadata) - return documents diff --git a/src/backend/langflow/components/documentloaders/HNLoader.py b/src/backend/langflow/components/documentloaders/HNLoader.py deleted file mode 100644 index 23f77d570..000000000 --- a/src/backend/langflow/components/documentloaders/HNLoader.py +++ /dev/null @@ -1,29 +0,0 @@ -from langflow import CustomComponent -from typing import List, Optional, Dict -from langchain_community.document_loaders.hn import HNLoader -from langflow.field_typing import Document - - -class HNLoaderComponent(CustomComponent): - display_name = "HNLoader" - description = "Load `Hacker News` data." - - def build_config(self): - return { - "metadata": {"display_name": "Metadata", "value": {}, "required": False, "field_type": "dict"}, - "web_path": {"display_name": "Web Page", "required": True}, - } - - def build( - self, - web_path: str, - metadata: Optional[Dict] = None, - ) -> List[Document]: - documents = HNLoader(web_path=web_path).load() - if metadata: - for document in documents: - if not document.metadata: - document.metadata = metadata - else: - document.metadata.update(metadata) - return documents diff --git a/src/backend/langflow/components/documentloaders/IFixitLoader.py b/src/backend/langflow/components/documentloaders/IFixitLoader.py deleted file mode 100644 index 48c5f7b47..000000000 --- a/src/backend/langflow/components/documentloaders/IFixitLoader.py +++ /dev/null @@ -1,32 +0,0 @@ -from typing import Dict, List, Optional - -from langchain_community.document_loaders.ifixit import IFixitLoader -from langflow import CustomComponent -from langflow.field_typing import Document - - -class IFixitLoaderComponent(CustomComponent): - display_name = "IFixitLoader" - description = "Load `iFixit` repair guides, device wikis and answers." - documentation = "https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/ifixit" - - def build_config(self): - return { - "metadata": {"display_name": "Metadata", "type": "dict", "default": {}}, - "web_path": {"display_name": "Web Page", "type": "str"}, - } - - def build(self, web_path: str, metadata: Optional[Dict] = None) -> List[Document]: - # Assuming IFixitLoader is the correct class name from the langchain library, - # and it has a load method that returns a Document object. - if metadata is None: - metadata = {} - - docs = IFixitLoader(web_path=web_path).load() - - if metadata: - for doc in docs: - if doc.metadata is None: - doc.metadata = {} - doc.metadata.update(metadata) - return docs diff --git a/src/backend/langflow/components/documentloaders/IMSDbLoader.py b/src/backend/langflow/components/documentloaders/IMSDbLoader.py deleted file mode 100644 index bd16f854b..000000000 --- a/src/backend/langflow/components/documentloaders/IMSDbLoader.py +++ /dev/null @@ -1,30 +0,0 @@ -from langflow import CustomComponent -from langflow.field_typing import Document -from langchain_community.document_loaders.imsdb import IMSDbLoader - -from typing import Dict, List, Optional - - -class IMSDbLoaderComponent(CustomComponent): - display_name = "IMSDbLoader" - description = "Load `IMSDb` webpages." - - def build_config(self): - return { - "metadata": {"display_name": "Metadata", "field_type": "dict"}, - "web_path": {"display_name": "Web Page", "field_type": "str"}, - } - - def build( - self, - metadata: Optional[Dict] = None, - web_path: str = "", - ) -> List[Document]: - documents = IMSDbLoader(web_path=web_path).load() - if metadata: - for document in documents: - if not document.metadata: - document.metadata = metadata - else: - document.metadata.update(metadata) - return documents diff --git a/src/backend/langflow/components/documentloaders/PyPDFLoader.py b/src/backend/langflow/components/documentloaders/PyPDFLoader.py deleted file mode 100644 index b44a90577..000000000 --- a/src/backend/langflow/components/documentloaders/PyPDFLoader.py +++ /dev/null @@ -1,41 +0,0 @@ -from typing import Dict, List, Optional - -from langchain_community.document_loaders.pdf import PyPDFLoader -from langchain_core.documents import Document - -from langflow import CustomComponent - - -class PyPDFLoaderComponent(CustomComponent): - display_name = "PyPDFLoader" - description = "Load PDF using pypdf into list of documents" - documentation = "https://python.langchain.com/docs/modules/data_connection/document_loaders/how_to/pdf" - - def build_config(self): - return { - "file_path": { - "display_name": "File Path", - "required": True, - "type": "file", - "fileTypes": ["pdf"], - "show": True, - }, - "metadata": { - "display_name": "Metadata", - "required": False, - "type": "dict", - "show": True, - }, - } - - def build(self, file_path: str, metadata: Optional[Dict] = None) -> List[Document]: - # Assuming there is a PyPDFLoader class that takes file_path and metadata as parameters - # and inherits from BaseLoader - docs = PyPDFLoader(file_path=file_path).load() - - if metadata: - for doc in docs: - if doc.metadata is None: - doc.metadata = {} - doc.metadata.update(metadata) - return docs diff --git a/src/backend/langflow/components/documentloaders/ReadTheDocsLoader.py b/src/backend/langflow/components/documentloaders/ReadTheDocsLoader.py deleted file mode 100644 index 55284b9bd..000000000 --- a/src/backend/langflow/components/documentloaders/ReadTheDocsLoader.py +++ /dev/null @@ -1,29 +0,0 @@ -from langflow import CustomComponent -from typing import Dict, Optional, List -from langchain_core.documents import Document -from langchain_community.document_loaders.readthedocs import ReadTheDocsLoader - - -class ReadTheDocsLoaderComponent(CustomComponent): - display_name = "ReadTheDocsLoader" - description = "Load `ReadTheDocs` documentation directory." - - def build_config(self): - return { - "metadata": {"display_name": "Metadata", "default": {}, "field_type": "dict"}, - "path": {"display_name": "Local directory", "required": True}, - } - - def build( - self, - path: str, - metadata: Optional[Dict] = None, - ) -> List[Document]: - documents = ReadTheDocsLoader(path=path).load() - if metadata: - for document in documents: - if not document.metadata: - document.metadata = metadata - else: - document.metadata.update(metadata) - return documents diff --git a/src/backend/langflow/components/documentloaders/SRTLoader.py b/src/backend/langflow/components/documentloaders/SRTLoader.py deleted file mode 100644 index 931660947..000000000 --- a/src/backend/langflow/components/documentloaders/SRTLoader.py +++ /dev/null @@ -1,27 +0,0 @@ -from typing import Dict, Optional - -from langchain_core.documents import Document - -from langflow import CustomComponent - - -class SRTLoaderComponent(CustomComponent): - display_name = "SRTLoader" - description = "Load `.srt` (subtitle) files." - documentation = "https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/subtitle" - - def build_config(self): - return { - "file_path": { - "display_name": "File Path", - "required": True, - "fileTypes": ["srt"], - }, - "metadata": { - "display_name": "Metadata", - "required": False, - }, - } - - def build(self, file_path: str, metadata: Optional[Dict] = None) -> Document: - return Document(file_path=file_path, metadata=metadata) diff --git a/src/backend/langflow/components/documentloaders/SlackDirectoryLoader.py b/src/backend/langflow/components/documentloaders/SlackDirectoryLoader.py deleted file mode 100644 index 782f1cea9..000000000 --- a/src/backend/langflow/components/documentloaders/SlackDirectoryLoader.py +++ /dev/null @@ -1,32 +0,0 @@ -from langflow import CustomComponent -from typing import Optional, Dict, List -from langchain_core.documents import Document -from langchain_community.document_loaders.slack_directory import SlackDirectoryLoader - - -class SlackDirectoryLoaderComponent(CustomComponent): - display_name = "SlackDirectoryLoader" - description = "Load from a `Slack` directory dump." - documentation = "https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/slack" - - def build_config(self): - return { - "zip_path": {"display_name": "Path to zip file", "field_type": "file", "file_types": [".zip"]}, - "metadata": {"display_name": "Metadata", "field_type": "dict"}, - "workspace_url": {"display_name": "Workspace URL"}, - } - - def build( - self, - zip_path: str, - metadata: Optional[Dict] = None, - workspace_url: Optional[str] = None, - ) -> List[Document]: - documents = SlackDirectoryLoader(zip_path=zip_path, workspace_url=workspace_url).load() - if metadata: - for document in documents: - if not document.metadata: - document.metadata = metadata - else: - document.metadata.update(metadata) - return documents diff --git a/src/backend/langflow/components/documentloaders/TextLoader.py b/src/backend/langflow/components/documentloaders/TextLoader.py deleted file mode 100644 index d3fa8e7ad..000000000 --- a/src/backend/langflow/components/documentloaders/TextLoader.py +++ /dev/null @@ -1,28 +0,0 @@ -from langflow import CustomComponent -from langflow.field_typing import Document -from typing import Optional, Dict - - -class TextLoaderComponent(CustomComponent): - display_name = "TextLoader" - description = "Load text file." - documentation = "https://python.langchain.com/docs/modules/data_connection/document_loaders/" - - def build_config(self): - return { - "file_path": { - "display_name": "File Path", - "required": True, - "type": "file", - "suffixes": [".txt"], - }, - "metadata": { - "display_name": "Metadata", - "required": False, - "type": "dict", - "default": {}, - }, - } - - def build(self, file_path: str, metadata: Optional[Dict] = None) -> Document: - return Document(file_path=file_path, metadata=metadata) diff --git a/src/backend/langflow/components/documentloaders/UnstructuredHTMLLoader.py b/src/backend/langflow/components/documentloaders/UnstructuredHTMLLoader.py deleted file mode 100644 index 41e5a468e..000000000 --- a/src/backend/langflow/components/documentloaders/UnstructuredHTMLLoader.py +++ /dev/null @@ -1,29 +0,0 @@ -from typing import Dict, List, Optional - -from langchain import CustomComponent -from langchain_community.document_loaders import UnstructuredHTMLLoader -from langchain_core.documents import Document - - -class UnstructuredHTMLLoaderComponent(CustomComponent): - display_name = "UnstructuredHTMLLoader" - description = "Load `HTML` files using `Unstructured`." - documentation = "https://python.langchain.com/docs/modules/data_connection/document_loaders/how_to/html" - - def build_config(self): - return { - "file_path": {"display_name": "File Path", "type": "file", "fileTypes": ["html"]}, - "metadata": {"display_name": "Metadata"}, - } - - def build(self, file_path: str, metadata: Optional[Dict] = None) -> List[Document]: - # Assuming the existence of a function or class named UnstructuredHTMLLoader that - # loads HTML and creates a Document object; Replace with actual implementation. - docs = UnstructuredHTMLLoader(file_path=file_path).load() - - if metadata: - for doc in docs: - if doc.metadata is None: - doc.metadata = {} - doc.metadata.update(metadata) - return docs diff --git a/src/backend/langflow/components/documentloaders/UnstructuredPowerPointLoader.py b/src/backend/langflow/components/documentloaders/UnstructuredPowerPointLoader.py deleted file mode 100644 index 797ffbc12..000000000 --- a/src/backend/langflow/components/documentloaders/UnstructuredPowerPointLoader.py +++ /dev/null @@ -1,31 +0,0 @@ -from langflow import CustomComponent -from langchain.document_loaders import Document -from typing import Optional, Dict - - -class UnstructuredPowerPointLoaderComponent(CustomComponent): - display_name = "UnstructuredPowerPointLoader" - description = "Load `Microsoft PowerPoint` files using `Unstructured`." - - def build_config(self): - return { - "file_path": { - "display_name": "File Path", - "type": "file", - "fileTypes": ["pptx", "ppt"], - }, - "metadata": { - "display_name": "Metadata", - "type": "dict", - }, - } - - def build( - self, - file_path: str, - metadata: Optional[Dict] = None, - ) -> Document: - # Assuming there is a loader class `UnstructuredPowerPointLoader` that takes these parameters - # Since the actual loader class is not provided, this is a placeholder for the actual implementation - loader_class = self.get_loader_class() # Placeholder method to obtain the correct loader class - return loader_class(file_path=file_path, metadata=metadata) diff --git a/src/backend/langflow/components/documentloaders/UnstructuredWordDocumentLoader.py b/src/backend/langflow/components/documentloaders/UnstructuredWordDocumentLoader.py deleted file mode 100644 index 48ff3a24e..000000000 --- a/src/backend/langflow/components/documentloaders/UnstructuredWordDocumentLoader.py +++ /dev/null @@ -1,25 +0,0 @@ -from langflow import CustomComponent -from langflow.field_typing import Document -from typing import Optional, Dict - - -class UnstructuredWordDocumentLoaderComponent(CustomComponent): - display_name = "UnstructuredWordDocumentLoader" - description = "Load `Microsoft Word` file using `Unstructured`." - documentation = ( - "https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/microsoft_word" - ) - - def build_config(self): - return { - "file_path": { - "display_name": "File Path", - "required": True, - "type": "file", - "suffixes": [".docx", ".doc"], - }, - "metadata": {"display_name": "Metadata", "required": False, "type": "dict"}, - } - - def build(self, file_path: str, metadata: Optional[Dict] = None) -> Document: - return Document(file_path=file_path, metadata=metadata) From e111e01efd73fa21c5fdbcc17eec5a86a146187e Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Wed, 31 Jan 2024 15:57:31 -0300 Subject: [PATCH 148/153] Update version number in pyproject.toml --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 3ef489623..407fa1f06 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "langflow" -version = "0.6.5a12" +version = "0.6.5a13" description = "A Python package with a built-in web application" authors = ["Logspace "] maintainers = [ From d8e34c1c6b155f5faff2012457c7ce10c8dd8df4 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Wed, 31 Jan 2024 16:59:18 -0300 Subject: [PATCH 149/153] Update dependencies in pyproject.toml --- poetry.lock | 870 ++++++++++++++----------------------------------- pyproject.toml | 4 +- 2 files changed, 243 insertions(+), 631 deletions(-) diff --git a/poetry.lock b/poetry.lock index 22e7c431c..64be3b841 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.7.0 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. [[package]] name = "aiofiles" @@ -246,23 +246,6 @@ files = [ {file = "appnope-0.1.3.tar.gz", hash = "sha256:02bd91c4de869fbb1e1c50aafc4098827a7a54ab2f39d9dcba6c9547ed920e24"}, ] -[[package]] -name = "asgiref" -version = "3.7.2" -description = "ASGI specs, helper code, and adapters" -optional = false -python-versions = ">=3.7" -files = [ - {file = "asgiref-3.7.2-py3-none-any.whl", hash = "sha256:89b2ef2247e3b562a16eef663bc0e2e703ec6468e2fa8a5cd61cd449786d4f6e"}, - {file = "asgiref-3.7.2.tar.gz", hash = "sha256:9e0ce3aa93a819ba5b45120216b23878cf6e8525eb3848653452b4192b92afed"}, -] - -[package.dependencies] -typing-extensions = {version = ">=4", markers = "python_version < \"3.11\""} - -[package.extras] -tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] - [[package]] name = "asttokens" version = "2.4.1" @@ -431,17 +414,17 @@ files = [ [[package]] name = "boto3" -version = "1.34.30" +version = "1.34.31" description = "The AWS SDK for Python" optional = false python-versions = ">= 3.8" files = [ - {file = "boto3-1.34.30-py3-none-any.whl", hash = "sha256:cd6173380768faaecf6236dbdcec15d8d032cbb162ce354fdb111056a74fc298"}, - {file = "boto3-1.34.30.tar.gz", hash = "sha256:9e1476ce2b26437881a0381bf2daa54de619ac74ab4bd74278668acda6004a64"}, + {file = "boto3-1.34.31-py3-none-any.whl", hash = "sha256:0d800130e43a5d4e71300cc6f91aabcef6fe6f26bc206bc61374bf695049587a"}, + {file = "boto3-1.34.31.tar.gz", hash = "sha256:c4dec7ea9bc9210ec783d39b56d332f5a266b0d1e31a96c5092f6bd5252361ba"}, ] [package.dependencies] -botocore = ">=1.34.30,<1.35.0" +botocore = ">=1.34.31,<1.35.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.10.0,<0.11.0" @@ -450,13 +433,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.34.30" +version = "1.34.31" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">= 3.8" files = [ - {file = "botocore-1.34.30-py3-none-any.whl", hash = "sha256:caf82d91c2ff61235284a07ffdfba006873e0752e00896052f901a37720cefa4"}, - {file = "botocore-1.34.30.tar.gz", hash = "sha256:e071a9766e7fc2221ca42ec01dfc54368a7518610787342ea622f6edc57f7891"}, + {file = "botocore-1.34.31-py3-none-any.whl", hash = "sha256:6ee1ba451ce3d640dccd485906f68a55d9e7f3534553876e4adc75d6060a05ac"}, + {file = "botocore-1.34.31.tar.gz", hash = "sha256:d5a2153dbe9687f510f179e03913bc9b4e266c865cabebe440c4d05ab923faa7"}, ] [package.dependencies] @@ -562,30 +545,6 @@ files = [ {file = "Brotli-1.1.0.tar.gz", hash = "sha256:81de08ac11bcb85841e440c13611c00b67d3bf82698314928d0b676362546724"}, ] -[[package]] -name = "build" -version = "1.0.3" -description = "A simple, correct Python build frontend" -optional = false -python-versions = ">= 3.7" -files = [ - {file = "build-1.0.3-py3-none-any.whl", hash = "sha256:589bf99a67df7c9cf07ec0ac0e5e2ea5d4b37ac63301c4986d1acb126aa83f8f"}, - {file = "build-1.0.3.tar.gz", hash = "sha256:538aab1b64f9828977f84bc63ae570b060a8ed1be419e7870b8b4fc5e6ea553b"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "os_name == \"nt\""} -importlib-metadata = {version = ">=4.6", markers = "python_version < \"3.10\""} -packaging = ">=19.0" -pyproject_hooks = "*" -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} - -[package.extras] -docs = ["furo (>=2023.08.17)", "sphinx (>=7.0,<8.0)", "sphinx-argparse-cli (>=1.5)", "sphinx-autodoc-typehints (>=1.10)", "sphinx-issues (>=3.0.0)"] -test = ["filelock (>=3)", "pytest (>=6.2.4)", "pytest-cov (>=2.12)", "pytest-mock (>=2)", "pytest-rerunfailures (>=9.1)", "pytest-xdist (>=1.34)", "setuptools (>=42.0.0)", "setuptools (>=56.0.0)", "setuptools (>=56.0.0)", "setuptools (>=67.8.0)", "wheel (>=0.36.0)"] -typing = ["importlib-metadata (>=5.1)", "mypy (>=1.5.0,<1.6.0)", "tomli", "typing-extensions (>=3.7.4.3)"] -virtualenv = ["virtualenv (>=20.0.35)"] - [[package]] name = "cachetools" version = "5.3.2" @@ -877,38 +836,28 @@ numpy = "*" [[package]] name = "chromadb" -version = "0.4.22" +version = "0.4.13" description = "Chroma." optional = false -python-versions = ">=3.8" +python-versions = ">=3.7" files = [ - {file = "chromadb-0.4.22-py3-none-any.whl", hash = "sha256:ad210b27b4cda2f09d15adc9c83c81bfa66b69f39648a27b637306e40de0680d"}, - {file = "chromadb-0.4.22.tar.gz", hash = "sha256:c793149e1c2bbbb52d77602c6c0594c5752f04cd9be12619250ddad2082af27a"}, + {file = "chromadb-0.4.13-py3-none-any.whl", hash = "sha256:6959dc4aaa6278c7491dd1911724981a0e46816b19e9f86945b9bd875e6a252a"}, + {file = "chromadb-0.4.13.tar.gz", hash = "sha256:99d330b9ac8f2ec81f4b34798d34f2ea9f4656bef1da951efa7e93957ef7e706"}, ] [package.dependencies] bcrypt = ">=4.0.1" -build = ">=1.0.3" chroma-hnswlib = "0.7.3" fastapi = ">=0.95.2" -grpcio = ">=1.58.0" importlib-resources = "*" -kubernetes = ">=28.1.0" -mmh3 = ">=4.0.1" -numpy = ">=1.22.5" +numpy = {version = ">=1.22.5", markers = "python_version >= \"3.8\""} onnxruntime = ">=1.14.1" -opentelemetry-api = ">=1.2.0" -opentelemetry-exporter-otlp-proto-grpc = ">=1.2.0" -opentelemetry-instrumentation-fastapi = ">=0.41b0" -opentelemetry-sdk = ">=1.2.0" overrides = ">=7.3.1" posthog = ">=2.4.0" pulsar-client = ">=3.1.0" pydantic = ">=1.9" pypika = ">=0.48.9" -PyYAML = ">=6.0.0" requests = ">=2.28" -tenacity = ">=8.2.3" tokenizers = ">=0.13.2" tqdm = ">=4.65.0" typer = ">=0.9.0" @@ -1318,6 +1267,17 @@ files = [ graph = ["objgraph (>=1.7.2)"] profile = ["gprof2dot (>=2022.7.29)"] +[[package]] +name = "dirtyjson" +version = "1.0.8" +description = "JSON decoder for Python that can extract data from the muck" +optional = false +python-versions = "*" +files = [ + {file = "dirtyjson-1.0.8-py3-none-any.whl", hash = "sha256:125e27248435a58acace26d5c2c4c11a1c0de0a9c5124c5a94ba78e517d74f53"}, + {file = "dirtyjson-1.0.8.tar.gz", hash = "sha256:90ca4a18f3ff30ce849d100dcf4a003953c79d3a2348ef056f1d9c22231a25fd"}, +] + [[package]] name = "diskcache" version = "5.6.3" @@ -1509,13 +1469,13 @@ requests = ["requests (>=2.4.0,<3.0.0)"] [[package]] name = "emoji" -version = "2.10.0" +version = "2.10.1" description = "Emoji for Python" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ - {file = "emoji-2.10.0-py2.py3-none-any.whl", hash = "sha256:aed4332caa23553a7218f032c08b0a325ae53b010f7fb98ad272c0f7841bc1d3"}, - {file = "emoji-2.10.0.tar.gz", hash = "sha256:7e68435eecd2c428c3b4aaa5f72d61a5b1a36c81a5138681cba13d19d94aa3a0"}, + {file = "emoji-2.10.1-py2.py3-none-any.whl", hash = "sha256:11fb369ea79d20c14efa4362c732d67126df294a7959a2c98bfd7447c12a218e"}, + {file = "emoji-2.10.1.tar.gz", hash = "sha256:16287283518fb7141bde00198f9ffff4e1c1cb570efb68b2f1ec50975c3a581d"}, ] [package.extras] @@ -2181,13 +2141,13 @@ protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4 [[package]] name = "google-api-core" -version = "2.16.0" +version = "2.16.1" description = "Google API client core library" optional = false python-versions = ">=3.7" files = [ - {file = "google-api-core-2.16.0.tar.gz", hash = "sha256:d1fc42e52aa4042ad812827b7aad858394e2bf73da8329af95ad8efa30bc886b"}, - {file = "google_api_core-2.16.0-py3-none-any.whl", hash = "sha256:c424f9f271c7f55366254708e0d0383963a72376286018af0a04f322be843400"}, + {file = "google-api-core-2.16.1.tar.gz", hash = "sha256:7f668ffa3d5b9f3c6930407e5f5d691c05a376050a5a5fd772b9dc32e70a0c30"}, + {file = "google_api_core-2.16.1-py3-none-any.whl", hash = "sha256:257e9e152cd18da0c6701113c122ade04dca04731e179fc5c7dca48e1396ec4c"}, ] [package.dependencies] @@ -2639,182 +2599,169 @@ protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4 [[package]] name = "grpcio" -version = "1.60.0" +version = "1.47.5" description = "HTTP/2-based RPC framework" optional = false -python-versions = ">=3.7" +python-versions = ">=3.6" files = [ - {file = "grpcio-1.60.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:d020cfa595d1f8f5c6b343530cd3ca16ae5aefdd1e832b777f9f0eb105f5b139"}, - {file = "grpcio-1.60.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:b98f43fcdb16172dec5f4b49f2fece4b16a99fd284d81c6bbac1b3b69fcbe0ff"}, - {file = "grpcio-1.60.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:20e7a4f7ded59097c84059d28230907cd97130fa74f4a8bfd1d8e5ba18c81491"}, - {file = "grpcio-1.60.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:452ca5b4afed30e7274445dd9b441a35ece656ec1600b77fff8c216fdf07df43"}, - {file = "grpcio-1.60.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:43e636dc2ce9ece583b3e2ca41df5c983f4302eabc6d5f9cd04f0562ee8ec1ae"}, - {file = "grpcio-1.60.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6e306b97966369b889985a562ede9d99180def39ad42c8014628dd3cc343f508"}, - {file = "grpcio-1.60.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f897c3b127532e6befdcf961c415c97f320d45614daf84deba0a54e64ea2457b"}, - {file = "grpcio-1.60.0-cp310-cp310-win32.whl", hash = "sha256:b87efe4a380887425bb15f220079aa8336276398dc33fce38c64d278164f963d"}, - {file = "grpcio-1.60.0-cp310-cp310-win_amd64.whl", hash = "sha256:a9c7b71211f066908e518a2ef7a5e211670761651039f0d6a80d8d40054047df"}, - {file = "grpcio-1.60.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:fb464479934778d7cc5baf463d959d361954d6533ad34c3a4f1d267e86ee25fd"}, - {file = "grpcio-1.60.0-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:4b44d7e39964e808b071714666a812049765b26b3ea48c4434a3b317bac82f14"}, - {file = "grpcio-1.60.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:90bdd76b3f04bdb21de5398b8a7c629676c81dfac290f5f19883857e9371d28c"}, - {file = "grpcio-1.60.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:91229d7203f1ef0ab420c9b53fe2ca5c1fbeb34f69b3bc1b5089466237a4a134"}, - {file = "grpcio-1.60.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b36a2c6d4920ba88fa98075fdd58ff94ebeb8acc1215ae07d01a418af4c0253"}, - {file = "grpcio-1.60.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:297eef542156d6b15174a1231c2493ea9ea54af8d016b8ca7d5d9cc65cfcc444"}, - {file = "grpcio-1.60.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:87c9224acba0ad8bacddf427a1c2772e17ce50b3042a789547af27099c5f751d"}, - {file = "grpcio-1.60.0-cp311-cp311-win32.whl", hash = "sha256:95ae3e8e2c1b9bf671817f86f155c5da7d49a2289c5cf27a319458c3e025c320"}, - {file = "grpcio-1.60.0-cp311-cp311-win_amd64.whl", hash = "sha256:467a7d31554892eed2aa6c2d47ded1079fc40ea0b9601d9f79204afa8902274b"}, - {file = "grpcio-1.60.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:a7152fa6e597c20cb97923407cf0934e14224af42c2b8d915f48bc3ad2d9ac18"}, - {file = "grpcio-1.60.0-cp312-cp312-macosx_10_10_universal2.whl", hash = "sha256:7db16dd4ea1b05ada504f08d0dca1cd9b926bed3770f50e715d087c6f00ad748"}, - {file = "grpcio-1.60.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:b0571a5aef36ba9177e262dc88a9240c866d903a62799e44fd4aae3f9a2ec17e"}, - {file = "grpcio-1.60.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6fd9584bf1bccdfff1512719316efa77be235469e1e3295dce64538c4773840b"}, - {file = "grpcio-1.60.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d6a478581b1a1a8fdf3318ecb5f4d0cda41cacdffe2b527c23707c9c1b8fdb55"}, - {file = "grpcio-1.60.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:77c8a317f0fd5a0a2be8ed5cbe5341537d5c00bb79b3bb27ba7c5378ba77dbca"}, - {file = "grpcio-1.60.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1c30bb23a41df95109db130a6cc1b974844300ae2e5d68dd4947aacba5985aa5"}, - {file = "grpcio-1.60.0-cp312-cp312-win32.whl", hash = "sha256:2aef56e85901c2397bd557c5ba514f84de1f0ae5dd132f5d5fed042858115951"}, - {file = "grpcio-1.60.0-cp312-cp312-win_amd64.whl", hash = "sha256:e381fe0c2aa6c03b056ad8f52f8efca7be29fb4d9ae2f8873520843b6039612a"}, - {file = "grpcio-1.60.0-cp37-cp37m-linux_armv7l.whl", hash = "sha256:92f88ca1b956eb8427a11bb8b4a0c0b2b03377235fc5102cb05e533b8693a415"}, - {file = "grpcio-1.60.0-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:e278eafb406f7e1b1b637c2cf51d3ad45883bb5bd1ca56bc05e4fc135dfdaa65"}, - {file = "grpcio-1.60.0-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:a48edde788b99214613e440fce495bbe2b1e142a7f214cce9e0832146c41e324"}, - {file = "grpcio-1.60.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:de2ad69c9a094bf37c1102b5744c9aec6cf74d2b635558b779085d0263166454"}, - {file = "grpcio-1.60.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:073f959c6f570797272f4ee9464a9997eaf1e98c27cb680225b82b53390d61e6"}, - {file = "grpcio-1.60.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c826f93050c73e7769806f92e601e0efdb83ec8d7c76ddf45d514fee54e8e619"}, - {file = "grpcio-1.60.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:9e30be89a75ee66aec7f9e60086fadb37ff8c0ba49a022887c28c134341f7179"}, - {file = "grpcio-1.60.0-cp37-cp37m-win_amd64.whl", hash = "sha256:b0fb2d4801546598ac5cd18e3ec79c1a9af8b8f2a86283c55a5337c5aeca4b1b"}, - {file = "grpcio-1.60.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:9073513ec380434eb8d21970e1ab3161041de121f4018bbed3146839451a6d8e"}, - {file = "grpcio-1.60.0-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:74d7d9fa97809c5b892449b28a65ec2bfa458a4735ddad46074f9f7d9550ad13"}, - {file = "grpcio-1.60.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:1434ca77d6fed4ea312901122dc8da6c4389738bf5788f43efb19a838ac03ead"}, - {file = "grpcio-1.60.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e61e76020e0c332a98290323ecfec721c9544f5b739fab925b6e8cbe1944cf19"}, - {file = "grpcio-1.60.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675997222f2e2f22928fbba640824aebd43791116034f62006e19730715166c0"}, - {file = "grpcio-1.60.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5208a57eae445ae84a219dfd8b56e04313445d146873117b5fa75f3245bc1390"}, - {file = "grpcio-1.60.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:428d699c8553c27e98f4d29fdc0f0edc50e9a8a7590bfd294d2edb0da7be3629"}, - {file = "grpcio-1.60.0-cp38-cp38-win32.whl", hash = "sha256:83f2292ae292ed5a47cdcb9821039ca8e88902923198f2193f13959360c01860"}, - {file = "grpcio-1.60.0-cp38-cp38-win_amd64.whl", hash = "sha256:705a68a973c4c76db5d369ed573fec3367d7d196673fa86614b33d8c8e9ebb08"}, - {file = "grpcio-1.60.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:c193109ca4070cdcaa6eff00fdb5a56233dc7610216d58fb81638f89f02e4968"}, - {file = "grpcio-1.60.0-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:676e4a44e740deaba0f4d95ba1d8c5c89a2fcc43d02c39f69450b1fa19d39590"}, - {file = "grpcio-1.60.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:5ff21e000ff2f658430bde5288cb1ac440ff15c0d7d18b5fb222f941b46cb0d2"}, - {file = "grpcio-1.60.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c86343cf9ff7b2514dd229bdd88ebba760bd8973dac192ae687ff75e39ebfab"}, - {file = "grpcio-1.60.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0fd3b3968ffe7643144580f260f04d39d869fcc2cddb745deef078b09fd2b328"}, - {file = "grpcio-1.60.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:30943b9530fe3620e3b195c03130396cd0ee3a0d10a66c1bee715d1819001eaf"}, - {file = "grpcio-1.60.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b10241250cb77657ab315270b064a6c7f1add58af94befa20687e7c8d8603ae6"}, - {file = "grpcio-1.60.0-cp39-cp39-win32.whl", hash = "sha256:79a050889eb8d57a93ed21d9585bb63fca881666fc709f5d9f7f9372f5e7fd03"}, - {file = "grpcio-1.60.0-cp39-cp39-win_amd64.whl", hash = "sha256:8a97a681e82bc11a42d4372fe57898d270a2707f36c45c6676e49ce0d5c41353"}, - {file = "grpcio-1.60.0.tar.gz", hash = "sha256:2199165a1affb666aa24adf0c97436686d0a61bc5fc113c037701fb7c7fceb96"}, + {file = "grpcio-1.47.5-cp310-cp310-linux_armv7l.whl", hash = "sha256:acc73289d0c44650aa1f21eccfa967f5623b01c3b5e2b4596fe5f9c5bf10956d"}, + {file = "grpcio-1.47.5-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:f3174c798959998876d546944523a558f78a9b9feb22a2cbaaa3822f2e158653"}, + {file = "grpcio-1.47.5-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:64401ee6d54b4d5869bcba4be3cae9f2e335c44a39ba1e29991ad22cfe2abacb"}, + {file = "grpcio-1.47.5-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:39a07eb5e7ec9277e5d124fb0e2d4f51ddbaadc2abdd27e8bbf1716dcf45e581"}, + {file = "grpcio-1.47.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:874b138ca95a6375ae6f6a12c10a348827c9aa8fbd05d025b87b5e050ab55b46"}, + {file = "grpcio-1.47.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:90539369afba42fc921cdda9d5f697a421f05a2e82ba58342ffbe88aa586019e"}, + {file = "grpcio-1.47.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2b18f970514bbc76547928e26d0cec06996ce3f947a3634b3adbe79d0e48e980"}, + {file = "grpcio-1.47.5-cp310-cp310-win32.whl", hash = "sha256:44c52923be0c4a0f662de43644679c6356960c38c4edf44864c23b998693c7cc"}, + {file = "grpcio-1.47.5-cp310-cp310-win_amd64.whl", hash = "sha256:07761f427551fced386db8c78701d6a167b2a682aa8df808303dd0a0d44bf6c9"}, + {file = "grpcio-1.47.5-cp36-cp36m-linux_armv7l.whl", hash = "sha256:10eb026bf75568de06933366f0340d2b4b207425c74a5640aa1812b8b69e7d9d"}, + {file = "grpcio-1.47.5-cp36-cp36m-macosx_10_10_universal2.whl", hash = "sha256:4f8e7fba6b1150a63aebd04d03be779de4ea4c4a8b28869e7a3c8f0b3ec59edc"}, + {file = "grpcio-1.47.5-cp36-cp36m-manylinux_2_17_aarch64.whl", hash = "sha256:36d93b19c214bc654fc50ae65cce84b8f7698159191b9d3f21f9ad92ae7bc325"}, + {file = "grpcio-1.47.5-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4e59f916bf58528e55893743151c6bd9f0a393fddfe411a6fffd29a300e6acf2"}, + {file = "grpcio-1.47.5-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18f8b2d316a3be464eb2a20afa7026a235a07a0094be879876611206d8026679"}, + {file = "grpcio-1.47.5-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:0c3076957cd2aea34fe69384453315fd765948eb6cb73a12f332277308d04b76"}, + {file = "grpcio-1.47.5-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:007f5ad07d2f3a4a422c1df589a0d25e918b96d8f6069cb6f0254386a5f09262"}, + {file = "grpcio-1.47.5-cp36-cp36m-win32.whl", hash = "sha256:01ac149a5ca9512277b1d2fe85687099f3e442c6f9f924eae003a6700735e23e"}, + {file = "grpcio-1.47.5-cp36-cp36m-win_amd64.whl", hash = "sha256:a32ccc88950f2be619157201161e70a5e5ed9e2427662bb2e60f1a8cea7d0db6"}, + {file = "grpcio-1.47.5-cp37-cp37m-linux_armv7l.whl", hash = "sha256:ec71f15258e086acadb13ec06e4e4c54eb0f5455cd4c618997f847874d5ff9ea"}, + {file = "grpcio-1.47.5-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:4bbf5a63497dbd5e44c4335cab153796a4274be17ca40ec971a7749c3f4fef6a"}, + {file = "grpcio-1.47.5-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:11e1bc97e88232201256b718c63a8a1fd86ec6fca3a501293be5c5e423de9d56"}, + {file = "grpcio-1.47.5-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e568d84fed80713d2fa3221552beee27ed8034f7eff52bb7871bf5ffe4d4ca78"}, + {file = "grpcio-1.47.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb4c838de8e1e7194d3f9a679fd76cc44a1dbe81f18bd39ee233c72347d772bf"}, + {file = "grpcio-1.47.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a74c19baf2f8127b44b3f58e2a5801a17992dae9a20197b4a8fa26e2ea79742b"}, + {file = "grpcio-1.47.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e369ed5ecff11ef85666cabbb5736495604e052c8dc2c03a2104f99dfd0a59e3"}, + {file = "grpcio-1.47.5-cp37-cp37m-win32.whl", hash = "sha256:ccb741fab5117aea981d4ac341d2ce1e588f515f83091807d4e2bb388ed59edd"}, + {file = "grpcio-1.47.5-cp37-cp37m-win_amd64.whl", hash = "sha256:af9d3b075dfcbc343d44b0e98725ba6d56dc0669e61905a4e71e8f4409cfefbd"}, + {file = "grpcio-1.47.5-cp38-cp38-linux_armv7l.whl", hash = "sha256:cac6847a4b9a7e7a1f270a71fef1c17c2e8a6b411c0ca48080ce1e08d284aded"}, + {file = "grpcio-1.47.5-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:54a3e17d155b6fb141e1fbb7c47d30556bec4c940b66ff4d9513536e2e214d4a"}, + {file = "grpcio-1.47.5-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:d1873c0b84a0ffb129f75e7c8be45d2cae427baf0b090d15b9ff46c1841c3f53"}, + {file = "grpcio-1.47.5-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e209df91cf8dfb335c2e26784702b0e12c20dc4de7b9b6d2cccd968146155f06"}, + {file = "grpcio-1.47.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:350e2627684f93f8b59af9c76a03eeb4aa145ecc589569137d4518486f4f1727"}, + {file = "grpcio-1.47.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:23754807314c5aa4c26eb1c50aaf506801a2f7825951100280d2c013b127436f"}, + {file = "grpcio-1.47.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:503c3fa0045f3ef80aa1ad082eac6a888081da2e1cd793f281ed499831e4c498"}, + {file = "grpcio-1.47.5-cp38-cp38-win32.whl", hash = "sha256:a4eecfbe994c88996461bd1459e43ea460952d4147f53e8c18e089764e6808f5"}, + {file = "grpcio-1.47.5-cp38-cp38-win_amd64.whl", hash = "sha256:941927ae4d589a2fef5c22b9c47df9e5e613c737bd750bafc3a9547cc506017c"}, + {file = "grpcio-1.47.5-cp39-cp39-linux_armv7l.whl", hash = "sha256:9891c77e69bd4109c25c1bea51d78fbc5ba2fcd9445bf99225bb8fb03d849913"}, + {file = "grpcio-1.47.5-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:61e83778d85dbbbd7446451ec28b7261e9ebba489cc8c262dfe8fedc119f769b"}, + {file = "grpcio-1.47.5-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:21ccfc0e989531cbdc93c54a7581ea5f7c46bf585016d9320b4be042f1e02374"}, + {file = "grpcio-1.47.5-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bea35a0114a39827ffe59f73950d242f95d59a9ac2009ae8da7b065c06f0a57f"}, + {file = "grpcio-1.47.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97e75b9e52eeb9d1335aaeecf581cb3cea7fc4bafd7bd675c83f208a386a42a8"}, + {file = "grpcio-1.47.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:1fb86f95228827b55e860278d142326af4489c0f4220975780daff325fc87172"}, + {file = "grpcio-1.47.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c9b83183525afe58dd9e7bb249f9e55df326e3c3834d09ea476c7a6bb12f73ee"}, + {file = "grpcio-1.47.5-cp39-cp39-win32.whl", hash = "sha256:00bff7492875ab04ec5ed3d92550d8f8aa423151e187b79684c8a22c7a6f1670"}, + {file = "grpcio-1.47.5-cp39-cp39-win_amd64.whl", hash = "sha256:2b32adae820cc0347e5e44efe91b661b436dbca73f25c5763cadb1cafd1dca10"}, + {file = "grpcio-1.47.5.tar.gz", hash = "sha256:b62b8bea0c94b4603bb4c8332d8a814375120bea3c2dbeb71397213bde5ea832"}, ] +[package.dependencies] +six = ">=1.5.2" + [package.extras] -protobuf = ["grpcio-tools (>=1.60.0)"] +protobuf = ["grpcio-tools (>=1.47.5)"] [[package]] name = "grpcio-health-checking" -version = "1.60.0" +version = "1.47.5" description = "Standard Health Checking Service for gRPC" optional = false python-versions = ">=3.6" files = [ - {file = "grpcio-health-checking-1.60.0.tar.gz", hash = "sha256:478b5300778120fed9f6d134d72b157a59f9c06689789218cbff47fafca2f119"}, - {file = "grpcio_health_checking-1.60.0-py3-none-any.whl", hash = "sha256:13caf28bc93795bd6bdb580b21832ebdd1aa3f5b648ea47ed17362d85bed96d3"}, + {file = "grpcio-health-checking-1.47.5.tar.gz", hash = "sha256:74f36ef2ff704c46965bd74cdea51afc0bbcde641134c9d09ecb5063391db516"}, + {file = "grpcio_health_checking-1.47.5-py3-none-any.whl", hash = "sha256:659b83138cb2b7db71777044d0caf58bab4f958fce972900f8577ebb4edca29d"}, ] [package.dependencies] -grpcio = ">=1.60.0" -protobuf = ">=4.21.6" +grpcio = ">=1.47.5" +protobuf = ">=3.12.0" [[package]] name = "grpcio-reflection" -version = "1.60.0" +version = "1.47.5" description = "Standard Protobuf Reflection Service for gRPC" optional = false python-versions = ">=3.6" files = [ - {file = "grpcio-reflection-1.60.0.tar.gz", hash = "sha256:3f6c0c73ba8f20d1420c5e72fc4dd0389fac346ed8fb32a28e6e1967b44fff35"}, - {file = "grpcio_reflection-1.60.0-py3-none-any.whl", hash = "sha256:f7a347ebd6cecf347fc836fd520fd1f0b3411912981649c7fb34d62a3a15aa4e"}, + {file = "grpcio-reflection-1.47.5.tar.gz", hash = "sha256:ac391ec327861f16bc870638101fee80799eccf39c5b09e9ddd776d6854b9873"}, + {file = "grpcio_reflection-1.47.5-py3-none-any.whl", hash = "sha256:8cfd222f2116b7e1bcd55bd2a1fcb168c5a9cd20310151d6278563f516e8ae1e"}, ] [package.dependencies] -grpcio = ">=1.60.0" -protobuf = ">=4.21.6" +grpcio = ">=1.47.5" +protobuf = ">=3.12.0" [[package]] name = "grpcio-status" -version = "1.60.0" +version = "1.47.5" description = "Status proto mapping for gRPC" optional = false python-versions = ">=3.6" files = [ - {file = "grpcio-status-1.60.0.tar.gz", hash = "sha256:f10e0b6db3adc0fdc244b71962814ee982996ef06186446b5695b9fa635aa1ab"}, - {file = "grpcio_status-1.60.0-py3-none-any.whl", hash = "sha256:7d383fa36e59c1e61d380d91350badd4d12ac56e4de2c2b831b050362c3c572e"}, + {file = "grpcio-status-1.47.5.tar.gz", hash = "sha256:671bb4c0819697a699c12a8785a78d6847eafb6a83b2437bbae13989b04e5c25"}, + {file = "grpcio_status-1.47.5-py3-none-any.whl", hash = "sha256:24549a84fa37ca5de1e0f6be96b4c2c3623b1e2b7359aa16b3de5aa0563795f1"}, ] [package.dependencies] googleapis-common-protos = ">=1.5.5" -grpcio = ">=1.60.0" -protobuf = ">=4.21.6" +grpcio = ">=1.47.5" +protobuf = ">=3.12.0" [[package]] name = "grpcio-tools" -version = "1.60.0" +version = "1.47.5" description = "Protobuf code generator for gRPC" optional = false -python-versions = ">=3.7" +python-versions = ">=3.6" files = [ - {file = "grpcio-tools-1.60.0.tar.gz", hash = "sha256:ed30499340228d733ff69fcf4a66590ed7921f94eb5a2bf692258b1280b9dac7"}, - {file = "grpcio_tools-1.60.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:6807b7a3f3e6e594566100bd7fe04a2c42ce6d5792652677f1aaf5aa5adaef3d"}, - {file = "grpcio_tools-1.60.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:857c5351e9dc33a019700e171163f94fcc7e3ae0f6d2b026b10fda1e3c008ef1"}, - {file = "grpcio_tools-1.60.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:ec0e401e9a43d927d216d5169b03c61163fb52b665c5af2fed851357b15aef88"}, - {file = "grpcio_tools-1.60.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e68dc4474f30cad11a965f0eb5d37720a032b4720afa0ec19dbcea2de73b5aae"}, - {file = "grpcio_tools-1.60.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbf0ed772d2ae7e8e5d7281fcc00123923ab130b94f7a843eee9af405918f924"}, - {file = "grpcio_tools-1.60.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c771b19dce2bfe06899247168c077d7ab4e273f6655d8174834f9a6034415096"}, - {file = "grpcio_tools-1.60.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e5614cf0960456d21d8a0f4902e3e5e3bcacc4e400bf22f196e5dd8aabb978b7"}, - {file = "grpcio_tools-1.60.0-cp310-cp310-win32.whl", hash = "sha256:87cf439178f3eb45c1a889b2e4a17cbb4c450230d92c18d9c57e11271e239c55"}, - {file = "grpcio_tools-1.60.0-cp310-cp310-win_amd64.whl", hash = "sha256:687f576d7ff6ce483bc9a196d1ceac45144e8733b953620a026daed8e450bc38"}, - {file = "grpcio_tools-1.60.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:2a8a758701f3ac07ed85f5a4284c6a9ddefcab7913a8e552497f919349e72438"}, - {file = "grpcio_tools-1.60.0-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:7c1cde49631732356cb916ee1710507967f19913565ed5f9991e6c9cb37e3887"}, - {file = "grpcio_tools-1.60.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:d941749bd8dc3f8be58fe37183143412a27bec3df8482d5abd6b4ec3f1ac2924"}, - {file = "grpcio_tools-1.60.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9ee35234f1da8fba7ddbc544856ff588243f1128ea778d7a1da3039be829a134"}, - {file = "grpcio_tools-1.60.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8f7a5094adb49e85db13ea3df5d99a976c2bdfd83b0ba26af20ebb742ac6786"}, - {file = "grpcio_tools-1.60.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:24c4ead4a03037beaeb8ef2c90d13d70101e35c9fae057337ed1a9144ef10b53"}, - {file = "grpcio_tools-1.60.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:811abb9c4fb6679e0058dfa123fb065d97b158b71959c0e048e7972bbb82ba0f"}, - {file = "grpcio_tools-1.60.0-cp311-cp311-win32.whl", hash = "sha256:bd2a17b0193fbe4793c215d63ce1e01ae00a8183d81d7c04e77e1dfafc4b2b8a"}, - {file = "grpcio_tools-1.60.0-cp311-cp311-win_amd64.whl", hash = "sha256:b22b1299b666eebd5752ba7719da536075eae3053abcf2898b65f763c314d9da"}, - {file = "grpcio_tools-1.60.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:74025fdd6d1cb7ba4b5d087995339e9a09f0c16cf15dfe56368b23e41ffeaf7a"}, - {file = "grpcio_tools-1.60.0-cp312-cp312-macosx_10_10_universal2.whl", hash = "sha256:5a907a4f1ffba86501b2cdb8682346249ea032b922fc69a92f082ba045cca548"}, - {file = "grpcio_tools-1.60.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:1fbb9554466d560472f07d906bfc8dcaf52f365c2a407015185993e30372a886"}, - {file = "grpcio_tools-1.60.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f10ef47460ce3c6fd400f05fe757b90df63486c9b84d1ecad42dcc5f80c8ac14"}, - {file = "grpcio_tools-1.60.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:321b18f42a70813545e416ddcb8bf20defa407a8114906711c9710a69596ceda"}, - {file = "grpcio_tools-1.60.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:081336d8258f1a56542aa8a7a5dec99a2b38d902e19fbdd744594783301b0210"}, - {file = "grpcio_tools-1.60.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:addc9b23d6ff729d9f83d4a2846292d4c84f5eb2ec38f08489a6a0d66ac2b91e"}, - {file = "grpcio_tools-1.60.0-cp312-cp312-win32.whl", hash = "sha256:e87cabac7969bdde309575edc2456357667a1b28262b2c1f12580ef48315b19d"}, - {file = "grpcio_tools-1.60.0-cp312-cp312-win_amd64.whl", hash = "sha256:e70d867c120d9849093b0ac24d861e378bc88af2552e743d83b9f642d2caa7c2"}, - {file = "grpcio_tools-1.60.0-cp37-cp37m-linux_armv7l.whl", hash = "sha256:559ce714fe212aaf4abbe1493c5bb8920def00cc77ce0d45266f4fd9d8b3166f"}, - {file = "grpcio_tools-1.60.0-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:7a5263a0f2ddb7b1cfb2349e392cfc4f318722e0f48f886393e06946875d40f3"}, - {file = "grpcio_tools-1.60.0-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:18976684a931ca4bcba65c78afa778683aefaae310f353e198b1823bf09775a0"}, - {file = "grpcio_tools-1.60.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5c519a0d4ba1ab44a004fa144089738c59278233e2010b2cf4527dc667ff297"}, - {file = "grpcio_tools-1.60.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6170873b1e5b6580ebb99e87fb6e4ea4c48785b910bd7af838cc6e44b2bccb04"}, - {file = "grpcio_tools-1.60.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:fb4df80868b3e397d5fbccc004c789d2668b622b51a9d2387b4c89c80d31e2c5"}, - {file = "grpcio_tools-1.60.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:dba6e32c87b4af29b5f475fb2f470f7ee3140bfc128644f17c6c59ddeb670680"}, - {file = "grpcio_tools-1.60.0-cp37-cp37m-win_amd64.whl", hash = "sha256:f610384dee4b1ca705e8da66c5b5fe89a2de3d165c5282c3d1ddf40cb18924e4"}, - {file = "grpcio_tools-1.60.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:4041538f55aad5b3ae7e25ab314d7995d689e968bfc8aa169d939a3160b1e4c6"}, - {file = "grpcio_tools-1.60.0-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:2fb4cf74bfe1e707cf10bc9dd38a1ebaa145179453d150febb121c7e9cd749bf"}, - {file = "grpcio_tools-1.60.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:2fd1671c52f96e79a2302c8b1c1f78b8a561664b8b3d6946f20d8f1cc6b4225a"}, - {file = "grpcio_tools-1.60.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dd1e68c232fe01dd5312a8dbe52c50ecd2b5991d517d7f7446af4ba6334ba872"}, - {file = "grpcio_tools-1.60.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17a32b3da4fc0798cdcec0a9c974ac2a1e98298f151517bf9148294a3b1a5742"}, - {file = "grpcio_tools-1.60.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:9970d384fb0c084b00945ef57d98d57a8d32be106d8f0bd31387f7cbfe411b5b"}, - {file = "grpcio_tools-1.60.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5ce6bbd4936977ec1114f2903eb4342781960d521b0d82f73afedb9335251f6f"}, - {file = "grpcio_tools-1.60.0-cp38-cp38-win32.whl", hash = "sha256:2e00de389729ca8d8d1a63c2038703078a887ff738dc31be640b7da9c26d0d4f"}, - {file = "grpcio_tools-1.60.0-cp38-cp38-win_amd64.whl", hash = "sha256:6192184b1f99372ff1d9594bd4b12264e3ff26440daba7eb043726785200ff77"}, - {file = "grpcio_tools-1.60.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:eae27f9b16238e2aaee84c77b5923c6924d6dccb0bdd18435bf42acc8473ae1a"}, - {file = "grpcio_tools-1.60.0-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:b96981f3a31b85074b73d97c8234a5ed9053d65a36b18f4a9c45a2120a5b7a0a"}, - {file = "grpcio_tools-1.60.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:1748893efd05cf4a59a175d7fa1e4fbb652f4d84ccaa2109f7869a2be48ed25e"}, - {file = "grpcio_tools-1.60.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7a6fe752205caae534f29fba907e2f59ff79aa42c6205ce9a467e9406cbac68c"}, - {file = "grpcio_tools-1.60.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3456df087ea61a0972a5bc165aed132ed6ddcc63f5749e572f9fff84540bdbad"}, - {file = "grpcio_tools-1.60.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f3d916606dcf5610d4367918245b3d9d8cd0d2ec0b7043d1bbb8c50fe9815c3a"}, - {file = "grpcio_tools-1.60.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:fc01bc1079279ec342f0f1b6a107b3f5dc3169c33369cf96ada6e2e171f74e86"}, - {file = "grpcio_tools-1.60.0-cp39-cp39-win32.whl", hash = "sha256:2dd01257e4feff986d256fa0bac9f56de59dc735eceeeb83de1c126e2e91f653"}, - {file = "grpcio_tools-1.60.0-cp39-cp39-win_amd64.whl", hash = "sha256:1b93ae8ffd18e9af9a965ebca5fa521e89066267de7abdde20721edc04e42721"}, + {file = "grpcio-tools-1.47.5.tar.gz", hash = "sha256:62ced60566a4cbcf35c57e887e2e68b4f108b3474ef3ec0022d38cd579345f92"}, + {file = "grpcio_tools-1.47.5-cp310-cp310-linux_armv7l.whl", hash = "sha256:9f92c561b245a562110bd84d3b64b016c8af5afde39febf1f71553ae56f6e8e4"}, + {file = "grpcio_tools-1.47.5-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:a0a991844a024705ad177cb858d36e3e6b329ea4a78b7f4c597b2817fc2692e7"}, + {file = "grpcio_tools-1.47.5-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:935976d5436d4306de052d1e00848fa25abc667e185aaaffcd367915f33a67c7"}, + {file = "grpcio_tools-1.47.5-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2481dba6a30d415a4756cd88cc380780e3f00bb41d56b8f6547bc3c09c6f4e7f"}, + {file = "grpcio_tools-1.47.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e62176978faa96b21e4e821e7070b0feed919726ff730c0b3b7e8d106ddb45bf"}, + {file = "grpcio_tools-1.47.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:728eb1f4ef6d380366a2de9940d1f910ece8bf4e44de5ca935cd16d4394e82ff"}, + {file = "grpcio_tools-1.47.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d58982c747e107f65c7307ec1646cce105b0785088287bf209f545377aeedaf4"}, + {file = "grpcio_tools-1.47.5-cp310-cp310-win32.whl", hash = "sha256:ea6d8f07b087bc2d579b7727daee2abf38fe5dc475c9e7c4f16b4a2c31895319"}, + {file = "grpcio_tools-1.47.5-cp310-cp310-win_amd64.whl", hash = "sha256:5e7a4e68072639fa767bde1011f5d83f4461a8e60651ea202af597777ee1ffd7"}, + {file = "grpcio_tools-1.47.5-cp36-cp36m-linux_armv7l.whl", hash = "sha256:bb1e066fc50ef7503b024924858658692d3e98582a9727b156f2f845da70e11e"}, + {file = "grpcio_tools-1.47.5-cp36-cp36m-macosx_10_10_universal2.whl", hash = "sha256:7d3e397a27e652ae6579f1f7dc3fc0c771db977ccaaded1fe113e882df425c15"}, + {file = "grpcio_tools-1.47.5-cp36-cp36m-manylinux_2_17_aarch64.whl", hash = "sha256:b19d8f1e8422826d49fc428acc66b69aa450c70f7090681df32d535188edf524"}, + {file = "grpcio_tools-1.47.5-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a0e017bd1022bc981fa1629e757e0d3d4a1991f999fb90ec714c2683fe05b8fa"}, + {file = "grpcio_tools-1.47.5-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:abb56ea33c4a33ee3b707f62339fd579e1a8dbbfeb7665d7ff85ee837cf64794"}, + {file = "grpcio_tools-1.47.5-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:02882ff2f703b75d343991608b39104f1621508cf407e427a75c1794ed0fac95"}, + {file = "grpcio_tools-1.47.5-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:84395aacae4f8a3358ad648a8bacf6b15bbb8946d8cf73f47dc77cfe1a154d48"}, + {file = "grpcio_tools-1.47.5-cp36-cp36m-win32.whl", hash = "sha256:de8901c64a1091cc474318e7a013af8c30feba34c7954c29ca8f477baf07db28"}, + {file = "grpcio_tools-1.47.5-cp36-cp36m-win_amd64.whl", hash = "sha256:37cb5c3d94ba1efef0d17a66e5e69b177fc934389eda8b76b161a6623e45e714"}, + {file = "grpcio_tools-1.47.5-cp37-cp37m-linux_armv7l.whl", hash = "sha256:5c2d3a35e9341ea9c68afe289054bd8604eda4214e6d916f97b19a316537a296"}, + {file = "grpcio_tools-1.47.5-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:89733edb89ec28e52dd9cc25e90b78248b6edd265f564726be2a9c4b4ee78479"}, + {file = "grpcio_tools-1.47.5-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:489f41535d779287759942c6cced93c4219ea53dad46ebdc4faca6220e1dba88"}, + {file = "grpcio_tools-1.47.5-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:072c84f561912400363b81af6bf5424c38fab80f0c9436c0fe19b2e7c2bcf15c"}, + {file = "grpcio_tools-1.47.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c650233420279f943bd1dcf286742aaeb4db7cc5f6554a5e8c16c2e4fa19a28f"}, + {file = "grpcio_tools-1.47.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:dab220aba6b5777b16df5c5b3a30f831cdbc4f493eabdaf9f6585691bad5496a"}, + {file = "grpcio_tools-1.47.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:309ca8508f361895ef2d4f533611272228d2412c8cae754b695673c7c65a2f8b"}, + {file = "grpcio_tools-1.47.5-cp37-cp37m-win32.whl", hash = "sha256:f8ce5fb65e97866257943cbf6d504195ab55e01ef467988d86322a36041b6de8"}, + {file = "grpcio_tools-1.47.5-cp37-cp37m-win_amd64.whl", hash = "sha256:b9154a18b0ad2bc4b9ceadedd7b67bb65b500b3427495b4d224a1a835aa55ce6"}, + {file = "grpcio_tools-1.47.5-cp38-cp38-linux_armv7l.whl", hash = "sha256:aaa4063bc05a18f32ae98e414e2472477468b966b9a1425c41eec160250beff2"}, + {file = "grpcio_tools-1.47.5-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:093da28f8ce3a0eedd5370b9f09f815fb6c01fd663d60734eab5b300b9a305ec"}, + {file = "grpcio_tools-1.47.5-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:0771f57585b9070086dec509b02fa2804a9d4c395e95cd7a6cb42d8f4b5683f7"}, + {file = "grpcio_tools-1.47.5-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:68d4cdc674c8596da8e25cf37741aab3f07bdf38731510a92019e5ec57f5fcea"}, + {file = "grpcio_tools-1.47.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:08fdce5549acca9fd7a45084c62e8ab0a1ca1c530bcbfa089625e9523f224023"}, + {file = "grpcio_tools-1.47.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8431b9ee083bec444ca6d48705b89774f97ba0a75e8c33ef3b9a2dc6ed2aa584"}, + {file = "grpcio_tools-1.47.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:baf37376da0062155d728fb9a1d522ea8f5039ebf774885d269f7772cbc3a2e6"}, + {file = "grpcio_tools-1.47.5-cp38-cp38-win32.whl", hash = "sha256:b65a59698f938fa59fd756799cd641c3755fb09cb95de008e4d67a9e5b1af6d5"}, + {file = "grpcio_tools-1.47.5-cp38-cp38-win_amd64.whl", hash = "sha256:17c2b5ce8b3100c8da4ae5070d8d2c2466f174e66d8127fb85ef8a7937a03853"}, + {file = "grpcio_tools-1.47.5-cp39-cp39-linux_armv7l.whl", hash = "sha256:9070301f079fef76fb0d51b84f393c6738587f3a16a2f0ced303362b0cc0ecf6"}, + {file = "grpcio_tools-1.47.5-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:5bcf01116a4d3bed2faf832f8c5618d1c69473576f3925240e3c5042dfbc115e"}, + {file = "grpcio_tools-1.47.5-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:b555b954aa213eac8efe7df507a178c3ab7323df9f501846a1bbccdf81354831"}, + {file = "grpcio_tools-1.47.5-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7604e08530b3edc688e41aa8af46051478d417b08afdf6fc2eafb5eb90528a26"}, + {file = "grpcio_tools-1.47.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d3f80818a560abee8189c4f0b074f45c16309b4596e013cb6ce105a022c5965"}, + {file = "grpcio_tools-1.47.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c801ebd7fa2304ff85aa15147f134aefe33132d85308c43e46f6a5be78b5a8a8"}, + {file = "grpcio_tools-1.47.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:235adfc22e9c703533573344de1d2394ddd92b27c82eb259bb5fb46f885159b8"}, + {file = "grpcio_tools-1.47.5-cp39-cp39-win32.whl", hash = "sha256:d659c257cbb48c843931b584d3c3da5473fa17275e0d04af79c9e9fdd6077179"}, + {file = "grpcio_tools-1.47.5-cp39-cp39-win_amd64.whl", hash = "sha256:9d121c63ff2fddeae2c65f6675eb944f47808a242b647d80b4661b2c5e1e6732"}, ] [package.dependencies] -grpcio = ">=1.60.0" -protobuf = ">=4.21.6,<5.0dev" +grpcio = ">=1.47.5" +protobuf = ">=3.12.0,<4.0dev" setuptools = "*" [[package]] @@ -3256,12 +3203,12 @@ testing = ["Django", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] [[package]] name = "jina" -version = "3.8.2" +version = "3.10.1" description = "Build cross-modal and multi-modal applications on the cloud · Neural Search · Creative AI · Cloud Native · MLOps" optional = false python-versions = "*" files = [ - {file = "jina-3.8.2.tar.gz", hash = "sha256:1645da74fdf47d3266f76fdd8cd857a3b5fc43fb17a45fe9d9c983a1e82b8137"}, + {file = "jina-3.10.1.tar.gz", hash = "sha256:876d06e9c6879aefdb337a7c96b82ec44bc40371f745be6ed307b3e22f989cb6"}, ] [package.dependencies] @@ -3270,22 +3217,22 @@ aiohttp = "*" aiostream = "*" cryptography = "*" docarray = [ - {version = ">=0.16.1", extras = ["common"]}, - {version = ">=0.16.1"}, + {version = ">=0.16.4"}, + {version = ">=0.16.3", extras = ["common"]}, ] docker = "*" fastapi = ">=0.76.0" filelock = "*" -grpcio = ">=1.46.0" -grpcio-health-checking = ">=1.46.0" -grpcio-reflection = ">=1.46.0" +grpcio = ">=1.46.0,<1.48.1" +grpcio-health-checking = ">=1.46.0,<1.48.1" +grpcio-reflection = ">=1.46.0,<1.48.1" jcloud = ">=0.0.35" -jina-hubble-sdk = ">=0.15.1" +jina-hubble-sdk = ">=0.19.0" numpy = "*" packaging = ">=20.0" pathspec = "*" prometheus_client = "*" -protobuf = ">=3.13.0" +protobuf = ">=3.20.0" pydantic = "*" python-multipart = "*" pyyaml = ">=5.3.1" @@ -3298,25 +3245,25 @@ websockets = "*" aiofiles = ["aiofiles"] aiohttp = ["aiohttp"] aiostream = ["aiostream"] -all = ["Pillow", "aiofiles", "aiohttp", "aiostream", "black (==22.3.0)", "bs4", "coverage (==6.2)", "cryptography", "docarray (>=0.16.1)", "docarray[common] (>=0.16.1)", "docker", "fastapi (>=0.76.0)", "filelock", "flaky", "grpcio (>=1.46.0)", "grpcio-health-checking (>=1.46.0)", "grpcio-reflection (>=1.46.0)", "jcloud (>=0.0.35)", "jina-hubble-sdk (>=0.15.1)", "jsonschema", "kubernetes (>=18.20.0)", "mock", "numpy", "packaging (>=20.0)", "pathspec", "portforward (>=0.2.4)", "prometheus_client", "protobuf (>=3.13.0)", "psutil", "pydantic", "pytest", "pytest-asyncio", "pytest-cov", "pytest-custom_exit_code", "pytest-kind (==21.1.3)", "pytest-lazy-fixture", "pytest-mock", "pytest-repeat", "pytest-reraise", "pytest-timeout", "python-multipart", "pyyaml (>=5.3.1)", "requests", "requests-mock", "scipy (>=1.6.1)", "sgqlc", "strawberry-graphql (>=0.96.0)", "tensorflow (>=2.0)", "torch", "uvicorn[standard]", "uvloop", "websockets"] +all = ["Pillow", "aiofiles", "aiohttp", "aiostream", "black (==22.3.0)", "bs4", "coverage (==6.2)", "cryptography", "docarray (>=0.16.4)", "docarray[common] (>=0.16.3)", "docker", "fastapi (>=0.76.0)", "filelock", "flaky", "grpcio (>=1.46.0,<1.48.1)", "grpcio-health-checking (>=1.46.0,<1.48.1)", "grpcio-reflection (>=1.46.0,<1.48.1)", "jcloud (>=0.0.35)", "jina-hubble-sdk (>=0.19.0)", "jsonschema", "kubernetes (>=18.20.0)", "mock", "numpy", "packaging (>=20.0)", "pathspec", "portforward (>=0.2.4)", "prometheus_client", "protobuf (>=3.20.0)", "psutil", "pydantic", "pytest", "pytest-asyncio", "pytest-cov", "pytest-custom_exit_code", "pytest-kind (==21.1.3)", "pytest-lazy-fixture", "pytest-mock", "pytest-repeat", "pytest-reraise", "pytest-timeout", "python-multipart", "pyyaml (>=5.3.1)", "requests", "requests-mock", "scipy (>=1.6.1)", "sgqlc", "strawberry-graphql (>=0.96.0)", "tensorflow (>=2.0)", "torch", "uvicorn[standard]", "uvloop", "websockets"] black = ["black (==22.3.0)"] bs4 = ["bs4"] cicd = ["bs4", "jsonschema", "portforward (>=0.2.4)", "sgqlc", "strawberry-graphql (>=0.96.0)", "tensorflow (>=2.0)", "torch"] -core = ["docarray (>=0.16.1)", "grpcio (>=1.46.0)", "grpcio-health-checking (>=1.46.0)", "grpcio-reflection (>=1.46.0)", "jcloud (>=0.0.35)", "jina-hubble-sdk (>=0.15.1)", "numpy", "packaging (>=20.0)", "protobuf (>=3.13.0)", "pyyaml (>=5.3.1)"] +core = ["docarray (>=0.16.4)", "grpcio (>=1.46.0,<1.48.1)", "grpcio-health-checking (>=1.46.0,<1.48.1)", "grpcio-reflection (>=1.46.0,<1.48.1)", "jcloud (>=0.0.35)", "jina-hubble-sdk (>=0.19.0)", "numpy", "packaging (>=20.0)", "protobuf (>=3.20.0)", "pyyaml (>=5.3.1)"] coverage = ["coverage (==6.2)"] cryptography = ["cryptography"] -devel = ["aiofiles", "aiohttp", "aiostream", "cryptography", "docarray[common] (>=0.16.1)", "docker", "fastapi (>=0.76.0)", "filelock", "pathspec", "prometheus_client", "pydantic", "python-multipart", "requests", "sgqlc", "strawberry-graphql (>=0.96.0)", "uvicorn[standard]", "uvloop", "websockets"] -docarray = ["docarray (>=0.16.1)"] -"docarray[common" = ["docarray[common] (>=0.16.1)"] +devel = ["aiofiles", "aiohttp", "aiostream", "cryptography", "docarray[common] (>=0.16.3)", "docker", "fastapi (>=0.76.0)", "filelock", "pathspec", "prometheus_client", "pydantic", "python-multipart", "requests", "sgqlc", "strawberry-graphql (>=0.96.0)", "uvicorn[standard]", "uvloop", "websockets"] +docarray = ["docarray (>=0.16.4)"] +"docarray[common" = ["docarray[common] (>=0.16.3)"] docker = ["docker"] fastapi = ["fastapi (>=0.76.0)"] filelock = ["filelock"] flaky = ["flaky"] -grpcio = ["grpcio (>=1.46.0)"] -grpcio-health-checking = ["grpcio-health-checking (>=1.46.0)"] -grpcio-reflection = ["grpcio-reflection (>=1.46.0)"] +grpcio = ["grpcio (>=1.46.0,<1.48.1)"] +grpcio-health-checking = ["grpcio-health-checking (>=1.46.0,<1.48.1)"] +grpcio-reflection = ["grpcio-reflection (>=1.46.0,<1.48.1)"] jcloud = ["jcloud (>=0.0.35)"] -jina-hubble-sdk = ["jina-hubble-sdk (>=0.15.1)"] +jina-hubble-sdk = ["jina-hubble-sdk (>=0.19.0)"] jsonschema = ["jsonschema"] kubernetes = ["kubernetes (>=18.20.0)"] mock = ["mock"] @@ -3327,7 +3274,7 @@ perf = ["prometheus_client", "uvloop"] pillow = ["Pillow"] portforward = ["portforward (>=0.2.4)"] prometheus-client = ["prometheus_client"] -protobuf = ["protobuf (>=3.13.0)"] +protobuf = ["protobuf (>=3.20.0)"] psutil = ["psutil"] pydantic = ["pydantic"] pytest = ["pytest"] @@ -3346,7 +3293,7 @@ requests = ["requests"] requests-mock = ["requests-mock"] scipy = ["scipy (>=1.6.1)"] sgqlc = ["sgqlc"] -standard = ["aiofiles", "aiohttp", "aiostream", "cryptography", "docarray[common] (>=0.16.1)", "docker", "fastapi (>=0.76.0)", "filelock", "pathspec", "prometheus_client", "pydantic", "python-multipart", "requests", "uvicorn[standard]", "uvloop", "websockets"] +standard = ["aiofiles", "aiohttp", "aiostream", "cryptography", "docarray[common] (>=0.16.3)", "docker", "fastapi (>=0.76.0)", "filelock", "pathspec", "prometheus_client", "pydantic", "python-multipart", "requests", "uvicorn[standard]", "uvloop", "websockets"] strawberry-graphql = ["strawberry-graphql (>=0.96.0)"] tensorflow = ["tensorflow (>=2.0)"] test = ["Pillow", "black (==22.3.0)", "coverage (==6.2)", "flaky", "kubernetes (>=18.20.0)", "mock", "psutil", "pytest", "pytest-asyncio", "pytest-cov", "pytest-custom_exit_code", "pytest-kind (==21.1.3)", "pytest-lazy-fixture", "pytest-mock", "pytest-repeat", "pytest-reraise", "pytest-timeout", "requests-mock", "scipy (>=1.6.1)"] @@ -3614,32 +3561,6 @@ sqs = ["boto3 (>=1.26.143)", "pycurl (>=7.43.0.5)", "urllib3 (>=1.26.16)"] yaml = ["PyYAML (>=3.10)"] zookeeper = ["kazoo (>=2.8.0)"] -[[package]] -name = "kubernetes" -version = "29.0.0" -description = "Kubernetes python client" -optional = false -python-versions = ">=3.6" -files = [ - {file = "kubernetes-29.0.0-py2.py3-none-any.whl", hash = "sha256:ab8cb0e0576ccdfb71886366efb102c6a20f268d817be065ce7f9909c631e43e"}, - {file = "kubernetes-29.0.0.tar.gz", hash = "sha256:c4812e227ae74d07d53c88293e564e54b850452715a59a927e7e1bc6b9a60459"}, -] - -[package.dependencies] -certifi = ">=14.05.14" -google-auth = ">=1.0.1" -oauthlib = ">=3.2.2" -python-dateutil = ">=2.5.3" -pyyaml = ">=5.4.1" -requests = "*" -requests-oauthlib = "*" -six = ">=1.9.0" -urllib3 = ">=1.24.2" -websocket-client = ">=0.32.0,<0.40.0 || >0.40.0,<0.41.dev0 || >=0.43.dev0" - -[package.extras] -adal = ["adal (>=1.0.2)"] - [[package]] name = "langchain" version = "0.1.4" @@ -3822,13 +3743,13 @@ langchain = ["langchain (>=0.0.309)"] [[package]] name = "langsmith" -version = "0.0.84" +version = "0.0.85" description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." optional = false python-versions = ">=3.8.1,<4.0" files = [ - {file = "langsmith-0.0.84-py3-none-any.whl", hash = "sha256:9ae1ab777018e2174f68e8f53c88e7a7feb8dbf1c458b473644a3d5e22dc1eb7"}, - {file = "langsmith-0.0.84.tar.gz", hash = "sha256:dd163f89bca14c86759c651a72917c6d45f7dd18435d7bc65dc205a23dd9ec8d"}, + {file = "langsmith-0.0.85-py3-none-any.whl", hash = "sha256:9d0ccbcda7b69c83828060603a51bb4319e43b8dc807fbd90b6355f8ec709500"}, + {file = "langsmith-0.0.85.tar.gz", hash = "sha256:fefc631fc30d836b54d4e3f99961c41aea497633898b8f09e305b6c7216c2c54"}, ] [package.dependencies] @@ -3876,19 +3797,20 @@ test = ["httpx (>=0.24.1)", "pytest (>=7.4.0)", "scipy (>=1.10)"] [[package]] name = "llama-index" -version = "0.9.39" +version = "0.9.40" description = "Interface between LLMs and your data" optional = false python-versions = ">=3.8.1,<4.0" files = [ - {file = "llama_index-0.9.39-py3-none-any.whl", hash = "sha256:73e19bf664b0643e3c1b88229d4bcaad841f4c6e882a63b27f637386c54d5353"}, - {file = "llama_index-0.9.39.tar.gz", hash = "sha256:c0d4093cd1c6d6056275f96d6acba56f383ef98925c9ce3fc8cde9fb4dee1f75"}, + {file = "llama_index-0.9.40-py3-none-any.whl", hash = "sha256:9fd192c574026b3e5eb95c8aed82506c48b46b5acb3401e98e0864d6f485f7a9"}, + {file = "llama_index-0.9.40.tar.gz", hash = "sha256:bbe8b9584393a90bfb5246333d63df1c34d0989d19737f76f26baed6080b25dc"}, ] [package.dependencies] aiohttp = ">=3.8.6,<4.0.0" dataclasses-json = "*" deprecated = ">=1.2.9.3" +dirtyjson = ">=1.0.8,<2.0.0" fsspec = ">=2023.5.0" httpx = "*" nest-asyncio = ">=1.5.8,<2.0.0" @@ -4249,98 +4171,6 @@ files = [ [package.dependencies] requests = "*" -[[package]] -name = "mmh3" -version = "4.1.0" -description = "Python extension for MurmurHash (MurmurHash3), a set of fast and robust hash functions." -optional = false -python-versions = "*" -files = [ - {file = "mmh3-4.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:be5ac76a8b0cd8095784e51e4c1c9c318c19edcd1709a06eb14979c8d850c31a"}, - {file = "mmh3-4.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:98a49121afdfab67cd80e912b36404139d7deceb6773a83620137aaa0da5714c"}, - {file = "mmh3-4.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5259ac0535874366e7d1a5423ef746e0d36a9e3c14509ce6511614bdc5a7ef5b"}, - {file = "mmh3-4.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5950827ca0453a2be357696da509ab39646044e3fa15cad364eb65d78797437"}, - {file = "mmh3-4.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1dd0f652ae99585b9dd26de458e5f08571522f0402155809fd1dc8852a613a39"}, - {file = "mmh3-4.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:99d25548070942fab1e4a6f04d1626d67e66d0b81ed6571ecfca511f3edf07e6"}, - {file = "mmh3-4.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:53db8d9bad3cb66c8f35cbc894f336273f63489ce4ac416634932e3cbe79eb5b"}, - {file = "mmh3-4.1.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75da0f615eb55295a437264cc0b736753f830b09d102aa4c2a7d719bc445ec05"}, - {file = "mmh3-4.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b926b07fd678ea84b3a2afc1fa22ce50aeb627839c44382f3d0291e945621e1a"}, - {file = "mmh3-4.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c5b053334f9b0af8559d6da9dc72cef0a65b325ebb3e630c680012323c950bb6"}, - {file = "mmh3-4.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:5bf33dc43cd6de2cb86e0aa73a1cc6530f557854bbbe5d59f41ef6de2e353d7b"}, - {file = "mmh3-4.1.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fa7eacd2b830727ba3dd65a365bed8a5c992ecd0c8348cf39a05cc77d22f4970"}, - {file = "mmh3-4.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:42dfd6742b9e3eec599f85270617debfa0bbb913c545bb980c8a4fa7b2d047da"}, - {file = "mmh3-4.1.0-cp310-cp310-win32.whl", hash = "sha256:2974ad343f0d39dcc88e93ee6afa96cedc35a9883bc067febd7ff736e207fa47"}, - {file = "mmh3-4.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:74699a8984ded645c1a24d6078351a056f5a5f1fe5838870412a68ac5e28d865"}, - {file = "mmh3-4.1.0-cp310-cp310-win_arm64.whl", hash = "sha256:f0dc874cedc23d46fc488a987faa6ad08ffa79e44fb08e3cd4d4cf2877c00a00"}, - {file = "mmh3-4.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3280a463855b0eae64b681cd5b9ddd9464b73f81151e87bb7c91a811d25619e6"}, - {file = "mmh3-4.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:97ac57c6c3301769e757d444fa7c973ceb002cb66534b39cbab5e38de61cd896"}, - {file = "mmh3-4.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a7b6502cdb4dbd880244818ab363c8770a48cdccecf6d729ade0241b736b5ec0"}, - {file = "mmh3-4.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52ba2da04671a9621580ddabf72f06f0e72c1c9c3b7b608849b58b11080d8f14"}, - {file = "mmh3-4.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5a5fef4c4ecc782e6e43fbeab09cff1bac82c998a1773d3a5ee6a3605cde343e"}, - {file = "mmh3-4.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5135358a7e00991f73b88cdc8eda5203bf9de22120d10a834c5761dbeb07dd13"}, - {file = "mmh3-4.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cff9ae76a54f7c6fe0167c9c4028c12c1f6de52d68a31d11b6790bb2ae685560"}, - {file = "mmh3-4.1.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6f02576a4d106d7830ca90278868bf0983554dd69183b7bbe09f2fcd51cf54f"}, - {file = "mmh3-4.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:073d57425a23721730d3ff5485e2da489dd3c90b04e86243dd7211f889898106"}, - {file = "mmh3-4.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:71e32ddec7f573a1a0feb8d2cf2af474c50ec21e7a8263026e8d3b4b629805db"}, - {file = "mmh3-4.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7cbb20b29d57e76a58b40fd8b13a9130db495a12d678d651b459bf61c0714cea"}, - {file = "mmh3-4.1.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:a42ad267e131d7847076bb7e31050f6c4378cd38e8f1bf7a0edd32f30224d5c9"}, - {file = "mmh3-4.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4a013979fc9390abadc445ea2527426a0e7a4495c19b74589204f9b71bcaafeb"}, - {file = "mmh3-4.1.0-cp311-cp311-win32.whl", hash = "sha256:1d3b1cdad7c71b7b88966301789a478af142bddcb3a2bee563f7a7d40519a00f"}, - {file = "mmh3-4.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:0dc6dc32eb03727467da8e17deffe004fbb65e8b5ee2b502d36250d7a3f4e2ec"}, - {file = "mmh3-4.1.0-cp311-cp311-win_arm64.whl", hash = "sha256:9ae3a5c1b32dda121c7dc26f9597ef7b01b4c56a98319a7fe86c35b8bc459ae6"}, - {file = "mmh3-4.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0033d60c7939168ef65ddc396611077a7268bde024f2c23bdc283a19123f9e9c"}, - {file = "mmh3-4.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d6af3e2287644b2b08b5924ed3a88c97b87b44ad08e79ca9f93d3470a54a41c5"}, - {file = "mmh3-4.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d82eb4defa245e02bb0b0dc4f1e7ee284f8d212633389c91f7fba99ba993f0a2"}, - {file = "mmh3-4.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba245e94b8d54765e14c2d7b6214e832557e7856d5183bc522e17884cab2f45d"}, - {file = "mmh3-4.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bb04e2feeabaad6231e89cd43b3d01a4403579aa792c9ab6fdeef45cc58d4ec0"}, - {file = "mmh3-4.1.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1e3b1a27def545ce11e36158ba5d5390cdbc300cfe456a942cc89d649cf7e3b2"}, - {file = "mmh3-4.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce0ab79ff736d7044e5e9b3bfe73958a55f79a4ae672e6213e92492ad5e734d5"}, - {file = "mmh3-4.1.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b02268be6e0a8eeb8a924d7db85f28e47344f35c438c1e149878bb1c47b1cd3"}, - {file = "mmh3-4.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:deb887f5fcdaf57cf646b1e062d56b06ef2f23421c80885fce18b37143cba828"}, - {file = "mmh3-4.1.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:99dd564e9e2b512eb117bd0cbf0f79a50c45d961c2a02402787d581cec5448d5"}, - {file = "mmh3-4.1.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:08373082dfaa38fe97aa78753d1efd21a1969e51079056ff552e687764eafdfe"}, - {file = "mmh3-4.1.0-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:54b9c6a2ea571b714e4fe28d3e4e2db37abfd03c787a58074ea21ee9a8fd1740"}, - {file = "mmh3-4.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a7b1edf24c69e3513f879722b97ca85e52f9032f24a52284746877f6a7304086"}, - {file = "mmh3-4.1.0-cp312-cp312-win32.whl", hash = "sha256:411da64b951f635e1e2284b71d81a5a83580cea24994b328f8910d40bed67276"}, - {file = "mmh3-4.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:bebc3ecb6ba18292e3d40c8712482b4477abd6981c2ebf0e60869bd90f8ac3a9"}, - {file = "mmh3-4.1.0-cp312-cp312-win_arm64.whl", hash = "sha256:168473dd608ade6a8d2ba069600b35199a9af837d96177d3088ca91f2b3798e3"}, - {file = "mmh3-4.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:372f4b7e1dcde175507640679a2a8790185bb71f3640fc28a4690f73da986a3b"}, - {file = "mmh3-4.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:438584b97f6fe13e944faf590c90fc127682b57ae969f73334040d9fa1c7ffa5"}, - {file = "mmh3-4.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6e27931b232fc676675fac8641c6ec6b596daa64d82170e8597f5a5b8bdcd3b6"}, - {file = "mmh3-4.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:571a92bad859d7b0330e47cfd1850b76c39b615a8d8e7aa5853c1f971fd0c4b1"}, - {file = "mmh3-4.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4a69d6afe3190fa08f9e3a58e5145549f71f1f3fff27bd0800313426929c7068"}, - {file = "mmh3-4.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:afb127be0be946b7630220908dbea0cee0d9d3c583fa9114a07156f98566dc28"}, - {file = "mmh3-4.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:940d86522f36348ef1a494cbf7248ab3f4a1638b84b59e6c9e90408bd11ad729"}, - {file = "mmh3-4.1.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3dcccc4935686619a8e3d1f7b6e97e3bd89a4a796247930ee97d35ea1a39341"}, - {file = "mmh3-4.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:01bb9b90d61854dfc2407c5e5192bfb47222d74f29d140cb2dd2a69f2353f7cc"}, - {file = "mmh3-4.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:bcb1b8b951a2c0b0fb8a5426c62a22557e2ffc52539e0a7cc46eb667b5d606a9"}, - {file = "mmh3-4.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:6477a05d5e5ab3168e82e8b106e316210ac954134f46ec529356607900aea82a"}, - {file = "mmh3-4.1.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:da5892287e5bea6977364b15712a2573c16d134bc5fdcdd4cf460006cf849278"}, - {file = "mmh3-4.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:99180d7fd2327a6fffbaff270f760576839dc6ee66d045fa3a450f3490fda7f5"}, - {file = "mmh3-4.1.0-cp38-cp38-win32.whl", hash = "sha256:9b0d4f3949913a9f9a8fb1bb4cc6ecd52879730aab5ff8c5a3d8f5b593594b73"}, - {file = "mmh3-4.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:598c352da1d945108aee0c3c3cfdd0e9b3edef74108f53b49d481d3990402169"}, - {file = "mmh3-4.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:475d6d1445dd080f18f0f766277e1237fa2914e5fe3307a3b2a3044f30892103"}, - {file = "mmh3-4.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5ca07c41e6a2880991431ac717c2a049056fff497651a76e26fc22224e8b5732"}, - {file = "mmh3-4.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0ebe052fef4bbe30c0548d12ee46d09f1b69035ca5208a7075e55adfe091be44"}, - {file = "mmh3-4.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eaefd42e85afb70f2b855a011f7b4d8a3c7e19c3f2681fa13118e4d8627378c5"}, - {file = "mmh3-4.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac0ae43caae5a47afe1b63a1ae3f0986dde54b5fb2d6c29786adbfb8edc9edfb"}, - {file = "mmh3-4.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6218666f74c8c013c221e7f5f8a693ac9cf68e5ac9a03f2373b32d77c48904de"}, - {file = "mmh3-4.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ac59294a536ba447b5037f62d8367d7d93b696f80671c2c45645fa9f1109413c"}, - {file = "mmh3-4.1.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:086844830fcd1e5c84fec7017ea1ee8491487cfc877847d96f86f68881569d2e"}, - {file = "mmh3-4.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e42b38fad664f56f77f6fbca22d08450f2464baa68acdbf24841bf900eb98e87"}, - {file = "mmh3-4.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d08b790a63a9a1cde3b5d7d733ed97d4eb884bfbc92f075a091652d6bfd7709a"}, - {file = "mmh3-4.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:73ea4cc55e8aea28c86799ecacebca09e5f86500414870a8abaedfcbaf74d288"}, - {file = "mmh3-4.1.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:f90938ff137130e47bcec8dc1f4ceb02f10178c766e2ef58a9f657ff1f62d124"}, - {file = "mmh3-4.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:aa1f13e94b8631c8cd53259250556edcf1de71738936b60febba95750d9632bd"}, - {file = "mmh3-4.1.0-cp39-cp39-win32.whl", hash = "sha256:a3b680b471c181490cf82da2142029edb4298e1bdfcb67c76922dedef789868d"}, - {file = "mmh3-4.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:fefef92e9c544a8dbc08f77a8d1b6d48006a750c4375bbcd5ff8199d761e263b"}, - {file = "mmh3-4.1.0-cp39-cp39-win_arm64.whl", hash = "sha256:8e2c1f6a2b41723a4f82bd5a762a777836d29d664fc0095f17910bea0adfd4a6"}, - {file = "mmh3-4.1.0.tar.gz", hash = "sha256:a1cf25348b9acd229dda464a094d6170f47d2850a1fcb762a3b6172d2ce6ca4a"}, -] - -[package.extras] -test = ["mypy (>=1.0)", "pytest (>=7.0.0)"] - [[package]] name = "monotonic" version = "1.6" @@ -4894,22 +4724,6 @@ files = [ {file = "nvidia_nvtx_cu12-12.1.105-py3-none-win_amd64.whl", hash = "sha256:65f4d98982b31b60026e0e6de73fbdfc09d08a96f4656dd3665ca616a11e1e82"}, ] -[[package]] -name = "oauthlib" -version = "3.2.2" -description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" -optional = false -python-versions = ">=3.6" -files = [ - {file = "oauthlib-3.2.2-py3-none-any.whl", hash = "sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca"}, - {file = "oauthlib-3.2.2.tar.gz", hash = "sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918"}, -] - -[package.extras] -rsa = ["cryptography (>=3.0.0)"] -signals = ["blinker (>=1.4.0)"] -signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] - [[package]] name = "olefile" version = "0.46" @@ -4944,35 +4758,36 @@ full = ["XLMMacroDeobfuscator"] [[package]] name = "onnxruntime" -version = "1.16.3" +version = "1.17.0" description = "ONNX Runtime is a runtime accelerator for Machine Learning models" optional = false python-versions = "*" files = [ - {file = "onnxruntime-1.16.3-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:3bc41f323ac77acfed190be8ffdc47a6a75e4beeb3473fbf55eeb075ccca8df2"}, - {file = "onnxruntime-1.16.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:212741b519ee61a4822c79c47147d63a8b0ffde25cd33988d3d7be9fbd51005d"}, - {file = "onnxruntime-1.16.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f91f5497fe3df4ceee2f9e66c6148d9bfeb320cd6a71df361c66c5b8bac985a"}, - {file = "onnxruntime-1.16.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef2b1fc269cabd27f129fb9058917d6fdc89b188c49ed8700f300b945c81f889"}, - {file = "onnxruntime-1.16.3-cp310-cp310-win32.whl", hash = "sha256:f36b56a593b49a3c430be008c2aea6658d91a3030115729609ec1d5ffbaab1b6"}, - {file = "onnxruntime-1.16.3-cp310-cp310-win_amd64.whl", hash = "sha256:3c467eaa3d2429c026b10c3d17b78b7f311f718ef9d2a0d6938e5c3c2611b0cf"}, - {file = "onnxruntime-1.16.3-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:a225bb683991001d111f75323d355b3590e75e16b5e0f07a0401e741a0143ea1"}, - {file = "onnxruntime-1.16.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9aded21fe3d898edd86be8aa2eb995aa375e800ad3dfe4be9f618a20b8ee3630"}, - {file = "onnxruntime-1.16.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00cccc37a5195c8fca5011b9690b349db435986bd508eb44c9fce432da9228a4"}, - {file = "onnxruntime-1.16.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e253e572021563226a86f1c024f8f70cdae28f2fb1cc8c3a9221e8b1ce37db5"}, - {file = "onnxruntime-1.16.3-cp311-cp311-win32.whl", hash = "sha256:a82a8f0b4c978d08f9f5c7a6019ae51151bced9fd91e5aaa0c20a9e4ac7a60b6"}, - {file = "onnxruntime-1.16.3-cp311-cp311-win_amd64.whl", hash = "sha256:78d81d9af457a1dc90db9a7da0d09f3ccb1288ea1236c6ab19f0ca61f3eee2d3"}, - {file = "onnxruntime-1.16.3-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:04ebcd29c20473596a1412e471524b2fb88d55e6301c40b98dd2407b5911595f"}, - {file = "onnxruntime-1.16.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9996bab0f202a6435ab867bc55598f15210d0b72794d5de83712b53d564084ae"}, - {file = "onnxruntime-1.16.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b8f5083f903408238883821dd8c775f8120cb4a604166dbdabe97f4715256d5"}, - {file = "onnxruntime-1.16.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c2dcf1b70f8434abb1116fe0975c00e740722aaf321997195ea3618cc00558e"}, - {file = "onnxruntime-1.16.3-cp38-cp38-win32.whl", hash = "sha256:d4a0151e1accd04da6711f6fd89024509602f82c65a754498e960b032359b02d"}, - {file = "onnxruntime-1.16.3-cp38-cp38-win_amd64.whl", hash = "sha256:e8aa5bba78afbd4d8a2654b14ec7462ff3ce4a6aad312a3c2d2c2b65009f2541"}, - {file = "onnxruntime-1.16.3-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:6829dc2a79d48c911fedaf4c0f01e03c86297d32718a3fdee7a282766dfd282a"}, - {file = "onnxruntime-1.16.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:76f876c53bfa912c6c242fc38213a6f13f47612d4360bc9d599bd23753e53161"}, - {file = "onnxruntime-1.16.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4137e5d443e2dccebe5e156a47f1d6d66f8077b03587c35f11ee0c7eda98b533"}, - {file = "onnxruntime-1.16.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c56695c1a343c7c008b647fff3df44da63741fbe7b6003ef576758640719be7b"}, - {file = "onnxruntime-1.16.3-cp39-cp39-win32.whl", hash = "sha256:985a029798744ce4743fcf8442240fed35c8e4d4d30ec7d0c2cdf1388cd44408"}, - {file = "onnxruntime-1.16.3-cp39-cp39-win_amd64.whl", hash = "sha256:28ff758b17ce3ca6bcad3d936ec53bd7f5482e7630a13f6dcae518eba8f71d85"}, + {file = "onnxruntime-1.17.0-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:d2b22a25a94109cc983443116da8d9805ced0256eb215c5e6bc6dcbabefeab96"}, + {file = "onnxruntime-1.17.0-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b4c87d83c6f58d1af2675fc99e3dc810f2dbdb844bcefd0c1b7573632661f6fc"}, + {file = "onnxruntime-1.17.0-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dba55723bf9b835e358f48c98a814b41692c393eb11f51e02ece0625c756b797"}, + {file = "onnxruntime-1.17.0-cp310-cp310-win32.whl", hash = "sha256:ee48422349cc500273beea7607e33c2237909f58468ae1d6cccfc4aecd158565"}, + {file = "onnxruntime-1.17.0-cp310-cp310-win_amd64.whl", hash = "sha256:f34cc46553359293854e38bdae2ab1be59543aad78a6317e7746d30e311110c3"}, + {file = "onnxruntime-1.17.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:16d26badd092c8c257fa57c458bb600d96dc15282c647ccad0ed7b2732e6c03b"}, + {file = "onnxruntime-1.17.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6f1273bebcdb47ed932d076c85eb9488bc4768fcea16d5f2747ca692fad4f9d3"}, + {file = "onnxruntime-1.17.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cb60fd3c2c1acd684752eb9680e89ae223e9801a9b0e0dc7b28adabe45a2e380"}, + {file = "onnxruntime-1.17.0-cp311-cp311-win32.whl", hash = "sha256:4b038324586bc905299e435f7c00007e6242389c856b82fe9357fdc3b1ef2bdc"}, + {file = "onnxruntime-1.17.0-cp311-cp311-win_amd64.whl", hash = "sha256:93d39b3fa1ee01f034f098e1c7769a811a21365b4883f05f96c14a2b60c6028b"}, + {file = "onnxruntime-1.17.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:90c0890e36f880281c6c698d9bc3de2afbeee2f76512725ec043665c25c67d21"}, + {file = "onnxruntime-1.17.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7466724e809a40e986b1637cba156ad9fc0d1952468bc00f79ef340bc0199552"}, + {file = "onnxruntime-1.17.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d47bee7557a8b99c8681b6882657a515a4199778d6d5e24e924d2aafcef55b0a"}, + {file = "onnxruntime-1.17.0-cp312-cp312-win32.whl", hash = "sha256:bb1bf1ee575c665b8bbc3813ab906e091a645a24ccc210be7932154b8260eca1"}, + {file = "onnxruntime-1.17.0-cp312-cp312-win_amd64.whl", hash = "sha256:ac2f286da3494b29b4186ca193c7d4e6a2c1f770c4184c7192c5da142c3dec28"}, + {file = "onnxruntime-1.17.0-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:1ec485643b93e0a3896c655eb2426decd63e18a278bb7ccebc133b340723624f"}, + {file = "onnxruntime-1.17.0-cp38-cp38-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:83c35809cda898c5a11911c69ceac8a2ac3925911854c526f73bad884582f911"}, + {file = "onnxruntime-1.17.0-cp38-cp38-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fa464aa4d81df818375239e481887b656e261377d5b6b9a4692466f5f3261edc"}, + {file = "onnxruntime-1.17.0-cp38-cp38-win32.whl", hash = "sha256:b7b337cd0586f7836601623cbd30a443df9528ef23965860d11c753ceeb009f2"}, + {file = "onnxruntime-1.17.0-cp38-cp38-win_amd64.whl", hash = "sha256:fbb9faaf51d01aa2c147ef52524d9326744c852116d8005b9041809a71838878"}, + {file = "onnxruntime-1.17.0-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:5a06ab84eaa350bf64b1d747b33ccf10da64221ed1f38f7287f15eccbec81603"}, + {file = "onnxruntime-1.17.0-cp39-cp39-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5d3d11db2c8242766212a68d0b139745157da7ce53bd96ba349a5c65e5a02357"}, + {file = "onnxruntime-1.17.0-cp39-cp39-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5632077c3ab8b0cd4f74b0af9c4e924be012b1a7bcd7daa845763c6c6bf14b7d"}, + {file = "onnxruntime-1.17.0-cp39-cp39-win32.whl", hash = "sha256:61a12732cba869b3ad2d4e29ab6cb62c7a96f61b8c213f7fcb961ba412b70b37"}, + {file = "onnxruntime-1.17.0-cp39-cp39-win_amd64.whl", hash = "sha256:461fa0fc7d9c392c352b6cccdedf44d818430f3d6eacd924bb804fdea2dcfd02"}, ] [package.dependencies] @@ -5006,171 +4821,6 @@ typing-extensions = ">=4.7,<5" [package.extras] datalib = ["numpy (>=1)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"] -[[package]] -name = "opentelemetry-api" -version = "1.22.0" -description = "OpenTelemetry Python API" -optional = false -python-versions = ">=3.7" -files = [ - {file = "opentelemetry_api-1.22.0-py3-none-any.whl", hash = "sha256:43621514301a7e9f5d06dd8013a1b450f30c2e9372b8e30aaeb4562abf2ce034"}, - {file = "opentelemetry_api-1.22.0.tar.gz", hash = "sha256:15ae4ca925ecf9cfdfb7a709250846fbb08072260fca08ade78056c502b86bed"}, -] - -[package.dependencies] -deprecated = ">=1.2.6" -importlib-metadata = ">=6.0,<7.0" - -[[package]] -name = "opentelemetry-exporter-otlp-proto-common" -version = "1.22.0" -description = "OpenTelemetry Protobuf encoding" -optional = false -python-versions = ">=3.7" -files = [ - {file = "opentelemetry_exporter_otlp_proto_common-1.22.0-py3-none-any.whl", hash = "sha256:3f2538bec5312587f8676c332b3747f54c89fe6364803a807e217af4603201fa"}, - {file = "opentelemetry_exporter_otlp_proto_common-1.22.0.tar.gz", hash = "sha256:71ae2f81bc6d6fe408d06388826edc8933759b2ca3a97d24054507dc7cfce52d"}, -] - -[package.dependencies] -backoff = {version = ">=1.10.0,<3.0.0", markers = "python_version >= \"3.7\""} -opentelemetry-proto = "1.22.0" - -[[package]] -name = "opentelemetry-exporter-otlp-proto-grpc" -version = "1.22.0" -description = "OpenTelemetry Collector Protobuf over gRPC Exporter" -optional = false -python-versions = ">=3.7" -files = [ - {file = "opentelemetry_exporter_otlp_proto_grpc-1.22.0-py3-none-any.whl", hash = "sha256:b5bcadc129272004316a455e9081216d3380c1fc2231a928ea6a70aa90e173fb"}, - {file = "opentelemetry_exporter_otlp_proto_grpc-1.22.0.tar.gz", hash = "sha256:1e0e5aa4bbabc74942f06f268deffd94851d12a8dc30b02527472ef1729fe5b1"}, -] - -[package.dependencies] -backoff = {version = ">=1.10.0,<3.0.0", markers = "python_version >= \"3.7\""} -deprecated = ">=1.2.6" -googleapis-common-protos = ">=1.52,<2.0" -grpcio = ">=1.0.0,<2.0.0" -opentelemetry-api = ">=1.15,<2.0" -opentelemetry-exporter-otlp-proto-common = "1.22.0" -opentelemetry-proto = "1.22.0" -opentelemetry-sdk = ">=1.22.0,<1.23.0" - -[package.extras] -test = ["pytest-grpc"] - -[[package]] -name = "opentelemetry-instrumentation" -version = "0.43b0" -description = "Instrumentation Tools & Auto Instrumentation for OpenTelemetry Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "opentelemetry_instrumentation-0.43b0-py3-none-any.whl", hash = "sha256:0ff1334d7e359e27640e9d420024efeb73eacae464309c2e14ede7ba6c93967e"}, -] - -[package.dependencies] -opentelemetry-api = ">=1.4,<2.0" -setuptools = ">=16.0" -wrapt = ">=1.0.0,<2.0.0" - -[[package]] -name = "opentelemetry-instrumentation-asgi" -version = "0.43b0" -description = "ASGI instrumentation for OpenTelemetry" -optional = false -python-versions = ">=3.7" -files = [ - {file = "opentelemetry_instrumentation_asgi-0.43b0-py3-none-any.whl", hash = "sha256:1f593829fa039e9367820736fb063e92acd15c25b53d7bcb5d319971b8e93fd7"}, - {file = "opentelemetry_instrumentation_asgi-0.43b0.tar.gz", hash = "sha256:3f6f19333dca31ef696672e4e36cb1c2613c71dc7e847c11ff36a37e1130dadc"}, -] - -[package.dependencies] -asgiref = ">=3.0,<4.0" -opentelemetry-api = ">=1.12,<2.0" -opentelemetry-instrumentation = "0.43b0" -opentelemetry-semantic-conventions = "0.43b0" -opentelemetry-util-http = "0.43b0" - -[package.extras] -instruments = ["asgiref (>=3.0,<4.0)"] -test = ["opentelemetry-instrumentation-asgi[instruments]", "opentelemetry-test-utils (==0.43b0)"] - -[[package]] -name = "opentelemetry-instrumentation-fastapi" -version = "0.43b0" -description = "OpenTelemetry FastAPI Instrumentation" -optional = false -python-versions = ">=3.7" -files = [ - {file = "opentelemetry_instrumentation_fastapi-0.43b0-py3-none-any.whl", hash = "sha256:b79c044df68a52e07b35fa12a424e7cc0dd27ff0a171c5fdcc41dea9de8fc938"}, - {file = "opentelemetry_instrumentation_fastapi-0.43b0.tar.gz", hash = "sha256:2afaaf470622e1a2732182c68f6d2431ffe5e026a7edacd0f83605632b66347f"}, -] - -[package.dependencies] -opentelemetry-api = ">=1.12,<2.0" -opentelemetry-instrumentation = "0.43b0" -opentelemetry-instrumentation-asgi = "0.43b0" -opentelemetry-semantic-conventions = "0.43b0" -opentelemetry-util-http = "0.43b0" - -[package.extras] -instruments = ["fastapi (>=0.58,<1.0)"] -test = ["httpx (>=0.22,<1.0)", "opentelemetry-instrumentation-fastapi[instruments]", "opentelemetry-test-utils (==0.43b0)", "requests (>=2.23,<3.0)"] - -[[package]] -name = "opentelemetry-proto" -version = "1.22.0" -description = "OpenTelemetry Python Proto" -optional = false -python-versions = ">=3.7" -files = [ - {file = "opentelemetry_proto-1.22.0-py3-none-any.whl", hash = "sha256:ce7188d22c75b6d0fe53e7fb58501613d0feade5139538e79dedd9420610fa0c"}, - {file = "opentelemetry_proto-1.22.0.tar.gz", hash = "sha256:9ec29169286029f17ca34ec1f3455802ffb90131642d2f545ece9a63e8f69003"}, -] - -[package.dependencies] -protobuf = ">=3.19,<5.0" - -[[package]] -name = "opentelemetry-sdk" -version = "1.22.0" -description = "OpenTelemetry Python SDK" -optional = false -python-versions = ">=3.7" -files = [ - {file = "opentelemetry_sdk-1.22.0-py3-none-any.whl", hash = "sha256:a730555713d7c8931657612a88a141e3a4fe6eb5523d9e2d5a8b1e673d76efa6"}, - {file = "opentelemetry_sdk-1.22.0.tar.gz", hash = "sha256:45267ac1f38a431fc2eb5d6e0c0d83afc0b78de57ac345488aa58c28c17991d0"}, -] - -[package.dependencies] -opentelemetry-api = "1.22.0" -opentelemetry-semantic-conventions = "0.43b0" -typing-extensions = ">=3.7.4" - -[[package]] -name = "opentelemetry-semantic-conventions" -version = "0.43b0" -description = "OpenTelemetry Semantic Conventions" -optional = false -python-versions = ">=3.7" -files = [ - {file = "opentelemetry_semantic_conventions-0.43b0-py3-none-any.whl", hash = "sha256:291284d7c1bf15fdaddf309b3bd6d3b7ce12a253cec6d27144439819a15d8445"}, - {file = "opentelemetry_semantic_conventions-0.43b0.tar.gz", hash = "sha256:b9576fb890df479626fa624e88dde42d3d60b8b6c8ae1152ad157a8b97358635"}, -] - -[[package]] -name = "opentelemetry-util-http" -version = "0.43b0" -description = "Web util for OpenTelemetry" -optional = false -python-versions = ">=3.7" -files = [ - {file = "opentelemetry_util_http-0.43b0-py3-none-any.whl", hash = "sha256:f25a820784b030f6cb86b3d76e5676c769b75ed3f55a210bcdae0a5e175ebadb"}, - {file = "opentelemetry_util_http-0.43b0.tar.gz", hash = "sha256:3ff6ab361dbe99fc81200d625603c0fb890c055c6e416a3e6d661ddf47a6c7f7"}, -] - [[package]] name = "orjson" version = "3.9.3" @@ -5536,18 +5186,18 @@ grpc = ["googleapis-common-protos (>=1.53.0)", "grpc-gateway-protoc-gen-openapiv [[package]] name = "platformdirs" -version = "4.1.0" +version = "4.2.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." optional = false python-versions = ">=3.8" files = [ - {file = "platformdirs-4.1.0-py3-none-any.whl", hash = "sha256:11c8f37bcca40db96d8144522d925583bdb7a31f7b0e37e3ed4318400a8e2380"}, - {file = "platformdirs-4.1.0.tar.gz", hash = "sha256:906d548203468492d432bcb294d4bc2fff751bf84971fbb2c10918cc206ee420"}, + {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, + {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, ] [package.extras] -docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] [[package]] name = "pluggy" @@ -5670,22 +5320,33 @@ testing = ["google-api-core[grpc] (>=1.31.5)"] [[package]] name = "protobuf" -version = "4.25.2" -description = "" +version = "3.20.3" +description = "Protocol Buffers" optional = false -python-versions = ">=3.8" +python-versions = ">=3.7" files = [ - {file = "protobuf-4.25.2-cp310-abi3-win32.whl", hash = "sha256:b50c949608682b12efb0b2717f53256f03636af5f60ac0c1d900df6213910fd6"}, - {file = "protobuf-4.25.2-cp310-abi3-win_amd64.whl", hash = "sha256:8f62574857ee1de9f770baf04dde4165e30b15ad97ba03ceac65f760ff018ac9"}, - {file = "protobuf-4.25.2-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:2db9f8fa64fbdcdc93767d3cf81e0f2aef176284071507e3ede160811502fd3d"}, - {file = "protobuf-4.25.2-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:10894a2885b7175d3984f2be8d9850712c57d5e7587a2410720af8be56cdaf62"}, - {file = "protobuf-4.25.2-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:fc381d1dd0516343f1440019cedf08a7405f791cd49eef4ae1ea06520bc1c020"}, - {file = "protobuf-4.25.2-cp38-cp38-win32.whl", hash = "sha256:33a1aeef4b1927431d1be780e87b641e322b88d654203a9e9d93f218ee359e61"}, - {file = "protobuf-4.25.2-cp38-cp38-win_amd64.whl", hash = "sha256:47f3de503fe7c1245f6f03bea7e8d3ec11c6c4a2ea9ef910e3221c8a15516d62"}, - {file = "protobuf-4.25.2-cp39-cp39-win32.whl", hash = "sha256:5e5c933b4c30a988b52e0b7c02641760a5ba046edc5e43d3b94a74c9fc57c1b3"}, - {file = "protobuf-4.25.2-cp39-cp39-win_amd64.whl", hash = "sha256:d66a769b8d687df9024f2985d5137a337f957a0916cf5464d1513eee96a63ff0"}, - {file = "protobuf-4.25.2-py3-none-any.whl", hash = "sha256:a8b7a98d4ce823303145bf3c1a8bdb0f2f4642a414b196f04ad9853ed0c8f830"}, - {file = "protobuf-4.25.2.tar.gz", hash = "sha256:fe599e175cb347efc8ee524bcd4b902d11f7262c0e569ececcb89995c15f0a5e"}, + {file = "protobuf-3.20.3-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:f4bd856d702e5b0d96a00ec6b307b0f51c1982c2bf9c0052cf9019e9a544ba99"}, + {file = "protobuf-3.20.3-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9aae4406ea63d825636cc11ffb34ad3379335803216ee3a856787bcf5ccc751e"}, + {file = "protobuf-3.20.3-cp310-cp310-win32.whl", hash = "sha256:28545383d61f55b57cf4df63eebd9827754fd2dc25f80c5253f9184235db242c"}, + {file = "protobuf-3.20.3-cp310-cp310-win_amd64.whl", hash = "sha256:67a3598f0a2dcbc58d02dd1928544e7d88f764b47d4a286202913f0b2801c2e7"}, + {file = "protobuf-3.20.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:899dc660cd599d7352d6f10d83c95df430a38b410c1b66b407a6b29265d66469"}, + {file = "protobuf-3.20.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e64857f395505ebf3d2569935506ae0dfc4a15cb80dc25261176c784662cdcc4"}, + {file = "protobuf-3.20.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:d9e4432ff660d67d775c66ac42a67cf2453c27cb4d738fc22cb53b5d84c135d4"}, + {file = "protobuf-3.20.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:74480f79a023f90dc6e18febbf7b8bac7508420f2006fabd512013c0c238f454"}, + {file = "protobuf-3.20.3-cp37-cp37m-win32.whl", hash = "sha256:b6cc7ba72a8850621bfec987cb72623e703b7fe2b9127a161ce61e61558ad905"}, + {file = "protobuf-3.20.3-cp37-cp37m-win_amd64.whl", hash = "sha256:8c0c984a1b8fef4086329ff8dd19ac77576b384079247c770f29cc8ce3afa06c"}, + {file = "protobuf-3.20.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:de78575669dddf6099a8a0f46a27e82a1783c557ccc38ee620ed8cc96d3be7d7"}, + {file = "protobuf-3.20.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:f4c42102bc82a51108e449cbb32b19b180022941c727bac0cfd50170341f16ee"}, + {file = "protobuf-3.20.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:44246bab5dd4b7fbd3c0c80b6f16686808fab0e4aca819ade6e8d294a29c7050"}, + {file = "protobuf-3.20.3-cp38-cp38-win32.whl", hash = "sha256:c02ce36ec760252242a33967d51c289fd0e1c0e6e5cc9397e2279177716add86"}, + {file = "protobuf-3.20.3-cp38-cp38-win_amd64.whl", hash = "sha256:447d43819997825d4e71bf5769d869b968ce96848b6479397e29fc24c4a5dfe9"}, + {file = "protobuf-3.20.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:398a9e0c3eaceb34ec1aee71894ca3299605fa8e761544934378bbc6c97de23b"}, + {file = "protobuf-3.20.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:bf01b5720be110540be4286e791db73f84a2b721072a3711efff6c324cdf074b"}, + {file = "protobuf-3.20.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:daa564862dd0d39c00f8086f88700fdbe8bc717e993a21e90711acfed02f2402"}, + {file = "protobuf-3.20.3-cp39-cp39-win32.whl", hash = "sha256:819559cafa1a373b7096a482b504ae8a857c89593cf3a25af743ac9ecbd23480"}, + {file = "protobuf-3.20.3-cp39-cp39-win_amd64.whl", hash = "sha256:03038ac1cfbc41aa21f6afcbcd357281d7521b4157926f30ebecc8d4ea59dcb7"}, + {file = "protobuf-3.20.3-py2.py3-none-any.whl", hash = "sha256:a7ca6d488aa8ff7f329d4c545b2dbad8ac31464f1d8b1c87ad1346717731e4db"}, + {file = "protobuf-3.20.3.tar.gz", hash = "sha256:2e3427429c9cffebf259491be0af70189607f365c2f41c7c3764af6f337105f2"}, ] [[package]] @@ -6416,20 +6077,6 @@ files = [ {file = "PyPika-0.48.9.tar.gz", hash = "sha256:838836a61747e7c8380cd1b7ff638694b7a7335345d0f559b04b2cd832ad5378"}, ] -[[package]] -name = "pyproject-hooks" -version = "1.0.0" -description = "Wrappers to call pyproject.toml-based build backend hooks." -optional = false -python-versions = ">=3.7" -files = [ - {file = "pyproject_hooks-1.0.0-py3-none-any.whl", hash = "sha256:283c11acd6b928d2f6a7c73fa0d01cb2bdc5f07c57a2eeb6e83d5e56b97976f8"}, - {file = "pyproject_hooks-1.0.0.tar.gz", hash = "sha256:f271b298b97f5955d53fb12b72c1fb1948c22c1a6b70b315c54cedaca0264ef5"}, -] - -[package.dependencies] -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} - [[package]] name = "pyreadline3" version = "3.4.1" @@ -6871,13 +6518,13 @@ cffi = {version = "*", markers = "implementation_name == \"pypy\""} [[package]] name = "qdrant-client" -version = "1.7.1" +version = "1.7.2" description = "Client library for the Qdrant vector search engine" optional = false python-versions = ">=3.8" files = [ - {file = "qdrant_client-1.7.1-py3-none-any.whl", hash = "sha256:b6b52007d7dce339007f6fa676a611fbc61609d374aa00d8596b62ea45d831d7"}, - {file = "qdrant_client-1.7.1.tar.gz", hash = "sha256:7e3660b540e5bddda555a1638dd7905df9963fefe2061712fc5f48bfd78d734d"}, + {file = "qdrant_client-1.7.2-py3-none-any.whl", hash = "sha256:d00cf0b7b7d6f06847c9bb0126b234151f78f75caf0b920ad976487808bb71ce"}, + {file = "qdrant_client-1.7.2.tar.gz", hash = "sha256:ec8286cd3a6556d3f033d3b687225af1e715d47767ee27f119aee3c8354c0e07"}, ] [package.dependencies] @@ -7188,24 +6835,6 @@ urllib3 = ">=1.21.1,<3" socks = ["PySocks (>=1.5.6,!=1.5.7)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] -[[package]] -name = "requests-oauthlib" -version = "1.3.1" -description = "OAuthlib authentication support for Requests." -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "requests-oauthlib-1.3.1.tar.gz", hash = "sha256:75beac4a47881eeb94d5ea5d6ad31ef88856affe2332b9aafb52c6452ccf0d7a"}, - {file = "requests_oauthlib-1.3.1-py2.py3-none-any.whl", hash = "sha256:2577c501a2fb8d05a304c09d090d6e47c306fef15809d102b327cf8364bddab5"}, -] - -[package.dependencies] -oauthlib = ">=3.0.0" -requests = ">=2.0.0" - -[package.extras] -rsa = ["oauthlib[signedtoken] (>=3.0.0)"] - [[package]] name = "rich" version = "13.7.0" @@ -7548,13 +7177,13 @@ test = ["asv", "gmpy2", "hypothesis", "mpmath", "pooch", "pytest", "pytest-cov", [[package]] name = "sentence-transformers" -version = "2.3.0" +version = "2.3.1" description = "Multilingual text embeddings" optional = true python-versions = ">=3.8.0" files = [ - {file = "sentence-transformers-2.3.0.tar.gz", hash = "sha256:8cd90bedc73f2cb6b8fa81f9ebf2bb24fb15a3eed4dd39bf7865d211c3bb6253"}, - {file = "sentence_transformers-2.3.0-py3-none-any.whl", hash = "sha256:d12607a2172c50e77ccd3cf160a5070c1d1e8f974d17f0dd7e05d015f76d6108"}, + {file = "sentence-transformers-2.3.1.tar.gz", hash = "sha256:d589d85a464f45338cdbdf99ea715f8068e1fb01c582e0bcdbf60bcf3eade6d0"}, + {file = "sentence_transformers-2.3.1-py3-none-any.whl", hash = "sha256:285d6637726c3b002186aa4b8bcace1101364b32671fb605297c4c2636b8190e"}, ] [package.dependencies] @@ -9003,39 +8632,22 @@ files = [ [[package]] name = "weaviate-client" -version = "4.4b6" +version = "3.26.2" description = "A python native Weaviate client" optional = false python-versions = ">=3.8" files = [ - {file = "weaviate-client-4.4b6.tar.gz", hash = "sha256:40b4ba7f3300ba537566697ed806b34cefb559285acbd4f0c2602dc71e5794e3"}, - {file = "weaviate_client-4.4b6-py3-none-any.whl", hash = "sha256:abb6140071ae83c4987176ae928328ad367eedccf7f2d06cb733d12a50ab445e"}, + {file = "weaviate-client-3.26.2.tar.gz", hash = "sha256:63ec70839b64909810a64aa7b3e5b85088462e93c7e2ed3c32ebefb702f36723"}, + {file = "weaviate_client-3.26.2-py3-none-any.whl", hash = "sha256:ca43bfb9c06b8ae3fd938dc9158acd93d4cbf4622192e173333e1ff63cf97164"}, ] [package.dependencies] authlib = ">=1.2.1,<2.0.0" -grpcio = ">=1.57.0,<2.0.0" -grpcio-health-checking = ">=1.57.0,<2.0.0" -grpcio-tools = ">=1.57.0,<2.0.0" -pydantic = ">=2.1.1,<3.0.0" requests = ">=2.30.0,<3.0.0" validators = ">=0.21.2,<1.0.0" -[[package]] -name = "websocket-client" -version = "1.7.0" -description = "WebSocket client for Python with low level API options" -optional = false -python-versions = ">=3.8" -files = [ - {file = "websocket-client-1.7.0.tar.gz", hash = "sha256:10e511ea3a8c744631d3bd77e61eb17ed09304c413ad42cf6ddfa4c7787e8fe6"}, - {file = "websocket_client-1.7.0-py3-none-any.whl", hash = "sha256:f4c3d22fec12a2461427a29957ff07d35098ee2d976d3ba244e688b8b4057588"}, -] - [package.extras] -docs = ["Sphinx (>=6.0)", "sphinx-rtd-theme (>=1.1.0)"] -optional = ["python-socks", "wsaccel"] -test = ["websockets"] +grpc = ["grpcio (>=1.57.0,<2.0.0)", "grpcio-tools (>=1.57.0,<2.0.0)"] [[package]] name = "websockets" @@ -9456,4 +9068,4 @@ local = ["ctransformers", "llama-cpp-python", "sentence-transformers"] [metadata] lock-version = "2.0" python-versions = ">=3.9,<3.11" -content-hash = "8d7773b92331fa8603c4d92401eadb7f07954b972ad8354f690c63761d459397" +content-hash = "1809b0d53078f3da65b939b33d655473a81d3bfcd92b229bd1364613bf65cf58" diff --git a/pyproject.toml b/pyproject.toml index 407fa1f06..388af2675 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -55,7 +55,7 @@ tiktoken = "~0.5.0" wikipedia = "^1.4.0" qdrant-client = "^1.7.0" websockets = "^10.3" -weaviate-client = { version = "^4.4b6", allow-prereleases = true } +weaviate-client = "*" jina = "*" sentence-transformers = { version = "^2.2.2", optional = true } ctransformers = { version = "^0.2.10", optional = true } @@ -98,7 +98,7 @@ markupsafe = "^2.1.3" extract-msg = "^0.45.0" # jq is not available for windows jq = { version = "^1.6.0", markers = "sys_platform != 'win32'" } -boto3 = "^1.28.63" +boto3 = "^1.34.0" numexpr = "^2.8.6" qianfan = "0.2.0" pgvector = "^0.2.3" From da9b1398015861d3a41bb17763fa2d67fba30b95 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Wed, 31 Jan 2024 17:00:22 -0300 Subject: [PATCH 150/153] Remove empty lines in pyproject.toml --- pyproject.toml | 2 -- 1 file changed, 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 388af2675..455e62b8d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -25,8 +25,6 @@ documentation = "https://docs.langflow.org" langflow = "langflow.__main__:main" [tool.poetry.dependencies] - - python = ">=3.9,<3.11" fastapi = "^0.108.0" uvicorn = "^0.25.0" From 57c09c3409c7531027ce5ca12759822d6cc8999f Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Wed, 31 Jan 2024 17:09:16 -0300 Subject: [PATCH 151/153] Add unique constraints and foreign key for user_id in flow table --- .../alembic/versions/b2fa308044b5_add_unique_constraints.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/backend/langflow/alembic/versions/b2fa308044b5_add_unique_constraints.py b/src/backend/langflow/alembic/versions/b2fa308044b5_add_unique_constraints.py index e24d5a72f..8a2e90abc 100644 --- a/src/backend/langflow/alembic/versions/b2fa308044b5_add_unique_constraints.py +++ b/src/backend/langflow/alembic/versions/b2fa308044b5_add_unique_constraints.py @@ -28,7 +28,7 @@ def upgrade() -> None: batch_op.add_column(sa.Column('folder', sqlmodel.sql.sqltypes.AutoString(), nullable=True)) batch_op.add_column(sa.Column('user_id', sqlmodel.sql.sqltypes.GUID(), nullable=True)) batch_op.create_index(batch_op.f('ix_flow_user_id'), ['user_id'], unique=False) - batch_op.create_foreign_key(None, 'user', ['user_id'], ['id']) + batch_op.create_foreign_key('fk_flow_user_id_user', 'user', ['user_id'], ['id']) except Exception: pass # ### end Alembic commands ### @@ -38,7 +38,7 @@ def downgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### try: with op.batch_alter_table('flow', schema=None) as batch_op: - batch_op.drop_constraint(None, type_='foreignkey') + batch_op.drop_constraint('fk_flow_user_id_user', type_='foreignkey') batch_op.drop_index(batch_op.f('ix_flow_user_id')) batch_op.drop_column('user_id') batch_op.drop_column('folder') From 11473961e0fbf155649f11cde5276d37ba0592a4 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Wed, 31 Jan 2024 17:09:38 -0300 Subject: [PATCH 152/153] Update dependencies to latest versions --- poetry.lock | 8 ++++---- pyproject.toml | 4 ++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/poetry.lock b/poetry.lock index 64be3b841..b6ccc55e7 100644 --- a/poetry.lock +++ b/poetry.lock @@ -192,13 +192,13 @@ files = [ [[package]] name = "anthropic" -version = "0.12.0" +version = "0.13.0" description = "The official Python library for the anthropic API" optional = false python-versions = ">=3.7" files = [ - {file = "anthropic-0.12.0-py3-none-any.whl", hash = "sha256:d2f706c8dc95c7974e71c901c55245eeda5f129e78c3e3e6752411d4aecfe416"}, - {file = "anthropic-0.12.0.tar.gz", hash = "sha256:f61bc5f83d195a0b1b5d92d4e5d9a131cab196bfdf9c8f69b73a815a40665e12"}, + {file = "anthropic-0.13.0-py3-none-any.whl", hash = "sha256:2d4b6a69bf5b31a596669d68820f40f5ed9a9a3333ddaa727166a11ed29275e8"}, + {file = "anthropic-0.13.0.tar.gz", hash = "sha256:b935d2fee12f7dbfcc80398b3da5f20103ece42aecb97d8ce24459e3c4f8ec8a"}, ] [package.dependencies] @@ -9068,4 +9068,4 @@ local = ["ctransformers", "llama-cpp-python", "sentence-transformers"] [metadata] lock-version = "2.0" python-versions = ">=3.9,<3.11" -content-hash = "1809b0d53078f3da65b939b33d655473a81d3bfcd92b229bd1364613bf65cf58" +content-hash = "4a8a0961e571a06db892d45bf1ccc76a348a419878b7a68ca4014707075cf220" diff --git a/pyproject.toml b/pyproject.toml index 455e62b8d..d7853a25d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -34,7 +34,7 @@ google-api-python-client = "^2.79.0" typer = "^0.9.0" gunicorn = "^21.2.0" langchain = "~0.1.0" -openai = "^1.6.1" +openai = "^1.10.0" pandas = "2.0.3" chromadb = "^0.4.0" huggingface-hub = { version = "^0.19.0", extras = ["inference"] } @@ -61,7 +61,7 @@ cohere = "^4.39.0" python-multipart = "^0.0.6" sqlmodel = "^0.0.14" faiss-cpu = "^1.7.4" -anthropic = "^0.12.0" +anthropic = "^0.13.0" orjson = "3.9.3" multiprocess = "^0.70.14" cachetools = "^5.3.1" From 339029cd5bc2c96ce424427a291860e56f541cca Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Wed, 31 Jan 2024 17:33:32 -0300 Subject: [PATCH 153/153] Update import statement for retrievers module --- src/backend/langflow/interface/retrievers/base.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/src/backend/langflow/interface/retrievers/base.py b/src/backend/langflow/interface/retrievers/base.py index 63d0ef915..a6813e76c 100644 --- a/src/backend/langflow/interface/retrievers/base.py +++ b/src/backend/langflow/interface/retrievers/base.py @@ -1,13 +1,12 @@ from typing import Any, ClassVar, Dict, List, Optional, Type -from langchain import retrievers -from loguru import logger - +from langchain_community import retrievers from langflow.interface.base import LangChainTypeCreator from langflow.interface.importing.utils import import_class from langflow.services.deps import get_settings_service from langflow.template.frontend_node.retrievers import RetrieverFrontendNode from langflow.utils.util import build_template_from_class, build_template_from_method +from loguru import logger class RetrieverCreator(LangChainTypeCreator):