From 866398111f424ff4f55b965ccf19254e19c01ad3 Mon Sep 17 00:00:00 2001 From: gustavoschaedler Date: Mon, 19 Jun 2023 23:02:06 +0100 Subject: [PATCH 01/90] WIP: Login auth and JWT encode/decode --- poetry.lock | 316 +++++++--------------------------- pyproject.toml | 3 + src/backend/langflow/login.py | 143 +++++++++++++++ src/backend/langflow/main.py | 142 ++++++++++++++- 4 files changed, 350 insertions(+), 254 deletions(-) create mode 100644 src/backend/langflow/login.py diff --git a/poetry.lock b/poetry.lock index 015f3caaf..7567c137e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,10 +1,9 @@ -# This file is automatically @generated by Poetry 1.4.0 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. [[package]] name = "aiofiles" version = "23.1.0" description = "File support for asyncio." -category = "main" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -16,7 +15,6 @@ files = [ name = "aiohttp" version = "3.8.4" description = "Async http client/server framework (asyncio)" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -125,7 +123,6 @@ speedups = ["Brotli", "aiodns", "cchardet"] name = "aiosignal" version = "1.3.1" description = "aiosignal: a list of registered asynchronous callbacks" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -140,7 +137,6 @@ frozenlist = ">=1.1.0" name = "aiostream" version = "0.4.5" description = "Generator-based operators for asynchronous iteration" -category = "main" optional = false python-versions = "*" files = [ @@ -152,7 +148,6 @@ files = [ name = "anthropic" version = "0.2.10" description = "Library for accessing the anthropic API" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -173,7 +168,6 @@ dev = ["black (>=22.3.0)", "pytest"] name = "anyio" version = "3.7.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -195,7 +189,6 @@ trio = ["trio (<0.22)"] name = "appnope" version = "0.1.3" description = "Disable App Nap on macOS >= 10.9" -category = "dev" optional = false python-versions = "*" files = [ @@ -207,7 +200,6 @@ files = [ name = "argilla" version = "0.0.1" description = "" -category = "main" optional = false python-versions = "*" files = [ @@ -219,7 +211,6 @@ files = [ name = "asgiref" version = "3.7.2" description = "ASGI specs, helper code, and adapters" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -237,7 +228,6 @@ tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] name = "asttokens" version = "2.2.1" description = "Annotate AST trees with source code positions" -category = "dev" optional = false python-versions = "*" files = [ @@ -255,7 +245,6 @@ test = ["astroid", "pytest"] name = "async-timeout" version = "4.0.2" description = "Timeout context manager for asyncio programs" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -267,7 +256,6 @@ files = [ name = "attrs" version = "23.1.0" description = "Classes Without Boilerplate" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -286,7 +274,6 @@ tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pyte name = "authlib" version = "1.2.0" description = "The ultimate Python library in building OAuth and OpenID Connect servers and clients." -category = "main" optional = false python-versions = "*" files = [ @@ -301,7 +288,6 @@ cryptography = ">=3.2" name = "backcall" version = "0.2.0" description = "Specifications for callback functions passed in to an API" -category = "dev" optional = false python-versions = "*" files = [ @@ -313,7 +299,6 @@ files = [ name = "backoff" version = "2.2.1" description = "Function decoration for backoff and retry" -category = "main" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -321,11 +306,44 @@ files = [ {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, ] +[[package]] +name = "bcrypt" +version = "4.0.1" +description = "Modern password hashing for your software and your servers" +optional = false +python-versions = ">=3.6" +files = [ + {file = "bcrypt-4.0.1-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:b1023030aec778185a6c16cf70f359cbb6e0c289fd564a7cfa29e727a1c38f8f"}, + {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:08d2947c490093a11416df18043c27abe3921558d2c03e2076ccb28a116cb6d0"}, + {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0eaa47d4661c326bfc9d08d16debbc4edf78778e6aaba29c1bc7ce67214d4410"}, + {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae88eca3024bb34bb3430f964beab71226e761f51b912de5133470b649d82344"}, + {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:a522427293d77e1c29e303fc282e2d71864579527a04ddcfda6d4f8396c6c36a"}, + {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:fbdaec13c5105f0c4e5c52614d04f0bca5f5af007910daa8b6b12095edaa67b3"}, + {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:ca3204d00d3cb2dfed07f2d74a25f12fc12f73e606fcaa6975d1f7ae69cacbb2"}, + {file = "bcrypt-4.0.1-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:089098effa1bc35dc055366740a067a2fc76987e8ec75349eb9484061c54f535"}, + {file = "bcrypt-4.0.1-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:e9a51bbfe7e9802b5f3508687758b564069ba937748ad7b9e890086290d2f79e"}, + {file = "bcrypt-4.0.1-cp36-abi3-win32.whl", hash = "sha256:2caffdae059e06ac23fce178d31b4a702f2a3264c20bfb5ff541b338194d8fab"}, + {file = "bcrypt-4.0.1-cp36-abi3-win_amd64.whl", hash = "sha256:8a68f4341daf7522fe8d73874de8906f3a339048ba406be6ddc1b3ccb16fc0d9"}, + {file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf4fa8b2ca74381bb5442c089350f09a3f17797829d958fad058d6e44d9eb83c"}, + {file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:67a97e1c405b24f19d08890e7ae0c4f7ce1e56a712a016746c8b2d7732d65d4b"}, + {file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b3b85202d95dd568efcb35b53936c5e3b3600c7cdcc6115ba461df3a8e89f38d"}, + {file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cbb03eec97496166b704ed663a53680ab57c5084b2fc98ef23291987b525cb7d"}, + {file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:5ad4d32a28b80c5fa6671ccfb43676e8c1cc232887759d1cd7b6f56ea4355215"}, + {file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b57adba8a1444faf784394de3436233728a1ecaeb6e07e8c22c8848f179b893c"}, + {file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:705b2cea8a9ed3d55b4491887ceadb0106acf7c6387699fca771af56b1cdeeda"}, + {file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:2b3ac11cf45161628f1f3733263e63194f22664bf4d0c0f3ab34099c02134665"}, + {file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3100851841186c25f127731b9fa11909ab7b1df6fc4b9f8353f4f1fd952fbf71"}, + {file = "bcrypt-4.0.1.tar.gz", hash = "sha256:27d375903ac8261cfe4047f6709d16f7d18d39b1ec92aaf72af989552a650ebd"}, +] + +[package.extras] +tests = ["pytest (>=3.2.1,!=3.3.0)"] +typecheck = ["mypy"] + [[package]] name = "beautifulsoup4" version = "4.12.2" description = "Screen-scraping library" -category = "main" optional = false python-versions = ">=3.6.0" files = [ @@ -344,7 +362,6 @@ lxml = ["lxml"] name = "black" version = "23.3.0" description = "The uncompromising code formatter." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -394,7 +411,6 @@ uvloop = ["uvloop (>=0.15.2)"] name = "cachetools" version = "5.3.1" description = "Extensible memoizing collections and decorators" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -406,7 +422,6 @@ files = [ name = "certifi" version = "2023.5.7" description = "Python package for providing Mozilla's CA Bundle." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -418,7 +433,6 @@ files = [ name = "cffi" version = "1.15.1" description = "Foreign Function Interface for Python calling C code." -category = "main" optional = false python-versions = "*" files = [ @@ -495,7 +509,6 @@ pycparser = "*" name = "chardet" version = "5.1.0" description = "Universal encoding detector for Python 3" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -507,7 +520,6 @@ files = [ name = "charset-normalizer" version = "3.1.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -592,7 +604,6 @@ files = [ name = "chromadb" version = "0.3.26" description = "Chroma." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -622,7 +633,6 @@ uvicorn = {version = ">=0.18.3", extras = ["standard"]} name = "click" version = "8.1.3" description = "Composable command line interface toolkit" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -637,7 +647,6 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} name = "clickhouse-connect" version = "0.6.3" description = "ClickHouse Database Core Driver for Python, Pandas, and Superset" -category = "main" optional = false python-versions = "~=3.7" files = [ @@ -726,7 +735,6 @@ sqlalchemy = ["sqlalchemy (>1.3.21,<2.0)"] name = "cohere" version = "4.9.0" description = "" -category = "main" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -743,7 +751,6 @@ requests = ">=2.0,<3.0" name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ @@ -755,7 +762,6 @@ files = [ name = "coloredlogs" version = "15.0.1" description = "Colored terminal output for Python's logging module" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -773,7 +779,6 @@ cron = ["capturer (>=2.4)"] name = "comm" version = "0.1.3" description = "Jupyter Python Comm implementation, for usage in ipykernel, xeus-python etc." -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -793,7 +798,6 @@ typing = ["mypy (>=0.990)"] name = "coverage" version = "7.2.7" description = "Code coverage measurement for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -869,7 +873,6 @@ toml = ["tomli"] name = "cryptography" version = "41.0.1" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -911,7 +914,6 @@ test-randomorder = ["pytest-randomly"] name = "ctransformers" version = "0.2.8" description = "Python bindings for the Transformer models implemented in C/C++ using GGML library." -category = "main" optional = false python-versions = "*" files = [ @@ -929,7 +931,6 @@ tests = ["pytest"] name = "dataclasses-json" version = "0.5.8" description = "Easily serialize dataclasses to and from JSON" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -949,7 +950,6 @@ dev = ["flake8", "hypothesis", "ipython", "mypy (>=0.710)", "portray", "pytest ( name = "debugpy" version = "1.6.7" description = "An implementation of the Debug Adapter Protocol for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -977,7 +977,6 @@ files = [ name = "decorator" version = "5.1.1" description = "Decorators for Humans" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -989,7 +988,6 @@ files = [ name = "deprecated" version = "1.2.14" description = "Python @deprecated decorator to deprecate old python classes, functions or methods." -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -1007,7 +1005,6 @@ dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] name = "dill" version = "0.3.6" description = "serialize all of python" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1022,7 +1019,6 @@ graph = ["objgraph (>=1.7.2)"] name = "docarray" version = "0.21.0" description = "The data structure for unstructured data" -category = "main" optional = false python-versions = "*" files = [ @@ -1051,7 +1047,6 @@ weaviate = ["weaviate-client (>=3.9.0,<3.10.0)"] name = "docker" version = "6.1.3" description = "A Python library for the Docker Engine API." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1073,7 +1068,6 @@ ssh = ["paramiko (>=2.4.3)"] name = "docstring-parser" version = "0.15" description = "Parse Python docstrings in reST, Google and Numpydoc format" -category = "main" optional = false python-versions = ">=3.6,<4.0" files = [ @@ -1085,7 +1079,6 @@ files = [ name = "duckdb" version = "0.8.1" description = "DuckDB embedded database" -category = "main" optional = false python-versions = "*" files = [ @@ -1147,7 +1140,6 @@ files = [ name = "ecdsa" version = "0.18.0" description = "ECDSA cryptographic signature library (pure python)" -category = "main" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -1166,7 +1158,6 @@ gmpy2 = ["gmpy2"] name = "et-xmlfile" version = "1.1.0" description = "An implementation of lxml.xmlfile for the standard library" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1178,7 +1169,6 @@ files = [ name = "exceptiongroup" version = "1.1.1" description = "Backport of PEP 654 (exception groups)" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1193,7 +1183,6 @@ test = ["pytest (>=6)"] name = "executing" version = "1.2.0" description = "Get the currently executing AST node of a frame, and other information" -category = "dev" optional = false python-versions = "*" files = [ @@ -1208,7 +1197,6 @@ tests = ["asttokens", "littleutils", "pytest", "rich"] name = "faiss-cpu" version = "1.7.4" description = "A library for efficient similarity search and clustering of dense vectors." -category = "main" optional = false python-versions = "*" files = [ @@ -1243,7 +1231,6 @@ files = [ name = "fake-useragent" version = "1.1.3" description = "Up-to-date simple useragent faker with real world database" -category = "main" optional = false python-versions = "*" files = [ @@ -1258,7 +1245,6 @@ importlib-resources = {version = ">=5.0", markers = "python_version < \"3.10\""} name = "fastapi" version = "0.97.0" description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1277,7 +1263,6 @@ all = ["email-validator (>=1.1.1)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)" name = "filelock" version = "3.12.2" description = "A platform independent file lock." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1293,7 +1278,6 @@ testing = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "diff-cover (>=7.5)", "p name = "flatbuffers" version = "23.5.26" description = "The FlatBuffers serialization format for Python" -category = "main" optional = false python-versions = "*" files = [ @@ -1305,7 +1289,6 @@ files = [ name = "frozenlist" version = "1.3.3" description = "A list-like structure which implements collections.abc.MutableSequence" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1389,7 +1372,6 @@ files = [ name = "google-api-core" version = "2.11.1" description = "Google API client core library" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1412,7 +1394,6 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] name = "google-api-python-client" version = "2.89.0" description = "Google API Client Library for Python" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1421,7 +1402,7 @@ files = [ ] [package.dependencies] -google-api-core = ">=1.31.5,<2.0.0 || >2.3.0,<3.0.0.dev0" +google-api-core = ">=1.31.5,<2.0.dev0 || >2.3.0,<3.0.0.dev0" google-auth = ">=1.19.0,<3.0.0.dev0" google-auth-httplib2 = ">=0.1.0" httplib2 = ">=0.15.0,<1.dev0" @@ -1431,7 +1412,6 @@ uritemplate = ">=3.0.1,<5" name = "google-auth" version = "2.20.0" description = "Google Authentication Library" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1457,7 +1437,6 @@ requests = ["requests (>=2.20.0,<3.0.0.dev0)"] name = "google-auth-httplib2" version = "0.1.0" description = "Google Authentication Library: httplib2 transport" -category = "main" optional = false python-versions = "*" files = [ @@ -1474,7 +1453,6 @@ six = "*" name = "google-search-results" version = "2.4.2" description = "Scrape and search localized results from Google, Bing, Baidu, Yahoo, Yandex, Ebay, Homedepot, youtube at scale using SerpApi.com" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -1488,7 +1466,6 @@ requests = "*" name = "googleapis-common-protos" version = "1.59.1" description = "Common protobufs used in Google APIs" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1506,7 +1483,6 @@ grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] name = "greenlet" version = "2.0.2" description = "Lightweight in-process concurrent programming" -category = "main" optional = false python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*" files = [ @@ -1580,7 +1556,6 @@ test = ["objgraph", "psutil"] name = "grpcio" version = "1.47.5" description = "HTTP/2-based RPC framework" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1642,7 +1617,6 @@ protobuf = ["grpcio-tools (>=1.47.5)"] name = "grpcio-health-checking" version = "1.47.5" description = "Standard Health Checking Service for gRPC" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1658,7 +1632,6 @@ protobuf = ">=3.12.0" name = "grpcio-reflection" version = "1.47.5" description = "Standard Protobuf Reflection Service for gRPC" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1674,7 +1647,6 @@ protobuf = ">=3.12.0" name = "grpcio-tools" version = "1.47.5" description = "Protobuf code generator for gRPC" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1735,7 +1707,6 @@ setuptools = "*" name = "gunicorn" version = "20.1.0" description = "WSGI HTTP Server for UNIX" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -1756,7 +1727,6 @@ tornado = ["tornado (>=0.2)"] name = "h11" version = "0.14.0" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1768,7 +1738,6 @@ files = [ name = "h2" version = "4.1.0" description = "HTTP/2 State-Machine based protocol implementation" -category = "main" optional = false python-versions = ">=3.6.1" files = [ @@ -1784,7 +1753,6 @@ hyperframe = ">=6.0,<7" name = "hnswlib" version = "0.7.0" description = "hnswlib" -category = "main" optional = false python-versions = "*" files = [ @@ -1798,7 +1766,6 @@ numpy = "*" name = "hpack" version = "4.0.0" description = "Pure-Python HPACK header compression" -category = "main" optional = false python-versions = ">=3.6.1" files = [ @@ -1810,7 +1777,6 @@ files = [ name = "httpcore" version = "0.16.3" description = "A minimal low-level HTTP client." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1822,17 +1788,16 @@ files = [ anyio = ">=3.0,<5.0" certifi = "*" h11 = ">=0.13,<0.15" -sniffio = ">=1.0.0,<2.0.0" +sniffio = "==1.*" [package.extras] http2 = ["h2 (>=3,<5)"] -socks = ["socksio (>=1.0.0,<2.0.0)"] +socks = ["socksio (==1.*)"] [[package]] name = "httplib2" version = "0.22.0" description = "A comprehensive HTTP client library." -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -1847,7 +1812,6 @@ pyparsing = {version = ">=2.4.2,<3.0.0 || >3.0.0,<3.0.1 || >3.0.1,<3.0.2 || >3.0 name = "httptools" version = "0.5.0" description = "A collection of framework independent HTTP protocol utils." -category = "main" optional = false python-versions = ">=3.5.0" files = [ @@ -1901,7 +1865,6 @@ test = ["Cython (>=0.29.24,<0.30.0)"] name = "httpx" version = "0.23.3" description = "The next generation HTTP client." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1918,15 +1881,14 @@ sniffio = "*" [package.extras] brotli = ["brotli", "brotlicffi"] -cli = ["click (>=8.0.0,<9.0.0)", "pygments (>=2.0.0,<3.0.0)", "rich (>=10,<13)"] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<13)"] http2 = ["h2 (>=3,<5)"] -socks = ["socksio (>=1.0.0,<2.0.0)"] +socks = ["socksio (==1.*)"] [[package]] name = "huggingface-hub" version = "0.13.4" description = "Client library to download and publish models, datasets and other repos on the huggingface.co hub" -category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -1957,7 +1919,6 @@ typing = ["types-PyYAML", "types-requests", "types-simplejson", "types-toml", "t name = "humanfriendly" version = "10.0" description = "Human friendly output for text interfaces using Python" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -1972,7 +1933,6 @@ pyreadline3 = {version = "*", markers = "sys_platform == \"win32\" and python_ve name = "hyperframe" version = "6.0.1" description = "HTTP/2 framing layer for Python" -category = "main" optional = false python-versions = ">=3.6.1" files = [ @@ -1984,7 +1944,6 @@ files = [ name = "idna" version = "3.4" description = "Internationalized Domain Names in Applications (IDNA)" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -1996,7 +1955,6 @@ files = [ name = "importlib-metadata" version = "4.13.0" description = "Read metadata from Python packages" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2016,7 +1974,6 @@ testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packag name = "importlib-resources" version = "5.12.0" description = "Read resources from Python packages" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2035,7 +1992,6 @@ testing = ["flake8 (<5)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-chec name = "iniconfig" version = "2.0.0" description = "brain-dead simple config-ini parsing" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2047,7 +2003,6 @@ files = [ name = "ipykernel" version = "6.23.2" description = "IPython Kernel for Jupyter" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2061,7 +2016,7 @@ comm = ">=0.1.1" debugpy = ">=1.6.5" ipython = ">=7.23.1" jupyter-client = ">=6.1.12" -jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" matplotlib-inline = ">=0.1" nest-asyncio = "*" packaging = "*" @@ -2081,7 +2036,6 @@ test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio" name = "ipython" version = "8.14.0" description = "IPython: Productive Interactive Computing" -category = "dev" optional = false python-versions = ">=3.9" files = [ @@ -2121,7 +2075,6 @@ test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.21)", "pa name = "jcloud" version = "0.2.12" description = "Simplify deploying and managing Jina projects on Jina Cloud" -category = "main" optional = false python-versions = "*" files = [ @@ -2144,7 +2097,6 @@ test = ["black (==22.3.0)", "jina (>=3.7.0)", "mock", "pytest", "pytest-asyncio" name = "jedi" version = "0.18.2" description = "An autocompletion tool for Python that can be used for text editors." -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2164,7 +2116,6 @@ testing = ["Django (<3.1)", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] name = "jina" version = "3.15.2" description = "Build multimodal AI services via cloud native technologies · Neural Search · Generative AI · MLOps" -category = "main" optional = false python-versions = "*" files = [ @@ -2282,7 +2233,6 @@ websockets = ["websockets"] name = "jina-hubble-sdk" version = "0.38.0" description = "SDK for Hubble API at Jina AI." -category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -2308,7 +2258,6 @@ full = ["aiohttp", "black (==22.3.0)", "docker", "filelock", "flake8 (==4.0.1)", name = "jinja2" version = "3.1.2" description = "A very fast and expressive template engine." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2326,7 +2275,6 @@ i18n = ["Babel (>=2.7)"] name = "joblib" version = "1.2.0" description = "Lightweight pipelining with Python functions" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2338,7 +2286,6 @@ files = [ name = "jupyter-client" version = "8.2.0" description = "Jupyter protocol implementation and client libraries" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2348,7 +2295,7 @@ files = [ [package.dependencies] importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""} -jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" python-dateutil = ">=2.8.2" pyzmq = ">=23.0" tornado = ">=6.2" @@ -2362,7 +2309,6 @@ test = ["coverage", "ipykernel (>=6.14)", "mypy", "paramiko", "pre-commit", "pyt name = "jupyter-core" version = "5.3.1" description = "Jupyter core package. A base package on which Jupyter projects rely." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2383,7 +2329,6 @@ test = ["ipykernel", "pre-commit", "pytest", "pytest-cov", "pytest-timeout"] name = "langchain" version = "0.0.202" description = "Building applications with LLMs through composability" -category = "main" optional = false python-versions = ">=3.8.1,<4.0" files = [ @@ -2421,7 +2366,6 @@ text-helpers = ["chardet (>=5.1.0,<6.0.0)"] name = "langchain-serve" version = "0.0.45" description = "Langchain Serve - serve your langchain apps on Jina AI Cloud." -category = "main" optional = true python-versions = "*" files = [ @@ -2446,7 +2390,6 @@ test = ["psutil", "pytest", "pytest-asyncio"] name = "langchainplus-sdk" version = "0.0.10" description = "Client library to connect to the LangChainPlus LLM Tracing and Evaluation Platform." -category = "main" optional = false python-versions = ">=3.8.1,<4.0" files = [ @@ -2463,7 +2406,6 @@ tenacity = ">=8.1.0,<9.0.0" name = "llama-cpp-python" version = "0.1.55" description = "A Python wrapper for llama.cpp" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2477,7 +2419,6 @@ typing-extensions = ">=4.5.0,<5.0.0" name = "lxml" version = "4.9.2" description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, != 3.4.*" files = [ @@ -2570,7 +2511,6 @@ source = ["Cython (>=0.29.7)"] name = "lz4" version = "4.3.2" description = "LZ4 Bindings for Python" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2620,7 +2560,6 @@ tests = ["psutil", "pytest (!=3.3.0)", "pytest-cov"] name = "markdown" version = "3.4.3" description = "Python implementation of John Gruber's Markdown." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2638,7 +2577,6 @@ testing = ["coverage", "pyyaml"] name = "markdown-it-py" version = "3.0.0" description = "Python port of markdown-it. Markdown parsing, done right!" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2663,7 +2601,6 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] name = "markupsafe" version = "2.1.3" description = "Safely add untrusted strings to HTML/XML markup." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2723,7 +2660,6 @@ files = [ name = "marshmallow" version = "3.19.0" description = "A lightweight library for converting complex datatypes to and from native Python datatypes." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2744,7 +2680,6 @@ tests = ["pytest", "pytz", "simplejson"] name = "marshmallow-enum" version = "1.5.1" description = "Enum field for Marshmallow" -category = "main" optional = false python-versions = "*" files = [ @@ -2759,7 +2694,6 @@ marshmallow = ">=2.0.0" name = "matplotlib-inline" version = "0.1.6" description = "Inline Matplotlib backend for Jupyter" -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -2774,7 +2708,6 @@ traitlets = "*" name = "mdurl" version = "0.1.2" description = "Markdown URL utilities" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2786,7 +2719,6 @@ files = [ name = "monotonic" version = "1.6" description = "An implementation of time.monotonic() for Python 2 & < 3.3" -category = "main" optional = false python-versions = "*" files = [ @@ -2798,7 +2730,6 @@ files = [ name = "mpmath" version = "1.3.0" description = "Python library for arbitrary-precision floating-point arithmetic" -category = "main" optional = false python-versions = "*" files = [ @@ -2816,7 +2747,6 @@ tests = ["pytest (>=4.6)"] name = "msg-parser" version = "1.2.0" description = "This module enables reading, parsing and converting Microsoft Outlook MSG E-Mail files." -category = "main" optional = false python-versions = ">=3.4" files = [ @@ -2834,7 +2764,6 @@ rtf = ["compressed-rtf (>=1.0.5)"] name = "multidict" version = "6.0.4" description = "multidict implementation" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2918,7 +2847,6 @@ files = [ name = "multiprocess" version = "0.70.14" description = "better multiprocessing and multithreading in python" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2945,7 +2873,6 @@ dill = ">=0.3.6" name = "mypy" version = "1.3.0" description = "Optional static typing for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2992,7 +2919,6 @@ reports = ["lxml"] name = "mypy-extensions" version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -3004,7 +2930,6 @@ files = [ name = "nanoid" version = "2.0.0" description = "A tiny, secure, URL-friendly, unique string ID generator for Python" -category = "main" optional = true python-versions = "*" files = [ @@ -3016,7 +2941,6 @@ files = [ name = "nest-asyncio" version = "1.5.6" description = "Patch asyncio to allow nested event loops" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -3028,7 +2952,6 @@ files = [ name = "networkx" version = "3.1" description = "Python package for creating and manipulating graphs and networks" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -3047,7 +2970,6 @@ test = ["codecov (>=2.1)", "pytest (>=7.2)", "pytest-cov (>=4.0)"] name = "nltk" version = "3.8.1" description = "Natural Language Toolkit" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3073,7 +2995,6 @@ twitter = ["twython"] name = "numexpr" version = "2.8.4" description = "Fast numerical expression evaluator for NumPy" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3116,7 +3037,6 @@ numpy = ">=1.13.3" name = "numpy" version = "1.25.0" description = "Fundamental package for array computing in Python" -category = "main" optional = false python-versions = ">=3.9" files = [ @@ -3151,7 +3071,6 @@ files = [ name = "olefile" version = "0.46" description = "Python package to parse, read and write Microsoft OLE2 files (Structured Storage or Compound Document, Microsoft Office)" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -3162,7 +3081,6 @@ files = [ name = "onnxruntime" version = "1.15.1" description = "ONNX Runtime is a runtime accelerator for Machine Learning models" -category = "main" optional = false python-versions = "*" files = [ @@ -3204,7 +3122,6 @@ sympy = "*" name = "openai" version = "0.27.8" description = "Python client library for the OpenAI API" -category = "main" optional = false python-versions = ">=3.7.1" files = [ @@ -3219,7 +3136,7 @@ tqdm = "*" [package.extras] datalib = ["numpy", "openpyxl (>=3.0.7)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"] -dev = ["black (>=21.6b0,<22.0)", "pytest (>=6.0.0,<7.0.0)", "pytest-asyncio", "pytest-mock"] +dev = ["black (>=21.6b0,<22.0)", "pytest (==6.*)", "pytest-asyncio", "pytest-mock"] embeddings = ["matplotlib", "numpy", "openpyxl (>=3.0.7)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)", "plotly", "scikit-learn (>=1.0.2)", "scipy", "tenacity (>=8.0.1)"] wandb = ["numpy", "openpyxl (>=3.0.7)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)", "wandb"] @@ -3227,7 +3144,6 @@ wandb = ["numpy", "openpyxl (>=3.0.7)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1 name = "openapi-schema-pydantic" version = "1.2.4" description = "OpenAPI (v3) specification schema as pydantic class" -category = "main" optional = false python-versions = ">=3.6.1" files = [ @@ -3242,7 +3158,6 @@ pydantic = ">=1.8.2" name = "openpyxl" version = "3.1.2" description = "A Python library to read/write Excel 2010 xlsx/xlsm files" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -3257,7 +3172,6 @@ et-xmlfile = "*" name = "opentelemetry-api" version = "1.16.0" description = "OpenTelemetry Python API" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3273,7 +3187,6 @@ setuptools = ">=16.0" name = "opentelemetry-exporter-otlp" version = "1.16.0" description = "OpenTelemetry Collector Exporters" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3289,7 +3202,6 @@ opentelemetry-exporter-otlp-proto-http = "1.16.0" name = "opentelemetry-exporter-otlp-proto-grpc" version = "1.16.0" description = "OpenTelemetry Collector Protobuf over gRPC Exporter" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3312,7 +3224,6 @@ test = ["pytest-grpc"] name = "opentelemetry-exporter-otlp-proto-http" version = "1.16.0" description = "OpenTelemetry Collector Protobuf over HTTP Exporter" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3335,7 +3246,6 @@ test = ["responses (==0.22.0)"] name = "opentelemetry-exporter-prometheus" version = "1.12.0rc1" description = "Prometheus Metric Exporter for OpenTelemetry" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -3352,7 +3262,6 @@ prometheus-client = ">=0.5.0,<1.0.0" name = "opentelemetry-instrumentation" version = "0.37b0" description = "Instrumentation Tools & Auto Instrumentation for OpenTelemetry Python" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3369,7 +3278,6 @@ wrapt = ">=1.0.0,<2.0.0" name = "opentelemetry-instrumentation-aiohttp-client" version = "0.37b0" description = "OpenTelemetry aiohttp client instrumentation" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3392,7 +3300,6 @@ test = ["opentelemetry-instrumentation-aiohttp-client[instruments]"] name = "opentelemetry-instrumentation-asgi" version = "0.37b0" description = "ASGI instrumentation for OpenTelemetry" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3415,7 +3322,6 @@ test = ["opentelemetry-instrumentation-asgi[instruments]", "opentelemetry-test-u name = "opentelemetry-instrumentation-fastapi" version = "0.37b0" description = "OpenTelemetry FastAPI Instrumentation" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3438,7 +3344,6 @@ test = ["httpx (>=0.22,<1.0)", "opentelemetry-instrumentation-fastapi[instrument name = "opentelemetry-instrumentation-grpc" version = "0.37b0" description = "OpenTelemetry gRPC instrumentation" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3461,7 +3366,6 @@ test = ["opentelemetry-instrumentation-grpc[instruments]", "opentelemetry-sdk (> name = "opentelemetry-proto" version = "1.16.0" description = "OpenTelemetry Python Proto" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3476,7 +3380,6 @@ protobuf = ">=3.19,<5.0" name = "opentelemetry-sdk" version = "1.16.0" description = "OpenTelemetry Python SDK" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3494,7 +3397,6 @@ typing-extensions = ">=3.7.4" name = "opentelemetry-semantic-conventions" version = "0.37b0" description = "OpenTelemetry Semantic Conventions" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3506,7 +3408,6 @@ files = [ name = "opentelemetry-util-http" version = "0.37b0" description = "Web util for OpenTelemetry" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3518,7 +3419,6 @@ files = [ name = "orjson" version = "3.9.1" description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3574,7 +3474,6 @@ files = [ name = "overrides" version = "7.3.1" description = "A decorator to automatically detect mismatch when overriding a method." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -3586,7 +3485,6 @@ files = [ name = "packaging" version = "23.1" description = "Core utilities for Python packages" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3598,7 +3496,6 @@ files = [ name = "pandas" version = "1.5.3" description = "Powerful data structures for data analysis, time series, and statistics" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -3647,7 +3544,6 @@ test = ["hypothesis (>=5.5.3)", "pytest (>=6.0)", "pytest-xdist (>=1.31)"] name = "pandas-stubs" version = "2.0.2.230605" description = "Type annotations for pandas" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -3663,7 +3559,6 @@ types-pytz = ">=2022.1.1" name = "parso" version = "0.8.3" description = "A Python Parser" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -3675,11 +3570,27 @@ files = [ qa = ["flake8 (==3.8.3)", "mypy (==0.782)"] testing = ["docopt", "pytest (<6.0.0)"] +[[package]] +name = "passlib" +version = "1.7.4" +description = "comprehensive password hashing framework supporting over 30 schemes" +optional = false +python-versions = "*" +files = [ + {file = "passlib-1.7.4-py2.py3-none-any.whl", hash = "sha256:aa6bca462b8d8bda89c70b382f0c298a20b5560af6cbfa2dce410c0a2fb669f1"}, + {file = "passlib-1.7.4.tar.gz", hash = "sha256:defd50f72b65c5402ab2c573830a6978e5f202ad0d984793c8dde2c4152ebe04"}, +] + +[package.extras] +argon2 = ["argon2-cffi (>=18.2.0)"] +bcrypt = ["bcrypt (>=3.1.0)"] +build-docs = ["cloud-sptheme (>=1.10.1)", "sphinx (>=1.6)", "sphinxcontrib-fulltoc (>=1.2.0)"] +totp = ["cryptography"] + [[package]] name = "pathspec" version = "0.11.1" description = "Utility library for gitignore style pattern matching of file paths." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3691,7 +3602,6 @@ files = [ name = "pexpect" version = "4.8.0" description = "Pexpect allows easy control of interactive console applications." -category = "dev" optional = false python-versions = "*" files = [ @@ -3706,7 +3616,6 @@ ptyprocess = ">=0.5" name = "pickleshare" version = "0.7.5" description = "Tiny 'shelve'-like database with concurrency support" -category = "dev" optional = false python-versions = "*" files = [ @@ -3718,7 +3627,6 @@ files = [ name = "pillow" version = "9.5.0" description = "Python Imaging Library (Fork)" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3798,7 +3706,6 @@ tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "pa name = "platformdirs" version = "3.6.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3814,7 +3721,6 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.3.1)", "pytest- name = "pluggy" version = "1.0.0" description = "plugin and hook calling mechanisms for python" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -3830,7 +3736,6 @@ testing = ["pytest", "pytest-benchmark"] name = "portalocker" version = "2.7.0" description = "Wraps the portalocker recipe for easy usage" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -3850,7 +3755,6 @@ tests = ["pytest (>=5.4.1)", "pytest-cov (>=2.8.1)", "pytest-mypy (>=0.8.0)", "p name = "posthog" version = "3.0.1" description = "Integrate PostHog into any python application." -category = "main" optional = false python-versions = "*" files = [ @@ -3874,7 +3778,6 @@ test = ["coverage", "flake8", "freezegun (==0.3.15)", "mock (>=2.0.0)", "pylint" name = "prometheus-client" version = "0.17.0" description = "Python client for the Prometheus monitoring system." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -3889,7 +3792,6 @@ twisted = ["twisted"] name = "prompt-toolkit" version = "3.0.38" description = "Library for building powerful interactive command lines in Python" -category = "dev" optional = false python-versions = ">=3.7.0" files = [ @@ -3904,7 +3806,6 @@ wcwidth = "*" name = "protobuf" version = "3.20.3" description = "Protocol Buffers" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3936,7 +3837,6 @@ files = [ name = "psutil" version = "5.9.5" description = "Cross-platform lib for process and system monitoring in Python." -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -3963,7 +3863,6 @@ test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] name = "psycopg2-binary" version = "2.9.6" description = "psycopg2 - Python-PostgreSQL Database Adapter" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -4035,7 +3934,6 @@ files = [ name = "ptyprocess" version = "0.7.0" description = "Run a subprocess in a pseudo terminal" -category = "dev" optional = false python-versions = "*" files = [ @@ -4047,7 +3945,6 @@ files = [ name = "pulsar-client" version = "3.2.0" description = "Apache Pulsar Python client library" -category = "main" optional = false python-versions = "*" files = [ @@ -4095,7 +3992,6 @@ functions = ["apache-bookkeeper-client (>=4.16.1)", "grpcio (>=1.8.2)", "prometh name = "pure-eval" version = "0.2.2" description = "Safely evaluate AST nodes without side effects" -category = "dev" optional = false python-versions = "*" files = [ @@ -4110,7 +4006,6 @@ tests = ["pytest"] name = "pyarrow" version = "12.0.1" description = "Python library for Apache Arrow" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4148,7 +4043,6 @@ numpy = ">=1.16.6" name = "pyasn1" version = "0.5.0" description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ @@ -4160,7 +4054,6 @@ files = [ name = "pyasn1-modules" version = "0.3.0" description = "A collection of ASN.1-based protocols modules" -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ @@ -4175,7 +4068,6 @@ pyasn1 = ">=0.4.6,<0.6.0" name = "pycparser" version = "2.21" description = "C parser in Python" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -4187,7 +4079,6 @@ files = [ name = "pydantic" version = "1.10.9" description = "Data validation and settings management using python type hints" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4240,7 +4131,6 @@ email = ["email-validator (>=1.0.3)"] name = "pygments" version = "2.15.1" description = "Pygments is a syntax highlighting package written in Python." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4255,7 +4145,6 @@ plugins = ["importlib-metadata"] name = "pypandoc" version = "1.11" description = "Thin wrapper for pandoc." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -4267,7 +4156,6 @@ files = [ name = "pyparsing" version = "3.0.9" description = "pyparsing module - Classes and methods to define and execute parsing grammars" -category = "main" optional = false python-versions = ">=3.6.8" files = [ @@ -4282,7 +4170,6 @@ diagrams = ["jinja2", "railroad-diagrams"] name = "pypdf" version = "3.10.0" description = "A pure-python PDF library capable of splitting, merging, cropping, and transforming PDF files" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -4304,7 +4191,6 @@ image = ["Pillow"] name = "pyreadline3" version = "3.4.1" description = "A python implementation of GNU readline." -category = "main" optional = false python-versions = "*" files = [ @@ -4316,7 +4202,6 @@ files = [ name = "pysrt" version = "1.1.2" description = "SubRip (.srt) subtitle parser and writer" -category = "main" optional = false python-versions = "*" files = [ @@ -4330,7 +4215,6 @@ chardet = "*" name = "pytest" version = "7.3.2" description = "pytest: simple powerful testing with Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -4353,7 +4237,6 @@ testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "no name = "pytest-cov" version = "4.1.0" description = "Pytest plugin for measuring coverage." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -4372,7 +4255,6 @@ testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtuale name = "python-dateutil" version = "2.8.2" description = "Extensions to the standard Python datetime module" -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ @@ -4387,7 +4269,6 @@ six = ">=1.5" name = "python-docx" version = "0.8.11" description = "Create and update Microsoft Word .docx files." -category = "main" optional = false python-versions = "*" files = [ @@ -4401,7 +4282,6 @@ lxml = ">=2.3.2" name = "python-dotenv" version = "1.0.0" description = "Read key-value pairs from a .env file and set them as environment variables" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -4416,7 +4296,6 @@ cli = ["click (>=5.0)"] name = "python-jose" version = "3.3.0" description = "JOSE implementation in Python" -category = "main" optional = false python-versions = "*" files = [ @@ -4438,7 +4317,6 @@ pycryptodome = ["pyasn1", "pycryptodome (>=3.3.1,<4.0.0)"] name = "python-magic" version = "0.4.27" description = "File type identification using libmagic" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -4450,7 +4328,6 @@ files = [ name = "python-multipart" version = "0.0.6" description = "A streaming multipart parser for Python" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4465,7 +4342,6 @@ dev = ["atomicwrites (==1.2.1)", "attrs (==19.2.0)", "coverage (==6.5.0)", "hatc name = "python-pptx" version = "0.6.21" description = "Generate and manipulate Open XML PowerPoint (.pptx) files" -category = "main" optional = false python-versions = "*" files = [ @@ -4481,7 +4357,6 @@ XlsxWriter = ">=0.5.7" name = "pytz" version = "2023.3" description = "World timezone definitions, modern and historical" -category = "main" optional = false python-versions = "*" files = [ @@ -4493,7 +4368,6 @@ files = [ name = "pywin32" version = "306" description = "Python for Window Extensions" -category = "main" optional = false python-versions = "*" files = [ @@ -4517,7 +4391,6 @@ files = [ name = "pyyaml" version = "6.0" description = "YAML parser and emitter for Python" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -4567,7 +4440,6 @@ files = [ name = "pyzmq" version = "25.1.0" description = "Python bindings for 0MQ" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -4657,7 +4529,6 @@ cffi = {version = "*", markers = "implementation_name == \"pypy\""} name = "qdrant-client" version = "1.2.0" description = "Client library for the Qdrant vector search engine" -category = "main" optional = false python-versions = ">=3.7,<3.12" files = [ @@ -4679,7 +4550,6 @@ urllib3 = ">=1.26.14,<2.0.0" name = "regex" version = "2023.6.3" description = "Alternative regular expression module, to replace re." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -4777,7 +4647,6 @@ files = [ name = "requests" version = "2.31.0" description = "Python HTTP for Humans." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4799,7 +4668,6 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] name = "rfc3986" version = "1.5.0" description = "Validating URI References per RFC 3986" -category = "main" optional = false python-versions = "*" files = [ @@ -4817,7 +4685,6 @@ idna2008 = ["idna"] name = "rich" version = "13.4.2" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" -category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -4836,7 +4703,6 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] name = "rsa" version = "4.9" description = "Pure-Python RSA implementation" -category = "main" optional = false python-versions = ">=3.6,<4" files = [ @@ -4851,7 +4717,6 @@ pyasn1 = ">=0.1.3" name = "ruff" version = "0.0.254" description = "An extremely fast Python linter, written in Rust." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -4878,7 +4743,6 @@ files = [ name = "scikit-learn" version = "1.2.2" description = "A set of python modules for machine learning and data mining" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -4921,7 +4785,6 @@ tests = ["black (>=22.3.0)", "flake8 (>=3.8.2)", "matplotlib (>=3.1.3)", "mypy ( name = "scipy" version = "1.10.1" description = "Fundamental algorithms for scientific computing in Python" -category = "main" optional = false python-versions = "<3.12,>=3.8" files = [ @@ -4960,7 +4823,6 @@ test = ["asv", "gmpy2", "mpmath", "pooch", "pytest", "pytest-cov", "pytest-timeo name = "sentence-transformers" version = "2.2.2" description = "Multilingual text embeddings" -category = "main" optional = false python-versions = ">=3.6.0" files = [ @@ -4983,7 +4845,6 @@ transformers = ">=4.6.0,<5.0.0" name = "sentencepiece" version = "0.1.99" description = "SentencePiece python wrapper" -category = "main" optional = false python-versions = "*" files = [ @@ -5038,7 +4899,6 @@ files = [ name = "setuptools" version = "67.8.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -5055,7 +4915,6 @@ testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs ( name = "six" version = "1.16.0" description = "Python 2 and 3 compatibility utilities" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -5067,7 +4926,6 @@ files = [ name = "sniffio" version = "1.3.0" description = "Sniff out which async library your code is running under" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -5079,7 +4937,6 @@ files = [ name = "soupsieve" version = "2.4.1" description = "A modern CSS selector implementation for Beautiful Soup." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -5091,7 +4948,6 @@ files = [ name = "sqlalchemy" version = "1.4.41" description = "Database Abstraction Library" -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ @@ -5139,7 +4995,7 @@ files = [ ] [package.dependencies] -greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\" and platform_machine == \"aarch64\" or python_version >= \"3\" and platform_machine == \"ppc64le\" or python_version >= \"3\" and platform_machine == \"x86_64\" or python_version >= \"3\" and platform_machine == \"amd64\" or python_version >= \"3\" and platform_machine == \"AMD64\" or python_version >= \"3\" and platform_machine == \"win32\" or python_version >= \"3\" and platform_machine == \"WIN32\""} +greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\" and (platform_machine == \"win32\" or platform_machine == \"WIN32\" or platform_machine == \"AMD64\" or platform_machine == \"amd64\" or platform_machine == \"x86_64\" or platform_machine == \"ppc64le\" or platform_machine == \"aarch64\")"} [package.extras] aiomysql = ["aiomysql", "greenlet (!=0.4.17)"] @@ -5166,7 +5022,6 @@ sqlcipher = ["sqlcipher3-binary"] name = "sqlalchemy2-stubs" version = "0.0.2a34" description = "Typing Stubs for SQLAlchemy 1.4" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -5181,7 +5036,6 @@ typing-extensions = ">=3.7.4" name = "sqlmodel" version = "0.0.8" description = "SQLModel, SQL databases in Python, designed for simplicity, compatibility, and robustness." -category = "main" optional = false python-versions = ">=3.6.1,<4.0.0" files = [ @@ -5198,7 +5052,6 @@ sqlalchemy2-stubs = "*" name = "stack-data" version = "0.6.2" description = "Extract data from python stack frames and tracebacks for informative displays" -category = "dev" optional = false python-versions = "*" files = [ @@ -5218,7 +5071,6 @@ tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] name = "starlette" version = "0.27.0" description = "The little ASGI library that shines." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -5237,7 +5089,6 @@ full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart", "pyyam name = "sympy" version = "1.12" description = "Computer algebra system (CAS) in Python" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -5252,7 +5103,6 @@ mpmath = ">=0.19" name = "tenacity" version = "8.2.2" description = "Retry code until it succeeds" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -5267,7 +5117,6 @@ doc = ["reno", "sphinx", "tornado (>=4.5)"] name = "textual" version = "0.10.1" description = "Modern Text User Interface framework" -category = "main" optional = true python-versions = ">=3.7,<4.0" files = [ @@ -5288,7 +5137,6 @@ dev = ["aiohttp (>=3.8.1)", "click (>=8.1.2)", "msgpack (>=1.0.3)"] name = "threadpoolctl" version = "3.1.0" description = "threadpoolctl" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -5300,7 +5148,6 @@ files = [ name = "tiktoken" version = "0.4.0" description = "tiktoken is a fast BPE tokeniser for use with OpenAI's models" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -5346,7 +5193,6 @@ blobfile = ["blobfile (>=2)"] name = "tokenizers" version = "0.13.3" description = "Fast and Customizable Tokenizers" -category = "main" optional = false python-versions = "*" files = [ @@ -5401,7 +5247,6 @@ testing = ["black (==22.3)", "datasets", "numpy", "pytest", "requests"] name = "toml" version = "0.10.2" description = "Python Library for Tom's Obvious, Minimal Language" -category = "main" optional = true python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -5413,7 +5258,6 @@ files = [ name = "tomli" version = "2.0.1" description = "A lil' TOML parser" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -5425,7 +5269,6 @@ files = [ name = "torch" version = "2.0.1" description = "Tensors and Dynamic neural networks in Python with strong GPU acceleration" -category = "main" optional = false python-versions = ">=3.8.0" files = [ @@ -5465,7 +5308,6 @@ opt-einsum = ["opt-einsum (>=3.3)"] name = "torchvision" version = "0.15.2" description = "image and video datasets and models for torch deep learning" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -5493,7 +5335,7 @@ files = [ [package.dependencies] numpy = "*" -pillow = ">=5.3.0,<8.3.0 || >=8.4.0" +pillow = ">=5.3.0,<8.3.dev0 || >=8.4.dev0" requests = "*" torch = "2.0.1" @@ -5504,7 +5346,6 @@ scipy = ["scipy"] name = "tornado" version = "6.3.2" description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." -category = "dev" optional = false python-versions = ">= 3.8" files = [ @@ -5525,7 +5366,6 @@ files = [ name = "tqdm" version = "4.65.0" description = "Fast, Extensible Progress Meter" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -5546,7 +5386,6 @@ telegram = ["requests"] name = "traitlets" version = "5.9.0" description = "Traitlets Python configuration system" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -5562,7 +5401,6 @@ test = ["argcomplete (>=2.0)", "pre-commit", "pytest", "pytest-mock"] name = "transformers" version = "4.29.0" description = "State-of-the-art Machine Learning for JAX, PyTorch and TensorFlow" -category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -5630,7 +5468,6 @@ vision = ["Pillow"] name = "typer" version = "0.9.0" description = "Typer, build great CLIs. Easy to code. Based on Python type hints." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -5652,7 +5489,6 @@ test = ["black (>=22.3.0,<23.0.0)", "coverage (>=6.2,<7.0)", "isort (>=5.0.6,<6. name = "types-cachetools" version = "5.3.0.5" description = "Typing stubs for cachetools" -category = "main" optional = false python-versions = "*" files = [ @@ -5664,7 +5500,6 @@ files = [ name = "types-pillow" version = "9.5.0.4" description = "Typing stubs for Pillow" -category = "dev" optional = false python-versions = "*" files = [ @@ -5676,7 +5511,6 @@ files = [ name = "types-pytz" version = "2023.3.0.0" description = "Typing stubs for pytz" -category = "dev" optional = false python-versions = "*" files = [ @@ -5688,7 +5522,6 @@ files = [ name = "types-pyyaml" version = "6.0.12.10" description = "Typing stubs for PyYAML" -category = "main" optional = false python-versions = "*" files = [ @@ -5700,7 +5533,6 @@ files = [ name = "types-requests" version = "2.31.0.1" description = "Typing stubs for requests" -category = "dev" optional = false python-versions = "*" files = [ @@ -5715,7 +5547,6 @@ types-urllib3 = "*" name = "types-urllib3" version = "1.26.25.13" description = "Typing stubs for urllib3" -category = "dev" optional = false python-versions = "*" files = [ @@ -5727,7 +5558,6 @@ files = [ name = "typing-extensions" version = "4.5.0" description = "Backported and Experimental Type Hints for Python 3.7+" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -5739,7 +5569,6 @@ files = [ name = "typing-inspect" version = "0.9.0" description = "Runtime inspection utilities for typing module." -category = "main" optional = false python-versions = "*" files = [ @@ -5755,7 +5584,6 @@ typing-extensions = ">=3.7.4" name = "unstructured" version = "0.5.13" description = "A library that prepares raw documents for downstream ML tasks." -category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -5795,7 +5623,6 @@ wikipedia = ["wikipedia"] name = "uritemplate" version = "4.1.1" description = "Implementation of RFC 6570 URI Templates" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -5807,7 +5634,6 @@ files = [ name = "urllib3" version = "1.26.16" description = "HTTP library with thread-safe connection pooling, file post, and more." -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ @@ -5824,7 +5650,6 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] name = "uvicorn" version = "0.22.0" description = "The lightning-fast ASGI server." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -5839,7 +5664,7 @@ h11 = ">=0.8" httptools = {version = ">=0.5.0", optional = true, markers = "extra == \"standard\""} python-dotenv = {version = ">=0.13", optional = true, markers = "extra == \"standard\""} pyyaml = {version = ">=5.1", optional = true, markers = "extra == \"standard\""} -uvloop = {version = ">=0.14.0,<0.15.0 || >0.15.0,<0.15.1 || >0.15.1", optional = true, markers = "sys_platform != \"win32\" and sys_platform != \"cygwin\" and platform_python_implementation != \"PyPy\" and extra == \"standard\""} +uvloop = {version = ">=0.14.0,<0.15.0 || >0.15.0,<0.15.1 || >0.15.1", optional = true, markers = "(sys_platform != \"win32\" and sys_platform != \"cygwin\") and platform_python_implementation != \"PyPy\" and extra == \"standard\""} watchfiles = {version = ">=0.13", optional = true, markers = "extra == \"standard\""} websockets = {version = ">=10.4", optional = true, markers = "extra == \"standard\""} @@ -5850,7 +5675,6 @@ standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", name = "uvloop" version = "0.17.0" description = "Fast implementation of asyncio event loop on top of libuv" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -5895,7 +5719,6 @@ test = ["Cython (>=0.29.32,<0.30.0)", "aiohttp", "flake8 (>=3.9.2,<3.10.0)", "my name = "validators" version = "0.20.0" description = "Python Data Validation for Humans™." -category = "main" optional = false python-versions = ">=3.4" files = [ @@ -5912,7 +5735,6 @@ test = ["flake8 (>=2.4.0)", "isort (>=4.2.2)", "pytest (>=2.2.3)"] name = "watchfiles" version = "0.19.0" description = "Simple, modern and high performance file watching and code reload in python." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -5947,7 +5769,6 @@ anyio = ">=3.0.0" name = "wcwidth" version = "0.2.6" description = "Measures the displayed width of unicode strings in a terminal" -category = "dev" optional = false python-versions = "*" files = [ @@ -5959,7 +5780,6 @@ files = [ name = "weaviate-client" version = "3.21.0" description = "A python native Weaviate client" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -5980,7 +5800,6 @@ grpc = ["grpcio", "grpcio-tools"] name = "websocket-client" version = "1.6.0" description = "WebSocket client for Python with low level API options" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -5997,7 +5816,6 @@ test = ["websockets"] name = "websockets" version = "11.0.3" description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -6077,7 +5895,6 @@ files = [ name = "wikipedia" version = "1.4.0" description = "Wikipedia API for Python" -category = "main" optional = false python-versions = "*" files = [ @@ -6092,7 +5909,6 @@ requests = ">=2.0.0,<3.0.0" name = "wrapt" version = "1.15.0" description = "Module for decorators, wrappers and monkey patching." -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" files = [ @@ -6177,7 +5993,6 @@ files = [ name = "xlsxwriter" version = "3.1.2" description = "A Python module for creating Excel XLSX files." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -6189,7 +6004,6 @@ files = [ name = "yarl" version = "1.9.2" description = "Yet another URL library" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -6277,7 +6091,6 @@ multidict = ">=4.0" name = "zipp" version = "3.15.0" description = "Backport of pathlib-compatible object wrapper for zip files" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -6293,7 +6106,6 @@ testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more name = "zstandard" version = "0.21.0" description = "Zstandard bindings for Python" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -6354,4 +6166,4 @@ deploy = ["langchain-serve"] [metadata] lock-version = "2.0" python-versions = ">=3.9,<3.12" -content-hash = "6b382b428d0c1d43bd76917dccc88e524c5ae6ecbfea59d07ff977aa030fd7f4" +content-hash = "0693d2e0c5820f1729c551328f82ae5f02ccf3a6da15a806dccc63df8d7c7c8a" diff --git a/pyproject.toml b/pyproject.toml index 4a383f6fc..62d6d2687 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -64,6 +64,9 @@ orjson = "^3.9.1" multiprocess = "^0.70.14" cachetools = "^5.3.1" types-cachetools = "^5.3.0.5" +python-jose = "^3.3.0" +passlib = "^1.7.4" +bcrypt = "^4.0.1" [tool.poetry.group.dev.dependencies] diff --git a/src/backend/langflow/login.py b/src/backend/langflow/login.py new file mode 100644 index 000000000..c2782be56 --- /dev/null +++ b/src/backend/langflow/login.py @@ -0,0 +1,143 @@ +from datetime import datetime, timedelta +from typing import Annotated + +from fastapi import Depends, FastAPI, HTTPException, status +from fastapi.security import OAuth2PasswordBearer, OAuth2PasswordRequestForm +from jose import JWTError, jwt +from passlib.context import CryptContext +from pydantic import BaseModel + +# to get a string like this run: +# openssl rand -hex 32 +SECRET_KEY = "09d25e094faa6ca2556c818166b7a9563b93f7099f6f0f4caa6cf63b88e8d3e7" +ALGORITHM = "HS256" +ACCESS_TOKEN_EXPIRE_MINUTES = 30 + + +fake_users_db = { + "johndoe": { + "username": "johndoe", + "full_name": "John Doe", + "email": "johndoe@example.com", + "hashed_password": "$2b$12$EixZaYVK1fsbw1ZfbX3OXePaWxn96p36WQoeG6Lruj3vjPGga31lW", + "disabled": False, + } +} + + +class Token(BaseModel): + access_token: str + token_type: str + + +class TokenData(BaseModel): + username: str | None = None + + +class User(BaseModel): + username: str + email: str | None = None + full_name: str | None = None + disabled: bool | None = None + + +class UserInDB(User): + hashed_password: str + + +pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto") +oauth2_scheme = OAuth2PasswordBearer(tokenUrl="token") + + +def verify_password(plain_password, hashed_password): + return pwd_context.verify(plain_password, hashed_password) + + +def get_password_hash(password): + return pwd_context.hash(password) + + +def get_user(db, username: str): + if username in db: + user_dict = db[username] + return UserInDB(**user_dict) + + +def authenticate_user(fake_db, username: str, password: str): + user = get_user(fake_db, username) + if not user: + return False + if not verify_password(password, user.hashed_password): + return False + return user + + +def create_access_token(data: dict, expires_delta: timedelta | None = None): + to_encode = data.copy() + if expires_delta: + expire = datetime.utcnow() + expires_delta + else: + expire = datetime.utcnow() + timedelta(minutes=15) + to_encode.update({"exp": expire}) + encoded_jwt = jwt.encode(to_encode, SECRET_KEY, algorithm=ALGORITHM) + return encoded_jwt + + +async def get_current_user(token: Annotated[str, Depends(oauth2_scheme)]): + credentials_exception = HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Could not validate credentials", + headers={"WWW-Authenticate": "Bearer"}, + ) + try: + payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM]) + username: str = payload.get("sub") + if username is None: + raise credentials_exception + token_data = TokenData(username=username) + except JWTError: + raise credentials_exception + user = get_user(fake_users_db, username=token_data.username) + if user is None: + raise credentials_exception + return user + + +async def get_current_active_user( + current_user: Annotated[User, Depends(get_current_user)] +): + if current_user.disabled: + raise HTTPException(status_code=400, detail="Inactive user") + return current_user + + +@app.post("/token", response_model=Token) +async def login_for_access_token( + form_data: Annotated[OAuth2PasswordRequestForm, Depends()] +): + user = authenticate_user(fake_users_db, form_data.username, form_data.password) + if not user: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Incorrect username or password", + headers={"WWW-Authenticate": "Bearer"}, + ) + access_token_expires = timedelta(minutes=ACCESS_TOKEN_EXPIRE_MINUTES) + access_token = create_access_token( + data={"sub": user.username}, expires_delta=access_token_expires + ) + return {"access_token": access_token, "token_type": "bearer"} + + +@app.get("/users/me/", response_model=User) +async def read_users_me( + current_user: Annotated[User, Depends(get_current_active_user)] +): + return current_user + + +@app.get("/users/me/items/") +async def read_own_items( + current_user: Annotated[User, Depends(get_current_active_user)] +): + return [{"item_id": "Foo", "owner": current_user.username}] \ No newline at end of file diff --git a/src/backend/langflow/main.py b/src/backend/langflow/main.py index ad3217eb5..78ac1e75f 100644 --- a/src/backend/langflow/main.py +++ b/src/backend/langflow/main.py @@ -4,15 +4,154 @@ from fastapi.middleware.cors import CORSMiddleware from langflow.api import router from langflow.database.base import create_db_and_tables +from datetime import datetime, timedelta +from typing import Annotated + +from fastapi import Depends, HTTPException, status +from fastapi.security import OAuth2PasswordBearer, OAuth2PasswordRequestForm +from jose import JWTError, jwt +from passlib.context import CryptContext +from pydantic import BaseModel + +# to get a string like this run: +# openssl rand -hex 32 +SECRET_KEY = "09d25e094faa6ca2556c818166b7a9563b93f7099f6f0f4caa6cf63b88e8d3e7" +ALGORITHM = "HS256" +ACCESS_TOKEN_EXPIRE_MINUTES = 30 + + +fake_users_db = { + "johndoe": { + "username": "johndoe", + "full_name": "John Doe", + "email": "johndoe@example.com", + "hashed_password": "$2b$12$EixZaYVK1fsbw1ZfbX3OXePaWxn96p36WQoeG6Lruj3vjPGga31lW", + "disabled": False, + } +} + + +class Token(BaseModel): + access_token: str + token_type: str + + +class TokenData(BaseModel): + username: str | None = None + + +class User(BaseModel): + username: str + email: str | None = None + full_name: str | None = None + disabled: bool | None = None + + +class UserInDB(User): + hashed_password: str + + +pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto") +oauth2_scheme = OAuth2PasswordBearer(tokenUrl="token") + + +def verify_password(plain_password, hashed_password): + return pwd_context.verify(plain_password, hashed_password) + + +def get_password_hash(password): + return pwd_context.hash(password) + + +def get_user(db, username: str): + if username in db: + user_dict = db[username] + return UserInDB(**user_dict) + + +def authenticate_user(fake_db, username: str, password: str): + user = get_user(fake_db, username) + if not user: + return False + if not verify_password(password, user.hashed_password): + return False + return user + + +def create_access_token(data: dict, expires_delta: timedelta | None = None): + to_encode = data.copy() + if expires_delta: + expire = datetime.utcnow() + expires_delta + else: + expire = datetime.utcnow() + timedelta(minutes=15) + to_encode.update({"exp": expire}) + encoded_jwt = jwt.encode(to_encode, SECRET_KEY, algorithm=ALGORITHM) + return encoded_jwt + + +async def get_current_user(token: Annotated[str, Depends(oauth2_scheme)]): + credentials_exception = HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Could not validate credentials", + headers={"WWW-Authenticate": "Bearer"}, + ) + try: + payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM]) + username: str = payload.get("sub") + if username is None: + raise credentials_exception + token_data = TokenData(username=username) + except JWTError: + raise credentials_exception + user = get_user(fake_users_db, username=token_data.username) + if user is None: + raise credentials_exception + return user + + +async def get_current_active_user(current_user: Annotated[User, Depends(get_current_user)]): + if current_user.disabled: + raise HTTPException(status_code=400, detail="Inactive user") + return current_user + def create_app(): """Create the FastAPI app and include the router.""" - app = FastAPI() origins = [ "*", ] + + @app.post("/token", response_model=Token) + async def login_for_access_token(form_data: Annotated[OAuth2PasswordRequestForm, Depends()]): + user = authenticate_user(fake_users_db, form_data.username, form_data.password) + if not user: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Incorrect username or password", + headers={"WWW-Authenticate": "Bearer"}, + ) + access_token_expires = timedelta(minutes=ACCESS_TOKEN_EXPIRE_MINUTES) + access_token = create_access_token( + data={"sub": user.username}, + expires_delta=access_token_expires + ) + return {"access_token": access_token, "token_type": "bearer"} + + + @app.get("/users/me/", response_model=User) + async def read_users_me( + current_user: Annotated[User, Depends(get_current_active_user)] + ): + return current_user + + + @app.get("/users/me/items/") + async def read_own_items( + current_user: Annotated[User, Depends(get_current_active_user)] + ): + return [{"item_id": "Foo", "owner": current_user.username}] @app.get("/health") def get_health(): @@ -36,5 +175,4 @@ app = create_app() if __name__ == "__main__": import uvicorn - uvicorn.run(app, host="127.0.0.1", port=7860) From ddd795b2f4667b1cc06b3b8499752252018322d7 Mon Sep 17 00:00:00 2001 From: gustavoschaedler Date: Mon, 19 Jun 2023 23:50:19 +0100 Subject: [PATCH 02/90] Add authentication and authorization functionality The commit adds the auth module and updates dependencies. It includes authentication and authorization functionality and models to create access tokens. The endpoints for login and users/me have been removed and moved to the appropriate modules. These changes have improved security and code organization. --- src/backend/langflow/auth/__init__.py | 0 src/backend/langflow/auth/auth.py | 72 +++++++++++ src/backend/langflow/login.py | 143 --------------------- src/backend/langflow/main.py | 154 ++--------------------- src/backend/langflow/models/__init__.py | 0 src/backend/langflow/models/token.py | 10 ++ src/backend/langflow/models/user.py | 29 +++++ src/backend/langflow/routers/__init__.py | 0 src/backend/langflow/routers/health.py | 8 ++ src/backend/langflow/routers/items.py | 12 ++ src/backend/langflow/routers/login.py | 35 ++++++ src/backend/langflow/routers/users.py | 10 ++ 12 files changed, 183 insertions(+), 290 deletions(-) create mode 100644 src/backend/langflow/auth/__init__.py create mode 100644 src/backend/langflow/auth/auth.py delete mode 100644 src/backend/langflow/login.py create mode 100644 src/backend/langflow/models/__init__.py create mode 100644 src/backend/langflow/models/token.py create mode 100644 src/backend/langflow/models/user.py create mode 100644 src/backend/langflow/routers/__init__.py create mode 100644 src/backend/langflow/routers/health.py create mode 100644 src/backend/langflow/routers/items.py create mode 100644 src/backend/langflow/routers/login.py create mode 100644 src/backend/langflow/routers/users.py diff --git a/src/backend/langflow/auth/__init__.py b/src/backend/langflow/auth/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/backend/langflow/auth/auth.py b/src/backend/langflow/auth/auth.py new file mode 100644 index 000000000..503b2bd5b --- /dev/null +++ b/src/backend/langflow/auth/auth.py @@ -0,0 +1,72 @@ +from typing import Annotated + +from fastapi import Depends, HTTPException, status +from passlib.context import CryptContext +from jose import JWTError, jwt +from datetime import datetime, timedelta, timezone +from fastapi.security import OAuth2PasswordBearer +from ..models.token import TokenData +from ..models.user import get_user, fake_users_db, User + + +SECRET_KEY = "your_secret_key" +ALGORITHM = "HS256" +ACCESS_TOKEN_EXPIRE_MINUTES = 30 + +pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto") +oauth2_scheme = OAuth2PasswordBearer(tokenUrl="token") + + +def verify_password(plain_password, hashed_password): + return pwd_context.verify(plain_password, hashed_password) + + +def get_password_hash(password): + return pwd_context.hash(password) + + +def create_access_token(data: dict, expires_delta: timedelta = None): + to_encode = data.copy() + if expires_delta: + expire = datetime.now(timezone.utc) + expires_delta + else: + expire = datetime.now(timezone.utc) + timedelta(minutes=15) + to_encode["exp"] = expire + return jwt.encode(to_encode, SECRET_KEY, algorithm=ALGORITHM) + + +def authenticate_user(fake_db, username: str, password: str): + user = get_user(fake_db, username) + if not user: + return False + if not verify_password(password, user.hashed_password): + return False + return user + + +async def get_current_user(token: Annotated[str, Depends(oauth2_scheme)]): + credentials_exception = HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Could not validate credentials", + headers={"WWW-Authenticate": "Bearer"}, + ) + try: + payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM]) + username: str = payload.get("sub") + if username is None: + raise credentials_exception + token_data = TokenData(username=username) + except JWTError: + raise credentials_exception + user = get_user(fake_users_db, username=token_data.username) + if user is None: + raise credentials_exception + return user + + +async def get_current_active_user( + current_user: Annotated[User, Depends(get_current_user)] +): + if current_user.disabled: + raise HTTPException(status_code=400, detail="Inactive user") + return current_user diff --git a/src/backend/langflow/login.py b/src/backend/langflow/login.py deleted file mode 100644 index c2782be56..000000000 --- a/src/backend/langflow/login.py +++ /dev/null @@ -1,143 +0,0 @@ -from datetime import datetime, timedelta -from typing import Annotated - -from fastapi import Depends, FastAPI, HTTPException, status -from fastapi.security import OAuth2PasswordBearer, OAuth2PasswordRequestForm -from jose import JWTError, jwt -from passlib.context import CryptContext -from pydantic import BaseModel - -# to get a string like this run: -# openssl rand -hex 32 -SECRET_KEY = "09d25e094faa6ca2556c818166b7a9563b93f7099f6f0f4caa6cf63b88e8d3e7" -ALGORITHM = "HS256" -ACCESS_TOKEN_EXPIRE_MINUTES = 30 - - -fake_users_db = { - "johndoe": { - "username": "johndoe", - "full_name": "John Doe", - "email": "johndoe@example.com", - "hashed_password": "$2b$12$EixZaYVK1fsbw1ZfbX3OXePaWxn96p36WQoeG6Lruj3vjPGga31lW", - "disabled": False, - } -} - - -class Token(BaseModel): - access_token: str - token_type: str - - -class TokenData(BaseModel): - username: str | None = None - - -class User(BaseModel): - username: str - email: str | None = None - full_name: str | None = None - disabled: bool | None = None - - -class UserInDB(User): - hashed_password: str - - -pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto") -oauth2_scheme = OAuth2PasswordBearer(tokenUrl="token") - - -def verify_password(plain_password, hashed_password): - return pwd_context.verify(plain_password, hashed_password) - - -def get_password_hash(password): - return pwd_context.hash(password) - - -def get_user(db, username: str): - if username in db: - user_dict = db[username] - return UserInDB(**user_dict) - - -def authenticate_user(fake_db, username: str, password: str): - user = get_user(fake_db, username) - if not user: - return False - if not verify_password(password, user.hashed_password): - return False - return user - - -def create_access_token(data: dict, expires_delta: timedelta | None = None): - to_encode = data.copy() - if expires_delta: - expire = datetime.utcnow() + expires_delta - else: - expire = datetime.utcnow() + timedelta(minutes=15) - to_encode.update({"exp": expire}) - encoded_jwt = jwt.encode(to_encode, SECRET_KEY, algorithm=ALGORITHM) - return encoded_jwt - - -async def get_current_user(token: Annotated[str, Depends(oauth2_scheme)]): - credentials_exception = HTTPException( - status_code=status.HTTP_401_UNAUTHORIZED, - detail="Could not validate credentials", - headers={"WWW-Authenticate": "Bearer"}, - ) - try: - payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM]) - username: str = payload.get("sub") - if username is None: - raise credentials_exception - token_data = TokenData(username=username) - except JWTError: - raise credentials_exception - user = get_user(fake_users_db, username=token_data.username) - if user is None: - raise credentials_exception - return user - - -async def get_current_active_user( - current_user: Annotated[User, Depends(get_current_user)] -): - if current_user.disabled: - raise HTTPException(status_code=400, detail="Inactive user") - return current_user - - -@app.post("/token", response_model=Token) -async def login_for_access_token( - form_data: Annotated[OAuth2PasswordRequestForm, Depends()] -): - user = authenticate_user(fake_users_db, form_data.username, form_data.password) - if not user: - raise HTTPException( - status_code=status.HTTP_401_UNAUTHORIZED, - detail="Incorrect username or password", - headers={"WWW-Authenticate": "Bearer"}, - ) - access_token_expires = timedelta(minutes=ACCESS_TOKEN_EXPIRE_MINUTES) - access_token = create_access_token( - data={"sub": user.username}, expires_delta=access_token_expires - ) - return {"access_token": access_token, "token_type": "bearer"} - - -@app.get("/users/me/", response_model=User) -async def read_users_me( - current_user: Annotated[User, Depends(get_current_active_user)] -): - return current_user - - -@app.get("/users/me/items/") -async def read_own_items( - current_user: Annotated[User, Depends(get_current_active_user)] -): - return [{"item_id": "Foo", "owner": current_user.username}] \ No newline at end of file diff --git a/src/backend/langflow/main.py b/src/backend/langflow/main.py index 78ac1e75f..d546fd58e 100644 --- a/src/backend/langflow/main.py +++ b/src/backend/langflow/main.py @@ -2,160 +2,15 @@ from fastapi import FastAPI from fastapi.middleware.cors import CORSMiddleware from langflow.api import router +from langflow.routers import login, users, items, health from langflow.database.base import create_db_and_tables -from datetime import datetime, timedelta -from typing import Annotated - -from fastapi import Depends, HTTPException, status -from fastapi.security import OAuth2PasswordBearer, OAuth2PasswordRequestForm -from jose import JWTError, jwt -from passlib.context import CryptContext -from pydantic import BaseModel - -# to get a string like this run: -# openssl rand -hex 32 -SECRET_KEY = "09d25e094faa6ca2556c818166b7a9563b93f7099f6f0f4caa6cf63b88e8d3e7" -ALGORITHM = "HS256" -ACCESS_TOKEN_EXPIRE_MINUTES = 30 - - -fake_users_db = { - "johndoe": { - "username": "johndoe", - "full_name": "John Doe", - "email": "johndoe@example.com", - "hashed_password": "$2b$12$EixZaYVK1fsbw1ZfbX3OXePaWxn96p36WQoeG6Lruj3vjPGga31lW", - "disabled": False, - } -} - - -class Token(BaseModel): - access_token: str - token_type: str - - -class TokenData(BaseModel): - username: str | None = None - - -class User(BaseModel): - username: str - email: str | None = None - full_name: str | None = None - disabled: bool | None = None - - -class UserInDB(User): - hashed_password: str - - -pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto") -oauth2_scheme = OAuth2PasswordBearer(tokenUrl="token") - - -def verify_password(plain_password, hashed_password): - return pwd_context.verify(plain_password, hashed_password) - - -def get_password_hash(password): - return pwd_context.hash(password) - - -def get_user(db, username: str): - if username in db: - user_dict = db[username] - return UserInDB(**user_dict) - - -def authenticate_user(fake_db, username: str, password: str): - user = get_user(fake_db, username) - if not user: - return False - if not verify_password(password, user.hashed_password): - return False - return user - - -def create_access_token(data: dict, expires_delta: timedelta | None = None): - to_encode = data.copy() - if expires_delta: - expire = datetime.utcnow() + expires_delta - else: - expire = datetime.utcnow() + timedelta(minutes=15) - to_encode.update({"exp": expire}) - encoded_jwt = jwt.encode(to_encode, SECRET_KEY, algorithm=ALGORITHM) - return encoded_jwt - - -async def get_current_user(token: Annotated[str, Depends(oauth2_scheme)]): - credentials_exception = HTTPException( - status_code=status.HTTP_401_UNAUTHORIZED, - detail="Could not validate credentials", - headers={"WWW-Authenticate": "Bearer"}, - ) - try: - payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM]) - username: str = payload.get("sub") - if username is None: - raise credentials_exception - token_data = TokenData(username=username) - except JWTError: - raise credentials_exception - user = get_user(fake_users_db, username=token_data.username) - if user is None: - raise credentials_exception - return user - - -async def get_current_active_user(current_user: Annotated[User, Depends(get_current_user)]): - if current_user.disabled: - raise HTTPException(status_code=400, detail="Inactive user") - return current_user - def create_app(): """Create the FastAPI app and include the router.""" app = FastAPI() - origins = [ - "*", - ] - - @app.post("/token", response_model=Token) - async def login_for_access_token(form_data: Annotated[OAuth2PasswordRequestForm, Depends()]): - user = authenticate_user(fake_users_db, form_data.username, form_data.password) - if not user: - raise HTTPException( - status_code=status.HTTP_401_UNAUTHORIZED, - detail="Incorrect username or password", - headers={"WWW-Authenticate": "Bearer"}, - ) - access_token_expires = timedelta(minutes=ACCESS_TOKEN_EXPIRE_MINUTES) - access_token = create_access_token( - data={"sub": user.username}, - expires_delta=access_token_expires - ) - return {"access_token": access_token, "token_type": "bearer"} - - - @app.get("/users/me/", response_model=User) - async def read_users_me( - current_user: Annotated[User, Depends(get_current_active_user)] - ): - return current_user - - - @app.get("/users/me/items/") - async def read_own_items( - current_user: Annotated[User, Depends(get_current_active_user)] - ): - return [{"item_id": "Foo", "owner": current_user.username}] - - @app.get("/health") - def get_health(): - return {"status": "OK"} + origins = ["*"] app.add_middleware( CORSMiddleware, @@ -165,7 +20,12 @@ def create_app(): allow_headers=["*"], ) + app.include_router(login.router) + app.include_router(users.router) + app.include_router(items.router) + app.include_router(health.router) app.include_router(router) + app.on_event("startup")(create_db_and_tables) return app diff --git a/src/backend/langflow/models/__init__.py b/src/backend/langflow/models/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/backend/langflow/models/token.py b/src/backend/langflow/models/token.py new file mode 100644 index 000000000..080286787 --- /dev/null +++ b/src/backend/langflow/models/token.py @@ -0,0 +1,10 @@ +from pydantic import BaseModel + + +class Token(BaseModel): + access_token: str + token_type: str + + +class TokenData(BaseModel): + username: str | None = None diff --git a/src/backend/langflow/models/user.py b/src/backend/langflow/models/user.py new file mode 100644 index 000000000..1023a6a65 --- /dev/null +++ b/src/backend/langflow/models/user.py @@ -0,0 +1,29 @@ +from pydantic import BaseModel + + +class User(BaseModel): + username: str + email: str | None = None + full_name: str | None = None + disabled: bool | None = None + + +class UserInDB(User): + hashed_password: str + + +fake_users_db = { + "johndoe": { + "username": "johndoe", + "full_name": "John Doe", + "email": "johndoe@example.com", + "hashed_password": "$2b$12$EixZaYVK1fsbw1ZfbX3OXePaWxn96p36WQoeG6Lruj3vjPGga31lW", + "disabled": False, + } +} + + +def get_user(db, username: str): + if username in db: + user_dict = db[username] + return UserInDB(**user_dict) diff --git a/src/backend/langflow/routers/__init__.py b/src/backend/langflow/routers/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/backend/langflow/routers/health.py b/src/backend/langflow/routers/health.py new file mode 100644 index 000000000..244ef001d --- /dev/null +++ b/src/backend/langflow/routers/health.py @@ -0,0 +1,8 @@ +from fastapi import APIRouter + +router = APIRouter() + + +@router.get("/health") +def get_health(): + return {"status": "OK"} diff --git a/src/backend/langflow/routers/items.py b/src/backend/langflow/routers/items.py new file mode 100644 index 000000000..e6d21340e --- /dev/null +++ b/src/backend/langflow/routers/items.py @@ -0,0 +1,12 @@ +from fastapi import APIRouter, Depends +from ..models.user import User +from ..auth.auth import get_current_active_user + +router = APIRouter() + + +@router.get("/users/me/items/") +async def read_own_items( + current_user: User = Depends(get_current_active_user) +): + return [{"item_id": "Foo", "owner": current_user.username}] diff --git a/src/backend/langflow/routers/login.py b/src/backend/langflow/routers/login.py new file mode 100644 index 000000000..eac2d57bb --- /dev/null +++ b/src/backend/langflow/routers/login.py @@ -0,0 +1,35 @@ +from fastapi import APIRouter, Depends, HTTPException, status +from fastapi.security import OAuth2PasswordRequestForm +from langflow.models.token import Token +from langflow.models.user import fake_users_db +from datetime import timedelta +from langflow.auth.auth import ( + ACCESS_TOKEN_EXPIRE_MINUTES, + authenticate_user, + create_access_token +) + +router = APIRouter() + + +@router.post("/token", response_model=Token) +async def login_for_access_token( + form_data: OAuth2PasswordRequestForm = Depends() +): + user = authenticate_user( + fake_users_db, + form_data.username, + form_data.password + ) + if not user: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Incorrect username or password", + headers={"WWW-Authenticate": "Bearer"}, + ) + access_token_expires = timedelta(minutes=ACCESS_TOKEN_EXPIRE_MINUTES) + access_token = create_access_token( + data={"sub": user.username}, + expires_delta=access_token_expires + ) + return {"access_token": access_token, "token_type": "bearer"} diff --git a/src/backend/langflow/routers/users.py b/src/backend/langflow/routers/users.py new file mode 100644 index 000000000..1a9184ec8 --- /dev/null +++ b/src/backend/langflow/routers/users.py @@ -0,0 +1,10 @@ +from fastapi import APIRouter, Depends +from langflow.models.user import User +from langflow.auth.auth import get_current_active_user + +router = APIRouter() + + +@router.get("/users/me/", response_model=User) +async def read_users_me(current_user: User = Depends(get_current_active_user)): + return current_user From 6d78aefa623c1e2789056e6d08b1d70222b22c97 Mon Sep 17 00:00:00 2001 From: gustavoschaedler Date: Tue, 20 Jun 2023 00:19:30 +0100 Subject: [PATCH 03/90] Add random SECRET_KEY to test Changed the SECRET_KEY to a randomly generated one using the command `openssl rand -hex 32`. Additionally, added code to raise an exception when facing JWTError in `get_current_user`. Added a new user in `fake_users_db` who is currently disabled. Finally, changed the endpoint to show all users instead of `me`. --- src/backend/langflow/auth/auth.py | 11 ++++++++--- src/backend/langflow/models/user.py | 17 ++++++++++++----- src/backend/langflow/routers/items.py | 9 +++++++-- 3 files changed, 27 insertions(+), 10 deletions(-) diff --git a/src/backend/langflow/auth/auth.py b/src/backend/langflow/auth/auth.py index 503b2bd5b..ec45d48f4 100644 --- a/src/backend/langflow/auth/auth.py +++ b/src/backend/langflow/auth/auth.py @@ -9,7 +9,9 @@ from ..models.token import TokenData from ..models.user import get_user, fake_users_db, User -SECRET_KEY = "your_secret_key" +# to get a string like this run: +# openssl rand -hex 32 +SECRET_KEY = "698619adad2d916f1f32d264540976964b3c0d3828e0870a65add5800a8cc6b9" ALGORITHM = "HS256" ACCESS_TOKEN_EXPIRE_MINUTES = 30 @@ -37,6 +39,7 @@ def create_access_token(data: dict, expires_delta: timedelta = None): def authenticate_user(fake_db, username: str, password: str): user = get_user(fake_db, username) + if not user: return False if not verify_password(password, user.hashed_password): @@ -50,14 +53,16 @@ async def get_current_user(token: Annotated[str, Depends(oauth2_scheme)]): detail="Could not validate credentials", headers={"WWW-Authenticate": "Bearer"}, ) + try: payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM]) username: str = payload.get("sub") if username is None: raise credentials_exception token_data = TokenData(username=username) - except JWTError: - raise credentials_exception + except JWTError as e: + raise credentials_exception from e + user = get_user(fake_users_db, username=token_data.username) if user is None: raise credentials_exception diff --git a/src/backend/langflow/models/user.py b/src/backend/langflow/models/user.py index 1023a6a65..c47c85464 100644 --- a/src/backend/langflow/models/user.py +++ b/src/backend/langflow/models/user.py @@ -13,12 +13,19 @@ class UserInDB(User): fake_users_db = { - "johndoe": { - "username": "johndoe", - "full_name": "John Doe", - "email": "johndoe@example.com", - "hashed_password": "$2b$12$EixZaYVK1fsbw1ZfbX3OXePaWxn96p36WQoeG6Lruj3vjPGga31lW", + "gustavo": { + "username": "gustavo", + "full_name": "Gustavo Schaedler", + "email": "gustavopoa@gmail.com", + "hashed_password": "$2b$12$f4R8IHUaVxVchhpWrwhckeJXnPalW1vUbJzcvb1KeovJcuMwE861K", #secret "disabled": False, + }, + "gustavo_disabled": { + "username": "gustavo_disabled", + "full_name": "Gustavo Disabled", + "email": "gustavo_disabled@gmail.com", + "hashed_password": "$2b$12$f4R8IHUaVxVchhpWrwhckeJXnPalW1vUbJzcvb1KeovJcuMwE861K", #secret + "disabled": True, } } diff --git a/src/backend/langflow/routers/items.py b/src/backend/langflow/routers/items.py index e6d21340e..7ca1ff320 100644 --- a/src/backend/langflow/routers/items.py +++ b/src/backend/langflow/routers/items.py @@ -5,8 +5,13 @@ from ..auth.auth import get_current_active_user router = APIRouter() -@router.get("/users/me/items/") +@router.get("/users/all/") async def read_own_items( current_user: User = Depends(get_current_active_user) ): - return [{"item_id": "Foo", "owner": current_user.username}] + return [ + { + "item_id": "my_id", + "owner": current_user.username + } + ] From f855433652af63639547e45d914448f826b51223 Mon Sep 17 00:00:00 2001 From: gustavoschaedler Date: Wed, 21 Jun 2023 15:54:56 +0100 Subject: [PATCH 04/90] feat(auth-models): Add is_admin field to User model This commit adds an is_admin field to the User model definition in user.py. It is set to False by default and has been updated in fake_users_db for both users. Also, there were some code formatting changes made in auth.py. --- src/backend/langflow/auth/auth.py | 4 ++-- src/backend/langflow/models/user.py | 3 +++ 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/src/backend/langflow/auth/auth.py b/src/backend/langflow/auth/auth.py index ec45d48f4..92f44d63d 100644 --- a/src/backend/langflow/auth/auth.py +++ b/src/backend/langflow/auth/auth.py @@ -39,7 +39,7 @@ def create_access_token(data: dict, expires_delta: timedelta = None): def authenticate_user(fake_db, username: str, password: str): user = get_user(fake_db, username) - + if not user: return False if not verify_password(password, user.hashed_password): @@ -53,7 +53,7 @@ async def get_current_user(token: Annotated[str, Depends(oauth2_scheme)]): detail="Could not validate credentials", headers={"WWW-Authenticate": "Bearer"}, ) - + try: payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM]) username: str = payload.get("sub") diff --git a/src/backend/langflow/models/user.py b/src/backend/langflow/models/user.py index c47c85464..2a9c233a4 100644 --- a/src/backend/langflow/models/user.py +++ b/src/backend/langflow/models/user.py @@ -6,6 +6,7 @@ class User(BaseModel): email: str | None = None full_name: str | None = None disabled: bool | None = None + is_admin: bool | None = False class UserInDB(User): @@ -19,6 +20,7 @@ fake_users_db = { "email": "gustavopoa@gmail.com", "hashed_password": "$2b$12$f4R8IHUaVxVchhpWrwhckeJXnPalW1vUbJzcvb1KeovJcuMwE861K", #secret "disabled": False, + "is_admin": True, }, "gustavo_disabled": { "username": "gustavo_disabled", @@ -26,6 +28,7 @@ fake_users_db = { "email": "gustavo_disabled@gmail.com", "hashed_password": "$2b$12$f4R8IHUaVxVchhpWrwhckeJXnPalW1vUbJzcvb1KeovJcuMwE861K", #secret "disabled": True, + "is_admin": False, } } From 0fa15d2d91cdc5a361f146ccbb8e798b0b3b4e7c Mon Sep 17 00:00:00 2001 From: gustavoschaedler Date: Tue, 1 Aug 2023 22:18:54 +0100 Subject: [PATCH 05/90] =?UTF-8?q?=E2=9C=A8=20feat(custom.py):=20add=20new?= =?UTF-8?q?=20custom=20component=20'YourComponent'=20to=20the=20project?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This commit adds a new file 'custom.py' under the 'src/backend/langflow/components/custom_components' directory. The file contains the implementation of a custom component called 'YourComponent'. The 'YourComponent' class extends the 'CustomComponent' class from the 'langflow' library. It has a display name of "Custom Component" and a description of "My description". The component has a 'build_config' method that returns a configuration object with a single property 'url'. The 'url' property is multiline and required. The 'build' method of the component takes in a 'url' string, a 'llm' object of type 'BaseLLM', and a 'prompt' object of type 'PromptTemplate'. It makes a GET request to the provided 'url', runs the response text through an 'LLMChain' with the given 'llm' and 'prompt', and returns a 'Document' object with the resulting page content. --- .../components/custom_components/custom.py | 21 +++++++++++++++++++ 1 file changed, 21 insertions(+) create mode 100644 src/backend/langflow/components/custom_components/custom.py diff --git a/src/backend/langflow/components/custom_components/custom.py b/src/backend/langflow/components/custom_components/custom.py new file mode 100644 index 000000000..3ebc5bda8 --- /dev/null +++ b/src/backend/langflow/components/custom_components/custom.py @@ -0,0 +1,21 @@ +from langflow import CustomComponent + +from langchain.llms.base import BaseLLM +from langchain.chains import LLMChain +from langchain import PromptTemplate +from langchain.schema import Document + +import requests + +class YourComponent(CustomComponent): + display_name: str = "Custom Component" + description: str = "My description" + + def build_config(self): + return { "url": { "multiline": True, "required": True } } + + def build(self, url: str, llm: BaseLLM, prompt: PromptTemplate) -> Document: + response = requests.get(url) + chain = LLMChain(llm=llm, prompt=prompt) + result = chain.run(response.text[:300]) + return Document(page_content=str(result)) From a09e57b9ef800b17cde806d4fcb9fb8b8f09dccb Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Tue, 1 Aug 2023 18:24:19 -0300 Subject: [PATCH 06/90] =?UTF-8?q?=F0=9F=94=A7=20chore(=5F=5Fmain=5F=5F.py)?= =?UTF-8?q?:=20update=20components=5Fpath=20default=20value=20to=20include?= =?UTF-8?q?=20"components"=20directory=20for=20better=20organization=20?= =?UTF-8?q?=F0=9F=94=A7=20chore(custom.py):=20update=20description=20of=20?= =?UTF-8?q?YourComponent=20to=20be=20more=20descriptive=20and=20accurate?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/__main__.py | 2 +- src/backend/langflow/components/custom_components/custom.py | 5 +++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/src/backend/langflow/__main__.py b/src/backend/langflow/__main__.py index b20a2a902..58789908a 100644 --- a/src/backend/langflow/__main__.py +++ b/src/backend/langflow/__main__.py @@ -127,7 +127,7 @@ def serve( timeout: int = typer.Option(300, help="Worker timeout in seconds."), port: int = typer.Option(7860, help="Port to listen on.", envvar="LANGFLOW_PORT"), components_path: Optional[Path] = typer.Option( - Path(__file__).parent, + Path(__file__).parent / "components", help="Path to the directory containing custom components.", envvar="LANGFLOW_COMPONENTS_PATH", ), diff --git a/src/backend/langflow/components/custom_components/custom.py b/src/backend/langflow/components/custom_components/custom.py index 3ebc5bda8..cec2c7acd 100644 --- a/src/backend/langflow/components/custom_components/custom.py +++ b/src/backend/langflow/components/custom_components/custom.py @@ -7,12 +7,13 @@ from langchain.schema import Document import requests + class YourComponent(CustomComponent): display_name: str = "Custom Component" - description: str = "My description" + description: str = "Create any custom component you want!" def build_config(self): - return { "url": { "multiline": True, "required": True } } + return {"url": {"multiline": True, "required": True}} def build(self, url: str, llm: BaseLLM, prompt: PromptTemplate) -> Document: response = requests.get(url) From 5b9bed07b5d04b489a95e14933f38ad8c95df738 Mon Sep 17 00:00:00 2001 From: gustavoschaedler Date: Tue, 1 Aug 2023 23:17:23 +0100 Subject: [PATCH 07/90] =?UTF-8?q?=F0=9F=94=A5=20chore(custom.py):=20remove?= =?UTF-8?q?=20custom=20component=20file?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The custom component file `custom.py` has been removed from the project as it is no longer needed. --- .../components/custom_components/custom.py | 22 ------------------- 1 file changed, 22 deletions(-) delete mode 100644 src/backend/langflow/components/custom_components/custom.py diff --git a/src/backend/langflow/components/custom_components/custom.py b/src/backend/langflow/components/custom_components/custom.py deleted file mode 100644 index cec2c7acd..000000000 --- a/src/backend/langflow/components/custom_components/custom.py +++ /dev/null @@ -1,22 +0,0 @@ -from langflow import CustomComponent - -from langchain.llms.base import BaseLLM -from langchain.chains import LLMChain -from langchain import PromptTemplate -from langchain.schema import Document - -import requests - - -class YourComponent(CustomComponent): - display_name: str = "Custom Component" - description: str = "Create any custom component you want!" - - def build_config(self): - return {"url": {"multiline": True, "required": True}} - - def build(self, url: str, llm: BaseLLM, prompt: PromptTemplate) -> Document: - response = requests.get(url) - chain = LLMChain(llm=llm, prompt=prompt) - result = chain.run(response.text[:300]) - return Document(page_content=str(result)) From f3174033ed1409a09d0bfbf3b792cea34cfc0c64 Mon Sep 17 00:00:00 2001 From: gustavoschaedler Date: Thu, 3 Aug 2023 21:49:31 +0100 Subject: [PATCH 08/90] =?UTF-8?q?=F0=9F=94=92=20chore(auth.py):=20refactor?= =?UTF-8?q?=20authenticate=5Fuser=20function=20to=20use=20database=20sessi?= =?UTF-8?q?on=20instead=20of=20fake=5Fdb=20for=20authentication?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit 🔒 chore(auth.py): refactor get_current_user function to use database session instead of fake_users_db for retrieving user information 🔒 chore(auth.py): refactor get_current_active_user function to use database session instead of fake_users_db for retrieving user information 🔒 chore(user.py): refactor get_user function to use database session instead of fake_users_db for retrieving user information 🔒 chore(login.py): refactor login_for_access_token function to use database session instead of fake_users_db for authentication and token creation 🔒 feat(models.py): add User model to represent user data in the database 🔒 feat(base_control.py): add BaseControl model to represent common control fields in database models --- src/backend/langflow/auth/auth.py | 28 ++++++------- src/backend/langflow/models/base_control.py | 7 ++++ src/backend/langflow/models/models.py | 21 ++++++++++ src/backend/langflow/models/user.py | 46 ++++++--------------- src/backend/langflow/routers/login.py | 38 +++++++++-------- 5 files changed, 72 insertions(+), 68 deletions(-) create mode 100644 src/backend/langflow/models/base_control.py create mode 100644 src/backend/langflow/models/models.py diff --git a/src/backend/langflow/auth/auth.py b/src/backend/langflow/auth/auth.py index 92f44d63d..c4b8ad5b4 100644 --- a/src/backend/langflow/auth/auth.py +++ b/src/backend/langflow/auth/auth.py @@ -1,16 +1,14 @@ from typing import Annotated - from fastapi import Depends, HTTPException, status from passlib.context import CryptContext from jose import JWTError, jwt from datetime import datetime, timedelta, timezone from fastapi.security import OAuth2PasswordBearer -from ..models.token import TokenData -from ..models.user import get_user, fake_users_db, User +from langflow.models.token import TokenData +from langflow.models.user import get_user, User +from sqlalchemy.orm import Session +from langflow.database.base import get_session - -# to get a string like this run: -# openssl rand -hex 32 SECRET_KEY = "698619adad2d916f1f32d264540976964b3c0d3828e0870a65add5800a8cc6b9" ALGORITHM = "HS256" ACCESS_TOKEN_EXPIRE_MINUTES = 30 @@ -37,23 +35,21 @@ def create_access_token(data: dict, expires_delta: timedelta = None): return jwt.encode(to_encode, SECRET_KEY, algorithm=ALGORITHM) -def authenticate_user(fake_db, username: str, password: str): - user = get_user(fake_db, username) - - if not user: +def authenticate_user(db: Session, username: str, password: str): + if user := get_user(db, username): + return user if verify_password(password, user.hashed_password) else False + else: return False - if not verify_password(password, user.hashed_password): - return False - return user -async def get_current_user(token: Annotated[str, Depends(oauth2_scheme)]): +async def get_current_user( + token: Annotated[str, Depends(oauth2_scheme)], db: Session = Depends(get_session) +): credentials_exception = HTTPException( status_code=status.HTTP_401_UNAUTHORIZED, detail="Could not validate credentials", headers={"WWW-Authenticate": "Bearer"}, ) - try: payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM]) username: str = payload.get("sub") @@ -63,7 +59,7 @@ async def get_current_user(token: Annotated[str, Depends(oauth2_scheme)]): except JWTError as e: raise credentials_exception from e - user = get_user(fake_users_db, username=token_data.username) + user = get_user(db, username=token_data.username) if user is None: raise credentials_exception return user diff --git a/src/backend/langflow/models/base_control.py b/src/backend/langflow/models/base_control.py new file mode 100644 index 000000000..9eea9d9f0 --- /dev/null +++ b/src/backend/langflow/models/base_control.py @@ -0,0 +1,7 @@ +from pydantic import BaseModel +from datetime import datetime + + +class BaseControl(BaseModel): + created_at: datetime + updated_at: datetime diff --git a/src/backend/langflow/models/models.py b/src/backend/langflow/models/models.py new file mode 100644 index 000000000..d86d5f7f0 --- /dev/null +++ b/src/backend/langflow/models/models.py @@ -0,0 +1,21 @@ +from sqlalchemy import Column, String, Boolean, DateTime +from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.sql import func +from sqlalchemy.dialects.postgresql import UUID +from uuid import uuid4 + +Base = declarative_base() + + +class User(Base): + __tablename__ = "users" + + id = Column( + UUID(as_uuid=True), primary_key=True, default=uuid4, unique=True, nullable=False + ) + username = Column(String, unique=True, index=True) + email = Column(String, unique=True, index=True) + disabled = Column(Boolean, default=False) + is_superuser = Column(Boolean, default=False) + created_at = Column(DateTime(timezone=True), server_default=func.now()) + updated_at = Column(DateTime(timezone=True), onupdate=func.now()) diff --git a/src/backend/langflow/models/user.py b/src/backend/langflow/models/user.py index 2a9c233a4..b8f6a3fc4 100644 --- a/src/backend/langflow/models/user.py +++ b/src/backend/langflow/models/user.py @@ -1,39 +1,17 @@ -from pydantic import BaseModel +from sqlalchemy.orm import Session +from langflow.models.user import User as DBUser +from langflow.models.base_control import BaseControl +from uuid import UUID -class User(BaseModel): +class User(BaseControl): + id: UUID username: str - email: str | None = None - full_name: str | None = None - disabled: bool | None = None - is_admin: bool | None = False + email: str + disabled: bool = False + is_superuser: bool = False -class UserInDB(User): - hashed_password: str - - -fake_users_db = { - "gustavo": { - "username": "gustavo", - "full_name": "Gustavo Schaedler", - "email": "gustavopoa@gmail.com", - "hashed_password": "$2b$12$f4R8IHUaVxVchhpWrwhckeJXnPalW1vUbJzcvb1KeovJcuMwE861K", #secret - "disabled": False, - "is_admin": True, - }, - "gustavo_disabled": { - "username": "gustavo_disabled", - "full_name": "Gustavo Disabled", - "email": "gustavo_disabled@gmail.com", - "hashed_password": "$2b$12$f4R8IHUaVxVchhpWrwhckeJXnPalW1vUbJzcvb1KeovJcuMwE861K", #secret - "disabled": True, - "is_admin": False, - } -} - - -def get_user(db, username: str): - if username in db: - user_dict = db[username] - return UserInDB(**user_dict) +def get_user(db: Session, user_id: UUID) -> User: + db_user = db.query(DBUser).filter(DBUser.id == user_id).first() + return User.from_orm(db_user) if db_user else None # type: ignore diff --git a/src/backend/langflow/routers/login.py b/src/backend/langflow/routers/login.py index eac2d57bb..dba69758f 100644 --- a/src/backend/langflow/routers/login.py +++ b/src/backend/langflow/routers/login.py @@ -1,35 +1,37 @@ +from datetime import timedelta from fastapi import APIRouter, Depends, HTTPException, status from fastapi.security import OAuth2PasswordRequestForm from langflow.models.token import Token -from langflow.models.user import fake_users_db -from datetime import timedelta from langflow.auth.auth import ( - ACCESS_TOKEN_EXPIRE_MINUTES, - authenticate_user, - create_access_token + ACCESS_TOKEN_EXPIRE_MINUTES, + authenticate_user, + create_access_token, ) +from sqlalchemy.orm import Session +from langflow.database.base import get_session + +TOKEN_TYPE = "bearer" router = APIRouter() +def create_user_token(user: str) -> dict: + access_token_expires = timedelta(minutes=ACCESS_TOKEN_EXPIRE_MINUTES) + access_token = create_access_token( + data={"sub": user.username}, expires_delta=access_token_expires + ) + return {"access_token": access_token, "token_type": TOKEN_TYPE} + + @router.post("/token", response_model=Token) async def login_for_access_token( - form_data: OAuth2PasswordRequestForm = Depends() + form_data: OAuth2PasswordRequestForm = Depends(), db: Session = Depends(get_session) ): - user = authenticate_user( - fake_users_db, - form_data.username, - form_data.password - ) - if not user: + if user := authenticate_user(db, form_data.username, form_data.password): + return create_user_token(user) + else: raise HTTPException( status_code=status.HTTP_401_UNAUTHORIZED, detail="Incorrect username or password", headers={"WWW-Authenticate": "Bearer"}, ) - access_token_expires = timedelta(minutes=ACCESS_TOKEN_EXPIRE_MINUTES) - access_token = create_access_token( - data={"sub": user.username}, - expires_delta=access_token_expires - ) - return {"access_token": access_token, "token_type": "bearer"} From 35ec2e086709b5ebf4bb338ddb1a5d6aee88520e Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Fri, 4 Aug 2023 16:56:36 -0300 Subject: [PATCH 09/90] =?UTF-8?q?=F0=9F=90=9B=20fix(vector=5Fstore.py):=20?= =?UTF-8?q?build=20Chroma=20settings=20if=20any=20of=20the=20chroma=5Fserv?= =?UTF-8?q?er=5F=20params=20are=20present=20in=20params=20=E2=9C=A8=20feat?= =?UTF-8?q?(vectorstores.py):=20add=20new=20fields=20for=20Chroma=20vector?= =?UTF-8?q?=20store=20configuration?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../interface/initialize/vector_store.py | 23 +++ .../template/frontend_node/vectorstores.py | 155 ++++++++++++------ 2 files changed, 127 insertions(+), 51 deletions(-) diff --git a/src/backend/langflow/interface/initialize/vector_store.py b/src/backend/langflow/interface/initialize/vector_store.py index d4bdb0155..c616d9b87 100644 --- a/src/backend/langflow/interface/initialize/vector_store.py +++ b/src/backend/langflow/interface/initialize/vector_store.py @@ -170,6 +170,29 @@ def initialize_pinecone(class_object: Type[Pinecone], params: dict): def initialize_chroma(class_object: Type[Chroma], params: dict): """Initialize a ChromaDB object from the params""" + # chroma_server_host: str | None = None, + # chroma_server_http_port: str | None = None, + # chroma_server_ssl_enabled: bool | None = False, + # chroma_server_grpc_port: str | None = None, + # chroma_server_cors_allow_origins: List[str] = [], + # If any of the above params are in params, specially host and port, + # we need to build the Chroma settings + if ( # type: ignore + "chroma_server_host" in params + or "chroma_server_http_port" in params + or "chroma_server_ssl_enabled" in params + or "chroma_server_grpc_port" in params + or "chroma_server_cors_allow_origins" in params + ): + import chromadb + + settings_params = { + key: params[key] + for key, value_ in params.items() + if key.startswith("chroma_server_") and value_ + } + chroma_settings = chromadb.config.Settings(**settings_params) + params["client_settings"] = chroma_settings persist = params.pop("persist", False) if not docs_in_params(params): params.pop("documents", None) diff --git a/src/backend/langflow/template/frontend_node/vectorstores.py b/src/backend/langflow/template/frontend_node/vectorstores.py index 53a840b80..23c293437 100644 --- a/src/backend/langflow/template/frontend_node/vectorstores.py +++ b/src/backend/langflow/template/frontend_node/vectorstores.py @@ -4,6 +4,52 @@ from langflow.template.field.base import TemplateField from langflow.template.frontend_node.base import FrontendNode +BASIC_FIELDS = [ + "work_dir", + "collection_name", + "api_key", + "location", + "persist_directory", + "persist", + "weaviate_url", + "index_name", + "namespace", + "folder_path", + "table_name", + "query_name", + "supabase_url", + "supabase_service_key", + "mongodb_atlas_cluster_uri", + "collection_name", + "db_name", +] +ADVANCED_FIELDS = [ + "n_dim", + "key", + "prefix", + "distance_func", + "content_payload_key", + "metadata_payload_key", + "timeout", + "host", + "path", + "url", + "port", + "https", + "prefer_grpc", + "grpc_port", + "pinecone_api_key", + "pinecone_env", + "client_kwargs", + "search_kwargs", + "chroma_server_host", + "chroma_server_http_port", + "chroma_server_ssl_enabled", + "chroma_server_grpc_port", + "chroma_server_cors_allow_origins", +] + + class VectorStoreFrontendNode(FrontendNode): def add_extra_fields(self) -> None: extra_fields: List[TemplateField] = [] @@ -45,16 +91,62 @@ class VectorStoreFrontendNode(FrontendNode): elif self.template.type_name == "Chroma": # New bool field for persist parameter - extra_field = TemplateField( - name="persist", - field_type="bool", - required=False, - show=True, - advanced=False, - value=False, - display_name="Persist", - ) - extra_fields.append(extra_field) + chroma_fields = [ + TemplateField( + name="persist", + field_type="bool", + required=False, + show=True, + advanced=False, + value=False, + display_name="Persist", + ), + # chroma_server_grpc_port: str | None = None, + TemplateField( + name="chroma_server_host", + field_type="str", + required=False, + show=True, + advanced=True, + display_name="Chroma Server Host", + ), + TemplateField( + name="chroma_server_http_port", + field_type="str", + required=False, + show=True, + advanced=True, + display_name="Chroma Server HTTP Port", + ), + TemplateField( + name="chroma_server_ssl_enabled", + field_type="bool", + required=False, + show=True, + advanced=True, + value=False, + display_name="Chroma Server SSL Enabled", + ), + TemplateField( + name="chroma_server_grpc_port", + field_type="str", + required=False, + show=True, + advanced=True, + display_name="Chroma Server GRPC Port", + ), + TemplateField( + name="chroma_server_cors_allow_origins", + field_type="str", + required=False, + is_list=True, + show=True, + advanced=True, + display_name="Chroma Server CORS Allow Origins", + ), + ] + + extra_fields.extend(chroma_fields) elif self.template.type_name == "Pinecone": # add pinecone_api_key and pinecone_env extra_field = TemplateField( @@ -208,45 +300,6 @@ class VectorStoreFrontendNode(FrontendNode): def format_field(field: TemplateField, name: Optional[str] = None) -> None: FrontendNode.format_field(field, name) # Define common field attributes - basic_fields = [ - "work_dir", - "collection_name", - "api_key", - "location", - "persist_directory", - "persist", - "weaviate_url", - "index_name", - "namespace", - "folder_path", - "table_name", - "query_name", - "supabase_url", - "supabase_service_key", - "mongodb_atlas_cluster_uri", - "collection_name", - "db_name", - ] - advanced_fields = [ - "n_dim", - "key", - "prefix", - "distance_func", - "content_payload_key", - "metadata_payload_key", - "timeout", - "host", - "path", - "url", - "port", - "https", - "prefer_grpc", - "grpc_port", - "pinecone_api_key", - "pinecone_env", - "client_kwargs", - "search_kwargs", - ] # Check and set field attributes if field.name == "texts": @@ -269,7 +322,7 @@ class VectorStoreFrontendNode(FrontendNode): field.display_name = "Embedding" field.field_type = "Embeddings" - elif field.name in basic_fields: + elif field.name in BASIC_FIELDS: field.show = True field.advanced = False if field.name == "api_key": @@ -279,7 +332,7 @@ class VectorStoreFrontendNode(FrontendNode): field.value = ":memory:" field.placeholder = ":memory:" - elif field.name in advanced_fields: + elif field.name in ADVANCED_FIELDS: field.show = True field.advanced = True if "key" in field.name: From 2bc4420f628b7352f362305049853a7606ab41ce Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Fri, 4 Aug 2023 17:22:05 -0300 Subject: [PATCH 10/90] =?UTF-8?q?=F0=9F=94=A7=20chore(vector=5Fstore.py):?= =?UTF-8?q?=20remove=20unused=20code=20related=20to=20initializing=20Chrom?= =?UTF-8?q?aDB=20object=20=F0=9F=94=A7=20chore(vector=5Fstore.py):=20remov?= =?UTF-8?q?e=20unused=20import=20statement=20for=20chromadb=20module?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../langflow/interface/initialize/vector_store.py | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/src/backend/langflow/interface/initialize/vector_store.py b/src/backend/langflow/interface/initialize/vector_store.py index c616d9b87..12cf054a5 100644 --- a/src/backend/langflow/interface/initialize/vector_store.py +++ b/src/backend/langflow/interface/initialize/vector_store.py @@ -170,13 +170,6 @@ def initialize_pinecone(class_object: Type[Pinecone], params: dict): def initialize_chroma(class_object: Type[Chroma], params: dict): """Initialize a ChromaDB object from the params""" - # chroma_server_host: str | None = None, - # chroma_server_http_port: str | None = None, - # chroma_server_ssl_enabled: bool | None = False, - # chroma_server_grpc_port: str | None = None, - # chroma_server_cors_allow_origins: List[str] = [], - # If any of the above params are in params, specially host and port, - # we need to build the Chroma settings if ( # type: ignore "chroma_server_host" in params or "chroma_server_http_port" in params @@ -184,7 +177,7 @@ def initialize_chroma(class_object: Type[Chroma], params: dict): or "chroma_server_grpc_port" in params or "chroma_server_cors_allow_origins" in params ): - import chromadb + import chromadb # type: ignore settings_params = { key: params[key] From c8a5e0724d2a8d87a2ab87f427146c6e19fc2e7d Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Fri, 4 Aug 2023 18:12:28 -0300 Subject: [PATCH 11/90] =?UTF-8?q?=F0=9F=90=9B=20fix(utilities.py):=20handl?= =?UTF-8?q?e=20field=5Ftype=20with=20"typing=5Fextensions"=20prefix=20in?= =?UTF-8?q?=20UtilitiesFrontendNode=20class=20=E2=9C=A8=20feat(utilities.p?= =?UTF-8?q?y):=20improve=20handling=20of=20field=5Ftype=20in=20UtilitiesFr?= =?UTF-8?q?ontendNode=20class=20to=20support=20different=20formats?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Fixes #707 --- src/backend/langflow/template/frontend_node/utilities.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/backend/langflow/template/frontend_node/utilities.py b/src/backend/langflow/template/frontend_node/utilities.py index 615d7d12f..df993e377 100644 --- a/src/backend/langflow/template/frontend_node/utilities.py +++ b/src/backend/langflow/template/frontend_node/utilities.py @@ -12,8 +12,11 @@ class UtilitiesFrontendNode(FrontendNode): FrontendNode.format_field(field, name) # field.field_type could be "Literal['news', 'search', 'places', 'images'] # we need to convert it to a list + # It seems it could also be like "typing_extensions.['news', 'search', 'places', 'images']" if "Literal" in field.field_type: - field.options = ast.literal_eval(field.field_type.replace("Literal", "")) + field_type = field.field_type.replace("typing_extensions.", "") + field_type = field_type.replace("Literal", "") + field.options = ast.literal_eval(field_type) field.is_list = True field.field_type = "str" From 1aed2b60a2e6a71a6cec5038f151b41b795bf400 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Sat, 5 Aug 2023 12:09:46 -0300 Subject: [PATCH 12/90] =?UTF-8?q?=F0=9F=94=A7=20fix(endpoints.py):=20chang?= =?UTF-8?q?e=20log=20message=20to=20improve=20clarity=20and=20accuracy=20?= =?UTF-8?q?=F0=9F=94=A7=20fix(endpoints.py):=20change=20log=20message=20to?= =?UTF-8?q?=20provide=20more=20detailed=20information=20about=20loaded=20c?= =?UTF-8?q?ustom=20components?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/api/v1/endpoints.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/src/backend/langflow/api/v1/endpoints.py b/src/backend/langflow/api/v1/endpoints.py index f4817d12a..1ca2a2437 100644 --- a/src/backend/langflow/api/v1/endpoints.py +++ b/src/backend/langflow/api/v1/endpoints.py @@ -46,13 +46,18 @@ def get_all(): build_langchain_custom_component_list_from_path(str(path)) for path in settings.COMPONENTS_PATH ] - logger.info(f"Loading {len(custom_component_dicts)} custom components") - + logger.info(f"Loading {len(custom_component_dicts)} category(ies)") for custom_component_dict in custom_component_dicts: + # custom_component_dict is a dict of dicts + category = list(custom_component_dict.keys())[0] + logger.info( + f"Loading {len(custom_component_dict[category])} component(s) from category {category}" + ) + logger.debug(custom_component_dict) custom_components_from_file = merge_nested_dicts( custom_components_from_file, custom_component_dict ) - logger.info(f"Loaded {custom_component_dict}") + return merge_nested_dicts(native_components, custom_components_from_file) From 819eb703d113836c4b152377acd2a5a3a0256fce Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Sat, 5 Aug 2023 12:11:28 -0300 Subject: [PATCH 13/90] =?UTF-8?q?=F0=9F=90=9B=20fix(utils.py):=20add=20fun?= =?UTF-8?q?ction=20merge=5Fnested=5Fdicts=5Fwith=5Frenaming=20to=20handle?= =?UTF-8?q?=20merging=20nested=20dictionaries=20with=20renaming=20of=20key?= =?UTF-8?q?s=20=F0=9F=90=9B=20fix(endpoints.py):=20update=20import=20state?= =?UTF-8?q?ment=20to=20import=20merge=5Fnested=5Fdicts=5Fwith=5Frenaming?= =?UTF-8?q?=20from=20utils.py=20=F0=9F=90=9B=20fix(endpoints.py):=20update?= =?UTF-8?q?=20function=20call=20to=20merge=5Fnested=5Fdicts=5Fwith=5Frenam?= =?UTF-8?q?ing=20to=20handle=20merging=20of=20native=20and=20custom=20comp?= =?UTF-8?q?onents=20=F0=9F=90=9B=20fix(types.py):=20remove=20unused=20impo?= =?UTF-8?q?rt=20statement=20for=20merge=5Fnested=5Fdicts=20from=20utils.py?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/api/utils.py | 27 ++++++++++++++++++++++++ src/backend/langflow/api/v1/endpoints.py | 8 ++++--- src/backend/langflow/interface/types.py | 1 - 3 files changed, 32 insertions(+), 4 deletions(-) diff --git a/src/backend/langflow/api/utils.py b/src/backend/langflow/api/utils.py index 91fa93ea4..0fb53e541 100644 --- a/src/backend/langflow/api/utils.py +++ b/src/backend/langflow/api/utils.py @@ -66,3 +66,30 @@ def merge_nested_dicts(dict1, dict2): else: dict1[key] = value return dict1 + + +def merge_nested_dicts_with_renaming(dict1, dict2): + for key, value in dict2.items(): + if ( + key in dict1 + and isinstance(value, dict) + and isinstance(dict1.get(key), dict) + ): + for sub_key, sub_value in value.items(): + if sub_key in dict1[key]: + new_key = get_new_key(dict1[key], sub_key) + dict1[key][new_key] = sub_value + else: + dict1[key][sub_key] = sub_value + else: + dict1[key] = value + return dict1 + + +def get_new_key(dictionary, original_key): + counter = 1 + new_key = original_key + " (" + str(counter) + ")" + while new_key in dictionary: + counter += 1 + new_key = original_key + " (" + str(counter) + ")" + return new_key diff --git a/src/backend/langflow/api/v1/endpoints.py b/src/backend/langflow/api/v1/endpoints.py index 1ca2a2437..24af55588 100644 --- a/src/backend/langflow/api/v1/endpoints.py +++ b/src/backend/langflow/api/v1/endpoints.py @@ -18,7 +18,7 @@ from langflow.api.v1.schemas import ( CustomComponentCode, ) -from langflow.api.utils import merge_nested_dicts +from langflow.api.utils import merge_nested_dicts_with_renaming from langflow.interface.types import ( build_langchain_types_dict, @@ -54,11 +54,13 @@ def get_all(): f"Loading {len(custom_component_dict[category])} component(s) from category {category}" ) logger.debug(custom_component_dict) - custom_components_from_file = merge_nested_dicts( + custom_components_from_file = merge_nested_dicts_with_renaming( custom_components_from_file, custom_component_dict ) - return merge_nested_dicts(native_components, custom_components_from_file) + return merge_nested_dicts_with_renaming( + native_components, custom_components_from_file + ) # For backwards compatibility we will keep the old endpoint diff --git a/src/backend/langflow/interface/types.py b/src/backend/langflow/interface/types.py index 3b4c09b32..668956e07 100644 --- a/src/backend/langflow/interface/types.py +++ b/src/backend/langflow/interface/types.py @@ -30,7 +30,6 @@ from langflow.interface.retrievers.base import retriever_creator from langflow.interface.custom.directory_reader import DirectoryReader from langflow.utils.logger import logger from langflow.utils.util import get_base_classes -from langflow.api.utils import merge_nested_dicts import re import warnings From 8d662ad9b4410e275404294dea38331d9d07cd34 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Sat, 5 Aug 2023 18:18:26 -0300 Subject: [PATCH 14/90] =?UTF-8?q?=F0=9F=94=A7=20chore(pyproject.toml):=20a?= =?UTF-8?q?dd=20alembic=20as=20a=20development=20dependency=20to=20manage?= =?UTF-8?q?=20database=20migrations=20=E2=AC=86=EF=B8=8F=20feat(pyproject.?= =?UTF-8?q?toml):=20upgrade=20alembic=20to=20version=201.11.2=20to=20ensur?= =?UTF-8?q?e=20compatibility=20with=20other=20dependencies?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- poetry.lock | 42 ++++++++++++++++++++++++++++++++++++++++-- pyproject.toml | 1 + 2 files changed, 41 insertions(+), 2 deletions(-) diff --git a/poetry.lock b/poetry.lock index 809dab71e..fc7748707 100644 --- a/poetry.lock +++ b/poetry.lock @@ -144,6 +144,25 @@ files = [ {file = "aiostream-0.4.5.tar.gz", hash = "sha256:3ecbf87085230fbcd9605c32ca20c4fb41af02c71d076eab246ea22e35947d88"}, ] +[[package]] +name = "alembic" +version = "1.11.2" +description = "A database migration tool for SQLAlchemy." +optional = false +python-versions = ">=3.7" +files = [ + {file = "alembic-1.11.2-py3-none-any.whl", hash = "sha256:7981ab0c4fad4fe1be0cf183aae17689fe394ff874fd2464adb774396faf0796"}, + {file = "alembic-1.11.2.tar.gz", hash = "sha256:678f662130dc540dac12de0ea73de9f89caea9dbea138f60ef6263149bf84657"}, +] + +[package.dependencies] +Mako = "*" +SQLAlchemy = ">=1.3.0" +typing-extensions = ">=4" + +[package.extras] +tz = ["python-dateutil"] + [[package]] name = "anthropic" version = "0.3.8" @@ -3147,6 +3166,25 @@ docs = ["sphinx (>=1.6.0)", "sphinx-bootstrap-theme"] flake8 = ["flake8"] tests = ["psutil", "pytest (!=3.3.0)", "pytest-cov"] +[[package]] +name = "mako" +version = "1.2.4" +description = "A super-fast templating language that borrows the best ideas from the existing templating languages." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Mako-1.2.4-py3-none-any.whl", hash = "sha256:c97c79c018b9165ac9922ae4f32da095ffd3c4e6872b45eded42926deea46818"}, + {file = "Mako-1.2.4.tar.gz", hash = "sha256:d60a3903dc3bb01a18ad6a89cdbe2e4eadc69c0bc8ef1e3773ba53d44c3f7a34"}, +] + +[package.dependencies] +MarkupSafe = ">=0.9.2" + +[package.extras] +babel = ["Babel"] +lingua = ["lingua"] +testing = ["pytest"] + [[package]] name = "markdown" version = "3.4.4" @@ -3195,7 +3233,7 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] name = "markupsafe" version = "2.1.3" description = "Safely add untrusted strings to HTML/XML markup." -optional = true +optional = false python-versions = ">=3.7" files = [ {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa"}, @@ -7580,4 +7618,4 @@ local = ["ctransformers", "llama-cpp-python", "sentence-transformers"] [metadata] lock-version = "2.0" python-versions = ">=3.9,<3.11" -content-hash = "57abce2ebcdc3cd7e359c36805822b9398d3bfb500c175b173a6d784d1276df6" +content-hash = "51cb6dada892cc1b5d2800130a3bd95a475f471dc0538ebfb73d96f4dd1f1dc4" diff --git a/pyproject.toml b/pyproject.toml index a29ae46ca..81df39c7b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -77,6 +77,7 @@ psycopg = "^3.1.9" psycopg-binary = "^3.1.9" fastavro = "^1.8.0" langchain-experimental = "^0.0.8" +alembic = "^1.11.2" [tool.poetry.group.dev.dependencies] black = "^23.1.0" From 46f289b5d0dbe099349ed971b3f7fc47f8a4098c Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Sat, 5 Aug 2023 18:20:37 -0300 Subject: [PATCH 15/90] =?UTF-8?q?=F0=9F=93=9D=20chore(alembic.ini):=20add?= =?UTF-8?q?=20Alembic=20configuration=20file=20for=20database=20migrations?= =?UTF-8?q?=20=F0=9F=93=9D=20chore(alembic/README):=20add=20README=20file?= =?UTF-8?q?=20for=20Alembic=20migrations=20=F0=9F=93=9D=20chore(alembic/en?= =?UTF-8?q?v.py):=20add=20Alembic=20environment=20configuration=20file=20?= =?UTF-8?q?=F0=9F=93=9D=20chore(alembic/script.py.mako):=20add=20Alembic?= =?UTF-8?q?=20migration=20script=20template=20=F0=9F=93=9D=20chore(main.py?= =?UTF-8?q?):=20refactor=20database=20initialization=20and=20migration=20l?= =?UTF-8?q?ogic=20to=20use=20DatabaseManager=20class?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/alembic.ini | 113 ++++++++++++++++++++ src/backend/langflow/alembic/README | 1 + src/backend/langflow/alembic/env.py | 78 ++++++++++++++ src/backend/langflow/alembic/script.py.mako | 27 +++++ src/backend/langflow/main.py | 11 +- 5 files changed, 226 insertions(+), 4 deletions(-) create mode 100644 src/backend/langflow/alembic.ini create mode 100644 src/backend/langflow/alembic/README create mode 100644 src/backend/langflow/alembic/env.py create mode 100644 src/backend/langflow/alembic/script.py.mako diff --git a/src/backend/langflow/alembic.ini b/src/backend/langflow/alembic.ini new file mode 100644 index 000000000..0227ea4f2 --- /dev/null +++ b/src/backend/langflow/alembic.ini @@ -0,0 +1,113 @@ +# A generic, single database configuration. + +[alembic] +# path to migration scripts +script_location = alembic + +# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s +# Uncomment the line below if you want the files to be prepended with date and time +# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file +# for all available tokens +# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s + +# sys.path path, will be prepended to sys.path if present. +# defaults to the current working directory. +prepend_sys_path = . + +# timezone to use when rendering the date within the migration file +# as well as the filename. +# If specified, requires the python-dateutil library that can be +# installed by adding `alembic[tz]` to the pip requirements +# string value is passed to dateutil.tz.gettz() +# leave blank for localtime +# timezone = + +# max length of characters to apply to the +# "slug" field +# truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +# version location specification; This defaults +# to alembic/versions. When using multiple version +# directories, initial revisions must be specified with --version-path. +# The path separator used here should be the separator specified by "version_path_separator" below. +# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions + +# version path separator; As mentioned above, this is the character used to split +# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep. +# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas. +# Valid values for version_path_separator are: +# +# version_path_separator = : +# version_path_separator = ; +# version_path_separator = space +version_path_separator = os # Use os.pathsep. Default configuration used for new projects. + +# set to 'true' to search source files recursively +# in each "version_locations" directory +# new in Alembic version 1.10 +# recursive_version_locations = false + +# the output encoding used when revision files +# are written from script.py.mako +# output_encoding = utf-8 + +# This is a placeholder to run the first migration +# When the user runs the Langflow the database url will +# be set dinamically +sqlalchemy.url = sqlite:///langflow.db + + +[post_write_hooks] +# post_write_hooks defines scripts or Python functions that are run +# on newly generated revision scripts. See the documentation for further +# detail and examples + +# format using "black" - use the console_scripts runner, against the "black" entrypoint +# hooks = black +# black.type = console_scripts +# black.entrypoint = black +# black.options = -l 79 REVISION_SCRIPT_FILENAME + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/src/backend/langflow/alembic/README b/src/backend/langflow/alembic/README new file mode 100644 index 000000000..98e4f9c44 --- /dev/null +++ b/src/backend/langflow/alembic/README @@ -0,0 +1 @@ +Generic single-database configuration. \ No newline at end of file diff --git a/src/backend/langflow/alembic/env.py b/src/backend/langflow/alembic/env.py new file mode 100644 index 000000000..ea4fe9c43 --- /dev/null +++ b/src/backend/langflow/alembic/env.py @@ -0,0 +1,78 @@ +from logging.config import fileConfig + +from sqlalchemy import engine_from_config +from sqlalchemy import pool + +from alembic import context + +from langflow.database.base import SQLModel + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +if config.config_file_name is not None: + fileConfig(config.config_file_name) + +# add your model's MetaData object here +# for 'autogenerate' support +# from myapp import mymodel +# target_metadata = mymodel.Base.metadata +target_metadata = SQLModel.metadata + +# other values from the config, defined by the needs of env.py, +# can be acquired: +# my_important_option = config.get_main_option("my_important_option") +# ... etc. + + +def run_migrations_offline() -> None: + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, + target_metadata=target_metadata, + literal_binds=True, + dialect_opts={"paramstyle": "named"}, + ) + + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online() -> None: + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + connectable = engine_from_config( + config.get_section(config.config_ini_section, {}), + prefix="sqlalchemy.", + poolclass=pool.NullPool, + ) + + with connectable.connect() as connection: + context.configure(connection=connection, target_metadata=target_metadata) + + with context.begin_transaction(): + context.run_migrations() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/src/backend/langflow/alembic/script.py.mako b/src/backend/langflow/alembic/script.py.mako new file mode 100644 index 000000000..6ce335109 --- /dev/null +++ b/src/backend/langflow/alembic/script.py.mako @@ -0,0 +1,27 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +import sqlmodel +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision: str = ${repr(up_revision)} +down_revision: Union[str, None] = ${repr(down_revision)} +branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)} +depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)} + + +def upgrade() -> None: + ${upgrades if upgrades else "pass"} + + +def downgrade() -> None: + ${downgrades if downgrades else "pass"} diff --git a/src/backend/langflow/main.py b/src/backend/langflow/main.py index 5b3341693..deef1c914 100644 --- a/src/backend/langflow/main.py +++ b/src/backend/langflow/main.py @@ -6,13 +6,15 @@ from fastapi.responses import FileResponse from fastapi.staticfiles import StaticFiles from langflow.api import router -from langflow.database.base import create_db_and_tables, Engine +from langflow.database.base import DatabaseManager from langflow.interface.utils import setup_llm_caching from langflow.utils.logger import configure def create_app(): """Create the FastAPI app and include the router.""" + from langflow.settings import settings + configure() app = FastAPI() @@ -32,10 +34,11 @@ def create_app(): allow_methods=["*"], allow_headers=["*"], ) - + database_manager = DatabaseManager(settings.DATABASE_URL) app.include_router(router) - app.on_event("startup")(Engine.update) - app.on_event("startup")(create_db_and_tables) + # app.on_event("startup")(Engine.update) + app.on_event("startup")(database_manager.run_migrations) + app.on_event("startup")(database_manager.create_db_and_tables) app.on_event("startup")(setup_llm_caching) return app From cd67aa212cd635fe20c1f8c5f4bc207e4dd46579 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Sat, 5 Aug 2023 18:33:17 -0300 Subject: [PATCH 16/90] =?UTF-8?q?=F0=9F=93=A6=20chore(alembic):=20add=20mi?= =?UTF-8?q?gration=20script=20to=20create=20Flow=20table=20and=20FlowStyle?= =?UTF-8?q?=20table=20=F0=9F=94=A7=20refactor(base.py):=20refactor=20Datab?= =?UTF-8?q?aseManager=20class=20to=20handle=20database=20operations=20and?= =?UTF-8?q?=20migrations=20=F0=9F=94=A7=20refactor(base.py):=20refactor=20?= =?UTF-8?q?session=5Fgetter=20function=20to=20use=20DatabaseManager=20inst?= =?UTF-8?q?ance=20=F0=9F=94=A7=20refactor(base.py):=20refactor=20get=5Fses?= =?UTF-8?q?sion=20function=20to=20use=20DatabaseManager=20instance=20?= =?UTF-8?q?=F0=9F=94=A7=20refactor(models/=5F=5Finit=5F=5F.py):=20add=20Fl?= =?UTF-8?q?ow=20model=20to=20=5F=5Fall=5F=5F=20list?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../versions/4814b6f4abfd_add_flow_table.py | 65 +++++++++++++++++ src/backend/langflow/database/base.py | 71 ++++++++++++++++--- .../langflow/database/models/__init__.py | 4 ++ 3 files changed, 132 insertions(+), 8 deletions(-) create mode 100644 src/backend/langflow/alembic/versions/4814b6f4abfd_add_flow_table.py diff --git a/src/backend/langflow/alembic/versions/4814b6f4abfd_add_flow_table.py b/src/backend/langflow/alembic/versions/4814b6f4abfd_add_flow_table.py new file mode 100644 index 000000000..0b2f32657 --- /dev/null +++ b/src/backend/langflow/alembic/versions/4814b6f4abfd_add_flow_table.py @@ -0,0 +1,65 @@ +"""Add Flow table + +Revision ID: 4814b6f4abfd +Revises: +Create Date: 2023-08-05 17:47:42.879824 + +""" + +import contextlib +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +import sqlmodel + + +# revision identifiers, used by Alembic. +revision: str = "4814b6f4abfd" +down_revision: Union[str, None] = None +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + + # This suppress is used to not break the migration if the table already exists. + with contextlib.suppress(sa.exc.OperationalError): + op.create_table( + "flow", + sa.Column("data", sa.JSON(), nullable=True), + sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column("description", sqlmodel.sql.sqltypes.AutoString(), nullable=True), + sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("id"), + ) + op.create_index( + op.f("ix_flow_description"), "flow", ["description"], unique=False + ) + op.create_index(op.f("ix_flow_name"), "flow", ["name"], unique=False) + with contextlib.suppress(sa.exc.OperationalError): + op.create_table( + "flowstyle", + sa.Column("color", sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column("emoji", sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column("flow_id", sqlmodel.sql.sqltypes.GUID(), nullable=True), + sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False), + sa.ForeignKeyConstraint( + ["flow_id"], + ["flow.id"], + ), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("id"), + ) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table("flowstyle") + op.drop_index(op.f("ix_flow_name"), table_name="flow") + op.drop_index(op.f("ix_flow_description"), table_name="flow") + op.drop_table("flow") + # ### end Alembic commands ### diff --git a/src/backend/langflow/database/base.py b/src/backend/langflow/database/base.py index 546c341c1..518b95d15 100644 --- a/src/backend/langflow/database/base.py +++ b/src/backend/langflow/database/base.py @@ -1,8 +1,11 @@ from contextlib import contextmanager import os - +from pathlib import Path +from langflow.database import models # noqa from sqlmodel import SQLModel, Session, create_engine from langflow.utils.logger import logger +from alembic.config import Config +from alembic import command class Engine: @@ -60,10 +63,67 @@ def create_db_and_tables(): logger.debug("Database and tables created successfully") +class DatabaseManager: + def __init__(self, database_url: str): + self.database_url = database_url + # This file is in langflow.database.base.py + # the ini is in langflow + self.script_location = Path(__file__).parent.parent / "alembic" + self.alembic_cfg_path = Path(__file__).parent.parent / "alembic.ini" + self.engine = create_engine(database_url) + + def __enter__(self): + self._session = Session(self.engine) + return self._session + + def __exit__(self, exc_type, exc_value, traceback): + if exc_type is not None: # If an exception has been raised + logger.error( + f"Session rollback because of exception: {exc_type.__name__} {exc_value}" + ) + self._session.rollback() + else: + self._session.commit() + self._session.close() + + def get_session(self): + with Session(self.engine) as session: + yield session + + def run_migrations(self): + logger.info( + f"Running DB migrations in {self.script_location} on {self.database_url}" + ) + alembic_cfg = Config() + alembic_cfg.set_main_option("script_location", str(self.script_location)) + alembic_cfg.set_main_option("sqlalchemy.url", self.database_url) + command.upgrade(alembic_cfg, "head") + + def create_db_and_tables(self): + logger.debug("Creating database and tables") + try: + SQLModel.metadata.create_all(self.engine) + except Exception as exc: + logger.error(f"Error creating database and tables: {exc}") + raise RuntimeError("Error creating database and tables") from exc + + # Now check if the table "flow" exists, if not, something went wrong + # and we need to create the tables again. + from sqlalchemy import inspect + + inspector = inspect(self.engine) + if "flow" not in inspector.get_table_names(): + logger.error("Something went wrong creating the database and tables.") + logger.error("Please check your database settings.") + raise RuntimeError("Something went wrong creating the database and tables.") + else: + logger.debug("Database and tables created successfully") + + @contextmanager -def session_getter(): +def session_getter(db_manager: DatabaseManager): try: - session = Session(Engine.get()) + session = Session(DatabaseManager.engine) yield session except Exception as e: print("Session rollback because of exception:", e) @@ -71,8 +131,3 @@ def session_getter(): raise finally: session.close() - - -def get_session(): - with session_getter() as session: - yield session diff --git a/src/backend/langflow/database/models/__init__.py b/src/backend/langflow/database/models/__init__.py index e69de29bb..da47bc5fe 100644 --- a/src/backend/langflow/database/models/__init__.py +++ b/src/backend/langflow/database/models/__init__.py @@ -0,0 +1,4 @@ +from .flow import Flow + + +__all__ = ["Flow"] From d566a86ed07b8802a83d9b33c2378fc1b1821bed Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Sat, 5 Aug 2023 22:03:56 -0300 Subject: [PATCH 17/90] =?UTF-8?q?=F0=9F=94=A5=20refactor(cache):=20remove?= =?UTF-8?q?=20unused=20cache=20files=20and=20classes?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The following files and classes were removed: - `src/backend/langflow/cache/__init__.py`: Removed unused import statements and `__all__` variable. - `src/backend/langflow/cache/base.py`: Removed unused `BaseCache` class. - `src/backend/langflow/cache/flow.py`: Removed unused `InMemoryCache` class. - `src/backend/langflow/cache/manager.py`: Removed unused `Subject`, `AsyncSubject`, and `CacheManager` classes. These files and classes were removed to clean up the codebase and remove unused functionality. 🔥 refactor(utils.py): remove unused code and dependencies in utils.py module 🔥 refactor(chat): remove unused chat module and its configuration class 🔥 refactor(chat/manager.py): remove unused imports and classes from chat manager module 🔥 refactor(chat/utils.py): remove unused imports and function from chat utils module 🔥 refactor(database/__init__.py): remove empty file 🔥 refactor(database): remove unused database files and models 🔥 refactor(database): remove unused database files and models to improve code organization and reduce clutter --- src/backend/langflow/cache/__init__.py | 7 - src/backend/langflow/cache/base.py | 84 ------- src/backend/langflow/cache/flow.py | 146 ------------ src/backend/langflow/cache/manager.py | 150 ------------ src/backend/langflow/cache/utils.py | 179 --------------- src/backend/langflow/chat/__init__.py | 0 src/backend/langflow/chat/config.py | 2 - src/backend/langflow/chat/manager.py | 217 ------------------ src/backend/langflow/chat/utils.py | 37 --- src/backend/langflow/database/__init__.py | 0 src/backend/langflow/database/base.py | 133 ----------- .../langflow/database/models/__init__.py | 4 - src/backend/langflow/database/models/base.py | 14 -- .../langflow/database/models/component.py | 29 --- src/backend/langflow/database/models/flow.py | 60 ----- .../langflow/database/models/flow_style.py | 33 --- 16 files changed, 1095 deletions(-) delete mode 100644 src/backend/langflow/cache/__init__.py delete mode 100644 src/backend/langflow/cache/base.py delete mode 100644 src/backend/langflow/cache/flow.py delete mode 100644 src/backend/langflow/cache/manager.py delete mode 100644 src/backend/langflow/cache/utils.py delete mode 100644 src/backend/langflow/chat/__init__.py delete mode 100644 src/backend/langflow/chat/config.py delete mode 100644 src/backend/langflow/chat/manager.py delete mode 100644 src/backend/langflow/chat/utils.py delete mode 100644 src/backend/langflow/database/__init__.py delete mode 100644 src/backend/langflow/database/base.py delete mode 100644 src/backend/langflow/database/models/__init__.py delete mode 100644 src/backend/langflow/database/models/base.py delete mode 100644 src/backend/langflow/database/models/component.py delete mode 100644 src/backend/langflow/database/models/flow.py delete mode 100644 src/backend/langflow/database/models/flow_style.py diff --git a/src/backend/langflow/cache/__init__.py b/src/backend/langflow/cache/__init__.py deleted file mode 100644 index 723aa9e18..000000000 --- a/src/backend/langflow/cache/__init__.py +++ /dev/null @@ -1,7 +0,0 @@ -from langflow.cache.manager import cache_manager -from langflow.cache.flow import InMemoryCache - -__all__ = [ - "cache_manager", - "InMemoryCache", -] diff --git a/src/backend/langflow/cache/base.py b/src/backend/langflow/cache/base.py deleted file mode 100644 index 88cb3a1da..000000000 --- a/src/backend/langflow/cache/base.py +++ /dev/null @@ -1,84 +0,0 @@ -import abc - - -class BaseCache(abc.ABC): - """ - Abstract base class for a cache. - """ - - @abc.abstractmethod - def get(self, key): - """ - Retrieve an item from the cache. - - Args: - key: The key of the item to retrieve. - - Returns: - The value associated with the key, or None if the key is not found. - """ - - @abc.abstractmethod - def set(self, key, value): - """ - Add an item to the cache. - - Args: - key: The key of the item. - value: The value to cache. - """ - - @abc.abstractmethod - def delete(self, key): - """ - Remove an item from the cache. - - Args: - key: The key of the item to remove. - """ - - @abc.abstractmethod - def clear(self): - """ - Clear all items from the cache. - """ - - @abc.abstractmethod - def __contains__(self, key): - """ - Check if the key is in the cache. - - Args: - key: The key of the item to check. - - Returns: - True if the key is in the cache, False otherwise. - """ - - @abc.abstractmethod - def __getitem__(self, key): - """ - Retrieve an item from the cache using the square bracket notation. - - Args: - key: The key of the item to retrieve. - """ - - @abc.abstractmethod - def __setitem__(self, key, value): - """ - Add an item to the cache using the square bracket notation. - - Args: - key: The key of the item. - value: The value to cache. - """ - - @abc.abstractmethod - def __delitem__(self, key): - """ - Remove an item from the cache using the square bracket notation. - - Args: - key: The key of the item to remove. - """ diff --git a/src/backend/langflow/cache/flow.py b/src/backend/langflow/cache/flow.py deleted file mode 100644 index 6d8fee977..000000000 --- a/src/backend/langflow/cache/flow.py +++ /dev/null @@ -1,146 +0,0 @@ -import threading -import time -from collections import OrderedDict - -from langflow.cache.base import BaseCache - - -class InMemoryCache(BaseCache): - """ - A simple in-memory cache using an OrderedDict. - - This cache supports setting a maximum size and expiration time for cached items. - When the cache is full, it uses a Least Recently Used (LRU) eviction policy. - Thread-safe using a threading Lock. - - Attributes: - max_size (int, optional): Maximum number of items to store in the cache. - expiration_time (int, optional): Time in seconds after which a cached item expires. Default is 1 hour. - - Example: - - cache = InMemoryCache(max_size=3, expiration_time=5) - - # setting cache values - cache.set("a", 1) - cache.set("b", 2) - cache["c"] = 3 - - # getting cache values - a = cache.get("a") - b = cache["b"] - """ - - def __init__(self, max_size=None, expiration_time=60 * 60): - """ - Initialize a new InMemoryCache instance. - - Args: - max_size (int, optional): Maximum number of items to store in the cache. - expiration_time (int, optional): Time in seconds after which a cached item expires. Default is 1 hour. - """ - self._cache = OrderedDict() - self._lock = threading.Lock() - self.max_size = max_size - self.expiration_time = expiration_time - - def get(self, key): - """ - Retrieve an item from the cache. - - Args: - key: The key of the item to retrieve. - - Returns: - The value associated with the key, or None if the key is not found or the item has expired. - """ - with self._lock: - if key in self._cache: - item = self._cache.pop(key) - if ( - self.expiration_time is None - or time.time() - item["time"] < self.expiration_time - ): - # Move the key to the end to make it recently used - self._cache[key] = item - return item["value"] - else: - self.delete(key) - return None - - def set(self, key, value): - """ - Add an item to the cache. - - If the cache is full, the least recently used item is evicted. - - Args: - key: The key of the item. - value: The value to cache. - """ - with self._lock: - if key in self._cache: - # Remove existing key before re-inserting to update order - self.delete(key) - elif self.max_size and len(self._cache) >= self.max_size: - # Remove least recently used item - self._cache.popitem(last=False) - self._cache[key] = {"value": value, "time": time.time()} - - def get_or_set(self, key, value): - """ - Retrieve an item from the cache. If the item does not exist, set it with the provided value. - - Args: - key: The key of the item. - value: The value to cache if the item doesn't exist. - - Returns: - The cached value associated with the key. - """ - with self._lock: - if key in self._cache: - return self.get(key) - self.set(key, value) - return value - - def delete(self, key): - """ - Remove an item from the cache. - - Args: - key: The key of the item to remove. - """ - # with self._lock: - self._cache.pop(key, None) - - def clear(self): - """ - Clear all items from the cache. - """ - with self._lock: - self._cache.clear() - - def __contains__(self, key): - """Check if the key is in the cache.""" - return key in self._cache - - def __getitem__(self, key): - """Retrieve an item from the cache using the square bracket notation.""" - return self.get(key) - - def __setitem__(self, key, value): - """Add an item to the cache using the square bracket notation.""" - self.set(key, value) - - def __delitem__(self, key): - """Remove an item from the cache using the square bracket notation.""" - self.delete(key) - - def __len__(self): - """Return the number of items in the cache.""" - return len(self._cache) - - def __repr__(self): - """Return a string representation of the InMemoryCache instance.""" - return f"InMemoryCache(max_size={self.max_size}, expiration_time={self.expiration_time})" diff --git a/src/backend/langflow/cache/manager.py b/src/backend/langflow/cache/manager.py deleted file mode 100644 index 13b281008..000000000 --- a/src/backend/langflow/cache/manager.py +++ /dev/null @@ -1,150 +0,0 @@ -from contextlib import contextmanager -from typing import Any, Awaitable, Callable, List, Optional - -import pandas as pd -from PIL import Image - - -class Subject: - """Base class for implementing the observer pattern.""" - - def __init__(self): - self.observers: List[Callable[[], None]] = [] - - def attach(self, observer: Callable[[], None]): - """Attach an observer to the subject.""" - self.observers.append(observer) - - def detach(self, observer: Callable[[], None]): - """Detach an observer from the subject.""" - self.observers.remove(observer) - - def notify(self): - """Notify all observers about an event.""" - for observer in self.observers: - if observer is None: - continue - observer() - - -class AsyncSubject: - """Base class for implementing the async observer pattern.""" - - def __init__(self): - self.observers: List[Callable[[], Awaitable]] = [] - - def attach(self, observer: Callable[[], Awaitable]): - """Attach an observer to the subject.""" - self.observers.append(observer) - - def detach(self, observer: Callable[[], Awaitable]): - """Detach an observer from the subject.""" - self.observers.remove(observer) - - async def notify(self): - """Notify all observers about an event.""" - for observer in self.observers: - if observer is None: - continue - await observer() - - -class CacheManager(Subject): - """Manages cache for different clients and notifies observers on changes.""" - - def __init__(self): - super().__init__() - self._cache = {} - self.current_client_id = None - self.current_cache = {} - - @contextmanager - def set_client_id(self, client_id: str): - """ - Context manager to set the current client_id and associated cache. - - Args: - client_id (str): The client identifier. - """ - previous_client_id = self.current_client_id - self.current_client_id = client_id - self.current_cache = self._cache.setdefault(client_id, {}) - try: - yield - finally: - self.current_client_id = previous_client_id - self.current_cache = self._cache.get(self.current_client_id, {}) - - def add(self, name: str, obj: Any, obj_type: str, extension: Optional[str] = None): - """ - Add an object to the current client's cache. - - Args: - name (str): The cache key. - obj (Any): The object to cache. - obj_type (str): The type of the object. - """ - object_extensions = { - "image": "png", - "pandas": "csv", - } - if obj_type in object_extensions: - _extension = object_extensions[obj_type] - else: - _extension = type(obj).__name__.lower() - self.current_cache[name] = { - "obj": obj, - "type": obj_type, - "extension": extension or _extension, - } - self.notify() - - def add_pandas(self, name: str, obj: Any): - """ - Add a pandas DataFrame or Series to the current client's cache. - - Args: - name (str): The cache key. - obj (Any): The pandas DataFrame or Series object. - """ - if isinstance(obj, (pd.DataFrame, pd.Series)): - self.add(name, obj.to_csv(), "pandas", extension="csv") - else: - raise ValueError("Object is not a pandas DataFrame or Series") - - def add_image(self, name: str, obj: Any, extension: str = "png"): - """ - Add a PIL Image to the current client's cache. - - Args: - name (str): The cache key. - obj (Any): The PIL Image object. - """ - if isinstance(obj, Image.Image): - self.add(name, obj, "image", extension=extension) - else: - raise ValueError("Object is not a PIL Image") - - def get(self, name: str): - """ - Get an object from the current client's cache. - - Args: - name (str): The cache key. - - Returns: - The cached object associated with the given cache key. - """ - return self.current_cache[name] - - def get_last(self): - """ - Get the last added item in the current client's cache. - - Returns: - The last added item in the cache. - """ - return list(self.current_cache.values())[-1] - - -cache_manager = CacheManager() diff --git a/src/backend/langflow/cache/utils.py b/src/backend/langflow/cache/utils.py deleted file mode 100644 index 3deabe9f4..000000000 --- a/src/backend/langflow/cache/utils.py +++ /dev/null @@ -1,179 +0,0 @@ -import base64 -import contextlib -import functools -import hashlib -import json -import os -import tempfile -from collections import OrderedDict -from pathlib import Path -from typing import Any, Dict -from appdirs import user_cache_dir - -CACHE: Dict[str, Any] = {} - -CACHE_DIR = user_cache_dir("langflow", "langflow") - - -def create_cache_folder(func): - def wrapper(*args, **kwargs): - # Get the destination folder - cache_path = Path(CACHE_DIR) / PREFIX - - # Create the destination folder if it doesn't exist - os.makedirs(cache_path, exist_ok=True) - - return func(*args, **kwargs) - - return wrapper - - -def memoize_dict(maxsize=128): - cache = OrderedDict() - - def decorator(func): - @functools.wraps(func) - def wrapper(*args, **kwargs): - hashed = compute_dict_hash(args[0]) - key = (func.__name__, hashed, frozenset(kwargs.items())) - if key not in cache: - result = func(*args, **kwargs) - cache[key] = result - if len(cache) > maxsize: - cache.popitem(last=False) - else: - result = cache[key] - return result - - def clear_cache(): - cache.clear() - - wrapper.clear_cache = clear_cache # type: ignore - wrapper.cache = cache # type: ignore - return wrapper - - return decorator - - -PREFIX = "langflow_cache" - - -@create_cache_folder -def clear_old_cache_files(max_cache_size: int = 3): - cache_dir = Path(tempfile.gettempdir()) / PREFIX - cache_files = list(cache_dir.glob("*.dill")) - - if len(cache_files) > max_cache_size: - cache_files_sorted_by_mtime = sorted( - cache_files, key=lambda x: x.stat().st_mtime, reverse=True - ) - - for cache_file in cache_files_sorted_by_mtime[max_cache_size:]: - with contextlib.suppress(OSError): - os.remove(cache_file) - - -def compute_dict_hash(graph_data): - graph_data = filter_json(graph_data) - - cleaned_graph_json = json.dumps(graph_data, sort_keys=True) - return hashlib.sha256(cleaned_graph_json.encode("utf-8")).hexdigest() - - -def filter_json(json_data): - filtered_data = json_data.copy() - - # Remove 'viewport' and 'chatHistory' keys - if "viewport" in filtered_data: - del filtered_data["viewport"] - if "chatHistory" in filtered_data: - del filtered_data["chatHistory"] - - # Filter nodes - if "nodes" in filtered_data: - for node in filtered_data["nodes"]: - if "position" in node: - del node["position"] - if "positionAbsolute" in node: - del node["positionAbsolute"] - if "selected" in node: - del node["selected"] - if "dragging" in node: - del node["dragging"] - - return filtered_data - - -@create_cache_folder -def save_binary_file(content: str, file_name: str, accepted_types: list[str]) -> str: - """ - Save a binary file to the specified folder. - - Args: - content: The content of the file as a bytes object. - file_name: The name of the file, including its extension. - - Returns: - The path to the saved file. - """ - if not any(file_name.endswith(suffix) for suffix in accepted_types): - raise ValueError(f"File {file_name} is not accepted") - - # Get the destination folder - cache_path = Path(CACHE_DIR) / PREFIX - if not content: - raise ValueError("Please, reload the file in the loader.") - data = content.split(",")[1] - decoded_bytes = base64.b64decode(data) - - # Create the full file path - file_path = os.path.join(cache_path, file_name) - - # Save the binary content to the file - with open(file_path, "wb") as file: - file.write(decoded_bytes) - - return file_path - - -@create_cache_folder -def save_uploaded_file(file, folder_name): - """ - Save an uploaded file to the specified folder with a hash of its content as the file name. - - Args: - file: The uploaded file object. - folder_name: The name of the folder to save the file in. - - Returns: - The path to the saved file. - """ - cache_path = Path(CACHE_DIR) - folder_path = cache_path / folder_name - - # Create the folder if it doesn't exist - if not folder_path.exists(): - folder_path.mkdir() - - # Create a hash of the file content - sha256_hash = hashlib.sha256() - # Reset the file cursor to the beginning of the file - file.seek(0) - # Iterate over the uploaded file in small chunks to conserve memory - while chunk := file.read(8192): # Read 8KB at a time (adjust as needed) - sha256_hash.update(chunk) - - # Use the hex digest of the hash as the file name - hex_dig = sha256_hash.hexdigest() - file_name = hex_dig - - # Reset the file cursor to the beginning of the file - file.seek(0) - - # Save the file with the hash as its name - file_path = folder_path / file_name - with open(file_path, "wb") as new_file: - while chunk := file.read(8192): - new_file.write(chunk) - - return file_path diff --git a/src/backend/langflow/chat/__init__.py b/src/backend/langflow/chat/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/src/backend/langflow/chat/config.py b/src/backend/langflow/chat/config.py deleted file mode 100644 index 274f4d5bd..000000000 --- a/src/backend/langflow/chat/config.py +++ /dev/null @@ -1,2 +0,0 @@ -class ChatConfig: - streaming: bool = True diff --git a/src/backend/langflow/chat/manager.py b/src/backend/langflow/chat/manager.py deleted file mode 100644 index 1e93174e2..000000000 --- a/src/backend/langflow/chat/manager.py +++ /dev/null @@ -1,217 +0,0 @@ -from collections import defaultdict -from fastapi import WebSocket, status -from langflow.api.v1.schemas import ChatMessage, ChatResponse, FileResponse -from langflow.cache import cache_manager -from langflow.cache.manager import Subject -from langflow.chat.utils import process_graph -from langflow.interface.utils import pil_to_base64 -from langflow.utils.logger import logger - - -import asyncio -import json -from typing import Any, Dict, List - -from langflow.cache.flow import InMemoryCache - - -class ChatHistory(Subject): - def __init__(self): - super().__init__() - self.history: Dict[str, List[ChatMessage]] = defaultdict(list) - - def add_message(self, client_id: str, message: ChatMessage): - """Add a message to the chat history.""" - - self.history[client_id].append(message) - - if not isinstance(message, FileResponse): - self.notify() - - def get_history(self, client_id: str, filter_messages=True) -> List[ChatMessage]: - """Get the chat history for a client.""" - if history := self.history.get(client_id, []): - if filter_messages: - return [msg for msg in history if msg.type not in ["start", "stream"]] - return history - else: - return [] - - def empty_history(self, client_id: str): - """Empty the chat history for a client.""" - self.history[client_id] = [] - - -class ChatManager: - def __init__(self): - self.active_connections: Dict[str, WebSocket] = {} - self.chat_history = ChatHistory() - self.cache_manager = cache_manager - self.cache_manager.attach(self.update) - self.in_memory_cache = InMemoryCache() - - def on_chat_history_update(self): - """Send the last chat message to the client.""" - client_id = self.cache_manager.current_client_id - if client_id in self.active_connections: - chat_response = self.chat_history.get_history( - client_id, filter_messages=False - )[-1] - if chat_response.is_bot: - # Process FileResponse - if isinstance(chat_response, FileResponse): - # If data_type is pandas, convert to csv - if chat_response.data_type == "pandas": - chat_response.data = chat_response.data.to_csv() - elif chat_response.data_type == "image": - # Base64 encode the image - chat_response.data = pil_to_base64(chat_response.data) - # get event loop - loop = asyncio.get_event_loop() - - coroutine = self.send_json(client_id, chat_response) - asyncio.run_coroutine_threadsafe(coroutine, loop) - - def update(self): - if self.cache_manager.current_client_id in self.active_connections: - self.last_cached_object_dict = self.cache_manager.get_last() - # Add a new ChatResponse with the data - chat_response = FileResponse( - message=None, - type="file", - data=self.last_cached_object_dict["obj"], - data_type=self.last_cached_object_dict["type"], - ) - - self.chat_history.add_message( - self.cache_manager.current_client_id, chat_response - ) - - async def connect(self, client_id: str, websocket: WebSocket): - await websocket.accept() - self.active_connections[client_id] = websocket - - def disconnect(self, client_id: str): - self.active_connections.pop(client_id, None) - - async def send_message(self, client_id: str, message: str): - websocket = self.active_connections[client_id] - await websocket.send_text(message) - - async def send_json(self, client_id: str, message: ChatMessage): - websocket = self.active_connections[client_id] - await websocket.send_json(message.dict()) - - async def close_connection(self, client_id: str, code: int, reason: str): - if websocket := self.active_connections[client_id]: - try: - await websocket.close(code=code, reason=reason) - self.disconnect(client_id) - except RuntimeError as exc: - # This is to catch the following error: - # Unexpected ASGI message 'websocket.close', after sending 'websocket.close' - if "after sending" in str(exc): - logger.error(f"Error closing connection: {exc}") - - async def process_message( - self, client_id: str, payload: Dict, langchain_object: Any - ): - # Process the graph data and chat message - chat_inputs = payload.pop("inputs", "") - chat_inputs = ChatMessage(message=chat_inputs) - self.chat_history.add_message(client_id, chat_inputs) - - # graph_data = payload - start_resp = ChatResponse(message=None, type="start", intermediate_steps="") - await self.send_json(client_id, start_resp) - - # is_first_message = len(self.chat_history.get_history(client_id=client_id)) <= 1 - # Generate result and thought - try: - logger.debug("Generating result and thought") - - result, intermediate_steps = await process_graph( - langchain_object=langchain_object, - chat_inputs=chat_inputs, - websocket=self.active_connections[client_id], - ) - except Exception as e: - # Log stack trace - logger.exception(e) - self.chat_history.empty_history(client_id) - raise e - # Send a response back to the frontend, if needed - intermediate_steps = intermediate_steps or "" - history = self.chat_history.get_history(client_id, filter_messages=False) - file_responses = [] - if history: - # Iterate backwards through the history - for msg in reversed(history): - if isinstance(msg, FileResponse): - if msg.data_type == "image": - # Base64 encode the image - if isinstance(msg.data, str): - continue - msg.data = pil_to_base64(msg.data) - file_responses.append(msg) - if msg.type == "start": - break - - response = ChatResponse( - message=result, - intermediate_steps=intermediate_steps.strip(), - type="end", - files=file_responses, - ) - await self.send_json(client_id, response) - self.chat_history.add_message(client_id, response) - - def set_cache(self, client_id: str, langchain_object: Any) -> bool: - """ - Set the cache for a client. - """ - - self.in_memory_cache.set(client_id, langchain_object) - return client_id in self.in_memory_cache - - async def handle_websocket(self, client_id: str, websocket: WebSocket): - await self.connect(client_id, websocket) - - try: - chat_history = self.chat_history.get_history(client_id) - # iterate and make BaseModel into dict - chat_history = [chat.dict() for chat in chat_history] - await websocket.send_json(chat_history) - - while True: - json_payload = await websocket.receive_json() - try: - payload = json.loads(json_payload) - except TypeError: - payload = json_payload - if "clear_history" in payload: - self.chat_history.history[client_id] = [] - continue - - with self.cache_manager.set_client_id(client_id): - langchain_object = self.in_memory_cache.get(client_id) - await self.process_message(client_id, payload, langchain_object) - - except Exception as exc: - # Handle any exceptions that might occur - logger.error(f"Error handling websocket: {exc}") - await self.close_connection( - client_id=client_id, - code=status.WS_1011_INTERNAL_ERROR, - reason=str(exc)[:120], - ) - finally: - try: - await self.close_connection( - client_id=client_id, - code=status.WS_1000_NORMAL_CLOSURE, - reason="Client disconnected", - ) - except Exception as exc: - logger.error(f"Error closing connection: {exc}") - self.disconnect(client_id) diff --git a/src/backend/langflow/chat/utils.py b/src/backend/langflow/chat/utils.py deleted file mode 100644 index 7db65b8e3..000000000 --- a/src/backend/langflow/chat/utils.py +++ /dev/null @@ -1,37 +0,0 @@ -from fastapi import WebSocket -from langflow.api.v1.schemas import ChatMessage -from langflow.processing.base import get_result_and_steps -from langflow.interface.utils import try_setting_streaming_options -from langflow.utils.logger import logger - - -async def process_graph( - langchain_object, - chat_inputs: ChatMessage, - websocket: WebSocket, -): - langchain_object = try_setting_streaming_options(langchain_object, websocket) - logger.debug("Loaded langchain object") - - if langchain_object is None: - # Raise user facing error - raise ValueError( - "There was an error loading the langchain_object. Please, check all the nodes and try again." - ) - - # Generate result and thought - try: - if not chat_inputs.message: - logger.debug("No message provided") - raise ValueError("No message provided") - - logger.debug("Generating result and thought") - result, intermediate_steps = await get_result_and_steps( - langchain_object, chat_inputs.message, websocket=websocket - ) - logger.debug("Generated result and intermediate_steps") - return result, intermediate_steps - except Exception as e: - # Log stack trace - logger.exception(e) - raise e diff --git a/src/backend/langflow/database/__init__.py b/src/backend/langflow/database/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/src/backend/langflow/database/base.py b/src/backend/langflow/database/base.py deleted file mode 100644 index 518b95d15..000000000 --- a/src/backend/langflow/database/base.py +++ /dev/null @@ -1,133 +0,0 @@ -from contextlib import contextmanager -import os -from pathlib import Path -from langflow.database import models # noqa -from sqlmodel import SQLModel, Session, create_engine -from langflow.utils.logger import logger -from alembic.config import Config -from alembic import command - - -class Engine: - _instance = None - - @classmethod - def get(cls): - logger.debug("Getting database engine") - if cls._instance is None: - cls.create() - return cls._instance - - @classmethod - def create(cls): - logger.debug("Creating database engine") - from langflow.settings import settings - - if langflow_database_url := os.getenv("LANGFLOW_DATABASE_URL"): - settings.DATABASE_URL = langflow_database_url - logger.debug("Using LANGFLOW_DATABASE_URL") - - if settings.DATABASE_URL and settings.DATABASE_URL.startswith("sqlite"): - connect_args = {"check_same_thread": False} - else: - connect_args = {} - if not settings.DATABASE_URL: - raise RuntimeError("No database_url provided") - cls._instance = create_engine(settings.DATABASE_URL, connect_args=connect_args) - - @classmethod - def update(cls): - logger.debug("Updating database engine") - cls._instance = None - cls.create() - - -def create_db_and_tables(): - logger.debug("Creating database and tables") - try: - SQLModel.metadata.create_all(Engine.get()) - except Exception as exc: - logger.error(f"Error creating database and tables: {exc}") - raise RuntimeError("Error creating database and tables") from exc - # Now check if the table Flow exists, if not, something went wrong - # and we need to create the tables again. - from sqlalchemy import inspect - - inspector = inspect(Engine.get()) - if "flow" not in inspector.get_table_names(): - logger.error("Something went wrong creating the database and tables.") - logger.error("Please check your database settings.") - - raise RuntimeError("Something went wrong creating the database and tables.") - else: - logger.debug("Database and tables created successfully") - - -class DatabaseManager: - def __init__(self, database_url: str): - self.database_url = database_url - # This file is in langflow.database.base.py - # the ini is in langflow - self.script_location = Path(__file__).parent.parent / "alembic" - self.alembic_cfg_path = Path(__file__).parent.parent / "alembic.ini" - self.engine = create_engine(database_url) - - def __enter__(self): - self._session = Session(self.engine) - return self._session - - def __exit__(self, exc_type, exc_value, traceback): - if exc_type is not None: # If an exception has been raised - logger.error( - f"Session rollback because of exception: {exc_type.__name__} {exc_value}" - ) - self._session.rollback() - else: - self._session.commit() - self._session.close() - - def get_session(self): - with Session(self.engine) as session: - yield session - - def run_migrations(self): - logger.info( - f"Running DB migrations in {self.script_location} on {self.database_url}" - ) - alembic_cfg = Config() - alembic_cfg.set_main_option("script_location", str(self.script_location)) - alembic_cfg.set_main_option("sqlalchemy.url", self.database_url) - command.upgrade(alembic_cfg, "head") - - def create_db_and_tables(self): - logger.debug("Creating database and tables") - try: - SQLModel.metadata.create_all(self.engine) - except Exception as exc: - logger.error(f"Error creating database and tables: {exc}") - raise RuntimeError("Error creating database and tables") from exc - - # Now check if the table "flow" exists, if not, something went wrong - # and we need to create the tables again. - from sqlalchemy import inspect - - inspector = inspect(self.engine) - if "flow" not in inspector.get_table_names(): - logger.error("Something went wrong creating the database and tables.") - logger.error("Please check your database settings.") - raise RuntimeError("Something went wrong creating the database and tables.") - else: - logger.debug("Database and tables created successfully") - - -@contextmanager -def session_getter(db_manager: DatabaseManager): - try: - session = Session(DatabaseManager.engine) - yield session - except Exception as e: - print("Session rollback because of exception:", e) - session.rollback() - raise - finally: - session.close() diff --git a/src/backend/langflow/database/models/__init__.py b/src/backend/langflow/database/models/__init__.py deleted file mode 100644 index da47bc5fe..000000000 --- a/src/backend/langflow/database/models/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -from .flow import Flow - - -__all__ = ["Flow"] diff --git a/src/backend/langflow/database/models/base.py b/src/backend/langflow/database/models/base.py deleted file mode 100644 index e20895b93..000000000 --- a/src/backend/langflow/database/models/base.py +++ /dev/null @@ -1,14 +0,0 @@ -from sqlmodel import SQLModel -import orjson - - -def orjson_dumps(v, *, default): - # orjson.dumps returns bytes, to match standard json.dumps we need to decode - return orjson.dumps(v, default=default).decode() - - -class SQLModelSerializable(SQLModel): - class Config: - orm_mode = True - json_loads = orjson.loads - json_dumps = orjson_dumps diff --git a/src/backend/langflow/database/models/component.py b/src/backend/langflow/database/models/component.py deleted file mode 100644 index bb2408cdb..000000000 --- a/src/backend/langflow/database/models/component.py +++ /dev/null @@ -1,29 +0,0 @@ -from langflow.database.models.base import SQLModelSerializable, SQLModel -from sqlmodel import Field -from typing import Optional -from datetime import datetime -import uuid - - -class Component(SQLModelSerializable, table=True): - id: uuid.UUID = Field(default_factory=uuid.uuid4, primary_key=True) - frontend_node_id: uuid.UUID = Field(index=True) - name: str = Field(index=True) - description: Optional[str] = Field(default=None) - python_code: Optional[str] = Field(default=None) - return_type: Optional[str] = Field(default=None) - is_disabled: bool = Field(default=False) - is_read_only: bool = Field(default=False) - create_at: datetime = Field(default_factory=datetime.utcnow) - update_at: datetime = Field(default_factory=datetime.utcnow) - - -class ComponentModel(SQLModel): - id: uuid.UUID = Field(default_factory=uuid.uuid4) - frontend_node_id: uuid.UUID = Field(default=uuid.uuid4()) - name: str = Field(default="") - description: Optional[str] = None - python_code: Optional[str] = None - return_type: Optional[str] = None - is_disabled: bool = False - is_read_only: bool = False diff --git a/src/backend/langflow/database/models/flow.py b/src/backend/langflow/database/models/flow.py deleted file mode 100644 index f9e3aa249..000000000 --- a/src/backend/langflow/database/models/flow.py +++ /dev/null @@ -1,60 +0,0 @@ -# Path: src/backend/langflow/database/models/flow.py - -from langflow.database.models.base import SQLModelSerializable -from pydantic import validator -from sqlmodel import Field, Relationship, JSON, Column -from uuid import UUID, uuid4 -from typing import Dict, Optional - -# if TYPE_CHECKING: -from langflow.database.models.flow_style import FlowStyle, FlowStyleRead - - -class FlowBase(SQLModelSerializable): - name: str = Field(index=True) - description: Optional[str] = Field(index=True) - data: Optional[Dict] = Field(default=None) - - @validator("data") - def validate_json(v): - # dict_keys(['description', 'name', 'id', 'data']) - if not v: - return v - if not isinstance(v, dict): - raise ValueError("Flow must be a valid JSON") - - # data must contain nodes and edges - if "nodes" not in v.keys(): - raise ValueError("Flow must have nodes") - if "edges" not in v.keys(): - raise ValueError("Flow must have edges") - - return v - - -class Flow(FlowBase, table=True): - id: UUID = Field(default_factory=uuid4, primary_key=True, unique=True) - data: Optional[Dict] = Field(default=None, sa_column=Column(JSON)) - style: Optional["FlowStyle"] = Relationship( - back_populates="flow", - # use "uselist=False" to make it a one-to-one relationship - sa_relationship_kwargs={"uselist": False}, - ) - - -class FlowCreate(FlowBase): - pass - - -class FlowRead(FlowBase): - id: UUID - - -class FlowReadWithStyle(FlowRead): - style: Optional["FlowStyleRead"] = None - - -class FlowUpdate(SQLModelSerializable): - name: Optional[str] = None - description: Optional[str] = None - data: Optional[Dict] = None diff --git a/src/backend/langflow/database/models/flow_style.py b/src/backend/langflow/database/models/flow_style.py deleted file mode 100644 index fe53799fe..000000000 --- a/src/backend/langflow/database/models/flow_style.py +++ /dev/null @@ -1,33 +0,0 @@ -# Path: src/backend/langflow/database/models/flowstyle.py - -from langflow.database.models.base import SQLModelSerializable -from sqlmodel import Field, Relationship -from uuid import UUID, uuid4 -from typing import TYPE_CHECKING, Optional - -if TYPE_CHECKING: - from langflow.database.models.flow import Flow - - -class FlowStyleBase(SQLModelSerializable): - color: str - emoji: str - flow_id: UUID = Field(default=None, foreign_key="flow.id") - - -class FlowStyle(FlowStyleBase, table=True): - id: UUID = Field(default_factory=uuid4, primary_key=True, unique=True) - flow: "Flow" = Relationship(back_populates="style") - - -class FlowStyleUpdate(SQLModelSerializable): - color: Optional[str] = None - emoji: Optional[str] = None - - -class FlowStyleCreate(FlowStyleBase): - pass - - -class FlowStyleRead(FlowStyleBase): - id: UUID From 63a9b01bbc6aee5f957eaca92c4ff267aef15658 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Sat, 5 Aug 2023 22:10:24 -0300 Subject: [PATCH 18/90] =?UTF-8?q?=E2=9C=A8=20feat(services):=20add=20suppo?= =?UTF-8?q?rt=20for=20service=20manager=20and=20service=20schema?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Added `__init__.py` file to the `services` directory to make it a package. - Created `Service` class in `base.py` to serve as a base class for different services. - Implemented `ServiceFactory` class in `factory.py` to create instances of services. - Implemented `ServiceManager` class in `manager.py` to manage the creation and retrieval of services. - Created `ServiceType` enum in `schema.py` to define the different types of services that can be registered with the service manager. - Added `initialize_services` function in `manager.py` to initialize all the services needed. The purpose of these changes is to provide a modular and extensible architecture for managing different services in the application. The `ServiceManager` allows for easy creation and retrieval of services, while the `ServiceType` enum provides a standardized way to refer to different types of services. The `Service` base class and `ServiceFactory` class provide a foundation for creating and managing specific services. --- src/backend/langflow/services/__init__.py | 4 ++ src/backend/langflow/services/base.py | 2 + src/backend/langflow/services/factory.py | 6 ++ src/backend/langflow/services/manager.py | 87 +++++++++++++++++++++++ src/backend/langflow/services/schema.py | 13 ++++ 5 files changed, 112 insertions(+) create mode 100644 src/backend/langflow/services/__init__.py create mode 100644 src/backend/langflow/services/base.py create mode 100644 src/backend/langflow/services/factory.py create mode 100644 src/backend/langflow/services/manager.py create mode 100644 src/backend/langflow/services/schema.py diff --git a/src/backend/langflow/services/__init__.py b/src/backend/langflow/services/__init__.py new file mode 100644 index 000000000..8ac74b5b9 --- /dev/null +++ b/src/backend/langflow/services/__init__.py @@ -0,0 +1,4 @@ +from .manager import service_manager +from .schema import ServiceType + +__all__ = ["service_manager", "ServiceType"] diff --git a/src/backend/langflow/services/base.py b/src/backend/langflow/services/base.py new file mode 100644 index 000000000..6bca6c4e2 --- /dev/null +++ b/src/backend/langflow/services/base.py @@ -0,0 +1,2 @@ +class Service: + name: str diff --git a/src/backend/langflow/services/factory.py b/src/backend/langflow/services/factory.py new file mode 100644 index 000000000..c37f4e9c2 --- /dev/null +++ b/src/backend/langflow/services/factory.py @@ -0,0 +1,6 @@ +class ServiceFactory: + def __init__(self, service_class): + self.service_class = service_class + + def create(self, *args, **kwargs): + raise NotImplementedError diff --git a/src/backend/langflow/services/manager.py b/src/backend/langflow/services/manager.py new file mode 100644 index 000000000..1606b3a82 --- /dev/null +++ b/src/backend/langflow/services/manager.py @@ -0,0 +1,87 @@ +from langflow.services.schema import ServiceType +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from langflow.services.factory import ServiceFactory + + +class ServiceManager: + """ + Manages the creation of different services. + """ + + def __init__(self): + self.services = {} + self.factories = {} + + def register_factory(self, service_factory: "ServiceFactory"): + """ + Registers a new factory. + """ + self.factories[service_factory.service_class.name] = service_factory + + def get(self, service_name: ServiceType): + """ + Get (or create) a service by its name. + """ + if service_name not in self.services: + self._create_service(service_name) + + return self.services[service_name] + + def _create_service(self, service_name: ServiceType): + """ + Create a new service given its name. + """ + self._validate_service_creation(service_name) + + if service_name == ServiceType.SETTINGS_MANAGER: + self.services[service_name] = self.factories[service_name].create() + else: + settings_service = self.get(ServiceType.SETTINGS_MANAGER) + self.services[service_name] = self.factories[service_name].create( + settings_service + ) + + def _validate_service_creation(self, service_name: ServiceType): + """ + Validate whether the service can be created. + """ + if service_name not in self.factories: + raise ValueError( + f"No factory registered for the service class '{service_name.name}'" + ) + + if ( + ServiceType.SETTINGS_MANAGER not in self.factories + and service_name != ServiceType.SETTINGS_MANAGER + ): + raise ValueError( + f"Cannot create service '{service_name.name}' before the settings service" + ) + + def update(self, service_name: ServiceType): + """ + Update a service by its name. + """ + if service_name in self.services: + self.services.pop(service_name, None) + self.get(service_name) + + +service_manager = ServiceManager() + + +def initialize_services(): + """ + Initialize all the services needed. + """ + from langflow.services.database import factory as database_factory + from langflow.services.cache import factory as cache_factory + from langflow.services.chat import factory as chat_factory + from langflow.services.settings import factory as settings_factory + + service_manager.register_factory(settings_factory.SettingsManagerFactory()) + service_manager.register_factory(database_factory.DatabaseManagerFactory()) + service_manager.register_factory(cache_factory.CacheManagerFactory()) + service_manager.register_factory(chat_factory.ChatManagerFactory()) diff --git a/src/backend/langflow/services/schema.py b/src/backend/langflow/services/schema.py new file mode 100644 index 000000000..695763afc --- /dev/null +++ b/src/backend/langflow/services/schema.py @@ -0,0 +1,13 @@ +from enum import Enum + + +class ServiceType(str, Enum): + """ + Enum for the different types of services that can be + registered with the service manager. + """ + + CACHE_MANAGER = "cache_manager" + SETTINGS_MANAGER = "settings_manager" + DATABASE_MANAGER = "database_manager" + CHAT_MANAGER = "chat_manager" From d5ad1522500a2095d1647a88a43cfee9bf0c0e36 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Sat, 5 Aug 2023 22:12:27 -0300 Subject: [PATCH 19/90] =?UTF-8?q?=F0=9F=93=A6=20chore(cache):=20add=20cach?= =?UTF-8?q?e=20module=20with=20cache=20manager,=20factory,=20base=20cache,?= =?UTF-8?q?=20and=20in-memory=20cache=20implementations?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ✨ feat(utils.py): add cache utility functions for memoization and file saving 🐛 fix(utils.py): fix cache folder creation to use the correct cache directory 🔧 chore(utils.py): refactor code to improve readability and remove unused imports --- .../langflow/services/cache/__init__.py | 11 ++ src/backend/langflow/services/cache/base.py | 84 ++++++++ .../langflow/services/cache/factory.py | 11 ++ src/backend/langflow/services/cache/flow.py | 146 ++++++++++++++ .../langflow/services/cache/manager.py | 153 +++++++++++++++ src/backend/langflow/services/cache/utils.py | 179 ++++++++++++++++++ 6 files changed, 584 insertions(+) create mode 100644 src/backend/langflow/services/cache/__init__.py create mode 100644 src/backend/langflow/services/cache/base.py create mode 100644 src/backend/langflow/services/cache/factory.py create mode 100644 src/backend/langflow/services/cache/flow.py create mode 100644 src/backend/langflow/services/cache/manager.py create mode 100644 src/backend/langflow/services/cache/utils.py diff --git a/src/backend/langflow/services/cache/__init__.py b/src/backend/langflow/services/cache/__init__.py new file mode 100644 index 000000000..79e143807 --- /dev/null +++ b/src/backend/langflow/services/cache/__init__.py @@ -0,0 +1,11 @@ +from . import factory, manager +from langflow.services.cache.manager import cache_manager +from langflow.services.cache.flow import InMemoryCache + + +__all__ = [ + "cache_manager", + "factory", + "manager", + "InMemoryCache", +] diff --git a/src/backend/langflow/services/cache/base.py b/src/backend/langflow/services/cache/base.py new file mode 100644 index 000000000..88cb3a1da --- /dev/null +++ b/src/backend/langflow/services/cache/base.py @@ -0,0 +1,84 @@ +import abc + + +class BaseCache(abc.ABC): + """ + Abstract base class for a cache. + """ + + @abc.abstractmethod + def get(self, key): + """ + Retrieve an item from the cache. + + Args: + key: The key of the item to retrieve. + + Returns: + The value associated with the key, or None if the key is not found. + """ + + @abc.abstractmethod + def set(self, key, value): + """ + Add an item to the cache. + + Args: + key: The key of the item. + value: The value to cache. + """ + + @abc.abstractmethod + def delete(self, key): + """ + Remove an item from the cache. + + Args: + key: The key of the item to remove. + """ + + @abc.abstractmethod + def clear(self): + """ + Clear all items from the cache. + """ + + @abc.abstractmethod + def __contains__(self, key): + """ + Check if the key is in the cache. + + Args: + key: The key of the item to check. + + Returns: + True if the key is in the cache, False otherwise. + """ + + @abc.abstractmethod + def __getitem__(self, key): + """ + Retrieve an item from the cache using the square bracket notation. + + Args: + key: The key of the item to retrieve. + """ + + @abc.abstractmethod + def __setitem__(self, key, value): + """ + Add an item to the cache using the square bracket notation. + + Args: + key: The key of the item. + value: The value to cache. + """ + + @abc.abstractmethod + def __delitem__(self, key): + """ + Remove an item from the cache using the square bracket notation. + + Args: + key: The key of the item to remove. + """ diff --git a/src/backend/langflow/services/cache/factory.py b/src/backend/langflow/services/cache/factory.py new file mode 100644 index 000000000..77f8d58d1 --- /dev/null +++ b/src/backend/langflow/services/cache/factory.py @@ -0,0 +1,11 @@ +from langflow.services.cache.manager import CacheManager +from langflow.services.factory import ServiceFactory + + +class CacheManagerFactory(ServiceFactory): + def __init__(self): + super().__init__(CacheManager) + + def create(self, settings_service): + # Here you would have logic to create and configure a CacheManager + return CacheManager() diff --git a/src/backend/langflow/services/cache/flow.py b/src/backend/langflow/services/cache/flow.py new file mode 100644 index 000000000..0c10c51e1 --- /dev/null +++ b/src/backend/langflow/services/cache/flow.py @@ -0,0 +1,146 @@ +import threading +import time +from collections import OrderedDict + +from langflow.services.cache.base import BaseCache + + +class InMemoryCache(BaseCache): + """ + A simple in-memory cache using an OrderedDict. + + This cache supports setting a maximum size and expiration time for cached items. + When the cache is full, it uses a Least Recently Used (LRU) eviction policy. + Thread-safe using a threading Lock. + + Attributes: + max_size (int, optional): Maximum number of items to store in the cache. + expiration_time (int, optional): Time in seconds after which a cached item expires. Default is 1 hour. + + Example: + + cache = InMemoryCache(max_size=3, expiration_time=5) + + # setting cache values + cache.set("a", 1) + cache.set("b", 2) + cache["c"] = 3 + + # getting cache values + a = cache.get("a") + b = cache["b"] + """ + + def __init__(self, max_size=None, expiration_time=60 * 60): + """ + Initialize a new InMemoryCache instance. + + Args: + max_size (int, optional): Maximum number of items to store in the cache. + expiration_time (int, optional): Time in seconds after which a cached item expires. Default is 1 hour. + """ + self._cache = OrderedDict() + self._lock = threading.Lock() + self.max_size = max_size + self.expiration_time = expiration_time + + def get(self, key): + """ + Retrieve an item from the cache. + + Args: + key: The key of the item to retrieve. + + Returns: + The value associated with the key, or None if the key is not found or the item has expired. + """ + with self._lock: + if key in self._cache: + item = self._cache.pop(key) + if ( + self.expiration_time is None + or time.time() - item["time"] < self.expiration_time + ): + # Move the key to the end to make it recently used + self._cache[key] = item + return item["value"] + else: + self.delete(key) + return None + + def set(self, key, value): + """ + Add an item to the cache. + + If the cache is full, the least recently used item is evicted. + + Args: + key: The key of the item. + value: The value to cache. + """ + with self._lock: + if key in self._cache: + # Remove existing key before re-inserting to update order + self.delete(key) + elif self.max_size and len(self._cache) >= self.max_size: + # Remove least recently used item + self._cache.popitem(last=False) + self._cache[key] = {"value": value, "time": time.time()} + + def get_or_set(self, key, value): + """ + Retrieve an item from the cache. If the item does not exist, set it with the provided value. + + Args: + key: The key of the item. + value: The value to cache if the item doesn't exist. + + Returns: + The cached value associated with the key. + """ + with self._lock: + if key in self._cache: + return self.get(key) + self.set(key, value) + return value + + def delete(self, key): + """ + Remove an item from the cache. + + Args: + key: The key of the item to remove. + """ + # with self._lock: + self._cache.pop(key, None) + + def clear(self): + """ + Clear all items from the cache. + """ + with self._lock: + self._cache.clear() + + def __contains__(self, key): + """Check if the key is in the cache.""" + return key in self._cache + + def __getitem__(self, key): + """Retrieve an item from the cache using the square bracket notation.""" + return self.get(key) + + def __setitem__(self, key, value): + """Add an item to the cache using the square bracket notation.""" + self.set(key, value) + + def __delitem__(self, key): + """Remove an item from the cache using the square bracket notation.""" + self.delete(key) + + def __len__(self): + """Return the number of items in the cache.""" + return len(self._cache) + + def __repr__(self): + """Return a string representation of the InMemoryCache instance.""" + return f"InMemoryCache(max_size={self.max_size}, expiration_time={self.expiration_time})" diff --git a/src/backend/langflow/services/cache/manager.py b/src/backend/langflow/services/cache/manager.py new file mode 100644 index 000000000..ce9a338ef --- /dev/null +++ b/src/backend/langflow/services/cache/manager.py @@ -0,0 +1,153 @@ +from contextlib import contextmanager +from typing import Any, Awaitable, Callable, List, Optional +from langflow.services.base import Service + +import pandas as pd +from PIL import Image + + +class Subject: + """Base class for implementing the observer pattern.""" + + def __init__(self): + self.observers: List[Callable[[], None]] = [] + + def attach(self, observer: Callable[[], None]): + """Attach an observer to the subject.""" + self.observers.append(observer) + + def detach(self, observer: Callable[[], None]): + """Detach an observer from the subject.""" + self.observers.remove(observer) + + def notify(self): + """Notify all observers about an event.""" + for observer in self.observers: + if observer is None: + continue + observer() + + +class AsyncSubject: + """Base class for implementing the async observer pattern.""" + + def __init__(self): + self.observers: List[Callable[[], Awaitable]] = [] + + def attach(self, observer: Callable[[], Awaitable]): + """Attach an observer to the subject.""" + self.observers.append(observer) + + def detach(self, observer: Callable[[], Awaitable]): + """Detach an observer from the subject.""" + self.observers.remove(observer) + + async def notify(self): + """Notify all observers about an event.""" + for observer in self.observers: + if observer is None: + continue + await observer() + + +class CacheManager(Subject, Service): + """Manages cache for different clients and notifies observers on changes.""" + + name = "cache_manager" + + def __init__(self): + super().__init__() + self._cache = {} + self.current_client_id = None + self.current_cache = {} + + @contextmanager + def set_client_id(self, client_id: str): + """ + Context manager to set the current client_id and associated cache. + + Args: + client_id (str): The client identifier. + """ + previous_client_id = self.current_client_id + self.current_client_id = client_id + self.current_cache = self._cache.setdefault(client_id, {}) + try: + yield + finally: + self.current_client_id = previous_client_id + self.current_cache = self._cache.get(self.current_client_id, {}) + + def add(self, name: str, obj: Any, obj_type: str, extension: Optional[str] = None): + """ + Add an object to the current client's cache. + + Args: + name (str): The cache key. + obj (Any): The object to cache. + obj_type (str): The type of the object. + """ + object_extensions = { + "image": "png", + "pandas": "csv", + } + if obj_type in object_extensions: + _extension = object_extensions[obj_type] + else: + _extension = type(obj).__name__.lower() + self.current_cache[name] = { + "obj": obj, + "type": obj_type, + "extension": extension or _extension, + } + self.notify() + + def add_pandas(self, name: str, obj: Any): + """ + Add a pandas DataFrame or Series to the current client's cache. + + Args: + name (str): The cache key. + obj (Any): The pandas DataFrame or Series object. + """ + if isinstance(obj, (pd.DataFrame, pd.Series)): + self.add(name, obj.to_csv(), "pandas", extension="csv") + else: + raise ValueError("Object is not a pandas DataFrame or Series") + + def add_image(self, name: str, obj: Any, extension: str = "png"): + """ + Add a PIL Image to the current client's cache. + + Args: + name (str): The cache key. + obj (Any): The PIL Image object. + """ + if isinstance(obj, Image.Image): + self.add(name, obj, "image", extension=extension) + else: + raise ValueError("Object is not a PIL Image") + + def get(self, name: str): + """ + Get an object from the current client's cache. + + Args: + name (str): The cache key. + + Returns: + The cached object associated with the given cache key. + """ + return self.current_cache[name] + + def get_last(self): + """ + Get the last added item in the current client's cache. + + Returns: + The last added item in the cache. + """ + return list(self.current_cache.values())[-1] + + +cache_manager = CacheManager() diff --git a/src/backend/langflow/services/cache/utils.py b/src/backend/langflow/services/cache/utils.py new file mode 100644 index 000000000..3deabe9f4 --- /dev/null +++ b/src/backend/langflow/services/cache/utils.py @@ -0,0 +1,179 @@ +import base64 +import contextlib +import functools +import hashlib +import json +import os +import tempfile +from collections import OrderedDict +from pathlib import Path +from typing import Any, Dict +from appdirs import user_cache_dir + +CACHE: Dict[str, Any] = {} + +CACHE_DIR = user_cache_dir("langflow", "langflow") + + +def create_cache_folder(func): + def wrapper(*args, **kwargs): + # Get the destination folder + cache_path = Path(CACHE_DIR) / PREFIX + + # Create the destination folder if it doesn't exist + os.makedirs(cache_path, exist_ok=True) + + return func(*args, **kwargs) + + return wrapper + + +def memoize_dict(maxsize=128): + cache = OrderedDict() + + def decorator(func): + @functools.wraps(func) + def wrapper(*args, **kwargs): + hashed = compute_dict_hash(args[0]) + key = (func.__name__, hashed, frozenset(kwargs.items())) + if key not in cache: + result = func(*args, **kwargs) + cache[key] = result + if len(cache) > maxsize: + cache.popitem(last=False) + else: + result = cache[key] + return result + + def clear_cache(): + cache.clear() + + wrapper.clear_cache = clear_cache # type: ignore + wrapper.cache = cache # type: ignore + return wrapper + + return decorator + + +PREFIX = "langflow_cache" + + +@create_cache_folder +def clear_old_cache_files(max_cache_size: int = 3): + cache_dir = Path(tempfile.gettempdir()) / PREFIX + cache_files = list(cache_dir.glob("*.dill")) + + if len(cache_files) > max_cache_size: + cache_files_sorted_by_mtime = sorted( + cache_files, key=lambda x: x.stat().st_mtime, reverse=True + ) + + for cache_file in cache_files_sorted_by_mtime[max_cache_size:]: + with contextlib.suppress(OSError): + os.remove(cache_file) + + +def compute_dict_hash(graph_data): + graph_data = filter_json(graph_data) + + cleaned_graph_json = json.dumps(graph_data, sort_keys=True) + return hashlib.sha256(cleaned_graph_json.encode("utf-8")).hexdigest() + + +def filter_json(json_data): + filtered_data = json_data.copy() + + # Remove 'viewport' and 'chatHistory' keys + if "viewport" in filtered_data: + del filtered_data["viewport"] + if "chatHistory" in filtered_data: + del filtered_data["chatHistory"] + + # Filter nodes + if "nodes" in filtered_data: + for node in filtered_data["nodes"]: + if "position" in node: + del node["position"] + if "positionAbsolute" in node: + del node["positionAbsolute"] + if "selected" in node: + del node["selected"] + if "dragging" in node: + del node["dragging"] + + return filtered_data + + +@create_cache_folder +def save_binary_file(content: str, file_name: str, accepted_types: list[str]) -> str: + """ + Save a binary file to the specified folder. + + Args: + content: The content of the file as a bytes object. + file_name: The name of the file, including its extension. + + Returns: + The path to the saved file. + """ + if not any(file_name.endswith(suffix) for suffix in accepted_types): + raise ValueError(f"File {file_name} is not accepted") + + # Get the destination folder + cache_path = Path(CACHE_DIR) / PREFIX + if not content: + raise ValueError("Please, reload the file in the loader.") + data = content.split(",")[1] + decoded_bytes = base64.b64decode(data) + + # Create the full file path + file_path = os.path.join(cache_path, file_name) + + # Save the binary content to the file + with open(file_path, "wb") as file: + file.write(decoded_bytes) + + return file_path + + +@create_cache_folder +def save_uploaded_file(file, folder_name): + """ + Save an uploaded file to the specified folder with a hash of its content as the file name. + + Args: + file: The uploaded file object. + folder_name: The name of the folder to save the file in. + + Returns: + The path to the saved file. + """ + cache_path = Path(CACHE_DIR) + folder_path = cache_path / folder_name + + # Create the folder if it doesn't exist + if not folder_path.exists(): + folder_path.mkdir() + + # Create a hash of the file content + sha256_hash = hashlib.sha256() + # Reset the file cursor to the beginning of the file + file.seek(0) + # Iterate over the uploaded file in small chunks to conserve memory + while chunk := file.read(8192): # Read 8KB at a time (adjust as needed) + sha256_hash.update(chunk) + + # Use the hex digest of the hash as the file name + hex_dig = sha256_hash.hexdigest() + file_name = hex_dig + + # Reset the file cursor to the beginning of the file + file.seek(0) + + # Save the file with the hash as its name + file_path = folder_path / file_name + with open(file_path, "wb") as new_file: + while chunk := file.read(8192): + new_file.write(chunk) + + return file_path From 7b1f99b1e0da4c088d6029ea3535a122319dfe5f Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Sat, 5 Aug 2023 22:13:10 -0300 Subject: [PATCH 20/90] =?UTF-8?q?=F0=9F=93=9D=20chore(chat):=20add=20chat?= =?UTF-8?q?=20module=20files?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit 📝 chore(chat): add ChatConfig class to handle chat configuration 📝 chore(chat): add ChatManagerFactory class to create and configure ChatManager 📝 chore(chat): add ChatManager class to handle chat functionality 📝 chore(chat): add ChatHistory class to manage chat history 📝 chore(chat): add process_graph function to process chat inputs and generate result and thought --- .../langflow/services/chat/__init__.py | 0 src/backend/langflow/services/chat/config.py | 2 + src/backend/langflow/services/chat/factory.py | 11 + src/backend/langflow/services/chat/manager.py | 221 ++++++++++++++++++ src/backend/langflow/services/chat/utils.py | 37 +++ 5 files changed, 271 insertions(+) create mode 100644 src/backend/langflow/services/chat/__init__.py create mode 100644 src/backend/langflow/services/chat/config.py create mode 100644 src/backend/langflow/services/chat/factory.py create mode 100644 src/backend/langflow/services/chat/manager.py create mode 100644 src/backend/langflow/services/chat/utils.py diff --git a/src/backend/langflow/services/chat/__init__.py b/src/backend/langflow/services/chat/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/backend/langflow/services/chat/config.py b/src/backend/langflow/services/chat/config.py new file mode 100644 index 000000000..274f4d5bd --- /dev/null +++ b/src/backend/langflow/services/chat/config.py @@ -0,0 +1,2 @@ +class ChatConfig: + streaming: bool = True diff --git a/src/backend/langflow/services/chat/factory.py b/src/backend/langflow/services/chat/factory.py new file mode 100644 index 000000000..03597ed11 --- /dev/null +++ b/src/backend/langflow/services/chat/factory.py @@ -0,0 +1,11 @@ +from langflow.services.chat.manager import ChatManager +from langflow.services.factory import ServiceFactory + + +class ChatManagerFactory(ServiceFactory): + def __init__(self): + super().__init__(ChatManager) + + def create(self, settings_service): + # Here you would have logic to create and configure a ChatManager + return ChatManager() diff --git a/src/backend/langflow/services/chat/manager.py b/src/backend/langflow/services/chat/manager.py new file mode 100644 index 000000000..82fb77572 --- /dev/null +++ b/src/backend/langflow/services/chat/manager.py @@ -0,0 +1,221 @@ +from collections import defaultdict +from fastapi import WebSocket, status +from langflow.api.v1.schemas import ChatMessage, ChatResponse, FileResponse +from langflow.services.base import Service +from langflow.services import service_manager +from langflow.services.cache.manager import Subject +from langflow.services.chat.utils import process_graph +from langflow.interface.utils import pil_to_base64 +from langflow.services.schema import ServiceType +from langflow.utils.logger import logger + + +import asyncio +import json +from typing import Any, Dict, List + +from langflow.services.cache.flow import InMemoryCache + + +class ChatHistory(Subject): + def __init__(self): + super().__init__() + self.history: Dict[str, List[ChatMessage]] = defaultdict(list) + + def add_message(self, client_id: str, message: ChatMessage): + """Add a message to the chat history.""" + + self.history[client_id].append(message) + + if not isinstance(message, FileResponse): + self.notify() + + def get_history(self, client_id: str, filter_messages=True) -> List[ChatMessage]: + """Get the chat history for a client.""" + if history := self.history.get(client_id, []): + if filter_messages: + return [msg for msg in history if msg.type not in ["start", "stream"]] + return history + else: + return [] + + def empty_history(self, client_id: str): + """Empty the chat history for a client.""" + self.history[client_id] = [] + + +class ChatManager(Service): + name = "chat_manager" + + def __init__(self): + self.active_connections: Dict[str, WebSocket] = {} + self.chat_history = ChatHistory() + self.cache_manager = service_manager.get(ServiceType.CACHE_MANAGER) + self.cache_manager.attach(self.update) + self.in_memory_cache = InMemoryCache() + + def on_chat_history_update(self): + """Send the last chat message to the client.""" + client_id = self.cache_manager.current_client_id + if client_id in self.active_connections: + chat_response = self.chat_history.get_history( + client_id, filter_messages=False + )[-1] + if chat_response.is_bot: + # Process FileResponse + if isinstance(chat_response, FileResponse): + # If data_type is pandas, convert to csv + if chat_response.data_type == "pandas": + chat_response.data = chat_response.data.to_csv() + elif chat_response.data_type == "image": + # Base64 encode the image + chat_response.data = pil_to_base64(chat_response.data) + # get event loop + loop = asyncio.get_event_loop() + + coroutine = self.send_json(client_id, chat_response) + asyncio.run_coroutine_threadsafe(coroutine, loop) + + def update(self): + if self.cache_manager.current_client_id in self.active_connections: + self.last_cached_object_dict = self.cache_manager.get_last() + # Add a new ChatResponse with the data + chat_response = FileResponse( + message=None, + type="file", + data=self.last_cached_object_dict["obj"], + data_type=self.last_cached_object_dict["type"], + ) + + self.chat_history.add_message( + self.cache_manager.current_client_id, chat_response + ) + + async def connect(self, client_id: str, websocket: WebSocket): + await websocket.accept() + self.active_connections[client_id] = websocket + + def disconnect(self, client_id: str): + self.active_connections.pop(client_id, None) + + async def send_message(self, client_id: str, message: str): + websocket = self.active_connections[client_id] + await websocket.send_text(message) + + async def send_json(self, client_id: str, message: ChatMessage): + websocket = self.active_connections[client_id] + await websocket.send_json(message.dict()) + + async def close_connection(self, client_id: str, code: int, reason: str): + if websocket := self.active_connections[client_id]: + try: + await websocket.close(code=code, reason=reason) + self.disconnect(client_id) + except RuntimeError as exc: + # This is to catch the following error: + # Unexpected ASGI message 'websocket.close', after sending 'websocket.close' + if "after sending" in str(exc): + logger.error(f"Error closing connection: {exc}") + + async def process_message( + self, client_id: str, payload: Dict, langchain_object: Any + ): + # Process the graph data and chat message + chat_inputs = payload.pop("inputs", "") + chat_inputs = ChatMessage(message=chat_inputs) + self.chat_history.add_message(client_id, chat_inputs) + + # graph_data = payload + start_resp = ChatResponse(message=None, type="start", intermediate_steps="") + await self.send_json(client_id, start_resp) + + # is_first_message = len(self.chat_history.get_history(client_id=client_id)) <= 1 + # Generate result and thought + try: + logger.debug("Generating result and thought") + + result, intermediate_steps = await process_graph( + langchain_object=langchain_object, + chat_inputs=chat_inputs, + websocket=self.active_connections[client_id], + ) + except Exception as e: + # Log stack trace + logger.exception(e) + self.chat_history.empty_history(client_id) + raise e + # Send a response back to the frontend, if needed + intermediate_steps = intermediate_steps or "" + history = self.chat_history.get_history(client_id, filter_messages=False) + file_responses = [] + if history: + # Iterate backwards through the history + for msg in reversed(history): + if isinstance(msg, FileResponse): + if msg.data_type == "image": + # Base64 encode the image + if isinstance(msg.data, str): + continue + msg.data = pil_to_base64(msg.data) + file_responses.append(msg) + if msg.type == "start": + break + + response = ChatResponse( + message=result, + intermediate_steps=intermediate_steps.strip(), + type="end", + files=file_responses, + ) + await self.send_json(client_id, response) + self.chat_history.add_message(client_id, response) + + def set_cache(self, client_id: str, langchain_object: Any) -> bool: + """ + Set the cache for a client. + """ + + self.in_memory_cache.set(client_id, langchain_object) + return client_id in self.in_memory_cache + + async def handle_websocket(self, client_id: str, websocket: WebSocket): + await self.connect(client_id, websocket) + + try: + chat_history = self.chat_history.get_history(client_id) + # iterate and make BaseModel into dict + chat_history = [chat.dict() for chat in chat_history] + await websocket.send_json(chat_history) + + while True: + json_payload = await websocket.receive_json() + try: + payload = json.loads(json_payload) + except TypeError: + payload = json_payload + if "clear_history" in payload: + self.chat_history.history[client_id] = [] + continue + + with self.cache_manager.set_client_id(client_id): + langchain_object = self.in_memory_cache.get(client_id) + await self.process_message(client_id, payload, langchain_object) + + except Exception as exc: + # Handle any exceptions that might occur + logger.error(f"Error handling websocket: {exc}") + await self.close_connection( + client_id=client_id, + code=status.WS_1011_INTERNAL_ERROR, + reason=str(exc)[:120], + ) + finally: + try: + await self.close_connection( + client_id=client_id, + code=status.WS_1000_NORMAL_CLOSURE, + reason="Client disconnected", + ) + except Exception as exc: + logger.error(f"Error closing connection: {exc}") + self.disconnect(client_id) diff --git a/src/backend/langflow/services/chat/utils.py b/src/backend/langflow/services/chat/utils.py new file mode 100644 index 000000000..7db65b8e3 --- /dev/null +++ b/src/backend/langflow/services/chat/utils.py @@ -0,0 +1,37 @@ +from fastapi import WebSocket +from langflow.api.v1.schemas import ChatMessage +from langflow.processing.base import get_result_and_steps +from langflow.interface.utils import try_setting_streaming_options +from langflow.utils.logger import logger + + +async def process_graph( + langchain_object, + chat_inputs: ChatMessage, + websocket: WebSocket, +): + langchain_object = try_setting_streaming_options(langchain_object, websocket) + logger.debug("Loaded langchain object") + + if langchain_object is None: + # Raise user facing error + raise ValueError( + "There was an error loading the langchain_object. Please, check all the nodes and try again." + ) + + # Generate result and thought + try: + if not chat_inputs.message: + logger.debug("No message provided") + raise ValueError("No message provided") + + logger.debug("Generating result and thought") + result, intermediate_steps = await get_result_and_steps( + langchain_object, chat_inputs.message, websocket=websocket + ) + logger.debug("Generated result and intermediate_steps") + return result, intermediate_steps + except Exception as e: + # Log stack trace + logger.exception(e) + raise e From b451ad9bdd91bf978d3b7897ca49e9d98ce44f21 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Sat, 5 Aug 2023 22:13:48 -0300 Subject: [PATCH 21/90] =?UTF-8?q?=F0=9F=93=A6=20chore(settings):=20add=20n?= =?UTF-8?q?ew=20files=20for=20managing=20settings=20in=20the=20backend?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit 📄 feat(settings/__init__.py): add __init__.py file to expose factory and manager modules 📄 feat(settings/base.py): add base settings class with default values and validators 📄 feat(settings/factory.py): add factory class for creating and configuring a SettingsManager 📄 feat(settings/manager.py): add manager class for loading settings from YAML file and initializing SettingsManager 📦 chore(settings.py): add settings.py file to the backend/langflow/services/settings directory ✨ feat(settings.py): add support for loading settings from a YAML file and updating settings from keyword arguments 🔒 chore(settings.py): add validation and logging to the settings module for improved reliability and debugging 🔧 chore(settings.py): add utility functions for saving and loading settings from a YAML file --- .../langflow/services/settings/__init__.py | 3 + .../langflow/services/settings/base.py | 172 ++++++++++++++++++ .../langflow/services/settings/factory.py | 15 ++ .../langflow/services/settings/manager.py | 36 ++++ .../langflow/services/settings/settings.py | 171 +++++++++++++++++ 5 files changed, 397 insertions(+) create mode 100644 src/backend/langflow/services/settings/__init__.py create mode 100644 src/backend/langflow/services/settings/base.py create mode 100644 src/backend/langflow/services/settings/factory.py create mode 100644 src/backend/langflow/services/settings/manager.py create mode 100644 src/backend/langflow/services/settings/settings.py diff --git a/src/backend/langflow/services/settings/__init__.py b/src/backend/langflow/services/settings/__init__.py new file mode 100644 index 000000000..2191bf2cc --- /dev/null +++ b/src/backend/langflow/services/settings/__init__.py @@ -0,0 +1,3 @@ +from . import factory, manager + +__all__ = ["factory", "manager"] diff --git a/src/backend/langflow/services/settings/base.py b/src/backend/langflow/services/settings/base.py new file mode 100644 index 000000000..9843339a5 --- /dev/null +++ b/src/backend/langflow/services/settings/base.py @@ -0,0 +1,172 @@ +import contextlib +import json +import os +from typing import Optional, List +from pathlib import Path + +import yaml +from pydantic import BaseSettings, root_validator, validator +from langflow.utils.logger import logger + +BASE_COMPONENTS_PATH = str(Path(__file__).parent / "components") + + +class Settings(BaseSettings): + CHAINS: dict = {} + AGENTS: dict = {} + PROMPTS: dict = {} + LLMS: dict = {} + TOOLS: dict = {} + MEMORIES: dict = {} + EMBEDDINGS: dict = {} + VECTORSTORES: dict = {} + DOCUMENTLOADERS: dict = {} + WRAPPERS: dict = {} + RETRIEVERS: dict = {} + TOOLKITS: dict = {} + TEXTSPLITTERS: dict = {} + UTILITIES: dict = {} + OUTPUT_PARSERS: dict = {} + CUSTOM_COMPONENTS: dict = {} + + DEV: bool = False + DATABASE_URL: Optional[str] = None + CACHE: str = "InMemoryCache" + REMOVE_API_KEYS: bool = False + COMPONENTS_PATH: List[str] = [] + + @validator("DATABASE_URL", pre=True) + def set_database_url(cls, value): + if not value: + logger.debug( + "No database_url provided, trying LANGFLOW_DATABASE_URL env variable" + ) + if langflow_database_url := os.getenv("LANGFLOW_DATABASE_URL"): + value = langflow_database_url + logger.debug("Using LANGFLOW_DATABASE_URL env variable.") + else: + logger.debug("No DATABASE_URL env variable, using sqlite database") + value = "sqlite:///./langflow.db" + + return value + + @validator("COMPONENTS_PATH", pre=True) + def set_components_path(cls, value): + if os.getenv("LANGFLOW_COMPONENTS_PATH"): + logger.debug("Adding LANGFLOW_COMPONENTS_PATH to components_path") + langflow_component_path = os.getenv("LANGFLOW_COMPONENTS_PATH") + if ( + Path(langflow_component_path).exists() + and langflow_component_path not in value + ): + if isinstance(langflow_component_path, list): + for path in langflow_component_path: + if path not in value: + value.append(path) + logger.debug( + f"Extending {langflow_component_path} to components_path" + ) + elif langflow_component_path not in value: + value.append(langflow_component_path) + logger.debug( + f"Appending {langflow_component_path} to components_path" + ) + + if not value: + value = [BASE_COMPONENTS_PATH] + logger.debug("Setting default components path to components_path") + elif BASE_COMPONENTS_PATH not in value: + value.append(BASE_COMPONENTS_PATH) + logger.debug("Adding default components path to components_path") + + logger.debug(f"Components path: {value}") + return value + + class Config: + validate_assignment = True + extra = "ignore" + env_prefix = "LANGFLOW_" + + @root_validator(allow_reuse=True) + def validate_lists(cls, values): + for key, value in values.items(): + if key != "dev" and not value: + values[key] = [] + return values + + def update_from_yaml(self, file_path: str, dev: bool = False): + new_settings = load_settings_from_yaml(file_path) + self.CHAINS = new_settings.CHAINS or {} + self.AGENTS = new_settings.AGENTS or {} + self.PROMPTS = new_settings.PROMPTS or {} + self.LLMS = new_settings.LLMS or {} + self.TOOLS = new_settings.TOOLS or {} + self.MEMORIES = new_settings.MEMORIES or {} + self.WRAPPERS = new_settings.WRAPPERS or {} + self.TOOLKITS = new_settings.TOOLKITS or {} + self.TEXTSPLITTERS = new_settings.TEXTSPLITTERS or {} + self.UTILITIES = new_settings.UTILITIES or {} + self.EMBEDDINGS = new_settings.EMBEDDINGS or {} + self.VECTORSTORES = new_settings.VECTORSTORES or {} + self.DOCUMENTLOADERS = new_settings.DOCUMENTLOADERS or {} + self.RETRIEVERS = new_settings.RETRIEVERS or {} + self.OUTPUT_PARSERS = new_settings.OUTPUT_PARSERS or {} + self.CUSTOM_COMPONENTS = new_settings.CUSTOM_COMPONENTS or {} + self.COMPONENTS_PATH = new_settings.COMPONENTS_PATH or [] + self.DEV = dev + + def update_settings(self, **kwargs): + logger.debug("Updating settings") + for key, value in kwargs.items(): + # value may contain sensitive information, so we don't want to log it + if not hasattr(self, key): + logger.debug(f"Key {key} not found in settings") + continue + logger.debug(f"Updating {key}") + if isinstance(getattr(self, key), list): + # value might be a '[something]' string + with contextlib.suppress(json.decoder.JSONDecodeError): + value = json.loads(str(value)) + if isinstance(value, list): + for item in value: + if item not in getattr(self, key): + getattr(self, key).append(item) + logger.debug(f"Extended {key}") + else: + getattr(self, key).append(value) + logger.debug(f"Appended {key}") + + else: + setattr(self, key, value) + logger.debug(f"Updated {key}") + logger.debug(f"{key}: {getattr(self, key)}") + + +def save_settings_to_yaml(settings: Settings, file_path: str): + with open(file_path, "w") as f: + settings_dict = settings.dict() + yaml.dump(settings_dict, f) + + +def load_settings_from_yaml(file_path: str) -> Settings: + # Check if a string is a valid path or a file name + if "/" not in file_path: + # Get current path + current_path = os.path.dirname(os.path.abspath(__file__)) + + file_path = os.path.join(current_path, file_path) + + with open(file_path, "r") as f: + settings_dict = yaml.safe_load(f) + settings_dict = {k.upper(): v for k, v in settings_dict.items()} + + for key in settings_dict: + if key not in Settings.__fields__.keys(): + raise KeyError(f"Key {key} not found in settings") + logger.debug(f"Loading {len(settings_dict[key])} {key} from {file_path}") + + return Settings(**settings_dict) + + +langflow_dir = Path(__file__).parent.parent.parent +settings = load_settings_from_yaml(str(langflow_dir / "config.yaml")) diff --git a/src/backend/langflow/services/settings/factory.py b/src/backend/langflow/services/settings/factory.py new file mode 100644 index 000000000..ab22e22b8 --- /dev/null +++ b/src/backend/langflow/services/settings/factory.py @@ -0,0 +1,15 @@ +from pathlib import Path +from langflow.services.settings.manager import SettingsManager +from langflow.services.factory import ServiceFactory + + +class SettingsManagerFactory(ServiceFactory): + def __init__(self): + super().__init__(SettingsManager) + + def create(self): + # Here you would have logic to create and configure a SettingsManager + langflow_dir = Path(__file__).parent.parent.parent + return SettingsManager.load_settings_from_yaml( + str(langflow_dir / "config.yaml") + ) diff --git a/src/backend/langflow/services/settings/manager.py b/src/backend/langflow/services/settings/manager.py new file mode 100644 index 000000000..598efe2d8 --- /dev/null +++ b/src/backend/langflow/services/settings/manager.py @@ -0,0 +1,36 @@ +from langflow.services.base import Service +from langflow.services.settings.base import Settings +from langflow.utils.logger import logger +import os +import yaml + + +class SettingsManager(Service): + name = "settings_manager" + + def __init__(self, settings: Settings): + super().__init__() + self.settings = settings + + @classmethod + def load_settings_from_yaml(cls, file_path: str) -> Settings: + # Check if a string is a valid path or a file name + if "/" not in file_path: + # Get current path + current_path = os.path.dirname(os.path.abspath(__file__)) + + file_path = os.path.join(current_path, file_path) + + with open(file_path, "r") as f: + settings_dict = yaml.safe_load(f) + settings_dict = {k.upper(): v for k, v in settings_dict.items()} + + for key in settings_dict: + if key not in Settings.__fields__.keys(): + raise KeyError(f"Key {key} not found in settings") + logger.debug( + f"Loading {len(settings_dict[key])} {key} from {file_path}" + ) + + settings = Settings(**settings_dict) + return cls(settings) diff --git a/src/backend/langflow/services/settings/settings.py b/src/backend/langflow/services/settings/settings.py new file mode 100644 index 000000000..439b3a1e4 --- /dev/null +++ b/src/backend/langflow/services/settings/settings.py @@ -0,0 +1,171 @@ +import contextlib +import json +import os +from typing import Optional, List +from pathlib import Path + +import yaml +from pydantic import BaseSettings, root_validator, validator +from langflow.utils.logger import logger + +BASE_COMPONENTS_PATH = str(Path(__file__).parent / "components") + + +class Settings(BaseSettings): + CHAINS: dict = {} + AGENTS: dict = {} + PROMPTS: dict = {} + LLMS: dict = {} + TOOLS: dict = {} + MEMORIES: dict = {} + EMBEDDINGS: dict = {} + VECTORSTORES: dict = {} + DOCUMENTLOADERS: dict = {} + WRAPPERS: dict = {} + RETRIEVERS: dict = {} + TOOLKITS: dict = {} + TEXTSPLITTERS: dict = {} + UTILITIES: dict = {} + OUTPUT_PARSERS: dict = {} + CUSTOM_COMPONENTS: dict = {} + + DEV: bool = False + DATABASE_URL: Optional[str] = None + CACHE: str = "InMemoryCache" + REMOVE_API_KEYS: bool = False + COMPONENTS_PATH: List[str] = [] + + @validator("DATABASE_URL", pre=True) + def set_database_url(cls, value): + if not value: + logger.debug( + "No database_url provided, trying LANGFLOW_DATABASE_URL env variable" + ) + if langflow_database_url := os.getenv("LANGFLOW_DATABASE_URL"): + value = langflow_database_url + logger.debug("Using LANGFLOW_DATABASE_URL env variable.") + else: + logger.debug("No DATABASE_URL env variable, using sqlite database") + value = "sqlite:///./langflow.db" + + return value + + @validator("COMPONENTS_PATH", pre=True) + def set_components_path(cls, value): + if os.getenv("LANGFLOW_COMPONENTS_PATH"): + logger.debug("Adding LANGFLOW_COMPONENTS_PATH to components_path") + langflow_component_path = os.getenv("LANGFLOW_COMPONENTS_PATH") + if ( + Path(langflow_component_path).exists() + and langflow_component_path not in value + ): + if isinstance(langflow_component_path, list): + for path in langflow_component_path: + if path not in value: + value.append(path) + logger.debug( + f"Extending {langflow_component_path} to components_path" + ) + elif langflow_component_path not in value: + value.append(langflow_component_path) + logger.debug( + f"Appending {langflow_component_path} to components_path" + ) + + if not value: + value = [BASE_COMPONENTS_PATH] + logger.debug("Setting default components path to components_path") + elif BASE_COMPONENTS_PATH not in value: + value.append(BASE_COMPONENTS_PATH) + logger.debug("Adding default components path to components_path") + + logger.debug(f"Components path: {value}") + return value + + class Config: + validate_assignment = True + extra = "ignore" + env_prefix = "LANGFLOW_" + + @root_validator(allow_reuse=True) + def validate_lists(cls, values): + for key, value in values.items(): + if key != "dev" and not value: + values[key] = [] + return values + + def update_from_yaml(self, file_path: str, dev: bool = False): + new_settings = load_settings_from_yaml(file_path) + self.CHAINS = new_settings.CHAINS or {} + self.AGENTS = new_settings.AGENTS or {} + self.PROMPTS = new_settings.PROMPTS or {} + self.LLMS = new_settings.LLMS or {} + self.TOOLS = new_settings.TOOLS or {} + self.MEMORIES = new_settings.MEMORIES or {} + self.WRAPPERS = new_settings.WRAPPERS or {} + self.TOOLKITS = new_settings.TOOLKITS or {} + self.TEXTSPLITTERS = new_settings.TEXTSPLITTERS or {} + self.UTILITIES = new_settings.UTILITIES or {} + self.EMBEDDINGS = new_settings.EMBEDDINGS or {} + self.VECTORSTORES = new_settings.VECTORSTORES or {} + self.DOCUMENTLOADERS = new_settings.DOCUMENTLOADERS or {} + self.RETRIEVERS = new_settings.RETRIEVERS or {} + self.OUTPUT_PARSERS = new_settings.OUTPUT_PARSERS or {} + self.CUSTOM_COMPONENTS = new_settings.CUSTOM_COMPONENTS or {} + self.COMPONENTS_PATH = new_settings.COMPONENTS_PATH or [] + self.DEV = dev + + def update_settings(self, **kwargs): + logger.debug("Updating settings") + for key, value in kwargs.items(): + # value may contain sensitive information, so we don't want to log it + if not hasattr(self, key): + logger.debug(f"Key {key} not found in settings") + continue + logger.debug(f"Updating {key}") + if isinstance(getattr(self, key), list): + # value might be a '[something]' string + with contextlib.suppress(json.decoder.JSONDecodeError): + value = json.loads(str(value)) + if isinstance(value, list): + for item in value: + if item not in getattr(self, key): + getattr(self, key).append(item) + logger.debug(f"Extended {key}") + else: + getattr(self, key).append(value) + logger.debug(f"Appended {key}") + + else: + setattr(self, key, value) + logger.debug(f"Updated {key}") + logger.debug(f"{key}: {getattr(self, key)}") + + +def save_settings_to_yaml(settings: Settings, file_path: str): + with open(file_path, "w") as f: + settings_dict = settings.dict() + yaml.dump(settings_dict, f) + + +def load_settings_from_yaml(file_path: str) -> Settings: + # Check if a string is a valid path or a file name + if "/" not in file_path: + # Get current path + current_path = os.path.dirname(os.path.abspath(__file__)) + + file_path = os.path.join(current_path, file_path) + + with open(file_path, "r") as f: + settings_dict = yaml.safe_load(f) + settings_dict = {k.upper(): v for k, v in settings_dict.items()} + + for key in settings_dict: + if key not in Settings.__fields__.keys(): + raise KeyError(f"Key {key} not found in settings") + logger.debug(f"Loading {len(settings_dict[key])} {key} from {file_path}") + + return Settings(**settings_dict) + + +settings = load_settings_from_yaml("config.yaml") From d51aa7ecb23a4163aed48545f595ca2cca4653ee Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Sat, 5 Aug 2023 22:14:22 -0300 Subject: [PATCH 22/90] =?UTF-8?q?=F0=9F=93=A6=20chore(database):=20add=20d?= =?UTF-8?q?atabase=20related=20files=20and=20models?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit 📦 chore(database): add database manager and factory 📦 chore(database): add flow and flow style models --- .../langflow/services/database/__init__.py | 0 .../langflow/services/database/base.py | 151 ++++++++++++++++++ .../langflow/services/database/factory.py | 15 ++ .../services/database/models/__init__.py | 4 + .../langflow/services/database/models/base.py | 14 ++ .../services/database/models/component.py | 29 ++++ .../langflow/services/database/models/flow.py | 60 +++++++ .../services/database/models/flow_style.py | 33 ++++ 8 files changed, 306 insertions(+) create mode 100644 src/backend/langflow/services/database/__init__.py create mode 100644 src/backend/langflow/services/database/base.py create mode 100644 src/backend/langflow/services/database/factory.py create mode 100644 src/backend/langflow/services/database/models/__init__.py create mode 100644 src/backend/langflow/services/database/models/base.py create mode 100644 src/backend/langflow/services/database/models/component.py create mode 100644 src/backend/langflow/services/database/models/flow.py create mode 100644 src/backend/langflow/services/database/models/flow_style.py diff --git a/src/backend/langflow/services/database/__init__.py b/src/backend/langflow/services/database/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/backend/langflow/services/database/base.py b/src/backend/langflow/services/database/base.py new file mode 100644 index 000000000..fffb39096 --- /dev/null +++ b/src/backend/langflow/services/database/base.py @@ -0,0 +1,151 @@ +from contextlib import contextmanager +import os +from pathlib import Path +from langflow.services.base import Service +from sqlmodel import SQLModel, Session, create_engine +from langflow.utils.logger import logger +from alembic.config import Config +from alembic import command + + +class Engine: + _instance = None + + @classmethod + def get(cls): + logger.debug("Getting database engine") + if cls._instance is None: + cls.create() + return cls._instance + + @classmethod + def create(cls): + logger.debug("Creating database engine") + from langflow.settings import settings + + if langflow_database_url := os.getenv("LANGFLOW_DATABASE_URL"): + settings.DATABASE_URL = langflow_database_url + logger.debug("Using LANGFLOW_DATABASE_URL") + + if settings.DATABASE_URL and settings.DATABASE_URL.startswith("sqlite"): + connect_args = {"check_same_thread": False} + else: + connect_args = {} + if not settings.DATABASE_URL: + raise RuntimeError("No database_url provided") + cls._instance = create_engine(settings.DATABASE_URL, connect_args=connect_args) + + @classmethod + def update(cls): + logger.debug("Updating database engine") + cls._instance = None + cls.create() + + +def create_db_and_tables(): + logger.debug("Creating database and tables") + try: + SQLModel.metadata.create_all(Engine.get()) + except Exception as exc: + logger.error(f"Error creating database and tables: {exc}") + raise RuntimeError("Error creating database and tables") from exc + # Now check if the table Flow exists, if not, something went wrong + # and we need to create the tables again. + from sqlalchemy import inspect + + inspector = inspect(Engine.get()) + if "flow" not in inspector.get_table_names(): + logger.error("Something went wrong creating the database and tables.") + logger.error("Please check your database settings.") + + raise RuntimeError("Something went wrong creating the database and tables.") + else: + logger.debug("Database and tables created successfully") + + +class DatabaseManager(Service): + name = "database_manager" + + def __init__(self, database_url: str): + self.database_url = database_url + # This file is in langflow.services.database.base.py + # the ini is in langflow + langflow_dir = Path(__file__).parent.parent.parent + self.script_location = langflow_dir / "alembic" + self.alembic_cfg_path = langflow_dir / "alembic.ini" + self.engine = create_engine(database_url) + + def __enter__(self): + self._session = Session(self.engine) + return self._session + + def __exit__(self, exc_type, exc_value, traceback): + if exc_type is not None: # If an exception has been raised + logger.error( + f"Session rollback because of exception: {exc_type.__name__} {exc_value}" + ) + self._session.rollback() + else: + self._session.commit() + self._session.close() + + def get_session(self): + with Session(self.engine) as session: + yield session + + def run_migrations(self): + logger.info( + f"Running DB migrations in {self.script_location} on {self.database_url}" + ) + alembic_cfg = Config() + alembic_cfg.set_main_option("script_location", str(self.script_location)) + alembic_cfg.set_main_option("sqlalchemy.url", self.database_url) + command.upgrade(alembic_cfg, "head") + + def create_db_and_tables(self): + logger.debug("Creating database and tables") + try: + SQLModel.metadata.create_all(self.engine) + except Exception as exc: + logger.error(f"Error creating database and tables: {exc}") + raise RuntimeError("Error creating database and tables") from exc + + # Now check if the table "flow" exists, if not, something went wrong + # and we need to create the tables again. + from sqlalchemy import inspect + + inspector = inspect(self.engine) + if "flow" not in inspector.get_table_names(): + logger.error("Something went wrong creating the database and tables.") + logger.error("Please check your database settings.") + raise RuntimeError("Something went wrong creating the database and tables.") + else: + logger.debug("Database and tables created successfully") + + +@contextmanager +def session_getter(db_manager: DatabaseManager): + try: + session = Session(DatabaseManager.engine) + yield session + except Exception as e: + print("Session rollback because of exception:", e) + session.rollback() + raise + finally: + session.close() + + +def get_session(): + with Session(Engine.get()) as session: + yield session + + +def initialize_database(): + logger.debug("Initializing database") + from langflow.services import service_manager, ServiceType + + database_manager = service_manager.get(ServiceType.DATABASE_MANAGER) + database_manager.run_migrations() + database_manager.create_db_and_tables() + logger.debug("Database initialized") diff --git a/src/backend/langflow/services/database/factory.py b/src/backend/langflow/services/database/factory.py new file mode 100644 index 000000000..187a29fdd --- /dev/null +++ b/src/backend/langflow/services/database/factory.py @@ -0,0 +1,15 @@ +from typing import TYPE_CHECKING +from langflow.services.database.base import DatabaseManager +from langflow.services.factory import ServiceFactory + +if TYPE_CHECKING: + from langflow.services.settings.manager import SettingsManager + + +class DatabaseManagerFactory(ServiceFactory): + def __init__(self): + super().__init__(DatabaseManager) + + def create(self, settings_service: "SettingsManager"): + # Here you would have logic to create and configure a DatabaseManager + return DatabaseManager(settings_service.settings.DATABASE_URL) diff --git a/src/backend/langflow/services/database/models/__init__.py b/src/backend/langflow/services/database/models/__init__.py new file mode 100644 index 000000000..da47bc5fe --- /dev/null +++ b/src/backend/langflow/services/database/models/__init__.py @@ -0,0 +1,4 @@ +from .flow import Flow + + +__all__ = ["Flow"] diff --git a/src/backend/langflow/services/database/models/base.py b/src/backend/langflow/services/database/models/base.py new file mode 100644 index 000000000..e20895b93 --- /dev/null +++ b/src/backend/langflow/services/database/models/base.py @@ -0,0 +1,14 @@ +from sqlmodel import SQLModel +import orjson + + +def orjson_dumps(v, *, default): + # orjson.dumps returns bytes, to match standard json.dumps we need to decode + return orjson.dumps(v, default=default).decode() + + +class SQLModelSerializable(SQLModel): + class Config: + orm_mode = True + json_loads = orjson.loads + json_dumps = orjson_dumps diff --git a/src/backend/langflow/services/database/models/component.py b/src/backend/langflow/services/database/models/component.py new file mode 100644 index 000000000..5c4e6c13a --- /dev/null +++ b/src/backend/langflow/services/database/models/component.py @@ -0,0 +1,29 @@ +from langflow.services.database.models.base import SQLModelSerializable, SQLModel +from sqlmodel import Field +from typing import Optional +from datetime import datetime +import uuid + + +class Component(SQLModelSerializable, table=True): + id: uuid.UUID = Field(default_factory=uuid.uuid4, primary_key=True) + frontend_node_id: uuid.UUID = Field(index=True) + name: str = Field(index=True) + description: Optional[str] = Field(default=None) + python_code: Optional[str] = Field(default=None) + return_type: Optional[str] = Field(default=None) + is_disabled: bool = Field(default=False) + is_read_only: bool = Field(default=False) + create_at: datetime = Field(default_factory=datetime.utcnow) + update_at: datetime = Field(default_factory=datetime.utcnow) + + +class ComponentModel(SQLModel): + id: uuid.UUID = Field(default_factory=uuid.uuid4) + frontend_node_id: uuid.UUID = Field(default=uuid.uuid4()) + name: str = Field(default="") + description: Optional[str] = None + python_code: Optional[str] = None + return_type: Optional[str] = None + is_disabled: bool = False + is_read_only: bool = False diff --git a/src/backend/langflow/services/database/models/flow.py b/src/backend/langflow/services/database/models/flow.py new file mode 100644 index 000000000..2b6c6879c --- /dev/null +++ b/src/backend/langflow/services/database/models/flow.py @@ -0,0 +1,60 @@ +# Path: src/backend/langflow/database/models/flow.py + +from langflow.services.database.models.base import SQLModelSerializable +from pydantic import validator +from sqlmodel import Field, Relationship, JSON, Column +from uuid import UUID, uuid4 +from typing import Dict, Optional + +# if TYPE_CHECKING: +from langflow.services.database.models.flow_style import FlowStyle, FlowStyleRead + + +class FlowBase(SQLModelSerializable): + name: str = Field(index=True) + description: Optional[str] = Field(index=True) + data: Optional[Dict] = Field(default=None) + + @validator("data") + def validate_json(v): + # dict_keys(['description', 'name', 'id', 'data']) + if not v: + return v + if not isinstance(v, dict): + raise ValueError("Flow must be a valid JSON") + + # data must contain nodes and edges + if "nodes" not in v.keys(): + raise ValueError("Flow must have nodes") + if "edges" not in v.keys(): + raise ValueError("Flow must have edges") + + return v + + +class Flow(FlowBase, table=True): + id: UUID = Field(default_factory=uuid4, primary_key=True, unique=True) + data: Optional[Dict] = Field(default=None, sa_column=Column(JSON)) + style: Optional["FlowStyle"] = Relationship( + back_populates="flow", + # use "uselist=False" to make it a one-to-one relationship + sa_relationship_kwargs={"uselist": False}, + ) + + +class FlowCreate(FlowBase): + pass + + +class FlowRead(FlowBase): + id: UUID + + +class FlowReadWithStyle(FlowRead): + style: Optional["FlowStyleRead"] = None + + +class FlowUpdate(SQLModelSerializable): + name: Optional[str] = None + description: Optional[str] = None + data: Optional[Dict] = None diff --git a/src/backend/langflow/services/database/models/flow_style.py b/src/backend/langflow/services/database/models/flow_style.py new file mode 100644 index 000000000..3810c7cea --- /dev/null +++ b/src/backend/langflow/services/database/models/flow_style.py @@ -0,0 +1,33 @@ +# Path: src/backend/langflow/database/models/flowstyle.py + +from langflow.services.database.models.base import SQLModelSerializable +from sqlmodel import Field, Relationship +from uuid import UUID, uuid4 +from typing import TYPE_CHECKING, Optional + +if TYPE_CHECKING: + from langflow.services.database.models.flow import Flow + + +class FlowStyleBase(SQLModelSerializable): + color: str + emoji: str + flow_id: UUID = Field(default=None, foreign_key="flow.id") + + +class FlowStyle(FlowStyleBase, table=True): + id: UUID = Field(default_factory=uuid4, primary_key=True, unique=True) + flow: "Flow" = Relationship(back_populates="style") + + +class FlowStyleUpdate(SQLModelSerializable): + color: Optional[str] = None + emoji: Optional[str] = None + + +class FlowStyleCreate(FlowStyleBase): + pass + + +class FlowStyleRead(FlowStyleBase): + id: UUID From be8be07a6241e945f96d7b7760512905ecba46df Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Sat, 5 Aug 2023 22:16:14 -0300 Subject: [PATCH 23/90] =?UTF-8?q?=F0=9F=94=A5=20refactor(=5F=5Finit=5F=5F.?= =?UTF-8?q?py):=20deactivate=20cache=20manager=20for=20now=20to=20improve?= =?UTF-8?q?=20performance=20and=20simplify=20code?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/__init__.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/backend/langflow/__init__.py b/src/backend/langflow/__init__.py index 5920369e2..f6eb836cc 100644 --- a/src/backend/langflow/__init__.py +++ b/src/backend/langflow/__init__.py @@ -1,5 +1,7 @@ from importlib import metadata -from langflow.cache import cache_manager + +# Deactivate cache manager for now +# from langflow.services.cache import cache_manager from langflow.processing.process import load_flow_from_json from langflow.interface.custom.custom_component import CustomComponent From c9ae251f8538d46ce448280e36ec6c0ddd705d5f Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Sat, 5 Aug 2023 22:16:41 -0300 Subject: [PATCH 24/90] =?UTF-8?q?=F0=9F=94=A7=20chore(main.py):=20refactor?= =?UTF-8?q?=20imports=20and=20remove=20unused=20code=20for=20better=20orga?= =?UTF-8?q?nization=20and=20readability=20=E2=9C=A8=20feat(main.py):=20add?= =?UTF-8?q?=20initialization=20functions=20for=20services=20and=20database?= =?UTF-8?q?=20on=20app=20startup=20to=20ensure=20proper=20setup=20and=20co?= =?UTF-8?q?nfiguration?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/main.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/src/backend/langflow/main.py b/src/backend/langflow/main.py index deef1c914..a2da92c93 100644 --- a/src/backend/langflow/main.py +++ b/src/backend/langflow/main.py @@ -6,14 +6,14 @@ from fastapi.responses import FileResponse from fastapi.staticfiles import StaticFiles from langflow.api import router -from langflow.database.base import DatabaseManager from langflow.interface.utils import setup_llm_caching +from langflow.services.database.base import initialize_database +from langflow.services.manager import initialize_services from langflow.utils.logger import configure def create_app(): """Create the FastAPI app and include the router.""" - from langflow.settings import settings configure() @@ -34,11 +34,10 @@ def create_app(): allow_methods=["*"], allow_headers=["*"], ) - database_manager = DatabaseManager(settings.DATABASE_URL) app.include_router(router) - # app.on_event("startup")(Engine.update) - app.on_event("startup")(database_manager.run_migrations) - app.on_event("startup")(database_manager.create_db_and_tables) + + app.on_event("startup")(initialize_services) + app.on_event("startup")(initialize_database) app.on_event("startup")(setup_llm_caching) return app From 029e06c03352272c48d8dd5cbc769578f352abaf Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Sat, 5 Aug 2023 22:17:10 -0300 Subject: [PATCH 25/90] =?UTF-8?q?=F0=9F=94=80=20chore(alembic/env.py):=20u?= =?UTF-8?q?pdate=20import=20statement=20for=20SQLModel=20to=20reflect=20ne?= =?UTF-8?q?w=20file=20location?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/alembic/env.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/backend/langflow/alembic/env.py b/src/backend/langflow/alembic/env.py index ea4fe9c43..a3babba6d 100644 --- a/src/backend/langflow/alembic/env.py +++ b/src/backend/langflow/alembic/env.py @@ -5,7 +5,7 @@ from sqlalchemy import pool from alembic import context -from langflow.database.base import SQLModel +from langflow.services.database.base import SQLModel # this is the Alembic Config object, which provides # access to the values within the .ini file in use. From 0a8ca3b90849a30d177a2c76ec6b7ab4a1d50cd9 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Sat, 5 Aug 2023 22:17:41 -0300 Subject: [PATCH 26/90] =?UTF-8?q?=F0=9F=94=A7=20fix(chat.py):=20update=20i?= =?UTF-8?q?mport=20statements=20to=20reflect=20changes=20in=20module=20str?= =?UTF-8?q?ucture=20=F0=9F=94=A7=20fix(components.py):=20update=20import?= =?UTF-8?q?=20statements=20to=20reflect=20changes=20in=20module=20structur?= =?UTF-8?q?e=20=F0=9F=94=A7=20fix(endpoints.py):=20update=20import=20state?= =?UTF-8?q?ments=20to=20reflect=20changes=20in=20module=20structure=20?= =?UTF-8?q?=F0=9F=94=A7=20fix(flow=5Fstyles.py):=20update=20import=20state?= =?UTF-8?q?ments=20to=20reflect=20changes=20in=20module=20structure=20?= =?UTF-8?q?=F0=9F=94=A7=20fix(flows.py):=20update=20import=20statements=20?= =?UTF-8?q?to=20reflect=20changes=20in=20module=20structure=20=F0=9F=94=A7?= =?UTF-8?q?=20fix(schemas.py):=20update=20import=20statements=20to=20refle?= =?UTF-8?q?ct=20changes=20in=20module=20structure=20=F0=9F=94=A7=20fix(run?= =?UTF-8?q?.py):=20update=20import=20statements=20to=20reflect=20changes?= =?UTF-8?q?=20in=20module=20structure=20=F0=9F=94=A7=20fix(utils.py):=20up?= =?UTF-8?q?date=20import=20statements=20to=20reflect=20changes=20in=20modu?= =?UTF-8?q?le=20structure?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/api/v1/chat.py | 8 +++++--- src/backend/langflow/api/v1/components.py | 4 ++-- src/backend/langflow/api/v1/endpoints.py | 6 +++--- src/backend/langflow/api/v1/flow_styles.py | 4 ++-- src/backend/langflow/api/v1/flows.py | 4 ++-- src/backend/langflow/api/v1/schemas.py | 2 +- src/backend/langflow/interface/run.py | 2 +- src/backend/langflow/interface/utils.py | 2 +- 8 files changed, 17 insertions(+), 15 deletions(-) diff --git a/src/backend/langflow/api/v1/chat.py b/src/backend/langflow/api/v1/chat.py index dd3407d1b..5711b0c33 100644 --- a/src/backend/langflow/api/v1/chat.py +++ b/src/backend/langflow/api/v1/chat.py @@ -3,13 +3,13 @@ from fastapi.responses import StreamingResponse from langflow.api.utils import build_input_keys_response from langflow.api.v1.schemas import BuildStatus, BuiltResponse, InitResponse, StreamData -from langflow.chat.manager import ChatManager +from langflow.services import service_manager, ServiceType from langflow.graph.graph.base import Graph from langflow.utils.logger import logger from cachetools import LRUCache router = APIRouter(tags=["Chat"]) -chat_manager = ChatManager() + flow_data_store: LRUCache = LRUCache(maxsize=10) @@ -17,6 +17,7 @@ flow_data_store: LRUCache = LRUCache(maxsize=10) async def chat(client_id: str, websocket: WebSocket): """Websocket endpoint for chat.""" try: + chat_manager = service_manager.get(ServiceType.CHAT_MANAGER) if client_id in chat_manager.in_memory_cache: await chat_manager.handle_websocket(client_id, websocket) else: @@ -45,6 +46,7 @@ async def init_build(graph_data: dict, flow_id: str): return InitResponse(flowId=flow_id) # Delete from cache if already exists + chat_manager = service_manager.get(ServiceType.CHAT_MANAGER) if flow_id in chat_manager.in_memory_cache: with chat_manager.in_memory_cache._lock: chat_manager.in_memory_cache.delete(flow_id) @@ -160,7 +162,7 @@ async def stream_build(flow_id: str): "handle_keys": [], } yield str(StreamData(event="message", data=input_keys_response)) - + chat_manager = service_manager.get(ServiceType.CHAT_MANAGER) chat_manager.set_cache(flow_id, langchain_object) # We need to reset the chat history chat_manager.chat_history.empty_history(flow_id) diff --git a/src/backend/langflow/api/v1/components.py b/src/backend/langflow/api/v1/components.py index 1e34da2aa..7f3572111 100644 --- a/src/backend/langflow/api/v1/components.py +++ b/src/backend/langflow/api/v1/components.py @@ -1,8 +1,8 @@ from datetime import timezone from typing import List from uuid import UUID -from langflow.database.models.component import Component, ComponentModel -from langflow.database.base import get_session +from langflow.services.database.models.component import Component, ComponentModel +from langflow.services.database.base import get_session from sqlmodel import Session, select from fastapi import APIRouter, Depends, HTTPException from sqlalchemy.exc import IntegrityError diff --git a/src/backend/langflow/api/v1/endpoints.py b/src/backend/langflow/api/v1/endpoints.py index f4817d12a..58021cab7 100644 --- a/src/backend/langflow/api/v1/endpoints.py +++ b/src/backend/langflow/api/v1/endpoints.py @@ -1,8 +1,8 @@ from http import HTTPStatus from typing import Annotated, Optional -from langflow.cache.utils import save_uploaded_file -from langflow.database.models.flow import Flow +from langflow.services.cache.utils import save_uploaded_file +from langflow.services.database.models.flow import Flow from langflow.processing.process import process_graph_cached, process_tweaks from langflow.utils.logger import logger from langflow.settings import settings @@ -26,7 +26,7 @@ from langflow.interface.types import ( build_langchain_custom_component_list_from_path, ) -from langflow.database.base import get_session +from langflow.services.database.base import get_session from sqlmodel import Session # build router diff --git a/src/backend/langflow/api/v1/flow_styles.py b/src/backend/langflow/api/v1/flow_styles.py index 40e292eb3..6b0759df2 100644 --- a/src/backend/langflow/api/v1/flow_styles.py +++ b/src/backend/langflow/api/v1/flow_styles.py @@ -1,11 +1,11 @@ from uuid import UUID -from langflow.database.models.flow_style import ( +from langflow.services.database.models.flow_style import ( FlowStyle, FlowStyleCreate, FlowStyleRead, FlowStyleUpdate, ) -from langflow.database.base import get_session +from langflow.services.database.base import get_session from sqlmodel import Session, select from fastapi import APIRouter, Depends, HTTPException diff --git a/src/backend/langflow/api/v1/flows.py b/src/backend/langflow/api/v1/flows.py index 9f5042fcb..c7f1134d5 100644 --- a/src/backend/langflow/api/v1/flows.py +++ b/src/backend/langflow/api/v1/flows.py @@ -3,14 +3,14 @@ from uuid import UUID from langflow.settings import settings from langflow.api.utils import remove_api_keys from langflow.api.v1.schemas import FlowListCreate, FlowListRead -from langflow.database.models.flow import ( +from langflow.services.database.models.flow import ( Flow, FlowCreate, FlowRead, FlowReadWithStyle, FlowUpdate, ) -from langflow.database.base import get_session +from langflow.services.database.base import get_session from sqlmodel import Session, select from fastapi import APIRouter, Depends, HTTPException from fastapi.encoders import jsonable_encoder diff --git a/src/backend/langflow/api/v1/schemas.py b/src/backend/langflow/api/v1/schemas.py index 0148dac6d..776e90034 100644 --- a/src/backend/langflow/api/v1/schemas.py +++ b/src/backend/langflow/api/v1/schemas.py @@ -1,7 +1,7 @@ from enum import Enum from pathlib import Path from typing import Any, Dict, List, Optional, Union -from langflow.database.models.flow import FlowCreate, FlowRead +from langflow.services.database.models.flow import FlowCreate, FlowRead from pydantic import BaseModel, Field, validator import json diff --git a/src/backend/langflow/interface/run.py b/src/backend/langflow/interface/run.py index 97f47334e..cb0573bf7 100644 --- a/src/backend/langflow/interface/run.py +++ b/src/backend/langflow/interface/run.py @@ -1,4 +1,4 @@ -from langflow.cache.utils import memoize_dict +from langflow.services.cache.utils import memoize_dict from langflow.graph import Graph from langflow.utils.logger import logger diff --git a/src/backend/langflow/interface/utils.py b/src/backend/langflow/interface/utils.py index d6c7b9023..f6b8a5488 100644 --- a/src/backend/langflow/interface/utils.py +++ b/src/backend/langflow/interface/utils.py @@ -9,7 +9,7 @@ import yaml from langchain.base_language import BaseLanguageModel from PIL.Image import Image from langflow.utils.logger import logger -from langflow.chat.config import ChatConfig +from langflow.services.chat.config import ChatConfig def load_file_into_dict(file_path: str) -> dict: From 8a2358dae0f371586546a6aeef1f3d932bff9532 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Sat, 5 Aug 2023 22:18:20 -0300 Subject: [PATCH 27/90] =?UTF-8?q?=F0=9F=94=A7=20fix(custom=5Fcomponent.py)?= =?UTF-8?q?:=20fix=20import=20paths=20for=20session=5Fgetter=20and=20Flow?= =?UTF-8?q?=20models=20in=20custom=5Fcomponent.py=20file=20=F0=9F=94=A7=20?= =?UTF-8?q?fix(test=5Fcache=5Fmanager.py):=20fix=20import=20path=20for=20C?= =?UTF-8?q?acheManager=20in=20test=5Fcache=5Fmanager.py=20file=20?= =?UTF-8?q?=F0=9F=94=A7=20fix(test=5Fcustom=5Fcomponent.py):=20fix=20impor?= =?UTF-8?q?t=20path=20for=20Flow=20and=20FlowCreate=20models=20in=20test?= =?UTF-8?q?=5Fcustom=5Fcomponent.py=20file=20=F0=9F=94=A7=20fix(test=5Fdat?= =?UTF-8?q?abase.py):=20fix=20import=20path=20for=20Flow,=20FlowCreate,=20?= =?UTF-8?q?and=20FlowUpdate=20models=20in=20test=5Fdatabase.py=20file=20?= =?UTF-8?q?=F0=9F=94=A7=20fix(test=5Fwebsocket.py):=20fix=20import=20path?= =?UTF-8?q?=20for=20WebSocketDisconnect=20in=20test=5Fwebsocket.py=20file?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/interface/custom/custom_component.py | 4 ++-- tests/test_cache_manager.py | 2 +- tests/test_custom_component.py | 2 +- tests/test_database.py | 4 ++-- tests/test_websocket.py | 2 +- 5 files changed, 7 insertions(+), 7 deletions(-) diff --git a/src/backend/langflow/interface/custom/custom_component.py b/src/backend/langflow/interface/custom/custom_component.py index ce8956660..0d93f8d75 100644 --- a/src/backend/langflow/interface/custom/custom_component.py +++ b/src/backend/langflow/interface/custom/custom_component.py @@ -6,8 +6,8 @@ from langflow.interface.custom.directory_reader import DirectoryReader from langflow.utils import validate -from langflow.database.base import session_getter -from langflow.database.models.flow import Flow +from langflow.services.database.base import session_getter +from langflow.services.database.models.flow import Flow from pydantic import Extra import yaml diff --git a/tests/test_cache_manager.py b/tests/test_cache_manager.py index f3e65481e..660512634 100644 --- a/tests/test_cache_manager.py +++ b/tests/test_cache_manager.py @@ -2,7 +2,7 @@ from io import StringIO import pandas as pd import pytest -from langflow.cache.manager import CacheManager +from langflow.services.cache.manager import CacheManager from PIL import Image diff --git a/tests/test_custom_component.py b/tests/test_custom_component.py index 199906dda..f20311cec 100644 --- a/tests/test_custom_component.py +++ b/tests/test_custom_component.py @@ -5,7 +5,7 @@ from uuid import uuid4 from fastapi import HTTPException -from langflow.database.models.flow import Flow, FlowCreate +from langflow.services.database.models.flow import Flow, FlowCreate from langflow.interface.custom.base import CustomComponent from langflow.interface.custom.component import ( Component, diff --git a/tests/test_database.py b/tests/test_database.py index bc512b6b0..6ebae5396 100644 --- a/tests/test_database.py +++ b/tests/test_database.py @@ -8,9 +8,9 @@ from fastapi.testclient import TestClient from fastapi.encoders import jsonable_encoder from langflow.api.v1.schemas import FlowListCreate -from langflow.database.models.flow import Flow, FlowCreate, FlowUpdate +from langflow.services.database.models.flow import Flow, FlowCreate, FlowUpdate -from langflow.database.models.flow_style import ( +from langflow.services.database.models.flow_style import ( FlowStyleCreate, FlowStyleRead, FlowStyleUpdate, diff --git a/tests/test_websocket.py b/tests/test_websocket.py index 57a0e95f6..dd668c287 100644 --- a/tests/test_websocket.py +++ b/tests/test_websocket.py @@ -1,6 +1,6 @@ from fastapi import WebSocketDisconnect -# from langflow.chat.manager import ChatManager +# from langflow.services.chat.manager import ChatManager import pytest From 7b2827f198a1a505558bb1a9b6636092f3e8654d Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Sat, 5 Aug 2023 22:18:45 -0300 Subject: [PATCH 28/90] =?UTF-8?q?=F0=9F=90=9B=20fix(conftest.py):=20fix=20?= =?UTF-8?q?client=5Ffixture=20to=20properly=20yield=20the=20TestClient=20i?= =?UTF-8?q?nstance=20and=20clear=20dependency=20overrides=20after=20usage?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- tests/conftest.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 45a8f8f1f..e6cc2a855 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -114,8 +114,8 @@ def client_fixture(session: Session): app = create_app() app.dependency_overrides[get_session] = get_session_override - - yield TestClient(app) + with TestClient(app) as client: + yield client app.dependency_overrides.clear() From 3442521f646a38e2fa8830288671eb9e0a3e197c Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Sat, 5 Aug 2023 23:13:51 -0300 Subject: [PATCH 29/90] =?UTF-8?q?=F0=9F=8E=89=20feat(lazy=5Fload.py):=20ad?= =?UTF-8?q?d=20LazyLoadDictBase=20class=20to=20provide=20lazy=20loading=20?= =?UTF-8?q?of=20a=20dictionary=20of=20all=20types=20=F0=9F=90=9B=20fix(laz?= =?UTF-8?q?y=5Fload.py):=20implement=20=5Fbuild=5Fdict()=20and=20get=5Ftyp?= =?UTF-8?q?e=5Fdict()=20methods=20to=20avoid=20NotImplementedError?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/utils/lazy_load.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) create mode 100644 src/backend/langflow/utils/lazy_load.py diff --git a/src/backend/langflow/utils/lazy_load.py b/src/backend/langflow/utils/lazy_load.py new file mode 100644 index 000000000..df0130acc --- /dev/null +++ b/src/backend/langflow/utils/lazy_load.py @@ -0,0 +1,15 @@ +class LazyLoadDictBase: + def __init__(self): + self._all_types_dict = None + + @property + def all_types_dict(self): + if self._all_types_dict is None: + self._all_types_dict = self._build_dict() + return self._all_types_dict + + def _build_dict(self): + raise NotImplementedError + + def get_type_dict(self): + raise NotImplementedError From f178e29ef45a36a67712f1ca12560aa275b43279 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Sat, 5 Aug 2023 23:17:27 -0300 Subject: [PATCH 30/90] =?UTF-8?q?=F0=9F=94=A7=20fix(test=5Fllms=5Ftemplate?= =?UTF-8?q?.py):=20update=20import=20statement=20to=20use=20get=5Fsettings?= =?UTF-8?q?=5Fmanager=20function=20from=20langflow.services.utils=20module?= =?UTF-8?q?=20=F0=9F=94=A7=20fix(test=5Fprompts=5Ftemplate.py):=20update?= =?UTF-8?q?=20import=20statement=20to=20use=20get=5Fsettings=5Fmanager=20f?= =?UTF-8?q?unction=20from=20langflow.services.utils=20module=20?= =?UTF-8?q?=F0=9F=94=A7=20fix(test=5Fvectorstore=5Ftemplate.py):=20update?= =?UTF-8?q?=20import=20statement=20to=20use=20get=5Fsettings=5Fmanager=20f?= =?UTF-8?q?unction=20from=20langflow.services.utils=20module=20?= =?UTF-8?q?=F0=9F=94=A7=20fix(test=5Fllms=5Ftemplate.py):=20update=20asser?= =?UTF-8?q?tion=20to=20use=20settings=20from=20settings=5Fmanager=20instea?= =?UTF-8?q?d=20of=20settings=20module=20=F0=9F=94=A7=20fix(test=5Fprompts?= =?UTF-8?q?=5Ftemplate.py):=20update=20assertion=20to=20use=20settings=20f?= =?UTF-8?q?rom=20settings=5Fmanager=20instead=20of=20settings=20module=20?= =?UTF-8?q?=F0=9F=94=A7=20fix(test=5Fvectorstore=5Ftemplate.py):=20update?= =?UTF-8?q?=20assertion=20to=20use=20settings=20from=20settings=5Fmanager?= =?UTF-8?q?=20instead=20of=20settings=20module?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- tests/test_llms_template.py | 5 +++-- tests/test_prompts_template.py | 5 +++-- tests/test_vectorstore_template.py | 5 +++-- 3 files changed, 9 insertions(+), 6 deletions(-) diff --git a/tests/test_llms_template.py b/tests/test_llms_template.py index d8f9e96f3..f1b76e18e 100644 --- a/tests/test_llms_template.py +++ b/tests/test_llms_template.py @@ -1,13 +1,14 @@ from fastapi.testclient import TestClient -from langflow.settings import settings +from langflow.services.utils import get_settings_manager def test_llms_settings(client: TestClient): + settings_manager = get_settings_manager() response = client.get("api/v1/all") assert response.status_code == 200 json_response = response.json() llms = json_response["llms"] - assert set(llms.keys()) == set(settings.LLMS) + assert set(llms.keys()) == set(settings_manager.settings.LLMS) # def test_hugging_face_hub(client: TestClient): diff --git a/tests/test_prompts_template.py b/tests/test_prompts_template.py index fa7a683bd..dde313c20 100644 --- a/tests/test_prompts_template.py +++ b/tests/test_prompts_template.py @@ -1,13 +1,14 @@ from fastapi.testclient import TestClient -from langflow.settings import settings +from langflow.services.utils import get_settings_manager def test_prompts_settings(client: TestClient): + settings_manager = get_settings_manager() response = client.get("api/v1/all") assert response.status_code == 200 json_response = response.json() prompts = json_response["prompts"] - assert set(prompts.keys()) == set(settings.PROMPTS) + assert set(prompts.keys()) == set(settings_manager.settings.PROMPTS) def test_prompt_template(client: TestClient): diff --git a/tests/test_vectorstore_template.py b/tests/test_vectorstore_template.py index bac950ee1..6ae4843ac 100644 --- a/tests/test_vectorstore_template.py +++ b/tests/test_vectorstore_template.py @@ -1,12 +1,13 @@ from fastapi.testclient import TestClient -from langflow.settings import settings +from langflow.services.utils import get_settings_manager # check that all agents are in settings.agents # are in json_response["agents"] def test_vectorstores_settings(client: TestClient): + settings_manager = get_settings_manager() response = client.get("api/v1/all") assert response.status_code == 200 json_response = response.json() vectorstores = json_response["vectorstores"] - assert set(vectorstores.keys()) == set(settings.VECTORSTORES) + assert set(vectorstores.keys()) == set(settings_manager.settings.VECTORSTORES) From d43236fb98648843c47560af288f96755e4e2111 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Sat, 5 Aug 2023 23:18:00 -0300 Subject: [PATCH 31/90] =?UTF-8?q?=F0=9F=94=A7=20chore(=5F=5Fmain=5F=5F.py)?= =?UTF-8?q?:=20import=20get=5Fsettings=5Fmanager=20function=20from=20langf?= =?UTF-8?q?low.services.utils=20module=20to=20improve=20code=20organizatio?= =?UTF-8?q?n=20and=20readability=20=F0=9F=94=A7=20chore(=5F=5Fmain=5F=5F.p?= =?UTF-8?q?y):=20remove=20unused=20import=20of=20settings=20module=20from?= =?UTF-8?q?=20langflow.settings=20package=20to=20clean=20up=20code=20?= =?UTF-8?q?=F0=9F=94=A7=20chore(=5F=5Fmain=5F=5F.py):=20update=20reference?= =?UTF-8?q?s=20to=20settings=20object=20to=20use=20settings=5Fmanager.sett?= =?UTF-8?q?ings=20for=20better=20encapsulation=20and=20modularity?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/__main__.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/src/backend/langflow/__main__.py b/src/backend/langflow/__main__.py index fa167f188..82d8bacb8 100644 --- a/src/backend/langflow/__main__.py +++ b/src/backend/langflow/__main__.py @@ -1,6 +1,7 @@ import sys import time import httpx +from langflow.services.utils import get_settings_manager from langflow.utils.util import get_number_of_workers from multiprocess import Process # type: ignore import platform @@ -12,7 +13,6 @@ from rich import box from rich import print as rprint import typer from langflow.main import setup_app -from langflow.settings import settings from langflow.utils.logger import configure, logger import webbrowser from dotenv import load_dotenv @@ -30,19 +30,19 @@ def update_settings( """Update the settings from a config file.""" # Check for database_url in the environment variables - + settings_manager = get_settings_manager() if config: logger.debug(f"Loading settings from {config}") - settings.update_from_yaml(config, dev=dev) + settings_manager.settings.update_from_yaml(config, dev=dev) if remove_api_keys: logger.debug(f"Setting remove_api_keys to {remove_api_keys}") - settings.update_settings(REMOVE_API_KEYS=remove_api_keys) + settings_manager.settings.update_settings(REMOVE_API_KEYS=remove_api_keys) if cache: logger.debug(f"Setting cache to {cache}") - settings.update_settings(CACHE=cache) + settings_manager.settings.update_settings(CACHE=cache) if components_path: logger.debug(f"Adding component path {components_path}") - settings.update_settings(COMPONENTS_PATH=components_path) + settings_manager.settings.update_settings(COMPONENTS_PATH=components_path) def serve_on_jcloud(): From 2fcbfa25a5fcf6f6708bb6bb556f3a6de2497d52 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Sat, 5 Aug 2023 23:18:31 -0300 Subject: [PATCH 32/90] =?UTF-8?q?=F0=9F=8E=89=20feat(utils.py):=20add=20ne?= =?UTF-8?q?w=20utility=20function=20`get=5Fsettings=5Fmanager()`=20to=20re?= =?UTF-8?q?trieve=20the=20settings=20manager=20from=20the=20service=20mana?= =?UTF-8?q?ger=20=F0=9F=8E=89=20feat(utils.py):=20add=20new=20utility=20fu?= =?UTF-8?q?nction=20`get=5Fsession()`=20to=20retrieve=20a=20session=20from?= =?UTF-8?q?=20the=20database=20manager?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/services/utils.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) create mode 100644 src/backend/langflow/services/utils.py diff --git a/src/backend/langflow/services/utils.py b/src/backend/langflow/services/utils.py new file mode 100644 index 000000000..07c67dfbe --- /dev/null +++ b/src/backend/langflow/services/utils.py @@ -0,0 +1,14 @@ +from langflow.services import ServiceType, service_manager +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from langflow.services.settings.manager import SettingsManager + + +def get_settings_manager() -> "SettingsManager": + return service_manager.get(ServiceType.SETTINGS_MANAGER) + + +def get_session(): + db_manager = service_manager.get(ServiceType.DATABASE_MANAGER) + yield from db_manager.get_session() From f72a42213c3783c520bb5a2e0f20e2f1bc6d8c94 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Sat, 5 Aug 2023 23:19:14 -0300 Subject: [PATCH 33/90] =?UTF-8?q?=F0=9F=94=A5=20refactor(base.py):=20remov?= =?UTF-8?q?e=20unused=20imports=20and=20code=20related=20to=20database=20e?= =?UTF-8?q?ngine=20creation=20and=20session=20handling=20=F0=9F=94=A5=20re?= =?UTF-8?q?factor(base.py):=20remove=20unused=20code=20related=20to=20load?= =?UTF-8?q?ing=20settings=20from=20YAML=20file?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../langflow/services/database/base.py | 61 ------------------- .../langflow/services/settings/base.py | 4 -- 2 files changed, 65 deletions(-) diff --git a/src/backend/langflow/services/database/base.py b/src/backend/langflow/services/database/base.py index fffb39096..cfc434f25 100644 --- a/src/backend/langflow/services/database/base.py +++ b/src/backend/langflow/services/database/base.py @@ -1,5 +1,4 @@ from contextlib import contextmanager -import os from pathlib import Path from langflow.services.base import Service from sqlmodel import SQLModel, Session, create_engine @@ -8,61 +7,6 @@ from alembic.config import Config from alembic import command -class Engine: - _instance = None - - @classmethod - def get(cls): - logger.debug("Getting database engine") - if cls._instance is None: - cls.create() - return cls._instance - - @classmethod - def create(cls): - logger.debug("Creating database engine") - from langflow.settings import settings - - if langflow_database_url := os.getenv("LANGFLOW_DATABASE_URL"): - settings.DATABASE_URL = langflow_database_url - logger.debug("Using LANGFLOW_DATABASE_URL") - - if settings.DATABASE_URL and settings.DATABASE_URL.startswith("sqlite"): - connect_args = {"check_same_thread": False} - else: - connect_args = {} - if not settings.DATABASE_URL: - raise RuntimeError("No database_url provided") - cls._instance = create_engine(settings.DATABASE_URL, connect_args=connect_args) - - @classmethod - def update(cls): - logger.debug("Updating database engine") - cls._instance = None - cls.create() - - -def create_db_and_tables(): - logger.debug("Creating database and tables") - try: - SQLModel.metadata.create_all(Engine.get()) - except Exception as exc: - logger.error(f"Error creating database and tables: {exc}") - raise RuntimeError("Error creating database and tables") from exc - # Now check if the table Flow exists, if not, something went wrong - # and we need to create the tables again. - from sqlalchemy import inspect - - inspector = inspect(Engine.get()) - if "flow" not in inspector.get_table_names(): - logger.error("Something went wrong creating the database and tables.") - logger.error("Please check your database settings.") - - raise RuntimeError("Something went wrong creating the database and tables.") - else: - logger.debug("Database and tables created successfully") - - class DatabaseManager(Service): name = "database_manager" @@ -136,11 +80,6 @@ def session_getter(db_manager: DatabaseManager): session.close() -def get_session(): - with Session(Engine.get()) as session: - yield session - - def initialize_database(): logger.debug("Initializing database") from langflow.services import service_manager, ServiceType diff --git a/src/backend/langflow/services/settings/base.py b/src/backend/langflow/services/settings/base.py index 9843339a5..1eb2793b3 100644 --- a/src/backend/langflow/services/settings/base.py +++ b/src/backend/langflow/services/settings/base.py @@ -166,7 +166,3 @@ def load_settings_from_yaml(file_path: str) -> Settings: logger.debug(f"Loading {len(settings_dict[key])} {key} from {file_path}") return Settings(**settings_dict) - - -langflow_dir = Path(__file__).parent.parent.parent -settings = load_settings_from_yaml(str(langflow_dir / "config.yaml")) From 6ca7308e3c368362bdfaa6018af78efb2e80953d Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Sat, 5 Aug 2023 23:20:35 -0300 Subject: [PATCH 34/90] updates imports to use settings_manager --- src/backend/langflow/api/v1/components.py | 2 +- src/backend/langflow/api/v1/endpoints.py | 14 +- src/backend/langflow/api/v1/flow_styles.py | 2 +- src/backend/langflow/api/v1/flows.py | 7 +- src/backend/langflow/graph/graph/base.py | 10 +- src/backend/langflow/graph/graph/constants.py | 59 ++++-- src/backend/langflow/graph/vertex/base.py | 4 +- src/backend/langflow/interface/agents/base.py | 9 +- src/backend/langflow/interface/base.py | 8 +- src/backend/langflow/interface/chains/base.py | 7 +- .../interface/document_loaders/base.py | 7 +- .../langflow/interface/embeddings/base.py | 7 +- src/backend/langflow/interface/listing.py | 63 ++++--- src/backend/langflow/interface/llms/base.py | 7 +- .../langflow/interface/memories/base.py | 7 +- .../langflow/interface/output_parsers/base.py | 7 +- .../langflow/interface/prompts/base.py | 7 +- .../langflow/interface/retrievers/base.py | 7 +- .../langflow/interface/text_splitters/base.py | 7 +- .../langflow/interface/toolkits/base.py | 7 +- src/backend/langflow/interface/tools/base.py | 9 +- .../langflow/interface/utilities/base.py | 7 +- src/backend/langflow/interface/utils.py | 9 +- .../langflow/interface/vector_store/base.py | 7 +- src/backend/langflow/settings.py | 171 ------------------ 25 files changed, 182 insertions(+), 269 deletions(-) delete mode 100644 src/backend/langflow/settings.py diff --git a/src/backend/langflow/api/v1/components.py b/src/backend/langflow/api/v1/components.py index 7f3572111..4071461fb 100644 --- a/src/backend/langflow/api/v1/components.py +++ b/src/backend/langflow/api/v1/components.py @@ -2,7 +2,7 @@ from datetime import timezone from typing import List from uuid import UUID from langflow.services.database.models.component import Component, ComponentModel -from langflow.services.database.base import get_session +from langflow.services.utils import get_session from sqlmodel import Session, select from fastapi import APIRouter, Depends, HTTPException from sqlalchemy.exc import IntegrityError diff --git a/src/backend/langflow/api/v1/endpoints.py b/src/backend/langflow/api/v1/endpoints.py index 58021cab7..92ccaffc8 100644 --- a/src/backend/langflow/api/v1/endpoints.py +++ b/src/backend/langflow/api/v1/endpoints.py @@ -4,9 +4,8 @@ from typing import Annotated, Optional from langflow.services.cache.utils import save_uploaded_file from langflow.services.database.models.flow import Flow from langflow.processing.process import process_graph_cached, process_tweaks +from langflow.services.utils import get_settings_manager from langflow.utils.logger import logger -from langflow.settings import settings - from fastapi import APIRouter, Depends, HTTPException, UploadFile, Body from langflow.interface.custom.custom_component import CustomComponent @@ -26,7 +25,7 @@ from langflow.interface.types import ( build_langchain_custom_component_list_from_path, ) -from langflow.services.database.base import get_session +from langflow.services.utils import get_session from sqlmodel import Session # build router @@ -40,11 +39,14 @@ def get_all(): # custom_components is a list of dicts # need to merge all the keys into one dict custom_components_from_file = {} - if settings.COMPONENTS_PATH: - logger.info(f"Building custom components from {settings.COMPONENTS_PATH}") + settings_manager = get_settings_manager() + if settings_manager.settings.COMPONENTS_PATH: + logger.info( + f"Building custom components from {settings_manager.settings.COMPONENTS_PATH}" + ) custom_component_dicts = [ build_langchain_custom_component_list_from_path(str(path)) - for path in settings.COMPONENTS_PATH + for path in settings_manager.settings.COMPONENTS_PATH ] logger.info(f"Loading {len(custom_component_dicts)} custom components") diff --git a/src/backend/langflow/api/v1/flow_styles.py b/src/backend/langflow/api/v1/flow_styles.py index 6b0759df2..6eacf8d86 100644 --- a/src/backend/langflow/api/v1/flow_styles.py +++ b/src/backend/langflow/api/v1/flow_styles.py @@ -5,7 +5,7 @@ from langflow.services.database.models.flow_style import ( FlowStyleRead, FlowStyleUpdate, ) -from langflow.services.database.base import get_session +from langflow.services.utils import get_session from sqlmodel import Session, select from fastapi import APIRouter, Depends, HTTPException diff --git a/src/backend/langflow/api/v1/flows.py b/src/backend/langflow/api/v1/flows.py index c7f1134d5..1ecbc85f4 100644 --- a/src/backend/langflow/api/v1/flows.py +++ b/src/backend/langflow/api/v1/flows.py @@ -1,6 +1,5 @@ from typing import List from uuid import UUID -from langflow.settings import settings from langflow.api.utils import remove_api_keys from langflow.api.v1.schemas import FlowListCreate, FlowListRead from langflow.services.database.models.flow import ( @@ -10,7 +9,8 @@ from langflow.services.database.models.flow import ( FlowReadWithStyle, FlowUpdate, ) -from langflow.services.database.base import get_session +from langflow.services.utils import get_session +from langflow.services.utils import get_settings_manager from sqlmodel import Session, select from fastapi import APIRouter, Depends, HTTPException from fastapi.encoders import jsonable_encoder @@ -61,7 +61,8 @@ def update_flow( if not db_flow: raise HTTPException(status_code=404, detail="Flow not found") flow_data = flow.dict(exclude_unset=True) - if settings.REMOVE_API_KEYS: + settings_manager = get_settings_manager() + if settings_manager.settings.REMOVE_API_KEYS: flow_data = remove_api_keys(flow_data) for key, value in flow_data.items(): setattr(db_flow, key, value) diff --git a/src/backend/langflow/graph/graph/base.py b/src/backend/langflow/graph/graph/base.py index 99b4e2b3d..f0d3986cf 100644 --- a/src/backend/langflow/graph/graph/base.py +++ b/src/backend/langflow/graph/graph/base.py @@ -1,7 +1,7 @@ from typing import Dict, Generator, List, Type, Union from langflow.graph.edge.base import Edge -from langflow.graph.graph.constants import VERTEX_TYPE_MAP +from langflow.graph.graph.constants import lazy_load_vertex_dict from langflow.graph.vertex.base import Vertex from langflow.graph.vertex.types import ( FileToolVertex, @@ -187,10 +187,12 @@ class Graph: """Returns the node class based on the node type.""" if node_type in FILE_TOOLS: return FileToolVertex - if node_type in VERTEX_TYPE_MAP: - return VERTEX_TYPE_MAP[node_type] + if node_type in lazy_load_vertex_dict.VERTEX_TYPE_MAP: + return lazy_load_vertex_dict.VERTEX_TYPE_MAP[node_type] return ( - VERTEX_TYPE_MAP[node_lc_type] if node_lc_type in VERTEX_TYPE_MAP else Vertex + lazy_load_vertex_dict.VERTEX_TYPE_MAP[node_lc_type] + if node_lc_type in lazy_load_vertex_dict.VERTEX_TYPE_MAP + else Vertex ) def _build_vertices(self) -> List[Vertex]: diff --git a/src/backend/langflow/graph/graph/constants.py b/src/backend/langflow/graph/graph/constants.py index 5e5c3b709..c9fea48b5 100644 --- a/src/backend/langflow/graph/graph/constants.py +++ b/src/backend/langflow/graph/graph/constants.py @@ -1,4 +1,3 @@ -from langflow.graph.vertex.base import Vertex from langflow.graph.vertex import types from langflow.interface.agents.base import agent_creator from langflow.interface.chains.base import chain_creator @@ -15,23 +14,45 @@ from langflow.interface.wrappers.base import wrapper_creator from langflow.interface.output_parsers.base import output_parser_creator from langflow.interface.retrievers.base import retriever_creator from langflow.interface.custom.base import custom_component_creator -from typing import Dict, Type +from langflow.utils.lazy_load import LazyLoadDictBase -VERTEX_TYPE_MAP: Dict[str, Type[Vertex]] = { - **{t: types.PromptVertex for t in prompt_creator.to_list()}, - **{t: types.AgentVertex for t in agent_creator.to_list()}, - **{t: types.ChainVertex for t in chain_creator.to_list()}, - **{t: types.ToolVertex for t in tool_creator.to_list()}, - **{t: types.ToolkitVertex for t in toolkits_creator.to_list()}, - **{t: types.WrapperVertex for t in wrapper_creator.to_list()}, - **{t: types.LLMVertex for t in llm_creator.to_list()}, - **{t: types.MemoryVertex for t in memory_creator.to_list()}, - **{t: types.EmbeddingVertex for t in embedding_creator.to_list()}, - **{t: types.VectorStoreVertex for t in vectorstore_creator.to_list()}, - **{t: types.DocumentLoaderVertex for t in documentloader_creator.to_list()}, - **{t: types.TextSplitterVertex for t in textsplitter_creator.to_list()}, - **{t: types.OutputParserVertex for t in output_parser_creator.to_list()}, - **{t: types.CustomComponentVertex for t in custom_component_creator.to_list()}, - **{t: types.RetrieverVertex for t in retriever_creator.to_list()}, -} +class VertexTypesDict(LazyLoadDictBase): + def __init__(self): + self._all_types_dict = None + + @property + def VERTEX_TYPE_MAP(self): + return self.all_types_dict + + def _build_dict(self): + langchain_types_dict = self.get_type_dict() + return { + **langchain_types_dict, + "Custom": ["Custom Tool", "Python Function"], + } + + def get_type_dict(self): + return { + **{t: types.PromptVertex for t in prompt_creator.to_list()}, + **{t: types.AgentVertex for t in agent_creator.to_list()}, + **{t: types.ChainVertex for t in chain_creator.to_list()}, + **{t: types.ToolVertex for t in tool_creator.to_list()}, + **{t: types.ToolkitVertex for t in toolkits_creator.to_list()}, + **{t: types.WrapperVertex for t in wrapper_creator.to_list()}, + **{t: types.LLMVertex for t in llm_creator.to_list()}, + **{t: types.MemoryVertex for t in memory_creator.to_list()}, + **{t: types.EmbeddingVertex for t in embedding_creator.to_list()}, + **{t: types.VectorStoreVertex for t in vectorstore_creator.to_list()}, + **{t: types.DocumentLoaderVertex for t in documentloader_creator.to_list()}, + **{t: types.TextSplitterVertex for t in textsplitter_creator.to_list()}, + **{t: types.OutputParserVertex for t in output_parser_creator.to_list()}, + **{ + t: types.CustomComponentVertex + for t in custom_component_creator.to_list() + }, + **{t: types.RetrieverVertex for t in retriever_creator.to_list()}, + } + + +lazy_load_vertex_dict = VertexTypesDict() diff --git a/src/backend/langflow/graph/vertex/base.py b/src/backend/langflow/graph/vertex/base.py index cb7dc4905..ac7f72b4d 100644 --- a/src/backend/langflow/graph/vertex/base.py +++ b/src/backend/langflow/graph/vertex/base.py @@ -1,6 +1,6 @@ import ast from langflow.interface.initialize import loading -from langflow.interface.listing import ALL_TYPES_DICT +from langflow.interface.listing import lazy_load_dict from langflow.utils.constants import DIRECT_TYPES from langflow.utils.logger import logger from langflow.utils.util import sync_to_async @@ -62,7 +62,7 @@ class Vertex: ) if self.base_type is None: - for base_type, value in ALL_TYPES_DICT.items(): + for base_type, value in lazy_load_dict.ALL_TYPES_DICT.items(): if self.vertex_type in value: self.base_type = base_type break diff --git a/src/backend/langflow/interface/agents/base.py b/src/backend/langflow/interface/agents/base.py index cc5214c0c..ec8c42aba 100644 --- a/src/backend/langflow/interface/agents/base.py +++ b/src/backend/langflow/interface/agents/base.py @@ -5,7 +5,8 @@ from langchain.agents import types from langflow.custom.customs import get_custom_nodes from langflow.interface.agents.custom import CUSTOM_AGENTS from langflow.interface.base import LangChainTypeCreator -from langflow.settings import settings +from langflow.services.utils import get_settings_manager + from langflow.template.frontend_node.agents import AgentFrontendNode from langflow.utils.logger import logger from langflow.utils.util import build_template_from_class, build_template_from_method @@ -53,13 +54,17 @@ class AgentCreator(LangChainTypeCreator): # Now this is a generator def to_list(self) -> List[str]: names = [] + settings_manager = get_settings_manager() for _, agent in self.type_to_loader_dict.items(): agent_name = ( agent.function_name() if hasattr(agent, "function_name") else agent.__name__ ) - if agent_name in settings.AGENTS or settings.DEV: + if ( + agent_name in settings_manager.settings.AGENTS + or settings_manager.settings.DEV + ): names.append(agent_name) return names diff --git a/src/backend/langflow/interface/base.py b/src/backend/langflow/interface/base.py index 76d859b1f..d1ed83b5a 100644 --- a/src/backend/langflow/interface/base.py +++ b/src/backend/langflow/interface/base.py @@ -2,13 +2,14 @@ from abc import ABC, abstractmethod from typing import Any, Dict, List, Optional, Type, Union from langchain.chains.base import Chain from langchain.agents import AgentExecutor +from langflow.services.utils import get_settings_manager from pydantic import BaseModel from langflow.template.field.base import TemplateField from langflow.template.frontend_node.base import FrontendNode from langflow.template.template.base import Template from langflow.utils.logger import logger -from langflow.settings import settings + # Assuming necessary imports for Field, Template, and FrontendNode classes @@ -26,9 +27,12 @@ class LangChainTypeCreator(BaseModel, ABC): @property def docs_map(self) -> Dict[str, str]: """A dict with the name of the component as key and the documentation link as value.""" + settings_manager = get_settings_manager() if self.name_docs_dict is None: try: - type_settings = getattr(settings, self.type_name.upper()) + type_settings = getattr( + settings_manager.settings, self.type_name.upper() + ) self.name_docs_dict = { name: value_dict["documentation"] for name, value_dict in type_settings.items() diff --git a/src/backend/langflow/interface/chains/base.py b/src/backend/langflow/interface/chains/base.py index fe58397b2..b906dbd25 100644 --- a/src/backend/langflow/interface/chains/base.py +++ b/src/backend/langflow/interface/chains/base.py @@ -3,7 +3,8 @@ from typing import Any, Dict, List, Optional, Type from langflow.custom.customs import get_custom_nodes from langflow.interface.base import LangChainTypeCreator from langflow.interface.importing.utils import import_class -from langflow.settings import settings +from langflow.services.utils import get_settings_manager + from langflow.template.frontend_node.chains import ChainFrontendNode from langflow.utils.logger import logger from langflow.utils.util import build_template_from_class, build_template_from_method @@ -30,6 +31,7 @@ class ChainCreator(LangChainTypeCreator): @property def type_to_loader_dict(self) -> Dict: if self.type_dict is None: + settings_manager = get_settings_manager() self.type_dict: dict[str, Any] = { chain_name: import_class(f"langchain.chains.{chain_name}") for chain_name in chains.__all__ @@ -43,7 +45,8 @@ class ChainCreator(LangChainTypeCreator): self.type_dict = { name: chain for name, chain in self.type_dict.items() - if name in settings.CHAINS or settings.DEV + if name in settings_manager.settings.CHAINS + or settings_manager.settings.DEV } return self.type_dict diff --git a/src/backend/langflow/interface/document_loaders/base.py b/src/backend/langflow/interface/document_loaders/base.py index ebae1e5a4..db0832ff3 100644 --- a/src/backend/langflow/interface/document_loaders/base.py +++ b/src/backend/langflow/interface/document_loaders/base.py @@ -1,9 +1,10 @@ from typing import Dict, List, Optional, Type from langflow.interface.base import LangChainTypeCreator +from langflow.services.utils import get_settings_manager from langflow.template.frontend_node.documentloaders import DocumentLoaderFrontNode from langflow.interface.custom_lists import documentloaders_type_to_cls_dict -from langflow.settings import settings + from langflow.utils.logger import logger from langflow.utils.util import build_template_from_class @@ -30,10 +31,12 @@ class DocumentLoaderCreator(LangChainTypeCreator): return None def to_list(self) -> List[str]: + settings_manager = get_settings_manager() return [ documentloader.__name__ for documentloader in self.type_to_loader_dict.values() - if documentloader.__name__ in settings.DOCUMENTLOADERS or settings.DEV + if documentloader.__name__ in settings_manager.settings.DOCUMENTLOADERS + or settings_manager.settings.DEV ] diff --git a/src/backend/langflow/interface/embeddings/base.py b/src/backend/langflow/interface/embeddings/base.py index 7572a06cc..169985d37 100644 --- a/src/backend/langflow/interface/embeddings/base.py +++ b/src/backend/langflow/interface/embeddings/base.py @@ -2,7 +2,8 @@ from typing import Dict, List, Optional, Type from langflow.interface.base import LangChainTypeCreator from langflow.interface.custom_lists import embedding_type_to_cls_dict -from langflow.settings import settings +from langflow.services.utils import get_settings_manager + from langflow.template.frontend_node.base import FrontendNode from langflow.template.frontend_node.embeddings import EmbeddingFrontendNode from langflow.utils.logger import logger @@ -32,10 +33,12 @@ class EmbeddingCreator(LangChainTypeCreator): return None def to_list(self) -> List[str]: + settings_manager = get_settings_manager() return [ embedding.__name__ for embedding in self.type_to_loader_dict.values() - if embedding.__name__ in settings.EMBEDDINGS or settings.DEV + if embedding.__name__ in settings_manager.settings.EMBEDDINGS + or settings_manager.settings.DEV ] diff --git a/src/backend/langflow/interface/listing.py b/src/backend/langflow/interface/listing.py index fe3090f65..1cab1efbc 100644 --- a/src/backend/langflow/interface/listing.py +++ b/src/backend/langflow/interface/listing.py @@ -14,34 +14,43 @@ from langflow.interface.wrappers.base import wrapper_creator from langflow.interface.output_parsers.base import output_parser_creator from langflow.interface.retrievers.base import retriever_creator from langflow.interface.custom.base import custom_component_creator +from langflow.utils.lazy_load import LazyLoadDictBase -def get_type_dict(): - return { - "agents": agent_creator.to_list(), - "prompts": prompt_creator.to_list(), - "llms": llm_creator.to_list(), - "tools": tool_creator.to_list(), - "chains": chain_creator.to_list(), - "memory": memory_creator.to_list(), - "toolkits": toolkits_creator.to_list(), - "wrappers": wrapper_creator.to_list(), - "documentLoaders": documentloader_creator.to_list(), - "vectorStore": vectorstore_creator.to_list(), - "embeddings": embedding_creator.to_list(), - "textSplitters": textsplitter_creator.to_list(), - "utilities": utility_creator.to_list(), - "outputParsers": output_parser_creator.to_list(), - "retrievers": retriever_creator.to_list(), - "custom_components": custom_component_creator.to_list(), - } +class AllTypesDict(LazyLoadDictBase): + def __init__(self): + self._all_types_dict = None + + @property + def ALL_TYPES_DICT(self): + return self.all_types_dict + + def _build_dict(self): + langchain_types_dict = self.get_type_dict() + return { + **langchain_types_dict, + "Custom": ["Custom Tool", "Python Function"], + } + + def get_type_dict(self): + return { + "agents": agent_creator.to_list(), + "prompts": prompt_creator.to_list(), + "llms": llm_creator.to_list(), + "tools": tool_creator.to_list(), + "chains": chain_creator.to_list(), + "memory": memory_creator.to_list(), + "toolkits": toolkits_creator.to_list(), + "wrappers": wrapper_creator.to_list(), + "documentLoaders": documentloader_creator.to_list(), + "vectorStore": vectorstore_creator.to_list(), + "embeddings": embedding_creator.to_list(), + "textSplitters": textsplitter_creator.to_list(), + "utilities": utility_creator.to_list(), + "outputParsers": output_parser_creator.to_list(), + "retrievers": retriever_creator.to_list(), + "custom_components": custom_component_creator.to_list(), + } -LANGCHAIN_TYPES_DICT = get_type_dict() - -# Now we'll build a dict with Langchain types and ours - -ALL_TYPES_DICT = { - **LANGCHAIN_TYPES_DICT, - "Custom": ["Custom Tool", "Python Function"], -} +lazy_load_dict = AllTypesDict() diff --git a/src/backend/langflow/interface/llms/base.py b/src/backend/langflow/interface/llms/base.py index 06aedd3cb..f562b99ed 100644 --- a/src/backend/langflow/interface/llms/base.py +++ b/src/backend/langflow/interface/llms/base.py @@ -2,7 +2,8 @@ from typing import Dict, List, Optional, Type from langflow.interface.base import LangChainTypeCreator from langflow.interface.custom_lists import llm_type_to_cls_dict -from langflow.settings import settings +from langflow.services.utils import get_settings_manager + from langflow.template.frontend_node.llms import LLMFrontendNode from langflow.utils.logger import logger from langflow.utils.util import build_template_from_class @@ -33,10 +34,12 @@ class LLMCreator(LangChainTypeCreator): return None def to_list(self) -> List[str]: + settings_manager = get_settings_manager() return [ llm.__name__ for llm in self.type_to_loader_dict.values() - if llm.__name__ in settings.LLMS or settings.DEV + if llm.__name__ in settings_manager.settings.LLMS + or settings_manager.settings.DEV ] diff --git a/src/backend/langflow/interface/memories/base.py b/src/backend/langflow/interface/memories/base.py index 9cd25381c..70665602c 100644 --- a/src/backend/langflow/interface/memories/base.py +++ b/src/backend/langflow/interface/memories/base.py @@ -2,7 +2,8 @@ from typing import Dict, List, Optional, Type from langflow.interface.base import LangChainTypeCreator from langflow.interface.custom_lists import memory_type_to_cls_dict -from langflow.settings import settings +from langflow.services.utils import get_settings_manager + from langflow.template.frontend_node.base import FrontendNode from langflow.template.frontend_node.memories import MemoryFrontendNode from langflow.utils.logger import logger @@ -48,10 +49,12 @@ class MemoryCreator(LangChainTypeCreator): return None def to_list(self) -> List[str]: + settings_manager = get_settings_manager() return [ memory.__name__ for memory in self.type_to_loader_dict.values() - if memory.__name__ in settings.MEMORIES or settings.DEV + if memory.__name__ in settings_manager.settings.MEMORIES + or settings_manager.settings.DEV ] diff --git a/src/backend/langflow/interface/output_parsers/base.py b/src/backend/langflow/interface/output_parsers/base.py index b5235ad58..256b521e1 100644 --- a/src/backend/langflow/interface/output_parsers/base.py +++ b/src/backend/langflow/interface/output_parsers/base.py @@ -4,7 +4,8 @@ from langchain import output_parsers from langflow.interface.base import LangChainTypeCreator from langflow.interface.importing.utils import import_class -from langflow.settings import settings +from langflow.services.utils import get_settings_manager + from langflow.template.frontend_node.output_parsers import OutputParserFrontendNode from langflow.utils.logger import logger from langflow.utils.util import build_template_from_class, build_template_from_method @@ -23,6 +24,7 @@ class OutputParserCreator(LangChainTypeCreator): @property def type_to_loader_dict(self) -> Dict: if self.type_dict is None: + settings_manager = get_settings_manager() self.type_dict = { output_parser_name: import_class( f"langchain.output_parsers.{output_parser_name}" @@ -33,7 +35,8 @@ class OutputParserCreator(LangChainTypeCreator): self.type_dict = { name: output_parser for name, output_parser in self.type_dict.items() - if name in settings.OUTPUT_PARSERS or settings.DEV + if name in settings_manager.settings.OUTPUT_PARSERS + or settings_manager.settings.DEV } return self.type_dict diff --git a/src/backend/langflow/interface/prompts/base.py b/src/backend/langflow/interface/prompts/base.py index c062a4a35..5aa41dfb2 100644 --- a/src/backend/langflow/interface/prompts/base.py +++ b/src/backend/langflow/interface/prompts/base.py @@ -5,7 +5,8 @@ from langchain import prompts from langflow.custom.customs import get_custom_nodes from langflow.interface.base import LangChainTypeCreator from langflow.interface.importing.utils import import_class -from langflow.settings import settings +from langflow.services.utils import get_settings_manager + from langflow.template.frontend_node.prompts import PromptFrontendNode from langflow.utils.logger import logger from langflow.utils.util import build_template_from_class @@ -20,6 +21,7 @@ class PromptCreator(LangChainTypeCreator): @property def type_to_loader_dict(self) -> Dict: + settings_manager = get_settings_manager() if self.type_dict is None: self.type_dict = { prompt_name: import_class(f"langchain.prompts.{prompt_name}") @@ -34,7 +36,8 @@ class PromptCreator(LangChainTypeCreator): self.type_dict = { name: prompt for name, prompt in self.type_dict.items() - if name in settings.PROMPTS or settings.DEV + if name in settings_manager.settings.PROMPTS + or settings_manager.settings.DEV } return self.type_dict diff --git a/src/backend/langflow/interface/retrievers/base.py b/src/backend/langflow/interface/retrievers/base.py index 759cd5916..db1cfd165 100644 --- a/src/backend/langflow/interface/retrievers/base.py +++ b/src/backend/langflow/interface/retrievers/base.py @@ -4,7 +4,8 @@ from langchain import retrievers from langflow.interface.base import LangChainTypeCreator from langflow.interface.importing.utils import import_class -from langflow.settings import settings +from langflow.services.utils import get_settings_manager + from langflow.template.frontend_node.retrievers import RetrieverFrontendNode from langflow.utils.logger import logger from langflow.utils.util import build_template_from_method, build_template_from_class @@ -48,10 +49,12 @@ class RetrieverCreator(LangChainTypeCreator): return None def to_list(self) -> List[str]: + settings_manager = get_settings_manager() return [ retriever for retriever in self.type_to_loader_dict.keys() - if retriever in settings.RETRIEVERS or settings.DEV + if retriever in settings_manager.settings.RETRIEVERS + or settings_manager.settings.DEV ] diff --git a/src/backend/langflow/interface/text_splitters/base.py b/src/backend/langflow/interface/text_splitters/base.py index 787f20d82..87b778c4c 100644 --- a/src/backend/langflow/interface/text_splitters/base.py +++ b/src/backend/langflow/interface/text_splitters/base.py @@ -1,9 +1,10 @@ from typing import Dict, List, Optional, Type from langflow.interface.base import LangChainTypeCreator +from langflow.services.utils import get_settings_manager from langflow.template.frontend_node.textsplitters import TextSplittersFrontendNode from langflow.interface.custom_lists import textsplitter_type_to_cls_dict -from langflow.settings import settings + from langflow.utils.logger import logger from langflow.utils.util import build_template_from_class @@ -30,10 +31,12 @@ class TextSplitterCreator(LangChainTypeCreator): return None def to_list(self) -> List[str]: + settings_manager = get_settings_manager() return [ textsplitter.__name__ for textsplitter in self.type_to_loader_dict.values() - if textsplitter.__name__ in settings.TEXTSPLITTERS or settings.DEV + if textsplitter.__name__ in settings_manager.settings.TEXTSPLITTERS + or settings_manager.settings.DEV ] diff --git a/src/backend/langflow/interface/toolkits/base.py b/src/backend/langflow/interface/toolkits/base.py index b7c165a4d..c13ffdbd9 100644 --- a/src/backend/langflow/interface/toolkits/base.py +++ b/src/backend/langflow/interface/toolkits/base.py @@ -4,7 +4,8 @@ from langchain.agents import agent_toolkits from langflow.interface.base import LangChainTypeCreator from langflow.interface.importing.utils import import_class, import_module -from langflow.settings import settings +from langflow.services.utils import get_settings_manager + from langflow.utils.logger import logger from langflow.utils.util import build_template_from_class @@ -29,13 +30,15 @@ class ToolkitCreator(LangChainTypeCreator): @property def type_to_loader_dict(self) -> Dict: if self.type_dict is None: + settings_manager = get_settings_manager() self.type_dict = { toolkit_name: import_class( f"langchain.agents.agent_toolkits.{toolkit_name}" ) # if toolkit_name is not lower case it is a class for toolkit_name in agent_toolkits.__all__ - if not toolkit_name.islower() and toolkit_name in settings.TOOLKITS + if not toolkit_name.islower() + and toolkit_name in settings_manager.settings.TOOLKITS } return self.type_dict diff --git a/src/backend/langflow/interface/tools/base.py b/src/backend/langflow/interface/tools/base.py index 8c9158c05..1dbc9a6ed 100644 --- a/src/backend/langflow/interface/tools/base.py +++ b/src/backend/langflow/interface/tools/base.py @@ -15,7 +15,8 @@ from langflow.interface.tools.constants import ( OTHER_TOOLS, ) from langflow.interface.tools.util import get_tool_params -from langflow.settings import settings +from langflow.services.utils import get_settings_manager + from langflow.template.field.base import TemplateField from langflow.template.template.base import Template from langflow.utils import util @@ -66,6 +67,7 @@ class ToolCreator(LangChainTypeCreator): @property def type_to_loader_dict(self) -> Dict: + settings_manager = get_settings_manager() if self.tools_dict is None: all_tools = {} @@ -74,7 +76,10 @@ class ToolCreator(LangChainTypeCreator): tool_name = tool_params.get("name") or tool - if tool_name in settings.TOOLS or settings.DEV: + if ( + tool_name in settings_manager.settings.TOOLS + or settings_manager.settings.DEV + ): if tool_name == "JsonSpec": tool_params["path"] = tool_params.pop("dict_") # type: ignore all_tools[tool_name] = { diff --git a/src/backend/langflow/interface/utilities/base.py b/src/backend/langflow/interface/utilities/base.py index b0ee4d4be..eb8cd60af 100644 --- a/src/backend/langflow/interface/utilities/base.py +++ b/src/backend/langflow/interface/utilities/base.py @@ -5,7 +5,8 @@ from langchain import SQLDatabase, utilities from langflow.custom.customs import get_custom_nodes from langflow.interface.base import LangChainTypeCreator from langflow.interface.importing.utils import import_class -from langflow.settings import settings +from langflow.services.utils import get_settings_manager + from langflow.template.frontend_node.utilities import UtilitiesFrontendNode from langflow.utils.logger import logger from langflow.utils.util import build_template_from_class @@ -26,6 +27,7 @@ class UtilityCreator(LangChainTypeCreator): from the langchain.chains module and filtering them according to the settings.utilities list. """ if self.type_dict is None: + settings_manager = get_settings_manager() self.type_dict = { utility_name: import_class(f"langchain.utilities.{utility_name}") for utility_name in utilities.__all__ @@ -35,7 +37,8 @@ class UtilityCreator(LangChainTypeCreator): self.type_dict = { name: utility for name, utility in self.type_dict.items() - if name in settings.UTILITIES or settings.DEV + if name in settings_manager.settings.UTILITIES + or settings_manager.settings.DEV } return self.type_dict diff --git a/src/backend/langflow/interface/utils.py b/src/backend/langflow/interface/utils.py index f6b8a5488..1fddbf80f 100644 --- a/src/backend/langflow/interface/utils.py +++ b/src/backend/langflow/interface/utils.py @@ -10,6 +10,7 @@ from langchain.base_language import BaseLanguageModel from PIL.Image import Image from langflow.utils.logger import logger from langflow.services.chat.config import ChatConfig +from langflow.services.utils import get_settings_manager def load_file_into_dict(file_path: str) -> dict: @@ -63,13 +64,11 @@ def extract_input_variables_from_prompt(prompt: str) -> list[str]: def setup_llm_caching(): """Setup LLM caching.""" - - from langflow.settings import settings - + settings_manager = get_settings_manager() try: - set_langchain_cache(settings) + set_langchain_cache(settings_manager.settings) except ImportError: - logger.warning(f"Could not import {settings.CACHE}. ") + logger.warning(f"Could not import {settings_manager.settings.CACHE}. ") except Exception as exc: logger.warning(f"Could not setup LLM caching. Error: {exc}") diff --git a/src/backend/langflow/interface/vector_store/base.py b/src/backend/langflow/interface/vector_store/base.py index 4a937ba89..4b8ca2b64 100644 --- a/src/backend/langflow/interface/vector_store/base.py +++ b/src/backend/langflow/interface/vector_store/base.py @@ -4,7 +4,8 @@ from langchain import vectorstores from langflow.interface.base import LangChainTypeCreator from langflow.interface.importing.utils import import_class -from langflow.settings import settings +from langflow.services.utils import get_settings_manager + from langflow.template.frontend_node.vectorstores import VectorStoreFrontendNode from langflow.utils.logger import logger from langflow.utils.util import build_template_from_method @@ -43,10 +44,12 @@ class VectorstoreCreator(LangChainTypeCreator): return None def to_list(self) -> List[str]: + settings_manager = get_settings_manager() return [ vectorstore for vectorstore in self.type_to_loader_dict.keys() - if vectorstore in settings.VECTORSTORES or settings.DEV + if vectorstore in settings_manager.settings.VECTORSTORES + or settings_manager.settings.DEV ] diff --git a/src/backend/langflow/settings.py b/src/backend/langflow/settings.py deleted file mode 100644 index 439b3a1e4..000000000 --- a/src/backend/langflow/settings.py +++ /dev/null @@ -1,171 +0,0 @@ -import contextlib -import json -import os -from typing import Optional, List -from pathlib import Path - -import yaml -from pydantic import BaseSettings, root_validator, validator -from langflow.utils.logger import logger - -BASE_COMPONENTS_PATH = str(Path(__file__).parent / "components") - - -class Settings(BaseSettings): - CHAINS: dict = {} - AGENTS: dict = {} - PROMPTS: dict = {} - LLMS: dict = {} - TOOLS: dict = {} - MEMORIES: dict = {} - EMBEDDINGS: dict = {} - VECTORSTORES: dict = {} - DOCUMENTLOADERS: dict = {} - WRAPPERS: dict = {} - RETRIEVERS: dict = {} - TOOLKITS: dict = {} - TEXTSPLITTERS: dict = {} - UTILITIES: dict = {} - OUTPUT_PARSERS: dict = {} - CUSTOM_COMPONENTS: dict = {} - - DEV: bool = False - DATABASE_URL: Optional[str] = None - CACHE: str = "InMemoryCache" - REMOVE_API_KEYS: bool = False - COMPONENTS_PATH: List[str] = [] - - @validator("DATABASE_URL", pre=True) - def set_database_url(cls, value): - if not value: - logger.debug( - "No database_url provided, trying LANGFLOW_DATABASE_URL env variable" - ) - if langflow_database_url := os.getenv("LANGFLOW_DATABASE_URL"): - value = langflow_database_url - logger.debug("Using LANGFLOW_DATABASE_URL env variable.") - else: - logger.debug("No DATABASE_URL env variable, using sqlite database") - value = "sqlite:///./langflow.db" - - return value - - @validator("COMPONENTS_PATH", pre=True) - def set_components_path(cls, value): - if os.getenv("LANGFLOW_COMPONENTS_PATH"): - logger.debug("Adding LANGFLOW_COMPONENTS_PATH to components_path") - langflow_component_path = os.getenv("LANGFLOW_COMPONENTS_PATH") - if ( - Path(langflow_component_path).exists() - and langflow_component_path not in value - ): - if isinstance(langflow_component_path, list): - for path in langflow_component_path: - if path not in value: - value.append(path) - logger.debug( - f"Extending {langflow_component_path} to components_path" - ) - elif langflow_component_path not in value: - value.append(langflow_component_path) - logger.debug( - f"Appending {langflow_component_path} to components_path" - ) - - if not value: - value = [BASE_COMPONENTS_PATH] - logger.debug("Setting default components path to components_path") - elif BASE_COMPONENTS_PATH not in value: - value.append(BASE_COMPONENTS_PATH) - logger.debug("Adding default components path to components_path") - - logger.debug(f"Components path: {value}") - return value - - class Config: - validate_assignment = True - extra = "ignore" - env_prefix = "LANGFLOW_" - - @root_validator(allow_reuse=True) - def validate_lists(cls, values): - for key, value in values.items(): - if key != "dev" and not value: - values[key] = [] - return values - - def update_from_yaml(self, file_path: str, dev: bool = False): - new_settings = load_settings_from_yaml(file_path) - self.CHAINS = new_settings.CHAINS or {} - self.AGENTS = new_settings.AGENTS or {} - self.PROMPTS = new_settings.PROMPTS or {} - self.LLMS = new_settings.LLMS or {} - self.TOOLS = new_settings.TOOLS or {} - self.MEMORIES = new_settings.MEMORIES or {} - self.WRAPPERS = new_settings.WRAPPERS or {} - self.TOOLKITS = new_settings.TOOLKITS or {} - self.TEXTSPLITTERS = new_settings.TEXTSPLITTERS or {} - self.UTILITIES = new_settings.UTILITIES or {} - self.EMBEDDINGS = new_settings.EMBEDDINGS or {} - self.VECTORSTORES = new_settings.VECTORSTORES or {} - self.DOCUMENTLOADERS = new_settings.DOCUMENTLOADERS or {} - self.RETRIEVERS = new_settings.RETRIEVERS or {} - self.OUTPUT_PARSERS = new_settings.OUTPUT_PARSERS or {} - self.CUSTOM_COMPONENTS = new_settings.CUSTOM_COMPONENTS or {} - self.COMPONENTS_PATH = new_settings.COMPONENTS_PATH or [] - self.DEV = dev - - def update_settings(self, **kwargs): - logger.debug("Updating settings") - for key, value in kwargs.items(): - # value may contain sensitive information, so we don't want to log it - if not hasattr(self, key): - logger.debug(f"Key {key} not found in settings") - continue - logger.debug(f"Updating {key}") - if isinstance(getattr(self, key), list): - # value might be a '[something]' string - with contextlib.suppress(json.decoder.JSONDecodeError): - value = json.loads(str(value)) - if isinstance(value, list): - for item in value: - if item not in getattr(self, key): - getattr(self, key).append(item) - logger.debug(f"Extended {key}") - else: - getattr(self, key).append(value) - logger.debug(f"Appended {key}") - - else: - setattr(self, key, value) - logger.debug(f"Updated {key}") - logger.debug(f"{key}: {getattr(self, key)}") - - -def save_settings_to_yaml(settings: Settings, file_path: str): - with open(file_path, "w") as f: - settings_dict = settings.dict() - yaml.dump(settings_dict, f) - - -def load_settings_from_yaml(file_path: str) -> Settings: - # Check if a string is a valid path or a file name - if "/" not in file_path: - # Get current path - current_path = os.path.dirname(os.path.abspath(__file__)) - - file_path = os.path.join(current_path, file_path) - - with open(file_path, "r") as f: - settings_dict = yaml.safe_load(f) - settings_dict = {k.upper(): v for k, v in settings_dict.items()} - - for key in settings_dict: - if key not in Settings.__fields__.keys(): - raise KeyError(f"Key {key} not found in settings") - logger.debug(f"Loading {len(settings_dict[key])} {key} from {file_path}") - - return Settings(**settings_dict) - - -settings = load_settings_from_yaml("config.yaml") From f9112facdac115c8b1dca6ad040aae8f0858a87e Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Sat, 5 Aug 2023 23:27:38 -0300 Subject: [PATCH 35/90] =?UTF-8?q?=F0=9F=90=9B=20fix(custom=5Fcomponent.py)?= =?UTF-8?q?:=20import=20get=5Fdb=5Fmanager=20function=20from=20langflow.se?= =?UTF-8?q?rvices.utils=20to=20fix=20NameError=20=F0=9F=90=9B=20fix(factor?= =?UTF-8?q?y.py):=20raise=20ValueError=20if=20no=20database=20URL=20provid?= =?UTF-8?q?ed=20in=20settings=20=F0=9F=90=9B=20fix(manager.py):=20change?= =?UTF-8?q?=20return=20type=20annotation=20of=20load=5Fsettings=5Ffrom=5Fy?= =?UTF-8?q?aml=20method=20to=20"SettingsManager"=20instead=20of=20"Setting?= =?UTF-8?q?s"=20=F0=9F=90=9B=20fix(utils.py):=20import=20service=5Fmanager?= =?UTF-8?q?=20and=20ServiceType=20to=20fix=20NameError?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../langflow/interface/custom/custom_component.py | 11 +++++++---- src/backend/langflow/services/database/factory.py | 2 ++ src/backend/langflow/services/settings/manager.py | 2 +- src/backend/langflow/services/utils.py | 4 ++++ 4 files changed, 14 insertions(+), 5 deletions(-) diff --git a/src/backend/langflow/interface/custom/custom_component.py b/src/backend/langflow/interface/custom/custom_component.py index 0d93f8d75..fdfef52f8 100644 --- a/src/backend/langflow/interface/custom/custom_component.py +++ b/src/backend/langflow/interface/custom/custom_component.py @@ -3,6 +3,7 @@ from fastapi import HTTPException from langflow.interface.custom.constants import CUSTOM_COMPONENT_SUPPORTED_TYPES from langflow.interface.custom.component import Component from langflow.interface.custom.directory_reader import DirectoryReader +from langflow.services.utils import get_db_manager from langflow.utils import validate @@ -159,7 +160,8 @@ class CustomComponent(Component, extra=Extra.allow): from langflow.processing.process import build_sorted_vertices_with_caching from langflow.processing.process import process_tweaks - with session_getter() as session: + db_manager = get_db_manager() + with session_getter(db_manager) as session: graph_data = flow.data if (flow := session.get(Flow, flow_id)) else None if not graph_data: raise ValueError(f"Flow {flow_id} not found") @@ -169,7 +171,8 @@ class CustomComponent(Component, extra=Extra.allow): def list_flows(self, *, get_session: Optional[Callable] = None) -> List[Flow]: get_session = get_session or session_getter - with get_session() as session: + db_manager = get_db_manager() + with get_session(db_manager) as session: flows = session.query(Flow).all() return flows @@ -182,8 +185,8 @@ class CustomComponent(Component, extra=Extra.allow): get_session: Optional[Callable] = None, ) -> Flow: get_session = get_session or session_getter - - with get_session() as session: + db_manager = get_db_manager() + with get_session(db_manager) as session: if flow_id: flow = session.query(Flow).get(flow_id) elif flow_name: diff --git a/src/backend/langflow/services/database/factory.py b/src/backend/langflow/services/database/factory.py index 187a29fdd..d98414382 100644 --- a/src/backend/langflow/services/database/factory.py +++ b/src/backend/langflow/services/database/factory.py @@ -12,4 +12,6 @@ class DatabaseManagerFactory(ServiceFactory): def create(self, settings_service: "SettingsManager"): # Here you would have logic to create and configure a DatabaseManager + if not settings_service.settings.DATABASE_URL: + raise ValueError("No database URL provided") return DatabaseManager(settings_service.settings.DATABASE_URL) diff --git a/src/backend/langflow/services/settings/manager.py b/src/backend/langflow/services/settings/manager.py index 598efe2d8..a357c4804 100644 --- a/src/backend/langflow/services/settings/manager.py +++ b/src/backend/langflow/services/settings/manager.py @@ -13,7 +13,7 @@ class SettingsManager(Service): self.settings = settings @classmethod - def load_settings_from_yaml(cls, file_path: str) -> Settings: + def load_settings_from_yaml(cls, file_path: str) -> "SettingsManager": # Check if a string is a valid path or a file name if "/" not in file_path: # Get current path diff --git a/src/backend/langflow/services/utils.py b/src/backend/langflow/services/utils.py index 07c67dfbe..049e82c0f 100644 --- a/src/backend/langflow/services/utils.py +++ b/src/backend/langflow/services/utils.py @@ -9,6 +9,10 @@ def get_settings_manager() -> "SettingsManager": return service_manager.get(ServiceType.SETTINGS_MANAGER) +def get_db_manager(): + return service_manager.get(ServiceType.DATABASE_MANAGER) + + def get_session(): db_manager = service_manager.get(ServiceType.DATABASE_MANAGER) yield from db_manager.get_session() From df51f7879ce2d759767dd80da0a7ddf4ddb906eb Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Sun, 6 Aug 2023 12:09:44 -0300 Subject: [PATCH 36/90] =?UTF-8?q?=F0=9F=94=A7=20chore(frontend):=20update?= =?UTF-8?q?=20.dockerignore=20to=20exclude=20the=20'build'=20directory=20f?= =?UTF-8?q?rom=20Docker=20build=20context=20=F0=9F=90=9B=20fix(frontend):?= =?UTF-8?q?=20add=20'build'=20directory=20to=20.dockerignore=20to=20preven?= =?UTF-8?q?t=20it=20from=20being=20included=20in=20Docker=20build=20contex?= =?UTF-8?q?t?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/frontend/.dockerignore | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/frontend/.dockerignore b/src/frontend/.dockerignore index 600e365ec..ca5762007 100644 --- a/src/frontend/.dockerignore +++ b/src/frontend/.dockerignore @@ -1 +1,2 @@ -**/node_modules \ No newline at end of file +**/node_modules +**/build \ No newline at end of file From a9db2da6bfb71225e835122414076278fccc7a90 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Sun, 6 Aug 2023 12:15:29 -0300 Subject: [PATCH 37/90] =?UTF-8?q?=F0=9F=90=9B=20fix(base.py):=20use=20db?= =?UTF-8?q?=5Fmanager.engine=20instead=20of=20DatabaseManager.engine=20to?= =?UTF-8?q?=20access=20the=20database=20engine=20=F0=9F=90=9B=20fix(confte?= =?UTF-8?q?st.py):=20add=20TYPE=5FCHECKING=20import=20to=20fix=20type=20hi?= =?UTF-8?q?nting=20error=20=F0=9F=90=9B=20fix(conftest.py):=20pass=20db=5F?= =?UTF-8?q?manager=20to=20blank=5Fsession=5Fgetter=20fixture=20to=20fix=20?= =?UTF-8?q?session=20creation=20error?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/services/database/base.py | 2 +- tests/conftest.py | 11 +++++++---- 2 files changed, 8 insertions(+), 5 deletions(-) diff --git a/src/backend/langflow/services/database/base.py b/src/backend/langflow/services/database/base.py index cfc434f25..9f92c6c25 100644 --- a/src/backend/langflow/services/database/base.py +++ b/src/backend/langflow/services/database/base.py @@ -70,7 +70,7 @@ class DatabaseManager(Service): @contextmanager def session_getter(db_manager: DatabaseManager): try: - session = Session(DatabaseManager.engine) + session = Session(db_manager.engine) yield session except Exception as e: print("Session rollback because of exception:", e) diff --git a/tests/conftest.py b/tests/conftest.py index e6cc2a855..a97270c7c 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,7 +1,7 @@ from contextlib import contextmanager import json from pathlib import Path -from typing import AsyncGenerator +from typing import AsyncGenerator, TYPE_CHECKING from langflow.api.v1.flows import get_session from langflow.graph.graph.base import Graph @@ -11,6 +11,9 @@ from httpx import AsyncClient from sqlmodel import SQLModel, Session, create_engine from sqlmodel.pool import StaticPool +if TYPE_CHECKING: + from langflow.services.database.base import DatabaseManager + def pytest_configure(): pytest.BASIC_EXAMPLE_PATH = ( @@ -134,15 +137,15 @@ def client_fixture(session: Session): # create a fixture for session_getter above @pytest.fixture(name="session_getter") -def session_getter_fixture(): +def session_getter_fixture(client): engine = create_engine( "sqlite://", connect_args={"check_same_thread": False}, poolclass=StaticPool ) SQLModel.metadata.create_all(engine) @contextmanager - def blank_session_getter(): - with Session(engine) as session: + def blank_session_getter(db_manager: "DatabaseManager"): + with Session(db_manager.engine) as session: yield session yield blank_session_getter From a0aa88f0391b437aeb379c291c47d9a9bb5dcf37 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Sun, 6 Aug 2023 12:28:05 -0300 Subject: [PATCH 38/90] =?UTF-8?q?=F0=9F=94=80=20chore(types.py):=20import?= =?UTF-8?q?=20`merge=5Fnested=5Fdicts=5Fwith=5Frenaming`=20from=20`langflo?= =?UTF-8?q?w.api.utils`=20to=20use=20it=20in=20`build=5Flangchain=5Fcustom?= =?UTF-8?q?=5Fcomponent=5Flist=5Ffrom=5Fpath`=20function=20=F0=9F=90=9B=20?= =?UTF-8?q?fix(types.py):=20replace=20`merge=5Fnested=5Fdicts`=20with=20`m?= =?UTF-8?q?erge=5Fnested=5Fdicts=5Fwith=5Frenaming`=20in=20`build=5Flangch?= =?UTF-8?q?ain=5Fcustom=5Fcomponent=5Flist=5Ffrom=5Fpath`=20function=20to?= =?UTF-8?q?=20fix=20a=20bug?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/interface/types.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/backend/langflow/interface/types.py b/src/backend/langflow/interface/types.py index 668956e07..76dc144a0 100644 --- a/src/backend/langflow/interface/types.py +++ b/src/backend/langflow/interface/types.py @@ -1,6 +1,7 @@ import ast import contextlib from typing import Any +from langflow.api.utils import merge_nested_dicts_with_renaming from langflow.interface.agents.base import agent_creator from langflow.interface.chains.base import chain_creator from langflow.interface.custom.constants import CUSTOM_COMPONENT_SUPPORTED_TYPES @@ -422,4 +423,4 @@ def build_langchain_custom_component_list_from_path(path: str): valid_menu = build_valid_menu(valid_components) invalid_menu = build_invalid_menu(invalid_components) - return merge_nested_dicts(valid_menu, invalid_menu) + return merge_nested_dicts_with_renaming(valid_menu, invalid_menu) From 7776977378de920205b8356d0143073b176edc70 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Mon, 7 Aug 2023 09:43:37 -0300 Subject: [PATCH 39/90] =?UTF-8?q?=F0=9F=9A=A7=20chore(conftest.py):=20add?= =?UTF-8?q?=20runner=20fixture=20to=20enable=20CLI=20testing=20?= =?UTF-8?q?=F0=9F=94=A7=20chore(conftest.py):=20import=20CliRunner=20from?= =?UTF-8?q?=20typer.testing=20to=20enable=20CLI=20testing=20=F0=9F=94=A7?= =?UTF-8?q?=20chore(conftest.py):=20remove=20unused=20imports=20and=20blan?= =?UTF-8?q?k=20lines=20for=20code=20cleanliness?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- tests/conftest.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/tests/conftest.py b/tests/conftest.py index a97270c7c..2eae791cd 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -10,6 +10,7 @@ from fastapi.testclient import TestClient from httpx import AsyncClient from sqlmodel import SQLModel, Session, create_engine from sqlmodel.pool import StaticPool +from typer.testing import CliRunner if TYPE_CHECKING: from langflow.services.database.base import DatabaseManager @@ -149,3 +150,8 @@ def session_getter_fixture(client): yield session yield blank_session_getter + + +@pytest.fixture +def runner(): + return CliRunner() From 709c4a17496d4f9843ae98454c6196396bbba07b Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Mon, 7 Aug 2023 09:44:11 -0300 Subject: [PATCH 40/90] =?UTF-8?q?=F0=9F=94=A7=20chore(=5F=5Fmain=5F=5F.py)?= =?UTF-8?q?:=20refactor=20serve=20function=20to=20improve=20readability=20?= =?UTF-8?q?and=20maintainability=20=E2=9C=A8=20feat(=5F=5Fmain=5F=5F.py):?= =?UTF-8?q?=20add=20support=20for=20custom=20components=20directory=20path?= =?UTF-8?q?=20as=20an=20environment=20variable=20=E2=9C=A8=20feat(=5F=5Fma?= =?UTF-8?q?in=5F=5F.py):=20set=20default=20value=20for=20config=20option?= =?UTF-8?q?=20to=20be=20the=20config.yaml=20file=20in=20the=20same=20direc?= =?UTF-8?q?tory=20as=20the=20script=20=E2=9C=A8=20feat(=5F=5Fmain=5F=5F.py?= =?UTF-8?q?):=20add=20support=20for=20specifying=20an=20.env=20file=20cont?= =?UTF-8?q?aining=20environment=20variables=20=E2=9C=A8=20feat(=5F=5Fmain?= =?UTF-8?q?=5F=5F.py):=20add=20backend=5Fonly=20option=20to=20run=20only?= =?UTF-8?q?=20the=20backend=20server=20without=20the=20frontend=20?= =?UTF-8?q?=F0=9F=94=A7=20chore(=5F=5Fmain=5F=5F.py):=20refactor=20setup?= =?UTF-8?q?=5Fapp=20function=20to=20pass=20backend=5Fonly=20option=20to=20?= =?UTF-8?q?the=20app=20setup=20=E2=9C=A8=20feat(=5F=5Fmain=5F=5F.py):=20ad?= =?UTF-8?q?d=20check=20to=20skip=20server=20startup=20if=20running=20in=20?= =?UTF-8?q?pytest=20environment=20=F0=9F=94=A7=20chore(=5F=5Fmain=5F=5F.py?= =?UTF-8?q?):=20refactor=20serve=20function=20to=20improve=20readability?= =?UTF-8?q?=20and=20maintainability=20=E2=9C=A8=20feat(=5F=5Fmain=5F=5F.py?= =?UTF-8?q?):=20add=20support=20for=20running=20the=20server=20using=20uvi?= =?UTF-8?q?corn=20on=20Windows?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/__main__.py | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/src/backend/langflow/__main__.py b/src/backend/langflow/__main__.py index 82d8bacb8..43247b10f 100644 --- a/src/backend/langflow/__main__.py +++ b/src/backend/langflow/__main__.py @@ -106,7 +106,9 @@ def serve( help="Path to the directory containing custom components.", envvar="LANGFLOW_COMPONENTS_PATH", ), - config: str = typer.Option("config.yaml", help="Path to the configuration file."), + config: str = typer.Option( + Path(__file__).parent / "config.yaml", help="Path to the configuration file." + ), # .env file param env_file: Path = typer.Option( None, help="Path to the .env file containing environment variables." @@ -146,6 +148,11 @@ def serve( help="Remove API keys from the projects saved in the database.", envvar="LANGFLOW_REMOVE_API_KEYS", ), + backend_only: bool = typer.Option( + False, + help="Run only the backend server without the frontend.", + envvar="LANGFLOW_BACKEND_ONLY", + ), ): """ Run the Langflow server. @@ -167,7 +174,7 @@ def serve( ) # create path object if path is provided static_files_dir: Optional[Path] = Path(path) if path else None - app = setup_app(static_files_dir=static_files_dir) + app = setup_app(static_files_dir=static_files_dir, backend_only=backend_only) # check if port is being used if is_port_in_use(port, host): port = get_free_port(port) @@ -179,6 +186,10 @@ def serve( "timeout": timeout, } + # Define an env variable to know if we are just testing the server + if "pytest" in sys.modules: + return + if platform.system() in ["Windows"]: # Run using uvicorn on MacOS and Windows # Windows doesn't support gunicorn From 76362da42bd395d39828506ab7300c94f3b3f605 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Mon, 7 Aug 2023 09:44:45 -0300 Subject: [PATCH 41/90] =?UTF-8?q?=F0=9F=94=A7=20chore(main.py):=20refactor?= =?UTF-8?q?=20setup=5Fapp=20function=20to=20add=20support=20for=20backend?= =?UTF-8?q?=5Fonly=20flag=20=E2=9C=A8=20feat(main.py):=20add=20backend=5Fo?= =?UTF-8?q?nly=20flag=20to=20setup=5Fapp=20function=20to=20allow=20running?= =?UTF-8?q?=20the=20app=20without=20serving=20static=20files?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/main.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/src/backend/langflow/main.py b/src/backend/langflow/main.py index a2da92c93..222873275 100644 --- a/src/backend/langflow/main.py +++ b/src/backend/langflow/main.py @@ -70,16 +70,19 @@ def get_static_files_dir(): return frontend_path / "frontend" -def setup_app(static_files_dir: Optional[Path] = None) -> FastAPI: +def setup_app( + static_files_dir: Optional[Path] = None, backend_only: bool = False +) -> FastAPI: """Setup the FastAPI app.""" # get the directory of the current file if not static_files_dir: static_files_dir = get_static_files_dir() - if not static_files_dir or not static_files_dir.exists(): + if not backend_only and (not static_files_dir or not static_files_dir.exists()): raise RuntimeError(f"Static files directory {static_files_dir} does not exist.") app = create_app() - setup_static_files(app, static_files_dir) + if not backend_only: + setup_static_files(app, static_files_dir) return app From 36a7ba4ad5a589c75beec7cfa583b7cadea47545 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Mon, 7 Aug 2023 09:45:09 -0300 Subject: [PATCH 42/90] =?UTF-8?q?=F0=9F=9A=80=20feat(test=5Fcli.py):=20add?= =?UTF-8?q?=20tests=20for=20server=20functionality=20and=20command=20line?= =?UTF-8?q?=20options?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit 🚀 feat(test_cli.py): add test for checking database URL option 🚀 feat(test_cli.py): add test for checking components path option --- tests/test_cli.py | 49 +++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 49 insertions(+) create mode 100644 tests/test_cli.py diff --git a/tests/test_cli.py b/tests/test_cli.py new file mode 100644 index 000000000..f1d5f193c --- /dev/null +++ b/tests/test_cli.py @@ -0,0 +1,49 @@ +from pathlib import Path +from langflow.__main__ import app +import pytest + +import requests +import multiprocessing +import time +from langflow.services import utils + + +@pytest.fixture(scope="module") +def default_settings(): + return [ + "--backend-only", + "--no-open-browser", + ] + + +def test_server(default_settings): + p = multiprocessing.Process( + target=app, + args=(["--host", "localhost", "--port", "8982", *default_settings],), + ) + p.start() + time.sleep(5) # allow some time for the server to start + + response = requests.get( + "http://localhost:8982/health" + ) # assuming a /health endpoint exists + assert response.status_code == 200 + + p.terminate() + + +def test_database_url(runner): + result = runner.invoke(app, ["--database-url", "sqlite:///test.db"]) + assert result.exit_code == 2, result.stdout + assert "No such option: --database-url" in result.output + + +def test_components_path(runner, client, default_settings): + result = runner.invoke( + app, + ["--components-path", "./", *default_settings], + ) + assert result.exit_code == 0, result.stdout + settings_manager = utils.get_settings_manager() + path = Path("./") + assert path in settings_manager.settings.COMPONENTS_PATH From 4547edef0f452255b1eabedf24b145aa8b8d0900 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Mon, 7 Aug 2023 10:13:40 -0300 Subject: [PATCH 43/90] =?UTF-8?q?=F0=9F=94=A5=20refactor(api):=20remove=20?= =?UTF-8?q?unused=20flow=5Fstyles=20module=20and=20related=20code=20?= =?UTF-8?q?=F0=9F=94=A5=20refactor(api):=20remove=20unused=20flow=5Fstyles?= =?UTF-8?q?=20router=20and=20related=20endpoints=20=F0=9F=94=A5=20refactor?= =?UTF-8?q?(api):=20remove=20unused=20FlowStyle=20model=20and=20related=20?= =?UTF-8?q?code=20=F0=9F=94=A5=20refactor(api):=20remove=20unused=20FlowSt?= =?UTF-8?q?yleCreate,=20FlowStyleRead,=20and=20FlowStyleUpdate=20models=20?= =?UTF-8?q?=F0=9F=94=A5=20refactor(api):=20remove=20unused=20style=20relat?= =?UTF-8?q?ionship=20from=20Flow=20model=20=F0=9F=94=A5=20refactor(api):?= =?UTF-8?q?=20remove=20unused=20FlowReadWithStyle=20model=20=F0=9F=94=A5?= =?UTF-8?q?=20refactor(api):=20remove=20unused=20flow=5Fstyles=5Fid=20para?= =?UTF-8?q?meter=20from=20read=5Fflow=5Fstyle=20endpoint=20=F0=9F=94=A5=20?= =?UTF-8?q?refactor(api):=20remove=20unused=20update=5Fflow=5Fstyle=20endp?= =?UTF-8?q?oint=20=F0=9F=94=A5=20refactor(api):=20remove=20unused=20delete?= =?UTF-8?q?=5Fflow=5Fstyle=20endpoint=20=F0=9F=94=A5=20refactor(api):=20re?= =?UTF-8?q?move=20unused=20flow=5Fid=20parameter=20from=20delete=5Fflow=5F?= =?UTF-8?q?style=20endpoint=20=F0=9F=94=A5=20refactor(api):=20remove=20unu?= =?UTF-8?q?sed=20style=20parameter=20from=20create=5Fflow=5Fstyle=20endpoi?= =?UTF-8?q?nt=20=F0=9F=94=A5=20refactor(api):=20remove=20unused=20read=5Ff?= =?UTF-8?q?low=5Fstyles=20endpoint=20=F0=9F=94=A5=20refactor(api):=20remov?= =?UTF-8?q?e=20unused=20flow=5Fstyles=5Fid=20parameter=20from=20read=5Fflo?= =?UTF-8?q?w=5Fstyles=20endpoint=20=F0=9F=94=A5=20refactor(api):=20remove?= =?UTF-8?q?=20unused=20flow=5Fstyle=5Fid=20parameter=20from=20update=5Fflo?= =?UTF-8?q?w=5Fstyle=20endpoint=20=F0=9F=94=A5=20refactor(api):=20remove?= =?UTF-8?q?=20unused=20flow=5Fstyle=20parameter=20from=20update=5Fflow=5Fs?= =?UTF-8?q?tyle=20endpoint=20=F0=9F=94=A5=20refactor(api):=20remove=20unus?= =?UTF-8?q?ed=20flow=5Fstyle=20parameter=20from=20create=5Fflow=5Fstyle=20?= =?UTF-8?q?endpoint=20=F0=9F=94=A5=20refactor(api):=20remove=20unused=20fl?= =?UTF-8?q?ow=5Fstyles=5Frouter=20import=20=F0=9F=94=A5=20refactor(api):?= =?UTF-8?q?=20remove=20unused=20flow=5Fstyles=5Frouter=20variable=20?= =?UTF-8?q?=F0=9F=94=A5=20refactor(api):=20remove=20unused=20flow=5Fstyles?= =?UTF-8?q?=5Frouter=20prefix=20and=20tags?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/api/v1/__init__.py | 2 - src/backend/langflow/api/v1/flow_styles.py | 83 ------------------- src/backend/langflow/api/v1/flows.py | 5 +- .../langflow/services/database/models/flow.py | 12 +-- .../services/database/models/flow_style.py | 33 -------- 5 files changed, 3 insertions(+), 132 deletions(-) delete mode 100644 src/backend/langflow/api/v1/flow_styles.py delete mode 100644 src/backend/langflow/services/database/models/flow_style.py diff --git a/src/backend/langflow/api/v1/__init__.py b/src/backend/langflow/api/v1/__init__.py index f001152a9..b6e7b36d8 100644 --- a/src/backend/langflow/api/v1/__init__.py +++ b/src/backend/langflow/api/v1/__init__.py @@ -2,7 +2,6 @@ from langflow.api.v1.endpoints import router as endpoints_router from langflow.api.v1.validate import router as validate_router from langflow.api.v1.chat import router as chat_router from langflow.api.v1.flows import router as flows_router -from langflow.api.v1.flow_styles import router as flow_styles_router from langflow.api.v1.components import router as component_router __all__ = [ @@ -11,5 +10,4 @@ __all__ = [ "component_router", "validate_router", "flows_router", - "flow_styles_router", ] diff --git a/src/backend/langflow/api/v1/flow_styles.py b/src/backend/langflow/api/v1/flow_styles.py deleted file mode 100644 index 6eacf8d86..000000000 --- a/src/backend/langflow/api/v1/flow_styles.py +++ /dev/null @@ -1,83 +0,0 @@ -from uuid import UUID -from langflow.services.database.models.flow_style import ( - FlowStyle, - FlowStyleCreate, - FlowStyleRead, - FlowStyleUpdate, -) -from langflow.services.utils import get_session -from sqlmodel import Session, select -from fastapi import APIRouter, Depends, HTTPException - - -# build router -router = APIRouter(prefix="/flow_styles", tags=["FlowStyles"]) - -# FlowStyleCreate: -# class FlowStyleBase(SQLModel): -# color: str = Field(index=True) -# emoji: str = Field(index=False) -# flow_id: UUID = Field(default=None, foreign_key="flow.id") - - -@router.post("/", response_model=FlowStyleRead) -def create_flow_style( - *, session: Session = Depends(get_session), flow_style: FlowStyleCreate -): - """Create a new flow_style.""" - db_flow_style = FlowStyle.from_orm(flow_style) - session.add(db_flow_style) - session.commit() - session.refresh(db_flow_style) - return db_flow_style - - -@router.get("/", response_model=list[FlowStyleRead]) -def read_flow_styles(*, session: Session = Depends(get_session)): - """Read all flows.""" - try: - flows = session.exec(select(FlowStyle)).all() - except Exception as e: - raise HTTPException(status_code=500, detail=str(e)) from e - return flows - - -@router.get("/{flow_styles_id}", response_model=FlowStyleRead) -def read_flow_style(*, session: Session = Depends(get_session), flow_styles_id: UUID): - """Read a flow_style.""" - if flow_style := session.get(FlowStyle, flow_styles_id): - return flow_style - else: - raise HTTPException(status_code=404, detail="FlowStyle not found") - - -@router.patch("/{flow_style_id}", response_model=FlowStyleRead) -def update_flow_style( - *, - session: Session = Depends(get_session), - flow_style_id: UUID, - flow_style: FlowStyleUpdate, -): - """Update a flow_style.""" - db_flow_style = session.get(FlowStyle, flow_style_id) - if not db_flow_style: - raise HTTPException(status_code=404, detail="FlowStyle not found") - flow_data = flow_style.dict(exclude_unset=True) - for key, value in flow_data.items(): - if hasattr(db_flow_style, key) and value is not None: - setattr(db_flow_style, key, value) - session.add(db_flow_style) - session.commit() - session.refresh(db_flow_style) - return db_flow_style - - -@router.delete("/{flow_id}") -def delete_flow_style(*, session: Session = Depends(get_session), flow_id: UUID): - """Delete a flow_style.""" - flow_style = session.get(FlowStyle, flow_id) - if not flow_style: - raise HTTPException(status_code=404, detail="FlowStyle not found") - session.delete(flow_style) - session.commit() - return {"message": "FlowStyle deleted successfully"} diff --git a/src/backend/langflow/api/v1/flows.py b/src/backend/langflow/api/v1/flows.py index 1ecbc85f4..3145ced3c 100644 --- a/src/backend/langflow/api/v1/flows.py +++ b/src/backend/langflow/api/v1/flows.py @@ -6,7 +6,6 @@ from langflow.services.database.models.flow import ( Flow, FlowCreate, FlowRead, - FlowReadWithStyle, FlowUpdate, ) from langflow.services.utils import get_session @@ -32,7 +31,7 @@ def create_flow(*, session: Session = Depends(get_session), flow: FlowCreate): return db_flow -@router.get("/", response_model=list[FlowReadWithStyle], status_code=200) +@router.get("/", response_model=list[FlowRead], status_code=200) def read_flows(*, session: Session = Depends(get_session)): """Read all flows.""" try: @@ -42,7 +41,7 @@ def read_flows(*, session: Session = Depends(get_session)): return [jsonable_encoder(flow) for flow in flows] -@router.get("/{flow_id}", response_model=FlowReadWithStyle, status_code=200) +@router.get("/{flow_id}", response_model=FlowRead, status_code=200) def read_flow(*, session: Session = Depends(get_session), flow_id: UUID): """Read a flow.""" if flow := session.get(Flow, flow_id): diff --git a/src/backend/langflow/services/database/models/flow.py b/src/backend/langflow/services/database/models/flow.py index 2b6c6879c..2bc83f9dc 100644 --- a/src/backend/langflow/services/database/models/flow.py +++ b/src/backend/langflow/services/database/models/flow.py @@ -2,12 +2,11 @@ from langflow.services.database.models.base import SQLModelSerializable from pydantic import validator -from sqlmodel import Field, Relationship, JSON, Column +from sqlmodel import Field, JSON, Column from uuid import UUID, uuid4 from typing import Dict, Optional # if TYPE_CHECKING: -from langflow.services.database.models.flow_style import FlowStyle, FlowStyleRead class FlowBase(SQLModelSerializable): @@ -35,11 +34,6 @@ class FlowBase(SQLModelSerializable): class Flow(FlowBase, table=True): id: UUID = Field(default_factory=uuid4, primary_key=True, unique=True) data: Optional[Dict] = Field(default=None, sa_column=Column(JSON)) - style: Optional["FlowStyle"] = Relationship( - back_populates="flow", - # use "uselist=False" to make it a one-to-one relationship - sa_relationship_kwargs={"uselist": False}, - ) class FlowCreate(FlowBase): @@ -50,10 +44,6 @@ class FlowRead(FlowBase): id: UUID -class FlowReadWithStyle(FlowRead): - style: Optional["FlowStyleRead"] = None - - class FlowUpdate(SQLModelSerializable): name: Optional[str] = None description: Optional[str] = None diff --git a/src/backend/langflow/services/database/models/flow_style.py b/src/backend/langflow/services/database/models/flow_style.py deleted file mode 100644 index 3810c7cea..000000000 --- a/src/backend/langflow/services/database/models/flow_style.py +++ /dev/null @@ -1,33 +0,0 @@ -# Path: src/backend/langflow/database/models/flowstyle.py - -from langflow.services.database.models.base import SQLModelSerializable -from sqlmodel import Field, Relationship -from uuid import UUID, uuid4 -from typing import TYPE_CHECKING, Optional - -if TYPE_CHECKING: - from langflow.services.database.models.flow import Flow - - -class FlowStyleBase(SQLModelSerializable): - color: str - emoji: str - flow_id: UUID = Field(default=None, foreign_key="flow.id") - - -class FlowStyle(FlowStyleBase, table=True): - id: UUID = Field(default_factory=uuid4, primary_key=True, unique=True) - flow: "Flow" = Relationship(back_populates="style") - - -class FlowStyleUpdate(SQLModelSerializable): - color: Optional[str] = None - emoji: Optional[str] = None - - -class FlowStyleCreate(FlowStyleBase): - pass - - -class FlowStyleRead(FlowStyleBase): - id: UUID From e2e14d8c9355a9ce653c8d4d4a2259a326ead37f Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Mon, 7 Aug 2023 10:24:02 -0300 Subject: [PATCH 44/90] =?UTF-8?q?=F0=9F=94=A5=20refactor(router.py):=20rem?= =?UTF-8?q?ove=20unused=20flow=5Fstyles=5Frouter=20import=20to=20improve?= =?UTF-8?q?=20code=20cleanliness=20and=20reduce=20unused=20code=20?= =?UTF-8?q?=F0=9F=94=A5=20refactor(test=5Fdatabase.py):=20remove=20unused?= =?UTF-8?q?=20flow=5Fstyle=20related=20tests=20to=20improve=20code=20clean?= =?UTF-8?q?liness=20and=20reduce=20unused=20code?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/api/router.py | 2 - tests/test_database.py | 102 ----------------------------- 2 files changed, 104 deletions(-) diff --git a/src/backend/langflow/api/router.py b/src/backend/langflow/api/router.py index b9c51c11e..ea1938a75 100644 --- a/src/backend/langflow/api/router.py +++ b/src/backend/langflow/api/router.py @@ -5,7 +5,6 @@ from langflow.api.v1 import ( endpoints_router, validate_router, flows_router, - flow_styles_router, component_router, ) @@ -17,4 +16,3 @@ router.include_router(endpoints_router) router.include_router(validate_router) router.include_router(component_router) router.include_router(flows_router) -router.include_router(flow_styles_router) diff --git a/tests/test_database.py b/tests/test_database.py index 6ebae5396..52a5daa4c 100644 --- a/tests/test_database.py +++ b/tests/test_database.py @@ -5,17 +5,10 @@ from uuid import UUID, uuid4 from sqlalchemy.orm import Session from fastapi.testclient import TestClient -from fastapi.encoders import jsonable_encoder from langflow.api.v1.schemas import FlowListCreate from langflow.services.database.models.flow import Flow, FlowCreate, FlowUpdate -from langflow.services.database.models.flow_style import ( - FlowStyleCreate, - FlowStyleRead, - FlowStyleUpdate, -) - @pytest.fixture(scope="module") def json_style(): @@ -56,33 +49,12 @@ def test_read_flows(client: TestClient, json_flow: str): assert response.json()["name"] == flow.name assert response.json()["data"] == flow.data - flow_style = FlowStyleCreate(color="red", emoji="👍", flow_id=response.json()["id"]) - response = client.post( - "api/v1/flow_styles/", json=jsonable_encoder(flow_style.dict()) - ) - assert response.status_code == 200 - assert response.json()["color"] == flow_style.color - assert response.json()["emoji"] == flow_style.emoji - assert response.json()["flow_id"] == str(flow_style.flow_id) - flow = FlowCreate(name="Test Flow", description="description", data=data) response = client.post("api/v1/flows/", json=flow.dict()) assert response.status_code == 201 assert response.json()["name"] == flow.name assert response.json()["data"] == flow.data - # Now we need to create FlowStyle objects for each Flow - flow_style = FlowStyleCreate( - color="green", emoji="👍", flow_id=response.json()["id"] - ) - response = client.post( - "api/v1/flow_styles/", json=jsonable_encoder(flow_style.dict()) - ) - assert response.status_code == 200 - assert response.json()["color"] == flow_style.color - assert response.json()["emoji"] == flow_style.emoji - assert response.json()["flow_id"] == str(flow_style.flow_id) - response = client.get("api/v1/flows/") assert response.status_code == 200 assert len(response.json()) > 0 @@ -97,21 +69,10 @@ def test_read_flow(client: TestClient, json_flow: str): # turn it into a UUID flow_id = UUID(flow_id) - flow_style = FlowStyleCreate(color="green", emoji="👍", flow_id=flow_id) - response = client.post( - "api/v1/flow_styles/", json=jsonable_encoder(flow_style.dict()) - ) - assert response.status_code == 200 - response_json = response.json() - assert response_json["color"] == flow_style.color - assert response_json["emoji"] == flow_style.emoji - assert response_json["flow_id"] == str(flow_style.flow_id) - response = client.get(f"api/v1/flows/{flow_id}") assert response.status_code == 200 assert response.json()["name"] == flow.name assert response.json()["data"] == flow.data - assert response.json()["style"]["color"] == flow_style.color def test_update_flow(client: TestClient, json_flow: str): @@ -275,66 +236,3 @@ def test_read_empty_flows(client: TestClient): response = client.get("api/v1/flows/") assert response.status_code == 200 assert len(response.json()) == 0 - - -def test_create_flow_style(client: TestClient): - flow_style = FlowStyleCreate(color="red", emoji="🔴") - response = client.post("api/v1/flow_styles/", json=flow_style.dict()) - assert response.status_code == 200 - created_flow_style = FlowStyleRead(**response.json()) - assert created_flow_style.color == flow_style.color - assert created_flow_style.emoji == flow_style.emoji - - -def test_read_flow_styles(client: TestClient): - response = client.get("api/v1/flow_styles/") - assert response.status_code == 200 - flow_styles = [FlowStyleRead(**flow_style) for flow_style in response.json()] - assert not flow_styles - # Create test data - flow_style = FlowStyleCreate(color="red", emoji="🔴") - response = client.post("api/v1/flow_styles/", json=flow_style.dict()) - assert response.status_code == 200 - # Check response data - response = client.get("api/v1/flow_styles/") - assert response.status_code == 200 - flow_styles = [FlowStyleRead(**flow_style) for flow_style in response.json()] - assert len(flow_styles) == 1 - assert flow_styles[0].color == flow_style.color - assert flow_styles[0].emoji == flow_style.emoji - - -def test_read_flow_style(client: TestClient): - flow_style = FlowStyleCreate(color="red", emoji="🔴") - response = client.post("api/v1/flow_styles/", json=flow_style.dict()) - created_flow_style = FlowStyleRead(**response.json()) - response = client.get(f"api/v1/flow_styles/{created_flow_style.id}") - assert response.status_code == 200 - read_flow_style = FlowStyleRead(**response.json()) - assert read_flow_style == created_flow_style - - -def test_update_flow_style(client: TestClient): - flow_style = FlowStyleCreate(color="red", emoji="🔴") - response = client.post("api/v1/flow_styles/", json=flow_style.dict()) - created_flow_style = FlowStyleRead(**response.json()) - to_update_flow_style = FlowStyleUpdate(color="blue") - response = client.patch( - f"api/v1/flow_styles/{created_flow_style.id}", json=to_update_flow_style.dict() - ) - assert response.status_code == 200 - updated_flow_style = FlowStyleRead(**response.json()) - assert updated_flow_style.color == "blue" - assert updated_flow_style.emoji == flow_style.emoji - - -def test_delete_flow_style(client: TestClient): - flow_style = FlowStyleCreate(color="red", emoji="🔴") - response = client.post("api/v1/flow_styles/", json=flow_style.dict()) - created_flow_style = FlowStyleRead(**response.json()) - response = client.delete(f"api/v1/flow_styles/{created_flow_style.id}") - assert response.status_code == 200 - assert response.json() == {"message": "FlowStyle deleted successfully"} - - response = client.get(f"api/v1/flow_styles/{created_flow_style.id}") - assert response.status_code == 404 From b7861ac77f3d1cfaae00c409791d998563bfe38f Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Mon, 7 Aug 2023 10:24:28 -0300 Subject: [PATCH 45/90] =?UTF-8?q?=F0=9F=94=A7=20chore(alembic.ini):=20upda?= =?UTF-8?q?te=20sqlalchemy.url=20to=20dynamically=20set=20the=20path=20to?= =?UTF-8?q?=20the=20database=20in=20the=20root=20of=20the=20project?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/alembic.ini | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/backend/langflow/alembic.ini b/src/backend/langflow/alembic.ini index 0227ea4f2..379661422 100644 --- a/src/backend/langflow/alembic.ini +++ b/src/backend/langflow/alembic.ini @@ -60,10 +60,10 @@ version_path_separator = os # Use os.pathsep. Default configuration used for ne # are written from script.py.mako # output_encoding = utf-8 -# This is a placeholder to run the first migration +# This is the path to the db in the root of the project. # When the user runs the Langflow the database url will -# be set dinamically -sqlalchemy.url = sqlite:///langflow.db +# be set dinamically. +sqlalchemy.url = sqlite:///../../../langflow.db [post_write_hooks] From d75fb49bf2adcf1cdab25a4b8281893ba713c00b Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Mon, 7 Aug 2023 10:25:46 -0300 Subject: [PATCH 46/90] =?UTF-8?q?=F0=9F=94=A5=20chore(alembic):=20remove?= =?UTF-8?q?=20FlowStyles=20table?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This commit removes the FlowStyles table from the database. The table was no longer needed and has been dropped. The corresponding indexes and foreign key constraints have also been removed. Downgrade functionality has been implemented to recreate the FlowStyles table and its associated indexes and foreign key constraints if needed. --- .../921920b95d3a_remove_flowstyles_table.py | 63 +++++++++++++++++++ 1 file changed, 63 insertions(+) create mode 100644 src/backend/langflow/alembic/versions/921920b95d3a_remove_flowstyles_table.py diff --git a/src/backend/langflow/alembic/versions/921920b95d3a_remove_flowstyles_table.py b/src/backend/langflow/alembic/versions/921920b95d3a_remove_flowstyles_table.py new file mode 100644 index 000000000..7bb550fdf --- /dev/null +++ b/src/backend/langflow/alembic/versions/921920b95d3a_remove_flowstyles_table.py @@ -0,0 +1,63 @@ +"""Remove FlowStyles table + +Revision ID: 921920b95d3a +Revises: 4814b6f4abfd +Create Date: 2023-08-07 10:22:54.503716 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = "921920b95d3a" +down_revision: Union[str, None] = "4814b6f4abfd" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_index("ix_component_frontend_node_id", table_name="component") + op.drop_index("ix_component_name", table_name="component") + op.drop_table("component") + op.drop_table("flowstyle") + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.create_table( + "flowstyle", + sa.Column("color", sa.VARCHAR(), nullable=False), + sa.Column("emoji", sa.VARCHAR(), nullable=False), + sa.Column("flow_id", sa.CHAR(length=32), nullable=True), + sa.Column("id", sa.CHAR(length=32), nullable=False), + sa.ForeignKeyConstraint( + ["flow_id"], + ["flow.id"], + ), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("id"), + ) + op.create_table( + "component", + sa.Column("id", sa.CHAR(length=32), nullable=False), + sa.Column("frontend_node_id", sa.CHAR(length=32), nullable=False), + sa.Column("name", sa.VARCHAR(), nullable=False), + sa.Column("description", sa.VARCHAR(), nullable=True), + sa.Column("python_code", sa.VARCHAR(), nullable=True), + sa.Column("return_type", sa.VARCHAR(), nullable=True), + sa.Column("is_disabled", sa.BOOLEAN(), nullable=False), + sa.Column("is_read_only", sa.BOOLEAN(), nullable=False), + sa.Column("create_at", sa.DATETIME(), nullable=False), + sa.Column("update_at", sa.DATETIME(), nullable=False), + sa.PrimaryKeyConstraint("id"), + ) + op.create_index("ix_component_name", "component", ["name"], unique=False) + op.create_index( + "ix_component_frontend_node_id", "component", ["frontend_node_id"], unique=False + ) + # ### end Alembic commands ### From 41707c1eac506910e378563a16a92fef1e8b0b7c Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Mon, 7 Aug 2023 11:59:23 -0300 Subject: [PATCH 47/90] =?UTF-8?q?=F0=9F=94=A7=20chore(test=5Fcli.py):=20re?= =?UTF-8?q?factor=20test=5Fcomponents=5Fpath=20to=20use=20a=20temporary=20?= =?UTF-8?q?directory=20for=20components=20path=20=F0=9F=94=A7=20chore(test?= =?UTF-8?q?=5Fcli.py):=20refactor=20test=5Fcomponents=5Fpath=20to=20use=20?= =?UTF-8?q?a=20temporary=20directory=20for=20components=20path=20to=20impr?= =?UTF-8?q?ove=20test=20isolation=20and=20avoid=20side=20effects?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- tests/test_cli.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/tests/test_cli.py b/tests/test_cli.py index f1d5f193c..cd2dcdbb7 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -1,4 +1,5 @@ from pathlib import Path +from tempfile import tempdir from langflow.__main__ import app import pytest @@ -39,11 +40,16 @@ def test_database_url(runner): def test_components_path(runner, client, default_settings): + # Create a foldr in the tmp directory + temp_dir = Path(tempdir) + # create a "components" folder + temp_dir = temp_dir / "components" + temp_dir.mkdir(exist_ok=True) + result = runner.invoke( app, - ["--components-path", "./", *default_settings], + ["--components-path", str(temp_dir), *default_settings], ) assert result.exit_code == 0, result.stdout settings_manager = utils.get_settings_manager() - path = Path("./") - assert path in settings_manager.settings.COMPONENTS_PATH + assert temp_dir in settings_manager.settings.COMPONENTS_PATH From 8036f74d8b4f66eae435fe07abd02a261ca702f4 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Mon, 7 Aug 2023 11:59:57 -0300 Subject: [PATCH 48/90] =?UTF-8?q?=F0=9F=94=80=20chore(conftest.py):=20upda?= =?UTF-8?q?te=20import=20statement=20for=20DatabaseManager=20to=20reflect?= =?UTF-8?q?=20file=20name=20change?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- tests/conftest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/conftest.py b/tests/conftest.py index 2eae791cd..e90d03d0a 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -13,7 +13,7 @@ from sqlmodel.pool import StaticPool from typer.testing import CliRunner if TYPE_CHECKING: - from langflow.services.database.base import DatabaseManager + from langflow.services.database.manager import DatabaseManager def pytest_configure(): From 8436c66aa767eab3ac78f7080c78a0dab35a0feb Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Mon, 7 Aug 2023 12:00:27 -0300 Subject: [PATCH 49/90] =?UTF-8?q?=F0=9F=93=A6=20chore(manager.py):=20add?= =?UTF-8?q?=20DatabaseManager=20class=20to=20handle=20database=20operation?= =?UTF-8?q?s=20=F0=9F=93=A6=20chore(utils.py):=20add=20initialize=5Fdataba?= =?UTF-8?q?se=20function=20and=20session=5Fgetter=20context=20manager=20to?= =?UTF-8?q?=20handle=20database=20initialization=20and=20session=20managem?= =?UTF-8?q?ent?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../services/database/{base.py => manager.py} | 26 +--------------- .../langflow/services/database/utils.py | 31 +++++++++++++++++++ 2 files changed, 32 insertions(+), 25 deletions(-) rename src/backend/langflow/services/database/{base.py => manager.py} (77%) create mode 100644 src/backend/langflow/services/database/utils.py diff --git a/src/backend/langflow/services/database/base.py b/src/backend/langflow/services/database/manager.py similarity index 77% rename from src/backend/langflow/services/database/base.py rename to src/backend/langflow/services/database/manager.py index 9f92c6c25..20db90f66 100644 --- a/src/backend/langflow/services/database/base.py +++ b/src/backend/langflow/services/database/manager.py @@ -1,4 +1,3 @@ -from contextlib import contextmanager from pathlib import Path from langflow.services.base import Service from sqlmodel import SQLModel, Session, create_engine @@ -12,7 +11,7 @@ class DatabaseManager(Service): def __init__(self, database_url: str): self.database_url = database_url - # This file is in langflow.services.database.base.py + # This file is in langflow.services.database.manager.py # the ini is in langflow langflow_dir = Path(__file__).parent.parent.parent self.script_location = langflow_dir / "alembic" @@ -65,26 +64,3 @@ class DatabaseManager(Service): raise RuntimeError("Something went wrong creating the database and tables.") else: logger.debug("Database and tables created successfully") - - -@contextmanager -def session_getter(db_manager: DatabaseManager): - try: - session = Session(db_manager.engine) - yield session - except Exception as e: - print("Session rollback because of exception:", e) - session.rollback() - raise - finally: - session.close() - - -def initialize_database(): - logger.debug("Initializing database") - from langflow.services import service_manager, ServiceType - - database_manager = service_manager.get(ServiceType.DATABASE_MANAGER) - database_manager.run_migrations() - database_manager.create_db_and_tables() - logger.debug("Database initialized") diff --git a/src/backend/langflow/services/database/utils.py b/src/backend/langflow/services/database/utils.py new file mode 100644 index 000000000..20b2bbbb4 --- /dev/null +++ b/src/backend/langflow/services/database/utils.py @@ -0,0 +1,31 @@ +from typing import TYPE_CHECKING +from langflow.utils.logger import logger +from contextlib import contextmanager + +from sqlmodel import Session + +if TYPE_CHECKING: + from langflow.services.database.manager import DatabaseManager + + +def initialize_database(): + logger.debug("Initializing database") + from langflow.services import service_manager, ServiceType + + database_manager = service_manager.get(ServiceType.DATABASE_MANAGER) + database_manager.run_migrations() + database_manager.create_db_and_tables() + logger.debug("Database initialized") + + +@contextmanager +def session_getter(db_manager: "DatabaseManager"): + try: + session = Session(db_manager.engine) + yield session + except Exception as e: + print("Session rollback because of exception:", e) + session.rollback() + raise + finally: + session.close() From 1601745ed909789a6360c68489c0c6008212dd68 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Mon, 7 Aug 2023 12:00:58 -0300 Subject: [PATCH 50/90] =?UTF-8?q?=F0=9F=94=80=20refactor(alembic/env.py):?= =?UTF-8?q?=20update=20import=20statement=20for=20SQLModel=20in=20env.py?= =?UTF-8?q?=20to=20reflect=20new=20location=20in=20manager=20module?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit 🔀 refactor(interface/custom/custom_component.py): update import statement for session_getter in custom_component.py to reflect new location in utils module 🔀 refactor(main.py): update import statement for initialize_database in main.py to reflect new location in utils module 🔀 refactor(database/factory.py): update import statement for DatabaseManager in factory.py to reflect new location in manager module --- src/backend/langflow/alembic/env.py | 2 +- src/backend/langflow/interface/custom/custom_component.py | 2 +- src/backend/langflow/main.py | 2 +- src/backend/langflow/services/database/factory.py | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/backend/langflow/alembic/env.py b/src/backend/langflow/alembic/env.py index a3babba6d..310894431 100644 --- a/src/backend/langflow/alembic/env.py +++ b/src/backend/langflow/alembic/env.py @@ -5,7 +5,7 @@ from sqlalchemy import pool from alembic import context -from langflow.services.database.base import SQLModel +from langflow.services.database.manager import SQLModel # this is the Alembic Config object, which provides # access to the values within the .ini file in use. diff --git a/src/backend/langflow/interface/custom/custom_component.py b/src/backend/langflow/interface/custom/custom_component.py index fdfef52f8..5388f40d8 100644 --- a/src/backend/langflow/interface/custom/custom_component.py +++ b/src/backend/langflow/interface/custom/custom_component.py @@ -7,7 +7,7 @@ from langflow.services.utils import get_db_manager from langflow.utils import validate -from langflow.services.database.base import session_getter +from langflow.services.database.utils import session_getter from langflow.services.database.models.flow import Flow from pydantic import Extra import yaml diff --git a/src/backend/langflow/main.py b/src/backend/langflow/main.py index 222873275..734483317 100644 --- a/src/backend/langflow/main.py +++ b/src/backend/langflow/main.py @@ -7,7 +7,7 @@ from fastapi.staticfiles import StaticFiles from langflow.api import router from langflow.interface.utils import setup_llm_caching -from langflow.services.database.base import initialize_database +from langflow.services.database.utils import initialize_database from langflow.services.manager import initialize_services from langflow.utils.logger import configure diff --git a/src/backend/langflow/services/database/factory.py b/src/backend/langflow/services/database/factory.py index d98414382..fecf24543 100644 --- a/src/backend/langflow/services/database/factory.py +++ b/src/backend/langflow/services/database/factory.py @@ -1,5 +1,5 @@ from typing import TYPE_CHECKING -from langflow.services.database.base import DatabaseManager +from langflow.services.database.manager import DatabaseManager from langflow.services.factory import ServiceFactory if TYPE_CHECKING: From 9dba69cffc8ce0ea9ed76eb42bbd753dc25a49d9 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Mon, 7 Aug 2023 12:07:45 -0300 Subject: [PATCH 51/90] =?UTF-8?q?=F0=9F=93=9D=20docs(CONTRIBUTING.md):=20a?= =?UTF-8?q?dd=20branch=20structure=20information=20to=20CONTRIBUTING.md=20?= =?UTF-8?q?file=20for=20better=20understanding=20of=20the=20repository=20s?= =?UTF-8?q?tructure=20=F0=9F=93=9D=20docs(README.md):=20add=20a=20section?= =?UTF-8?q?=20about=20joining=20the=20Discord=20server=20to=20encourage=20?= =?UTF-8?q?community=20engagement=20and=20collaboration?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- CONTRIBUTING.md | 5 +++++ README.md | 2 ++ 2 files changed, 7 insertions(+) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index da7ec1977..c58bb92f1 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -7,6 +7,11 @@ to contributions, whether it be in the form of a new feature, improved infra, or To contribute to this project, please follow a ["fork and pull request"](https://docs.github.com/en/get-started/quickstart/contributing-to-projects) workflow. Please do not try to push directly to this repo unless you are a maintainer. +The branch structure is as follows: + +- `main`: The stable version of Langflow +- `dev`: The development version of Langflow. This branch is used to test new features before they are merged into `main` and, as such, may be unstable. + ## 🗺️Contributing Guidelines ## 🚩GitHub Issues diff --git a/README.md b/README.md index 3d795015d..9137ea714 100644 --- a/README.md +++ b/README.md @@ -275,6 +275,8 @@ flow("Hey, have you heard of Langflow?") We welcome contributions from developers of all levels to our open-source project on GitHub. If you'd like to contribute, please check our [contributing guidelines](./CONTRIBUTING.md) and help make Langflow more accessible. +--- + Join our [Discord](https://discord.com/invite/EqksyE2EX9) server to ask questions, make suggestions and showcase your projects! 🦾

From 96dfb9b324bb77bd5ff69e7b4f8f6c0af381ec5c Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Mon, 7 Aug 2023 12:08:16 -0300 Subject: [PATCH 52/90] =?UTF-8?q?=F0=9F=90=9B=20fix(main.py):=20fix=20cond?= =?UTF-8?q?ition=20to=20setup=20static=20files=20only=20if=20static=5Ffile?= =?UTF-8?q?s=5Fdir=20is=20not=20None?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/main.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/backend/langflow/main.py b/src/backend/langflow/main.py index 734483317..1702fb8f9 100644 --- a/src/backend/langflow/main.py +++ b/src/backend/langflow/main.py @@ -81,7 +81,7 @@ def setup_app( if not backend_only and (not static_files_dir or not static_files_dir.exists()): raise RuntimeError(f"Static files directory {static_files_dir} does not exist.") app = create_app() - if not backend_only: + if not backend_only and static_files_dir is not None: setup_static_files(app, static_files_dir) return app From 71012ac47b3c936291a1784640534fdfcce4e8be Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Mon, 7 Aug 2023 14:09:27 -0300 Subject: [PATCH 53/90] =?UTF-8?q?=F0=9F=94=A5=20chore(alembic):=20remove?= =?UTF-8?q?=20flowstyles=20table?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ✨ feat(alembic): add migration to remove flowstyles table 🔥 chore(alembic): remove old migration file for removing flowstyles table 🐛 fix(database): import Flow model to avoid unused import warning --- .../0a534bdfd84b_remove_flowstyles_table.py | 42 +++++++++++++ .../921920b95d3a_remove_flowstyles_table.py | 63 ------------------- .../langflow/services/database/manager.py | 1 + 3 files changed, 43 insertions(+), 63 deletions(-) create mode 100644 src/backend/langflow/alembic/versions/0a534bdfd84b_remove_flowstyles_table.py delete mode 100644 src/backend/langflow/alembic/versions/921920b95d3a_remove_flowstyles_table.py diff --git a/src/backend/langflow/alembic/versions/0a534bdfd84b_remove_flowstyles_table.py b/src/backend/langflow/alembic/versions/0a534bdfd84b_remove_flowstyles_table.py new file mode 100644 index 000000000..0100df44d --- /dev/null +++ b/src/backend/langflow/alembic/versions/0a534bdfd84b_remove_flowstyles_table.py @@ -0,0 +1,42 @@ +"""Remove FlowStyles table + +Revision ID: 0a534bdfd84b +Revises: 4814b6f4abfd +Create Date: 2023-08-07 14:09:06.844104 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = "0a534bdfd84b" +down_revision: Union[str, None] = "4814b6f4abfd" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table("flowstyle") + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.create_table( + "flowstyle", + sa.Column("color", sa.VARCHAR(), nullable=False), + sa.Column("emoji", sa.VARCHAR(), nullable=False), + sa.Column("flow_id", sa.CHAR(length=32), nullable=True), + sa.Column("id", sa.CHAR(length=32), nullable=False), + sa.ForeignKeyConstraint( + ["flow_id"], + ["flow.id"], + ), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("id"), + ) + # ### end Alembic commands ### diff --git a/src/backend/langflow/alembic/versions/921920b95d3a_remove_flowstyles_table.py b/src/backend/langflow/alembic/versions/921920b95d3a_remove_flowstyles_table.py deleted file mode 100644 index 7bb550fdf..000000000 --- a/src/backend/langflow/alembic/versions/921920b95d3a_remove_flowstyles_table.py +++ /dev/null @@ -1,63 +0,0 @@ -"""Remove FlowStyles table - -Revision ID: 921920b95d3a -Revises: 4814b6f4abfd -Create Date: 2023-08-07 10:22:54.503716 - -""" -from typing import Sequence, Union - -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision: str = "921920b95d3a" -down_revision: Union[str, None] = "4814b6f4abfd" -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None - - -def upgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.drop_index("ix_component_frontend_node_id", table_name="component") - op.drop_index("ix_component_name", table_name="component") - op.drop_table("component") - op.drop_table("flowstyle") - # ### end Alembic commands ### - - -def downgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.create_table( - "flowstyle", - sa.Column("color", sa.VARCHAR(), nullable=False), - sa.Column("emoji", sa.VARCHAR(), nullable=False), - sa.Column("flow_id", sa.CHAR(length=32), nullable=True), - sa.Column("id", sa.CHAR(length=32), nullable=False), - sa.ForeignKeyConstraint( - ["flow_id"], - ["flow.id"], - ), - sa.PrimaryKeyConstraint("id"), - sa.UniqueConstraint("id"), - ) - op.create_table( - "component", - sa.Column("id", sa.CHAR(length=32), nullable=False), - sa.Column("frontend_node_id", sa.CHAR(length=32), nullable=False), - sa.Column("name", sa.VARCHAR(), nullable=False), - sa.Column("description", sa.VARCHAR(), nullable=True), - sa.Column("python_code", sa.VARCHAR(), nullable=True), - sa.Column("return_type", sa.VARCHAR(), nullable=True), - sa.Column("is_disabled", sa.BOOLEAN(), nullable=False), - sa.Column("is_read_only", sa.BOOLEAN(), nullable=False), - sa.Column("create_at", sa.DATETIME(), nullable=False), - sa.Column("update_at", sa.DATETIME(), nullable=False), - sa.PrimaryKeyConstraint("id"), - ) - op.create_index("ix_component_name", "component", ["name"], unique=False) - op.create_index( - "ix_component_frontend_node_id", "component", ["frontend_node_id"], unique=False - ) - # ### end Alembic commands ### diff --git a/src/backend/langflow/services/database/manager.py b/src/backend/langflow/services/database/manager.py index 20db90f66..60a4de74d 100644 --- a/src/backend/langflow/services/database/manager.py +++ b/src/backend/langflow/services/database/manager.py @@ -4,6 +4,7 @@ from sqlmodel import SQLModel, Session, create_engine from langflow.utils.logger import logger from alembic.config import Config from alembic import command +from .models import Flow # noqa: F401 class DatabaseManager(Service): From 4e1fc2202ab97a6113117cb123012a223984e609 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Mon, 7 Aug 2023 14:16:37 -0300 Subject: [PATCH 54/90] =?UTF-8?q?=F0=9F=94=A5=20refactor(test=5Fcli.py):?= =?UTF-8?q?=20remove=20unused=20imports=20and=20test=20cases?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The test_cli.py file had some unused imports and test cases that were not being used. This commit removes those unused imports and test cases to improve code cleanliness and maintainability. --- tests/test_cli.py | 25 ------------------------- 1 file changed, 25 deletions(-) diff --git a/tests/test_cli.py b/tests/test_cli.py index cd2dcdbb7..408500d7a 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -3,9 +3,6 @@ from tempfile import tempdir from langflow.__main__ import app import pytest -import requests -import multiprocessing -import time from langflow.services import utils @@ -17,28 +14,6 @@ def default_settings(): ] -def test_server(default_settings): - p = multiprocessing.Process( - target=app, - args=(["--host", "localhost", "--port", "8982", *default_settings],), - ) - p.start() - time.sleep(5) # allow some time for the server to start - - response = requests.get( - "http://localhost:8982/health" - ) # assuming a /health endpoint exists - assert response.status_code == 200 - - p.terminate() - - -def test_database_url(runner): - result = runner.invoke(app, ["--database-url", "sqlite:///test.db"]) - assert result.exit_code == 2, result.stdout - assert "No such option: --database-url" in result.output - - def test_components_path(runner, client, default_settings): # Create a foldr in the tmp directory temp_dir = Path(tempdir) From 9fcc96e76760813b4d86ff43c8983f313ff624e7 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Mon, 7 Aug 2023 14:18:07 -0300 Subject: [PATCH 55/90] =?UTF-8?q?=F0=9F=94=80=20refactor(manager.py):=20up?= =?UTF-8?q?date=20import=20statement=20for=20models=20in=20database=20mana?= =?UTF-8?q?ger=20to=20improve=20readability=20and=20maintainability?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/services/database/manager.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/backend/langflow/services/database/manager.py b/src/backend/langflow/services/database/manager.py index 60a4de74d..92385a457 100644 --- a/src/backend/langflow/services/database/manager.py +++ b/src/backend/langflow/services/database/manager.py @@ -4,7 +4,7 @@ from sqlmodel import SQLModel, Session, create_engine from langflow.utils.logger import logger from alembic.config import Config from alembic import command -from .models import Flow # noqa: F401 +from langflow.services.database import models # noqa class DatabaseManager(Service): From ac0ff01d619be8a737ba76bcc2e91b7fcc4fdcf3 Mon Sep 17 00:00:00 2001 From: Igor Carvalho Date: Mon, 7 Aug 2023 14:46:33 -0300 Subject: [PATCH 56/90] Refactor: rename codeTabsComponent variables --- .../components/codeTabsComponent/index.tsx | 170 +++++++++--------- 1 file changed, 85 insertions(+), 85 deletions(-) diff --git a/src/frontend/src/components/codeTabsComponent/index.tsx b/src/frontend/src/components/codeTabsComponent/index.tsx index 27653fbd7..9fd36633a 100644 --- a/src/frontend/src/components/codeTabsComponent/index.tsx +++ b/src/frontend/src/components/codeTabsComponent/index.tsx @@ -205,13 +205,13 @@ export default function CodeTabsComponent({ : "overflow-hidden" )} > - {data.map((t: any, index) => ( + {data.map((node: any, index) => (

- {tweaks.tweaksList.current.includes(t["data"]["id"]) && ( + {tweaks.tweaksList.current.includes(node["data"]["id"]) && (
@@ -226,23 +226,23 @@ export default function CodeTabsComponent({ - {Object.keys(t["data"]["node"]["template"]) + {Object.keys(node["data"]["node"]["template"]) .filter( (n) => n.charAt(0) !== "_" && - t.data.node.template[n].show && - (t.data.node.template[n].type === "str" || - t.data.node.template[n].type === + node.data.node.template[n].show && + (node.data.node.template[n].type === "str" || + node.data.node.template[n].type === "bool" || - t.data.node.template[n].type === + node.data.node.template[n].type === "float" || - t.data.node.template[n].type === + node.data.node.template[n].type === "code" || - t.data.node.template[n].type === + node.data.node.template[n].type === "prompt" || - t.data.node.template[n].type === + node.data.node.template[n].type === "file" || - t.data.node.template[n].type === "int") + node.data.node.template[n].type === "int") ) .map((n, i) => { return ( @@ -255,22 +255,22 @@ export default function CodeTabsComponent({
- {t.data.node.template[n].type === + {node.data.node.template[n].type === "str" && - !t.data.node.template[n].options ? ( + !node.data.node.template[n].options ? (
- {t.data.node.template[n] + {node.data.node.template[n] .list ? ( - ) : t.data.node.template[n] + ) : node.data.node.template[n] .multiline ? ( @@ -305,14 +305,14 @@ export default function CodeTabsComponent({ disabled={false} editNode={true} value={ - !t.data.node.template[ + !node.data.node.template[ n ].value || - t.data.node.template[ + node.data.node.template[ n ].value === "" ? "" - : t.data.node + : node.data.node .template[n] .value } @@ -328,9 +328,9 @@ export default function CodeTabsComponent({ return newInputList; }); tweaks.buildTweakObject( - t["data"]["id"], + node["data"]["id"], k, - t.data.node + node.data.node .template[n] ); }} @@ -342,16 +342,16 @@ export default function CodeTabsComponent({ editNode={true} disabled={false} password={ - t.data.node.template[n] + node.data.node.template[n] .password ?? false } value={ - !t.data.node.template[n] + !node.data.node.template[n] .value || - t.data.node.template[n] + node.data.node.template[n] .value === "" ? "" - : t.data.node.template[ + : node.data.node.template[ n ].value } @@ -367,21 +367,21 @@ export default function CodeTabsComponent({ return newInputList; }); tweaks.buildTweakObject( - t["data"]["id"], + node["data"]["id"], k, - t.data.node.template[n] + node.data.node.template[n] ); }} /> )}
- ) : t.data.node.template[n].type === + ) : node.data.node.template[n].type === "bool" ? (
{" "} { @@ -396,25 +396,25 @@ export default function CodeTabsComponent({ return newInputList; }); tweaks.buildTweakObject( - t["data"]["id"], + node["data"]["id"], e, - t.data.node.template[n] + node.data.node.template[n] ); }} size="small" disabled={false} />
- ) : t.data.node.template[n].type === + ) : node.data.node.template[n].type === "file" ? ( @@ -423,41 +423,41 @@ export default function CodeTabsComponent({ editNode={true} disabled={false} value={ - t.data.node.template[n] + node.data.node.template[n] .value ?? "" } onChange={(k: any) => {}} fileTypes={ - t.data.node.template[n] + node.data.node.template[n] .fileTypes } suffixes={ - t.data.node.template[n] + node.data.node.template[n] .suffixes } onFileChange={( value: any ) => { - t.data.node.template[ + node.data.node.template[ n ].file_path = value; }} >
- ) : t.data.node.template[n].type === + ) : node.data.node.template[n].type === "float" ? (
{ @@ -472,23 +472,23 @@ export default function CodeTabsComponent({ return newInputList; }); tweaks.buildTweakObject( - t["data"]["id"], + node["data"]["id"], k, - t.data.node.template[n] + node.data.node.template[n] ); }} />
- ) : t.data.node.template[n].type === + ) : node.data.node.template[n].type === "str" && - t.data.node.template[n] + node.data.node.template[n] .options ? (
{ @@ -503,35 +503,35 @@ export default function CodeTabsComponent({ return newInputList; }); tweaks.buildTweakObject( - t["data"]["id"], + node["data"]["id"], k, - t.data.node.template[n] + node.data.node.template[n] ); }} value={ - !t.data.node.template[n] + !node.data.node.template[n] .value || - t.data.node.template[n] + node.data.node.template[n] .value === "" ? "" - : t.data.node.template[n] + : node.data.node.template[n] .value } >
- ) : t.data.node.template[n].type === + ) : node.data.node.template[n].type === "int" ? (
{ @@ -546,23 +546,23 @@ export default function CodeTabsComponent({ return newInputList; }); tweaks.buildTweakObject( - t["data"]["id"], + node["data"]["id"], k, - t.data.node.template[n] + node.data.node.template[n] ); }} />
- ) : t.data.node.template[n].type === + ) : node.data.node.template[n].type === "prompt" ? ( @@ -571,12 +571,12 @@ export default function CodeTabsComponent({ editNode={true} disabled={false} value={ - !t.data.node.template[n] + !node.data.node.template[n] .value || - t.data.node.template[n] + node.data.node.template[n] .value === "" ? "" - : t.data.node.template[ + : node.data.node.template[ n ].value } @@ -592,23 +592,23 @@ export default function CodeTabsComponent({ return newInputList; }); tweaks.buildTweakObject( - t["data"]["id"], + node["data"]["id"], k, - t.data.node.template[n] + node.data.node.template[n] ); }} /> - ) : t.data.node.template[n].type === + ) : node.data.node.template[n].type === "code" ? ( @@ -617,12 +617,12 @@ export default function CodeTabsComponent({ disabled={false} editNode={true} value={ - !t.data.node.template[n] + !node.data.node.template[n] .value || - t.data.node.template[n] + node.data.node.template[n] .value === "" ? "" - : t.data.node.template[ + : node.data.node.template[ n ].value } @@ -638,15 +638,15 @@ export default function CodeTabsComponent({ return newInputList; }); tweaks.buildTweakObject( - t["data"]["id"], + node["data"]["id"], k, - t.data.node.template[n] + node.data.node.template[n] ); }} /> - ) : t.data.node.template[n].type === + ) : node.data.node.template[n].type === "Any" ? ( "-" ) : ( From 68c4799ab0b648fbe95f58b21683d00a56934b4e Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Mon, 7 Aug 2023 14:47:04 -0300 Subject: [PATCH 57/90] =?UTF-8?q?=F0=9F=94=A7=20chore(launch.json):=20upda?= =?UTF-8?q?te=20launch.json=20to=20include=20a=20new=20configuration=20for?= =?UTF-8?q?=20debugging=20Python=20tests=20=E2=9C=A8=20feat(launch.json):?= =?UTF-8?q?=20add=20a=20new=20configuration=20for=20debugging=20Python=20t?= =?UTF-8?q?ests=20with=20the=20purpose=20of=20"debug-test"=20and=20console?= =?UTF-8?q?=20set=20to=20"integratedTerminal"?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .vscode/launch.json | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/.vscode/launch.json b/.vscode/launch.json index e09e76cc8..bb61b0b9e 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -1,4 +1,5 @@ { + "version": "0.2.0", "configurations": [ { "name": "Debug Backend", @@ -38,6 +39,15 @@ "request": "launch", "url": "http://localhost:3000/", "webRoot": "${workspaceRoot}/src/frontend" + }, + { + "name": "Python: Debug Tests", + "type": "python", + "request": "launch", + "program": "${file}", + "purpose": ["debug-test"], + "console": "integratedTerminal", + "justMyCode": false } ] } From cc917311a6ef651612419fa8478e6352282362e3 Mon Sep 17 00:00:00 2001 From: Igor Carvalho Date: Mon, 7 Aug 2023 15:02:12 -0300 Subject: [PATCH 58/90] Refactor: Change more variables names on codeTabsComponent --- .../components/codeTabsComponent/index.tsx | 218 +++++++++--------- 1 file changed, 109 insertions(+), 109 deletions(-) diff --git a/src/frontend/src/components/codeTabsComponent/index.tsx b/src/frontend/src/components/codeTabsComponent/index.tsx index 9fd36633a..e92d80b20 100644 --- a/src/frontend/src/components/codeTabsComponent/index.tsx +++ b/src/frontend/src/components/codeTabsComponent/index.tsx @@ -228,75 +228,75 @@ export default function CodeTabsComponent({ {Object.keys(node["data"]["node"]["template"]) .filter( - (n) => - n.charAt(0) !== "_" && - node.data.node.template[n].show && - (node.data.node.template[n].type === "str" || - node.data.node.template[n].type === + (templateName) => + templateName.charAt(0) !== "_" && + node.data.node.template[templateName].show && + (node.data.node.template[templateName].type === "str" || + node.data.node.template[templateName].type === "bool" || - node.data.node.template[n].type === + node.data.node.template[templateName].type === "float" || - node.data.node.template[n].type === + node.data.node.template[templateName].type === "code" || - node.data.node.template[n].type === + node.data.node.template[templateName].type === "prompt" || - node.data.node.template[n].type === + node.data.node.template[templateName].type === "file" || - node.data.node.template[n].type === "int") + node.data.node.template[templateName].type === "int") ) - .map((n, i) => { + .map((templateName, index) => { return ( - {n} + {templateName}
- {node.data.node.template[n].type === + {node.data.node.template[templateName].type === "str" && - !node.data.node.template[n].options ? ( + !node.data.node.template[templateName].options ? (
- {node.data.node.template[n] + {node.data.node.template[templateName] .list ? ( { + onChange={(target) => { setData((old) => { let newInputList = cloneDeep(old); newInputList[ index ].data.node.template[ - n - ].value = k; + templateName + ].value = target; return newInputList; }); tweaks.buildTweakObject( node["data"]["id"], - k, - node.data.node.template[n] + target, + node.data.node.template[templateName] ); }} /> - ) : node.data.node.template[n] + ) : node.data.node.template[templateName] .multiline ? ( @@ -306,32 +306,32 @@ export default function CodeTabsComponent({ editNode={true} value={ !node.data.node.template[ - n + templateName ].value || node.data.node.template[ - n + templateName ].value === "" ? "" : node.data.node - .template[n] + .template[templateName] .value } - onChange={(k) => { + onChange={(target) => { setData((old) => { let newInputList = cloneDeep(old); newInputList[ index ].data.node.template[ - n - ].value = k; + templateName + ].value = target; return newInputList; }); tweaks.buildTweakObject( node["data"]["id"], - k, + target, node.data.node - .template[n] + .template[templateName] ); }} /> @@ -342,46 +342,46 @@ export default function CodeTabsComponent({ editNode={true} disabled={false} password={ - node.data.node.template[n] + node.data.node.template[templateName] .password ?? false } value={ - !node.data.node.template[n] + !node.data.node.template[templateName] .value || - node.data.node.template[n] + node.data.node.template[templateName] .value === "" ? "" : node.data.node.template[ - n + templateName ].value } - onChange={(k) => { + onChange={(target) => { setData((old) => { let newInputList = cloneDeep(old); newInputList[ index ].data.node.template[ - n - ].value = k; + templateName + ].value = target; return newInputList; }); tweaks.buildTweakObject( node["data"]["id"], - k, - node.data.node.template[n] + target, + node.data.node.template[templateName] ); }} /> )}
- ) : node.data.node.template[n].type === + ) : node.data.node.template[templateName].type === "bool" ? (
{" "} { @@ -391,30 +391,30 @@ export default function CodeTabsComponent({ newInputList[ index ].data.node.template[ - n + templateName ].value = e; return newInputList; }); tweaks.buildTweakObject( node["data"]["id"], e, - node.data.node.template[n] + node.data.node.template[templateName] ); }} size="small" disabled={false} />
- ) : node.data.node.template[n].type === + ) : node.data.node.template[templateName].type === "file" ? ( @@ -423,146 +423,146 @@ export default function CodeTabsComponent({ editNode={true} disabled={false} value={ - node.data.node.template[n] + node.data.node.template[templateName] .value ?? "" } - onChange={(k: any) => {}} + onChange={(target: any) => {}} fileTypes={ - node.data.node.template[n] + node.data.node.template[templateName] .fileTypes } suffixes={ - node.data.node.template[n] + node.data.node.template[templateName] .suffixes } onFileChange={( value: any ) => { node.data.node.template[ - n + templateName ].file_path = value; }} >
- ) : node.data.node.template[n].type === + ) : node.data.node.template[templateName].type === "float" ? (
{ + onChange={(target) => { setData((old) => { let newInputList = cloneDeep(old); newInputList[ index ].data.node.template[ - n - ].value = k; + templateName + ].value = target; return newInputList; }); tweaks.buildTweakObject( node["data"]["id"], - k, - node.data.node.template[n] + target, + node.data.node.template[templateName] ); }} />
- ) : node.data.node.template[n].type === + ) : node.data.node.template[templateName].type === "str" && - node.data.node.template[n] + node.data.node.template[templateName] .options ? (
{ + onSelect={(target) => { setData((old) => { let newInputList = cloneDeep(old); newInputList[ index ].data.node.template[ - n - ].value = k; + templateName + ].value = target; return newInputList; }); tweaks.buildTweakObject( node["data"]["id"], - k, - node.data.node.template[n] + target, + node.data.node.template[templateName] ); }} value={ - !node.data.node.template[n] + !node.data.node.template[templateName] .value || - node.data.node.template[n] + node.data.node.template[templateName] .value === "" ? "" - : node.data.node.template[n] + : node.data.node.template[templateName] .value } >
- ) : node.data.node.template[n].type === + ) : node.data.node.template[templateName].type === "int" ? (
{ + onChange={(target) => { setData((old) => { let newInputList = cloneDeep(old); newInputList[ index ].data.node.template[ - n - ].value = k; + templateName + ].value = target; return newInputList; }); tweaks.buildTweakObject( node["data"]["id"], - k, - node.data.node.template[n] + target, + node.data.node.template[templateName] ); }} />
- ) : node.data.node.template[n].type === + ) : node.data.node.template[templateName].type === "prompt" ? ( @@ -571,44 +571,44 @@ export default function CodeTabsComponent({ editNode={true} disabled={false} value={ - !node.data.node.template[n] + !node.data.node.template[templateName] .value || - node.data.node.template[n] + node.data.node.template[templateName] .value === "" ? "" : node.data.node.template[ - n + templateName ].value } - onChange={(k) => { + onChange={(target) => { setData((old) => { let newInputList = cloneDeep(old); newInputList[ index ].data.node.template[ - n - ].value = k; + templateName + ].value = target; return newInputList; }); tweaks.buildTweakObject( node["data"]["id"], - k, - node.data.node.template[n] + target, + node.data.node.template[templateName] ); }} /> - ) : node.data.node.template[n].type === + ) : node.data.node.template[templateName].type === "code" ? ( @@ -617,36 +617,36 @@ export default function CodeTabsComponent({ disabled={false} editNode={true} value={ - !node.data.node.template[n] + !node.data.node.template[templateName] .value || - node.data.node.template[n] + node.data.node.template[templateName] .value === "" ? "" : node.data.node.template[ - n + templateName ].value } - onChange={(k) => { + onChange={(target) => { setData((old) => { let newInputList = cloneDeep(old); newInputList[ index ].data.node.template[ - n - ].value = k; + templateName + ].value = target; return newInputList; }); tweaks.buildTweakObject( node["data"]["id"], - k, - node.data.node.template[n] + target, + node.data.node.template[templateName] ); }} /> - ) : node.data.node.template[n].type === + ) : node.data.node.template[templateName].type === "Any" ? ( "-" ) : ( From 1f93cb551303226ee91f89375dc98f4178e58b48 Mon Sep 17 00:00:00 2001 From: Igor Carvalho Date: Mon, 7 Aug 2023 17:14:54 -0300 Subject: [PATCH 59/90] Refactor: Change more variables names --- .../components/parameterComponent/index.tsx | 91 ++++++----- .../src/CustomNodes/GenericNode/index.tsx | 48 +++--- .../src/alerts/alertDropDown/index.tsx | 6 +- .../EditFlowSettingsComponent/index.tsx | 6 +- .../components/codeAreaComponent/index.tsx | 6 +- .../src/components/floatComponent/index.tsx | 14 +- .../src/components/headerComponent/index.tsx | 2 +- .../src/components/inputComponent/index.tsx | 4 +- .../components/inputFileComponent/index.tsx | 4 +- .../components/inputListComponent/index.tsx | 8 +- .../src/components/intComponent/index.tsx | 10 +- .../src/components/promptComponent/index.tsx | 4 +- .../components/textAreaComponent/index.tsx | 8 +- .../src/components/toggleComponent/index.tsx | 4 +- .../components/toggleShadComponent/index.tsx | 4 +- .../src/components/ui/rename-label.tsx | 8 +- src/frontend/src/contexts/tabsContext.tsx | 42 ++--- src/frontend/src/contexts/typesContext.tsx | 4 +- src/frontend/src/contexts/undoRedoContext.tsx | 8 +- src/frontend/src/modals/ApiModal/index.tsx | 26 ++-- .../src/modals/EditNodeModal/index.tsx | 144 +++++++++--------- src/frontend/src/modals/exportModal/index.tsx | 8 +- .../src/modals/flowSettingsModal/index.tsx | 6 +- .../src/modals/formModal/chatInput/index.tsx | 4 +- src/frontend/src/modals/formModal/index.tsx | 56 +++---- .../src/modals/genericModal/index.tsx | 10 +- .../components/DisclosureComponent/index.tsx | 6 +- .../components/PageComponent/index.tsx | 26 ++-- .../extraSidebarComponent/index.tsx | 42 ++--- .../components/nodeToolbarComponent/index.tsx | 22 +-- src/frontend/src/utils/reactflowUtils.ts | 60 ++++---- src/frontend/src/utils/utils.ts | 11 +- 32 files changed, 357 insertions(+), 345 deletions(-) diff --git a/src/frontend/src/CustomNodes/GenericNode/components/parameterComponent/index.tsx b/src/frontend/src/CustomNodes/GenericNode/components/parameterComponent/index.tsx index 72c2a8e4d..42942a360 100644 --- a/src/frontend/src/CustomNodes/GenericNode/components/parameterComponent/index.tsx +++ b/src/frontend/src/CustomNodes/GenericNode/components/parameterComponent/index.tsx @@ -1,5 +1,11 @@ import { cloneDeep } from "lodash"; -import React, { useContext, useEffect, useRef, useState } from "react"; +import React, { + ReactNode, + useContext, + useEffect, + useRef, + useState, +} from "react"; import { Handle, Position, useUpdateNodeInternals } from "reactflow"; import ShadTooltip from "../../../../components/ShadTooltipComponent"; import CodeAreaComponent from "../../../../components/codeAreaComponent"; @@ -17,6 +23,7 @@ import { TOOLTIP_EMPTY } from "../../../../constants/constants"; import { TabsContext } from "../../../../contexts/tabsContext"; import { typesContext } from "../../../../contexts/typesContext"; import { ParameterComponentType } from "../../../../types/components"; +import { TabsState } from "../../../../types/tabs"; import { isValidConnection } from "../../../../utils/reactflowUtils"; import { nodeColors, @@ -38,15 +45,15 @@ export default function ParameterComponent({ required = false, optionalHandle = null, info = "", -}: ParameterComponentType) { - const ref = useRef(null); - const refHtml = useRef(null); - const infoHtml = useRef(null); +}: ParameterComponentType): JSX.Element { + const ref = useRef(null); + const refHtml = useRef(null); + const infoHtml = useRef(null); const updateNodeInternals = useUpdateNodeInternals(); const [position, setPosition] = useState(0); const { setTabsState, tabId, save, flows } = useContext(TabsContext); - const flow = flows.find((f) => f.id === tabId).data?.nodes ?? null; + const flow = flows.find((flow) => flow.id === tabId)?.data?.nodes ?? null; // Update component position useEffect(() => { @@ -62,16 +69,17 @@ export default function ParameterComponent({ const { reactFlowInstance } = useContext(typesContext); let disabled = - reactFlowInstance?.getEdges().some((e) => e.targetHandle === id) ?? false; + reactFlowInstance?.getEdges().some((edge) => edge.targetHandle === id) ?? false; const { data: myData } = useContext(typesContext); - const handleOnNewValue = (newValue: any) => { + const handleOnNewValue = (newValue: string | string[] | boolean): void => { let newData = cloneDeep(data); - newData.node.template[name].value = newValue; + newData.node!.template[name].value = newValue; setData(newData); // Set state to pending - setTabsState((prev) => { + //@ts-ignore + setTabsState((prev: TabsState) => { return { ...prev, [tabId]: { @@ -86,10 +94,11 @@ export default function ParameterComponent({ useEffect(() => { if (name === "openai_api_base") console.log(info); + // @ts-ignore infoHtml.current = ( -
- {info.split("\n").map((line, i) => ( -

+

+ {info.split("\n").map((line, index) => ( +

{line}

))} @@ -98,18 +107,19 @@ export default function ParameterComponent({ }, [info]); function renderTooltips() { - let groupedObj = groupByFamily(myData, tooltipTitle, left, flow); + let groupedObj = groupByFamily(myData, tooltipTitle!, left, flow!); if (groupedObj && groupedObj.length > 0) { - refHtml.current = groupedObj.map((item, i) => { + //@ts-ignore + refHtml.current = groupedObj.map((item, index) => { const Icon: any = nodeIconsLucide[item.family] ?? nodeIconsLucide["unknown"]; return ( 0 ? "mt-2 flex items-center" : "flex items-center" + index > 0 ? "mt-2 flex items-center" : "flex items-center" )} >
2 - ? item.type.split(", ").map((el, i) => ( - + ? item.type.split(", ").map((el, index) => ( + - {i === item.type.split(", ").length - 1 + {index === item.type.split(", ").length - 1 ? el : (el += `, `)} @@ -148,6 +158,7 @@ export default function ParameterComponent({ ); }); } else { + //@ts-ignore refHtml.current = {TOOLTIP_EMPTY}; } } @@ -207,7 +218,7 @@ export default function ParameterComponent({ position={left ? Position.Left : Position.Right} id={id} isValidConnection={(connection) => - isValidConnection(connection, reactFlowInstance) + isValidConnection(connection, reactFlowInstance!) } className={classNames( left ? "-ml-0.5 " : "-mr-0.5 ", @@ -223,9 +234,9 @@ export default function ParameterComponent({ {left === true && type === "str" && - !data.node.template[name].options ? ( + !data.node?.template[name].options ? (
- {data.node.template[name].list ? ( + {data.node?.template[name].list ? ( - ) : data.node.template[name].multiline ? ( + ) : data.node?.template[name].multiline ? ( )} @@ -255,9 +266,9 @@ export default function ParameterComponent({
{ - handleOnNewValue(t); + enabled={data.node?.template[name].value ?? false} + setEnabled={(isEnabled) => { + handleOnNewValue(isEnabled); }} size="large" /> @@ -266,13 +277,13 @@ export default function ParameterComponent({
) : left === true && type === "str" && - data.node.template[name].options ? ( + data.node?.template[name].options ? (
{ data.node = nodeClass; }} nodeClass={data.node} disabled={disabled} - value={data.node.template[name].value ?? ""} + value={data.node?.template[name].value ?? ""} onChange={handleOnNewValue} />
@@ -297,12 +308,12 @@ export default function ParameterComponent({
{ - data.node.template[name].file_path = t; + fileTypes={data.node?.template[name].fileTypes} + suffixes={data.node?.template[name].suffixes} + onFileChange={(filePath: string) => { + data.node!.template[name].file_path = filePath; save(); }} > @@ -311,7 +322,7 @@ export default function ParameterComponent({
@@ -324,7 +335,7 @@ export default function ParameterComponent({ }} nodeClass={data.node} disabled={disabled} - value={data.node.template[name].value ?? ""} + value={data.node?.template[name].value ?? ""} onChange={handleOnNewValue} />
diff --git a/src/frontend/src/CustomNodes/GenericNode/index.tsx b/src/frontend/src/CustomNodes/GenericNode/index.tsx index d72e46d23..61717ad1d 100644 --- a/src/frontend/src/CustomNodes/GenericNode/index.tsx +++ b/src/frontend/src/CustomNodes/GenericNode/index.tsx @@ -159,52 +159,52 @@ export default function GenericNode({ <> {Object.keys(data.node.template) - .filter((t) => t.charAt(0) !== "_") - .map((t: string, idx) => ( + .filter((templateName) => templateName.charAt(0) !== "_") + .map((templateName: string, idx) => (
- {data.node.template[t].show && - !data.node.template[t].advanced ? ( + {data.node.template[templateName].show && + !data.node.template[templateName].advanced ? ( ) : ( <> diff --git a/src/frontend/src/alerts/alertDropDown/index.tsx b/src/frontend/src/alerts/alertDropDown/index.tsx index 90838d693..59f42d3ec 100644 --- a/src/frontend/src/alerts/alertDropDown/index.tsx +++ b/src/frontend/src/alerts/alertDropDown/index.tsx @@ -22,9 +22,9 @@ export default function AlertDropdown({ children }: AlertDropdownType) { return ( { - setOpen(k); - if (k) setNotificationCenter(false); + onOpenChange={(target) => { + setOpen(target); + if (target) setNotificationCenter(false); }} > {children} diff --git a/src/frontend/src/components/EditFlowSettingsComponent/index.tsx b/src/frontend/src/components/EditFlowSettingsComponent/index.tsx index 6504c2b4e..caf2fa5b0 100644 --- a/src/frontend/src/components/EditFlowSettingsComponent/index.tsx +++ b/src/frontend/src/components/EditFlowSettingsComponent/index.tsx @@ -55,12 +55,12 @@ export const EditFlowSettings: React.FC = ({ }; const [desc, setDesc] = useState( - flows.find((f) => f.id === tabId).description + flows.find((flow) => flow.id === tabId).description ); const handleDescriptionChange = (event: ChangeEvent) => { - flows.find((f) => f.id === tabId).description = event.target.value; - setDesc(flows.find((f) => f.id === tabId).description); + flows.find((flow) => flow.id === tabId).description = event.target.value; + setDesc(flows.find((flow) => flow.id === tabId).description); setDescription(event.target.value); }; diff --git a/src/frontend/src/components/codeAreaComponent/index.tsx b/src/frontend/src/components/codeAreaComponent/index.tsx index 05e8127da..853fff1e8 100644 --- a/src/frontend/src/components/codeAreaComponent/index.tsx +++ b/src/frontend/src/components/codeAreaComponent/index.tsx @@ -34,9 +34,9 @@ export default function CodeAreaComponent({ value={myValue} nodeClass={nodeClass} setNodeClass={setNodeClass} - setValue={(t: string) => { - setMyValue(t); - onChange(t); + setValue={(value: string) => { + setMyValue(value); + onChange(value); }} >
diff --git a/src/frontend/src/components/floatComponent/index.tsx b/src/frontend/src/components/floatComponent/index.tsx index 40d6fc3fe..ae3719959 100644 --- a/src/frontend/src/components/floatComponent/index.tsx +++ b/src/frontend/src/components/floatComponent/index.tsx @@ -25,12 +25,12 @@ export default function FloatComponent({ type="number" step={step} min={min} - onInput={(e: React.ChangeEvent) => { - if (e.target.value < min.toString()) { - e.target.value = min.toString(); + onInput={(event: React.ChangeEvent) => { + if (event.target.value < min.toString()) { + event.target.value = min.toString(); } - if (e.target.value > max.toString()) { - e.target.value = max.toString(); + if (event.target.value > max.toString()) { + event.target.value = max.toString(); } }} max={max} @@ -40,8 +40,8 @@ export default function FloatComponent({ placeholder={ editNode ? "Number 0 to 1" : "Type a number from zero to one" } - onChange={(e) => { - onChange(e.target.value); + onChange={(event) => { + onChange(event.target.value); }} />
diff --git a/src/frontend/src/components/headerComponent/index.tsx b/src/frontend/src/components/headerComponent/index.tsx index 91e9bbd8a..545a49593 100644 --- a/src/frontend/src/components/headerComponent/index.tsx +++ b/src/frontend/src/components/headerComponent/index.tsx @@ -34,7 +34,7 @@ export default function Header() { ⛓️ - {flows.findIndex((f) => tabId === f.id) !== -1 && tabId !== "" && ( + {flows.findIndex((flow) => tabId === flow.id) !== -1 && tabId !== "" && ( )}
diff --git a/src/frontend/src/components/inputComponent/index.tsx b/src/frontend/src/components/inputComponent/index.tsx index 78b06c411..5345239ef 100644 --- a/src/frontend/src/components/inputComponent/index.tsx +++ b/src/frontend/src/components/inputComponent/index.tsx @@ -31,8 +31,8 @@ export default function InputComponent({ password && !editNode ? "pr-10" : "" )} placeholder={password && editNode ? "Key" : "Type something..."} - onChange={(e) => { - onChange(e.target.value); + onChange={(event) => { + onChange(event.target.value); }} /> {password && ( diff --git a/src/frontend/src/components/inputFileComponent/index.tsx b/src/frontend/src/components/inputFileComponent/index.tsx index 8c9693d8c..53d1ad4de 100644 --- a/src/frontend/src/components/inputFileComponent/index.tsx +++ b/src/frontend/src/components/inputFileComponent/index.tsx @@ -49,11 +49,11 @@ export default function InputFileComponent({ input.style.display = "none"; // Hidden from view input.multiple = false; // Allow only one file selection - input.onchange = (e: Event) => { + input.onchange = (event: Event) => { setLoading(true); // Get the selected file - const file = (e.target as HTMLInputElement).files?.[0]; + const file = (event.target as HTMLInputElement).files?.[0]; // Check if the file type is correct if (file && checkFileType(file.name)) { diff --git a/src/frontend/src/components/inputListComponent/index.tsx b/src/frontend/src/components/inputListComponent/index.tsx index 13f5f7cca..c66e93c8f 100644 --- a/src/frontend/src/components/inputListComponent/index.tsx +++ b/src/frontend/src/components/inputListComponent/index.tsx @@ -25,18 +25,18 @@ export default function InputListComponent({ "flex flex-col gap-3" )} > - {value.map((i, idx) => { + {value.map((singleValue, idx) => { return (
{ + onChange={(event) => { let newInputList = _.cloneDeep(value); - newInputList[idx] = e.target.value; + newInputList[idx] = event.target.value; onChange(newInputList); }} /> diff --git a/src/frontend/src/components/intComponent/index.tsx b/src/frontend/src/components/intComponent/index.tsx index c43055b47..504f3b816 100644 --- a/src/frontend/src/components/intComponent/index.tsx +++ b/src/frontend/src/components/intComponent/index.tsx @@ -41,17 +41,17 @@ export default function IntComponent({ type="number" step="1" min={min} - onInput={(e: React.ChangeEvent) => { - if (e.target.value < min.toString()) { - e.target.value = min.toString(); + onInput={(event: React.ChangeEvent) => { + if (event.target.value < min.toString()) { + event.target.value = min.toString(); } }} value={value ?? ""} className={editNode ? "input-edit-node" : ""} disabled={disabled} placeholder={editNode ? "Integer number" : "Type an integer number"} - onChange={(e) => { - onChange(e.target.value); + onChange={(event) => { + onChange(event.target.value); }} />
diff --git a/src/frontend/src/components/promptComponent/index.tsx b/src/frontend/src/components/promptComponent/index.tsx index 82196658f..e3393512b 100644 --- a/src/frontend/src/components/promptComponent/index.tsx +++ b/src/frontend/src/components/promptComponent/index.tsx @@ -39,8 +39,8 @@ export default function PromptAreaComponent({ value={value} buttonText="Check & Save" modalTitle="Edit Prompt" - setValue={(t: string) => { - onChange(t); + setValue={(value: string) => { + onChange(value); }} nodeClass={nodeClass} setNodeClass={setNodeClass} diff --git a/src/frontend/src/components/textAreaComponent/index.tsx b/src/frontend/src/components/textAreaComponent/index.tsx index cd598f01e..7c5af6da9 100644 --- a/src/frontend/src/components/textAreaComponent/index.tsx +++ b/src/frontend/src/components/textAreaComponent/index.tsx @@ -25,8 +25,8 @@ export default function TextAreaComponent({ disabled={disabled} className={editNode ? "input-edit-node" : ""} placeholder={"Type something..."} - onChange={(e) => { - onChange(e.target.value); + onChange={(event) => { + onChange(event.target.value); }} />
@@ -35,8 +35,8 @@ export default function TextAreaComponent({ buttonText="Finishing Editing" modalTitle="Edit Text" value={value} - setValue={(t: string) => { - onChange(t); + setValue={(value: string) => { + onChange(value); }} > {!editNode && ( diff --git a/src/frontend/src/components/toggleComponent/index.tsx b/src/frontend/src/components/toggleComponent/index.tsx index 210c9223a..23acccee7 100644 --- a/src/frontend/src/components/toggleComponent/index.tsx +++ b/src/frontend/src/components/toggleComponent/index.tsx @@ -18,8 +18,8 @@ export default function ToggleComponent({
{ - setEnabled(x); + onChange={(isEnabled: boolean) => { + setEnabled(isEnabled); }} className={classNames( enabled ? "bg-primary" : "bg-input", diff --git a/src/frontend/src/components/toggleShadComponent/index.tsx b/src/frontend/src/components/toggleShadComponent/index.tsx index 95ef6d062..5671b6465 100644 --- a/src/frontend/src/components/toggleShadComponent/index.tsx +++ b/src/frontend/src/components/toggleShadComponent/index.tsx @@ -35,8 +35,8 @@ export default function ToggleShadComponent({ disabled={disabled} className="" checked={enabled} - onCheckedChange={(x: boolean) => { - setEnabled(x); + onCheckedChange={(isEnabled: boolean) => { + setEnabled(isEnabled); }} >
diff --git a/src/frontend/src/components/ui/rename-label.tsx b/src/frontend/src/components/ui/rename-label.tsx index e1cef4996..8b12fdfb0 100644 --- a/src/frontend/src/components/ui/rename-label.tsx +++ b/src/frontend/src/components/ui/rename-label.tsx @@ -15,8 +15,8 @@ export default function RenameLabel(props) { useEffect(() => { if (isRename) { setMyValue(props.value); - document.addEventListener("keydown", (e) => { - if (e.key === "Escape") { + document.addEventListener("keydown", (event) => { + if (event.key === "Escape") { setIsRename(false); props.setValue(""); } @@ -67,8 +67,8 @@ export default function RenameLabel(props) { } }} value={myValue} - onChange={(e) => { - setMyValue(e.target.value); + onChange={(event) => { + setMyValue(event.target.value); }} /> ) : ( diff --git a/src/frontend/src/contexts/tabsContext.tsx b/src/frontend/src/contexts/tabsContext.tsx index 5ea8c11c0..2cc4adc2b 100644 --- a/src/frontend/src/contexts/tabsContext.tsx +++ b/src/frontend/src/contexts/tabsContext.tsx @@ -316,11 +316,11 @@ export function TabsProvider({ children }: { children: ReactNode }) { const input = document.createElement("input"); input.type = "file"; // add a change event listener to the file input - input.onchange = (e: Event) => { + input.onchange = (event: Event) => { // check if the file type is application/json - if ((e.target as HTMLInputElement).files[0].type === "application/json") { + if ((event.target as HTMLInputElement).files[0].type === "application/json") { // get the file from the file input - const file = (e.target as HTMLInputElement).files[0]; + const file = (event.target as HTMLInputElement).files[0]; // read the file as text const formData = new FormData(); formData.append("file", file); @@ -359,12 +359,12 @@ export function TabsProvider({ children }: { children: ReactNode }) { let idsMap = {}; let nodes = reactFlowInstance.getNodes(); let edges = reactFlowInstance.getEdges(); - selectionInstance.nodes.forEach((n) => { - if (n.position.y < minimumY) { - minimumY = n.position.y; + selectionInstance.nodes.forEach((node) => { + if (node.position.y < minimumY) { + minimumY = node.position.y; } - if (n.position.x < minimumX) { - minimumX = n.position.x; + if (node.position.x < minimumX) { + minimumX = node.position.x; } }); @@ -372,43 +372,43 @@ export function TabsProvider({ children }: { children: ReactNode }) { ? { x: position.paneX + position.x, y: position.paneY + position.y } : reactFlowInstance.project({ x: position.x, y: position.y }); - selectionInstance.nodes.forEach((n: NodeType) => { + selectionInstance.nodes.forEach((node: NodeType) => { // Generate a unique node ID - let newId = getNodeId(n.data.type); - idsMap[n.id] = newId; + let newId = getNodeId(node.data.type); + idsMap[node.id] = newId; // Create a new node object const newNode: NodeType = { id: newId, type: "genericNode", position: { - x: insidePosition.x + n.position.x - minimumX, - y: insidePosition.y + n.position.y - minimumY, + x: insidePosition.x + node.position.x - minimumX, + y: insidePosition.y + node.position.y - minimumY, }, data: { - ..._.cloneDeep(n.data), + ..._.cloneDeep(node.data), id: newId, }, }; // Add the new node to the list of nodes in state nodes = nodes - .map((e) => ({ ...e, selected: false })) + .map((node) => ({ ...node, selected: false })) .concat({ ...newNode, selected: false }); }); reactFlowInstance.setNodes(nodes); - selectionInstance.edges.forEach((e) => { - let source = idsMap[e.source]; - let target = idsMap[e.target]; - let sourceHandleSplitted = e.sourceHandle.split("|"); + selectionInstance.edges.forEach((edge) => { + let source = idsMap[edge.source]; + let target = idsMap[edge.target]; + let sourceHandleSplitted = edge.sourceHandle.split("|"); let sourceHandle = sourceHandleSplitted[0] + "|" + source + "|" + sourceHandleSplitted.slice(2).join("|"); - let targetHandleSplitted = e.targetHandle.split("|"); + let targetHandleSplitted = edge.targetHandle.split("|"); let targetHandle = targetHandleSplitted.slice(0, -1).join("|") + "|" + target; let id = @@ -433,7 +433,7 @@ export function TabsProvider({ children }: { children: ReactNode }) { animated: targetHandle.split("|")[0] === "Text", selected: false, }, - edges.map((e) => ({ ...e, selected: false })) + edges.map((edge) => ({ ...edge, selected: false })) ); }); reactFlowInstance.setEdges(edges); diff --git a/src/frontend/src/contexts/typesContext.tsx b/src/frontend/src/contexts/typesContext.tsx index d4523bf60..c87a2a19a 100644 --- a/src/frontend/src/contexts/typesContext.tsx +++ b/src/frontend/src/contexts/typesContext.tsx @@ -88,12 +88,12 @@ export function TypesProvider({ children }: { children: ReactNode }) { function deleteNode(idx: string) { reactFlowInstance.setNodes( - reactFlowInstance.getNodes().filter((n: Node) => n.id !== idx) + reactFlowInstance.getNodes().filter((node: Node) => node.id !== idx) ); reactFlowInstance.setEdges( reactFlowInstance .getEdges() - .filter((ns) => ns.source !== idx && ns.target !== idx) + .filter((edge) => edge.source !== idx && edge.target !== idx) ); } return ( diff --git a/src/frontend/src/contexts/undoRedoContext.tsx b/src/frontend/src/contexts/undoRedoContext.tsx index dbe0baa45..218a5ec8a 100644 --- a/src/frontend/src/contexts/undoRedoContext.tsx +++ b/src/frontend/src/contexts/undoRedoContext.tsx @@ -45,14 +45,14 @@ export function UndoRedoProvider({ children }) { const [past, setPast] = useState(flows.map(() => [])); const [future, setFuture] = useState(flows.map(() => [])); const [tabIndex, setTabIndex] = useState( - flows.findIndex((f) => f.id === tabId) + flows.findIndex((flow) => flow.id === tabId) ); useEffect(() => { // whenever the flows variable changes, we need to add one array to the past and future states - setPast((old) => flows.map((f, i) => (old[i] ? old[i] : []))); - setFuture((old) => flows.map((f, i) => (old[i] ? old[i] : []))); - setTabIndex(flows.findIndex((f) => f.id === tabId)); + setPast((old) => flows.map((flow, index) => (old[index] ? old[index] : []))); + setFuture((old) => flows.map((flow, index) => (old[index] ? old[index] : []))); + setTabIndex(flows.findIndex((flow) => flow.id === tabId)); }, [flows, tabId]); const { setNodes, setEdges, getNodes, getEdges } = useReactFlow(); diff --git a/src/frontend/src/modals/ApiModal/index.tsx b/src/frontend/src/modals/ApiModal/index.tsx index 236a03d65..7af77a205 100644 --- a/src/frontend/src/modals/ApiModal/index.tsx +++ b/src/frontend/src/modals/ApiModal/index.tsx @@ -181,22 +181,22 @@ const ApiModal = forwardRef( function filterNodes() { let arrNodesWithValues = []; - flow["data"]["nodes"].forEach((t) => { - Object.keys(t["data"]["node"]["template"]) + flow["data"]["nodes"].forEach((node) => { + Object.keys(node["data"]["node"]["template"]) .filter( - (n) => - n.charAt(0) !== "_" && - t.data.node.template[n].show && - (t.data.node.template[n].type === "str" || - t.data.node.template[n].type === "bool" || - t.data.node.template[n].type === "float" || - t.data.node.template[n].type === "code" || - t.data.node.template[n].type === "prompt" || - t.data.node.template[n].type === "file" || - t.data.node.template[n].type === "int") + (templateName) => + templateName.charAt(0) !== "_" && + node.data.node.template[templateName].show && + (node.data.node.template[templateName].type === "str" || + node.data.node.template[templateName].type === "bool" || + node.data.node.template[templateName].type === "float" || + node.data.node.template[templateName].type === "code" || + node.data.node.template[templateName].type === "prompt" || + node.data.node.template[templateName].type === "file" || + node.data.node.template[templateName].type === "int") ) .map((n, i) => { - arrNodesWithValues.push(t["id"]); + arrNodesWithValues.push(node["id"]); }); }); diff --git a/src/frontend/src/modals/EditNodeModal/index.tsx b/src/frontend/src/modals/EditNodeModal/index.tsx index 4b4b55568..9447f21ac 100644 --- a/src/frontend/src/modals/EditNodeModal/index.tsx +++ b/src/frontend/src/modals/EditNodeModal/index.tsx @@ -49,13 +49,13 @@ const EditNodeModal = forwardRef( const { reactFlowInstance } = useContext(typesContext); let disabled = - reactFlowInstance?.getEdges().some((e) => e.targetHandle === data.id) ?? + reactFlowInstance?.getEdges().some((edge) => edge.targetHandle === data.id) ?? false; - function changeAdvanced(n) { + function changeAdvanced(templateParam) { setMyData((old) => { let newData = cloneDeep(old); - newData.node.template[n].advanced = !newData.node.template[n].advanced; + newData.node.template[templateParam].advanced = !newData.node.template[templateParam].advanced; return newData; }); } @@ -112,51 +112,51 @@ const EditNodeModal = forwardRef( {Object.keys(myData.node.template) .filter( - (t) => - t.charAt(0) !== "_" && - myData.node.template[t].show && - (myData.node.template[t].type === "str" || - myData.node.template[t].type === "bool" || - myData.node.template[t].type === "float" || - myData.node.template[t].type === "code" || - myData.node.template[t].type === "prompt" || - myData.node.template[t].type === "file" || - myData.node.template[t].type === "int") + (templateParam) => + templateParam.charAt(0) !== "_" && + myData.node.template[templateParam].show && + (myData.node.template[templateParam].type === "str" || + myData.node.template[templateParam].type === "bool" || + myData.node.template[templateParam].type === "float" || + myData.node.template[templateParam].type === "code" || + myData.node.template[templateParam].type === "prompt" || + myData.node.template[templateParam].type === "file" || + myData.node.template[templateParam].type === "int") ) - .map((n, i) => ( - + .map((templateParam, index) => ( + - {myData.node.template[n].name - ? myData.node.template[n].name - : myData.node.template[n].display_name} + {myData.node.template[templateParam].name + ? myData.node.template[templateParam].name + : myData.node.template[templateParam].display_name} - {myData.node.template[n].type === "str" && - !myData.node.template[n].options ? ( + {myData.node.template[templateParam].type === "str" && + !myData.node.template[templateParam].options ? (
- {myData.node.template[n].list ? ( + {myData.node.template[templateParam].list ? ( { - handleOnNewValue(t, n); + onChange={(value: string[]) => { + handleOnNewValue(value, templateParam); }} /> - ) : myData.node.template[n].multiline ? ( + ) : myData.node.template[templateParam].multiline ? ( { - handleOnNewValue(t, n); + onChange={(value: string) => { + handleOnNewValue(value, templateParam); }} /> ) : ( @@ -164,105 +164,105 @@ const EditNodeModal = forwardRef( editNode={true} disabled={disabled} password={ - myData.node.template[n].password ?? + myData.node.template[templateParam].password ?? false } value={ - myData.node.template[n].value ?? "" + myData.node.template[templateParam].value ?? "" } - onChange={(t) => { - handleOnNewValue(t, n); + onChange={(value) => { + handleOnNewValue(value, templateParam); }} /> )}
- ) : myData.node.template[n].type === "bool" ? ( + ) : myData.node.template[templateParam].type === "bool" ? (
{" "} { - handleOnNewValue(t, n); + enabled={myData.node.template[templateParam].value} + setEnabled={(isEnabled) => { + handleOnNewValue(isEnabled, templateParam); }} size="small" />
- ) : myData.node.template[n].type === "float" ? ( + ) : myData.node.template[templateParam].type === "float" ? (
{ - handleOnNewValue(t, n); + value={myData.node.template[templateParam].value ?? ""} + onChange={(value) => { + handleOnNewValue(value, templateParam); }} />
- ) : myData.node.template[n].type === "str" && - myData.node.template[n].options ? ( + ) : myData.node.template[templateParam].type === "str" && + myData.node.template[templateParam].options ? (
handleOnNewValue(t, n)} + options={myData.node.template[templateParam].options} + onSelect={(value) => handleOnNewValue(value, templateParam)} value={ - myData.node.template[n].value ?? + myData.node.template[templateParam].value ?? "Choose an option" } >
- ) : myData.node.template[n].type === "int" ? ( + ) : myData.node.template[templateParam].type === "int" ? (
{ - handleOnNewValue(t, n); + value={myData.node.template[templateParam].value ?? ""} + onChange={(value) => { + handleOnNewValue(value, templateParam); }} />
- ) : myData.node.template[n].type === "file" ? ( + ) : myData.node.template[templateParam].type === "file" ? (
{ - handleOnNewValue(t, n); + value={myData.node.template[templateParam].value ?? ""} + onChange={(value: string) => { + handleOnNewValue(value, templateParam); }} fileTypes={ - myData.node.template[n].fileTypes + myData.node.template[templateParam].fileTypes } - suffixes={myData.node.template[n].suffixes} - onFileChange={(t: string) => { - data.node.template[n].file_path = t; + suffixes={myData.node.template[templateParam].suffixes} + onFileChange={(filePath: string) => { + data.node.template[templateParam].file_path = filePath; }} >
- ) : myData.node.template[n].type === "prompt" ? ( + ) : myData.node.template[templateParam].type === "prompt" ? (
{ myData.node = nodeClass; }} - value={myData.node.template[n].value ?? ""} - onChange={(t: string) => { - handleOnNewValue(t, n); + value={myData.node.template[templateParam].value ?? ""} + onChange={(value: string) => { + handleOnNewValue(value, templateParam); }} />
- ) : myData.node.template[n].type === "code" ? ( + ) : myData.node.template[templateParam].type === "code" ? (
{ data.node = nodeClass; @@ -270,13 +270,13 @@ const EditNodeModal = forwardRef( nodeClass={data.node} disabled={disabled} editNode={true} - value={myData.node.template[n].value ?? ""} - onChange={(t: string) => { - handleOnNewValue(t, n); + value={myData.node.template[templateParam].value ?? ""} + onChange={(value: string) => { + handleOnNewValue(value, templateParam); }} />
- ) : myData.node.template[n].type === "Any" ? ( + ) : myData.node.template[templateParam].type === "Any" ? ( "-" ) : (
@@ -285,8 +285,8 @@ const EditNodeModal = forwardRef(
changeAdvanced(n)} + enabled={!myData.node.template[templateParam].advanced} + setEnabled={(e) => changeAdvanced(templateParam)} disabled={disabled} size="small" /> diff --git a/src/frontend/src/modals/exportModal/index.tsx b/src/frontend/src/modals/exportModal/index.tsx index c45723e04..d3bbbaa64 100644 --- a/src/frontend/src/modals/exportModal/index.tsx +++ b/src/frontend/src/modals/exportModal/index.tsx @@ -12,10 +12,10 @@ const ExportModal = forwardRef((props: { children: ReactNode }, ref) => { const { flows, tabId, updateFlow, downloadFlow, saveFlow } = useContext(TabsContext); const [checked, setChecked] = useState(false); - const [name, setName] = useState(flows.find((f) => f.id === tabId).name); + const [name, setName] = useState(flows.find((flow) => flow.id === tabId).name); const [invalidName, setInvalidName] = useState(false); const [description, setDescription] = useState( - flows.find((f) => f.id === tabId).description + flows.find((flow) => flow.id === tabId).description ); const [open, setOpen] = useState(false); return ( @@ -59,13 +59,13 @@ const ExportModal = forwardRef((props: { children: ReactNode }, ref) => { onClick={() => { if (checked) downloadFlow( - flows.find((f) => f.id === tabId), + flows.find((flow) => flow.id === tabId), name, description ); else downloadFlow( - removeApiKeys(flows.find((f) => f.id === tabId)), + removeApiKeys(flows.find((flow) => flow.id === tabId)), name, description ); diff --git a/src/frontend/src/modals/flowSettingsModal/index.tsx b/src/frontend/src/modals/flowSettingsModal/index.tsx index 7f9e2e6e9..12db76931 100644 --- a/src/frontend/src/modals/flowSettingsModal/index.tsx +++ b/src/frontend/src/modals/flowSettingsModal/index.tsx @@ -19,14 +19,14 @@ export default function FlowSettingsModal({ const { flows, tabId, updateFlow, setTabsState, saveFlow } = useContext(TabsContext); const maxLength = 50; - const [name, setName] = useState(flows.find((f) => f.id === tabId).name); + const [name, setName] = useState(flows.find((flow) => flow.id === tabId).name); const [description, setDescription] = useState( - flows.find((f) => f.id === tabId).description + flows.find((flow) => flow.id === tabId).description ); const [invalidName, setInvalidName] = useState(false); function handleClick() { - let savedFlow = flows.find((f) => f.id === tabId); + let savedFlow = flows.find((flow) => flow.id === tabId); savedFlow.name = name; savedFlow.description = description; saveFlow(savedFlow); diff --git a/src/frontend/src/modals/formModal/chatInput/index.tsx b/src/frontend/src/modals/formModal/chatInput/index.tsx index e8be2f292..be1e5a45a 100644 --- a/src/frontend/src/modals/formModal/chatInput/index.tsx +++ b/src/frontend/src/modals/formModal/chatInput/index.tsx @@ -46,8 +46,8 @@ export default function ChatInput({ }`, }} value={lockChat ? "Thinking..." : chatValue} - onChange={(e) => { - setChatValue(e.target.value); + onChange={(event) => { + setChatValue(event.target.value); }} className={classNames( lockChat diff --git a/src/frontend/src/modals/formModal/index.tsx b/src/frontend/src/modals/formModal/index.tsx index 751eacea1..d0fa348d1 100644 --- a/src/frontend/src/modals/formModal/index.tsx +++ b/src/frontend/src/modals/formModal/index.tsx @@ -46,7 +46,7 @@ export default function FormModal({ const handleKeys = formKeysData.handle_keys; const keyToUse = Object.keys(inputKeys).find( - (k) => !handleKeys.some((j) => j === k) && inputKeys[k] === "" + (key) => !handleKeys.some((j) => j === key) && inputKeys[key] === "" ); return inputKeys[keyToUse]; @@ -69,9 +69,9 @@ export default function FormModal({ const tabsStateFlowIdFormKeysData = tabsStateFlowId.formKeysData; const [chatKey, setChatKey] = useState( Object.keys(tabsState[flow.id].formKeysData.input_keys).find( - (k) => - !tabsState[flow.id].formKeysData.handle_keys.some((j) => j === k) && - tabsState[flow.id].formKeysData.input_keys[k] === "" + (key) => + !tabsState[flow.id].formKeysData.handle_keys.some((j) => j === key) && + tabsState[flow.id].formKeysData.input_keys[key] === "" ) ); @@ -420,13 +420,13 @@ export default function FormModal({
{Object.keys(tabsState[id.current].formKeysData.input_keys).map( - (i, k) => ( -
+ (key, index) => ( +
- {i} + {key}
- handleOnCheckedChange(value, i) + handleOnCheckedChange(value, key) } size="small" disabled={tabsState[ id.current - ].formKeysData.handle_keys.some((t) => t === i)} + ].formKeysData.handle_keys.some((t) => t === key)} />
} - key={k} - keyValue={i} + key={index} + keyValue={key} >
{tabsState[id.current].formKeysData.handle_keys.some( - (t) => t === i + (t) => t === key ) && (
Source: Component @@ -462,18 +462,18 @@ export default function FormModal({
@@ -481,17 +481,17 @@ export default function FormModal({
) )} - {tabsState[id.current].formKeysData.memory_keys.map((i, k) => ( -
+ {tabsState[id.current].formKeysData.memory_keys.map((key, index) => ( +
- {i} + {key}
{}} size="small" disabled={true} @@ -499,8 +499,8 @@ export default function FormModal({
} - key={k} - keyValue={i} + key={index} + keyValue={key} >
@@ -529,14 +529,14 @@ export default function FormModal({
{chatHistory.length > 0 ? ( - chatHistory.map((c, i) => ( + chatHistory.map((chat, index) => ( )) ) : ( diff --git a/src/frontend/src/modals/genericModal/index.tsx b/src/frontend/src/modals/genericModal/index.tsx index df9d32f6d..c3d26700f 100644 --- a/src/frontend/src/modals/genericModal/index.tsx +++ b/src/frontend/src/modals/genericModal/index.tsx @@ -208,9 +208,9 @@ export default function GenericModal({ setIsEdit(false); }} autoFocus - onChange={(e) => { - setInputValue(e.target.value); - checkVariables(e.target.value); + onChange={(event) => { + setInputValue(event.target.value); + checkVariables(event.target.value); }} placeholder="Type message here." /> @@ -221,8 +221,8 @@ export default function GenericModal({ ref={ref} className="form-input h-full w-full rounded-lg focus-visible:ring-1" value={inputValue} - onChange={(e) => { - setInputValue(e.target.value); + onChange={(event) => { + setInputValue(event.target.value); }} placeholder="Type message here." /> diff --git a/src/frontend/src/pages/FlowPage/components/DisclosureComponent/index.tsx b/src/frontend/src/pages/FlowPage/components/DisclosureComponent/index.tsx index d48f0d812..33287f60a 100644 --- a/src/frontend/src/pages/FlowPage/components/DisclosureComponent/index.tsx +++ b/src/frontend/src/pages/FlowPage/components/DisclosureComponent/index.tsx @@ -18,9 +18,9 @@ export default function DisclosureComponent({ {title}
- {buttons.map((x, index) => ( - ))}
diff --git a/src/frontend/src/pages/FlowPage/components/PageComponent/index.tsx b/src/frontend/src/pages/FlowPage/components/PageComponent/index.tsx index ac9aa1d01..63e89e883 100644 --- a/src/frontend/src/pages/FlowPage/components/PageComponent/index.tsx +++ b/src/frontend/src/pages/FlowPage/components/PageComponent/index.tsx @@ -141,10 +141,10 @@ export default function Page({ flow }: { flow: FlowType }) { }, [setExtraComponent, setExtraNavigation]); const onEdgesChangeMod = useCallback( - (s: EdgeChange[]) => { - onEdgesChange(s); - setNodes((x) => { - let newX = _.cloneDeep(x); + (change: EdgeChange[]) => { + onEdgesChange(change); + setNodes((node) => { + let newX = _.cloneDeep(node); return newX; }); setTabsState((prev) => { @@ -161,8 +161,8 @@ export default function Page({ flow }: { flow: FlowType }) { ); const onNodesChangeMod = useCallback( - (s: NodeChange[]) => { - onNodesChange(s); + (change: NodeChange[]) => { + onNodesChange(change); setTabsState((prev) => { return { ...prev, @@ -193,8 +193,8 @@ export default function Page({ flow }: { flow: FlowType }) { eds ) ); - setNodes((x) => { - let newX = _.cloneDeep(x); + setNodes((node) => { + let newX = _.cloneDeep(node); return newX; }); }, @@ -219,7 +219,7 @@ export default function Page({ flow }: { flow: FlowType }) { const onDragOver = useCallback((event: React.DragEvent) => { event.preventDefault(); - if (event.dataTransfer.types.some((t) => t === "nodedata")) { + if (event.dataTransfer.types.some((types) => types === "nodedata")) { event.dataTransfer.dropEffect = "move"; } else { event.dataTransfer.dropEffect = "copy"; @@ -229,7 +229,7 @@ export default function Page({ flow }: { flow: FlowType }) { const onDrop = useCallback( (event: React.DragEvent) => { event.preventDefault(); - if (event.dataTransfer.types.some((t) => t === "nodedata")) { + if (event.dataTransfer.types.some((types) => types === "nodedata")) { takeSnapshot(); // Get the current bounds of the ReactFlow wrapper element @@ -281,7 +281,7 @@ export default function Page({ flow }: { flow: FlowType }) { // Add the new node to the list of nodes in state } setNodes((nds) => nds.concat(newNode)); - } else if (event.dataTransfer.types.some((t) => t === "Files")) { + } else if (event.dataTransfer.types.some((types) => types === "Files")) { takeSnapshot(); uploadFlow(false, event.dataTransfer.files.item(0)); } @@ -303,7 +303,7 @@ export default function Page({ flow }: { flow: FlowType }) { takeSnapshot(); setEdges( edges.filter( - (ns) => !mynodes.some((n) => ns.source === n.id || ns.target === n.id) + (edge) => !mynodes.some((node) => edge.source === node.id || edge.target === node.id) ) ); }, @@ -326,7 +326,7 @@ export default function Page({ flow }: { flow: FlowType }) { const onEdgeUpdateEnd = useCallback((_, edge) => { if (!edgeUpdateSuccessful.current) { - setEdges((eds) => eds.filter((e) => e.id !== edge.id)); + setEdges((eds) => eds.filter((edg) => edg.id !== edge.id)); } edgeUpdateSuccessful.current = true; diff --git a/src/frontend/src/pages/FlowPage/components/extraSidebarComponent/index.tsx b/src/frontend/src/pages/FlowPage/components/extraSidebarComponent/index.tsx index 5413d6ec9..924168f9d 100644 --- a/src/frontend/src/pages/FlowPage/components/extraSidebarComponent/index.tsx +++ b/src/frontend/src/pages/FlowPage/components/extraSidebarComponent/index.tsx @@ -56,7 +56,7 @@ export default function ExtraSidebar() { return ret; }); } - const flow = flows.find((f) => f.id === tabId); + const flow = flows.find((flow) => flow.id === tabId); useEffect(() => { // show components with error on load let errors = []; @@ -143,10 +143,10 @@ export default function ExtraSidebar() { id="search" placeholder="Search" className="nopan nodrag noundo nocopy input-search" - onChange={(e) => { - handleSearchInput(e.target.value); + onChange={(event) => { + handleSearchInput(event.target.value); // Set search input state - setSearch(e.target.value); + setSearch(event.target.value); }} />
@@ -161,42 +161,42 @@ export default function ExtraSidebar() {
{Object.keys(dataFilter) .sort() - .map((d: keyof APIObjectType, i) => - Object.keys(dataFilter[d]).length > 0 ? ( + .map((SBSectionName: keyof APIObjectType, index) => + Object.keys(dataFilter[SBSectionName]).length > 0 ? (
- {Object.keys(dataFilter[d]) + {Object.keys(dataFilter[SBSectionName]) .sort() - .map((t: string, k) => ( + .map((SBItemName: string, index) => ( -
+
onDragStart(event, { - type: t, - node: data[d][t], + type: SBItemName, + node: data[SBSectionName][SBItemName], }) } onDragEnd={() => { @@ -209,7 +209,7 @@ export default function ExtraSidebar() { >
- {data[d][t].display_name} + {data[SBSectionName][SBItemName].display_name} ) : ( -
+
) )}
diff --git a/src/frontend/src/pages/FlowPage/components/nodeToolbarComponent/index.tsx b/src/frontend/src/pages/FlowPage/components/nodeToolbarComponent/index.tsx index 06d8ea06d..9bd6db214 100644 --- a/src/frontend/src/pages/FlowPage/components/nodeToolbarComponent/index.tsx +++ b/src/frontend/src/pages/FlowPage/components/nodeToolbarComponent/index.tsx @@ -9,17 +9,17 @@ import { classNames } from "../../../../utils/utils"; export default function NodeToolbarComponent({ data, setData, deleteNode }) { const [nodeLength, setNodeLength] = useState( Object.keys(data.node.template).filter( - (t) => - t.charAt(0) !== "_" && - data.node.template[t].show && - (data.node.template[t].type === "str" || - data.node.template[t].type === "bool" || - data.node.template[t].type === "float" || - data.node.template[t].type === "code" || - data.node.template[t].type === "prompt" || - data.node.template[t].type === "file" || - data.node.template[t].type === "Any" || - data.node.template[t].type === "int") + (templateName) => + templateName.charAt(0) !== "_" && + data.node.template[templateName].show && + (data.node.template[templateName].type === "str" || + data.node.template[templateName].type === "bool" || + data.node.template[templateName].type === "float" || + data.node.template[templateName].type === "code" || + data.node.template[templateName].type === "prompt" || + data.node.template[templateName].type === "file" || + data.node.template[templateName].type === "Any" || + data.node.template[templateName].type === "int") ).length ); diff --git a/src/frontend/src/utils/reactflowUtils.ts b/src/frontend/src/utils/reactflowUtils.ts index ff486ebb2..416ba145f 100644 --- a/src/frontend/src/utils/reactflowUtils.ts +++ b/src/frontend/src/utils/reactflowUtils.ts @@ -15,7 +15,7 @@ export function cleanEdges({ const sourceNode = nodes.find((node) => node.id === edge.source); const targetNode = nodes.find((node) => node.id === edge.target); if (!sourceNode || !targetNode) { - newEdges = newEdges.filter((e) => e.id !== edge.id); + newEdges = newEdges.filter((edg) => edg.id !== edge.id); } // check if the source and target handle still exists if (sourceNode && targetNode) { @@ -41,7 +41,7 @@ export function cleanEdges({ ...sourceNode.data.node.base_classes, ].join("|"); if (id !== sourceHandle) { - newEdges = newEdges.filter((e) => e.id !== edge.id); + newEdges = newEdges.filter((edg) => edg.id !== edge.id); } } } @@ -57,15 +57,15 @@ export function isValidConnection( targetHandle .split("|")[0] .split(";") - .some((n) => n === sourceHandle.split("|")[0]) || + .some((target) => target === sourceHandle.split("|")[0]) || sourceHandle .split("|") .slice(2) - .some((t) => + .some((target) => targetHandle .split("|")[0] .split(";") - .some((n) => n === t) + .some((n) => n === target) ) || targetHandle.split("|")[0] === "str" ) { @@ -129,35 +129,35 @@ export function updateTemplate( export function updateIds(newFlow, getNodeId) { let idsMap = {}; - newFlow.nodes.forEach((n: NodeType) => { + newFlow.nodes.forEach((node: NodeType) => { // Generate a unique node ID - let newId = getNodeId(n.data.type); - idsMap[n.id] = newId; - n.id = newId; - n.data.id = newId; + let newId = getNodeId(node.data.type); + idsMap[node.id] = newId; + node.id = newId; + node.data.id = newId; // Add the new node to the list of nodes in state }); - newFlow.edges.forEach((e) => { - e.source = idsMap[e.source]; - e.target = idsMap[e.target]; - let sourceHandleSplitted = e.sourceHandle.split("|"); - e.sourceHandle = + newFlow.edges.forEach((edge) => { + edge.source = idsMap[edge.source]; + edge.target = idsMap[edge.target]; + let sourceHandleSplitted = edge.sourceHandle.split("|"); + edge.sourceHandle = sourceHandleSplitted[0] + "|" + - e.source + + edge.source + "|" + sourceHandleSplitted.slice(2).join("|"); - let targetHandleSplitted = e.targetHandle.split("|"); - e.targetHandle = - targetHandleSplitted.slice(0, -1).join("|") + "|" + e.target; - e.id = + let targetHandleSplitted = edge.targetHandle.split("|"); + edge.targetHandle = + targetHandleSplitted.slice(0, -1).join("|") + "|" + edge.target; + edge.id = "reactflow__edge-" + - e.source + - e.sourceHandle + + edge.source + + edge.sourceHandle + "-" + - e.target + - e.targetHandle; + edge.target + + edge.targetHandle; }); } @@ -169,10 +169,10 @@ export function buildTweaks(flow) { } export function validateNode( - n: NodeType, + node: NodeType, reactFlowInstance: ReactFlowInstance ): Array { - if (!n.data?.node?.template || !Object.keys(n.data.node.template)) { + if (!node.data?.node?.template || !Object.keys(node.data.node.template)) { return [ "We've noticed a potential issue with a node in the flow. Please review it and, if necessary, submit a bug report with your exported flow file. Thank you for your help!", ]; @@ -181,7 +181,7 @@ export function validateNode( const { type, node: { template }, - } = n.data; + } = node.data; return Object.keys(template).reduce( (errors: Array, t) => @@ -194,9 +194,9 @@ export function validateNode( !reactFlowInstance .getEdges() .some( - (e) => - e.targetHandle.split("|")[1] === t && - e.targetHandle.split("|")[2] === n.id + (edge) => + edge.targetHandle.split("|")[1] === t && + edge.targetHandle.split("|")[2] === node.id ) ? [ `${type} is missing ${ diff --git a/src/frontend/src/utils/utils.ts b/src/frontend/src/utils/utils.ts index 462e72084..dc387635c 100644 --- a/src/frontend/src/utils/utils.ts +++ b/src/frontend/src/utils/utils.ts @@ -103,14 +103,15 @@ export function groupByFamily(data, baseClasses, left, flow?: NodeType[]) { "int", ]); - const checkBaseClass = (t: any) => - t.type && - t.show && - ((!excludeTypes.has(t.type) && baseClassesSet.has(t.type)) || - (t.input_types && t.input_types.some((x) => baseClassesSet.has(x)))); + const checkBaseClass = (template: any) => + template.type && + template.show && + ((!excludeTypes.has(template.type) && baseClassesSet.has(template.type)) || + (template.input_types && template.input_types.some((inputType) => baseClassesSet.has(inputType)))); if (flow) { for (const node of flow) { + console.log(node) const nodeData = node.data; const foundNode = checkedNodes.get(nodeData.type); checkedNodes.set(nodeData.type, { From 71018dbd43b903b559f966b4498043cff5891d91 Mon Sep 17 00:00:00 2001 From: Igor Carvalho Date: Mon, 7 Aug 2023 17:20:26 -0300 Subject: [PATCH 60/90] change more names --- src/frontend/src/utils/utils.ts | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/src/frontend/src/utils/utils.ts b/src/frontend/src/utils/utils.ts index dc387635c..0c7ac5b6b 100644 --- a/src/frontend/src/utils/utils.ts +++ b/src/frontend/src/utils/utils.ts @@ -111,7 +111,6 @@ export function groupByFamily(data, baseClasses, left, flow?: NodeType[]) { if (flow) { for (const node of flow) { - console.log(node) const nodeData = node.data; const foundNode = checkedNodes.get(nodeData.type); checkedNodes.set(nodeData.type, { @@ -120,7 +119,7 @@ export function groupByFamily(data, baseClasses, left, flow?: NodeType[]) { Object.values(nodeData.node.template).some(checkBaseClass), hasBaseClassInBaseClasses: foundNode?.hasBaseClassInBaseClasses || - nodeData.node.base_classes.some((t) => baseClassesSet.has(t)), + nodeData.node.base_classes.some((baseClass) => baseClassesSet.has(baseClass)), }); } } @@ -136,8 +135,8 @@ export function groupByFamily(data, baseClasses, left, flow?: NodeType[]) { hasBaseClassInTemplate: Object.values(node.template).some( checkBaseClass ), - hasBaseClassInBaseClasses: node.base_classes.some((t) => - baseClassesSet.has(t) + hasBaseClassInBaseClasses: node.base_classes.some((baseClass) => + baseClassesSet.has(baseClass) ), }; checkedNodes.set(n, foundNode); @@ -163,13 +162,13 @@ export function groupByFamily(data, baseClasses, left, flow?: NodeType[]) { } return left - ? arrOfPossibleOutputs.map((t) => ({ - family: t.category, - type: t.full ? "" : t.nodes.join(", "), + ? arrOfPossibleOutputs.map((output) => ({ + family: output.category, + type: output.full ? "" : output.nodes.join(", "), })) - : arrOfPossibleInputs.map((t) => ({ - family: t.category, - type: t.full ? "" : t.nodes.join(", "), + : arrOfPossibleInputs.map((input) => ({ + family: input.category, + type: input.full ? "" : input.nodes.join(", "), })); } From e56fa4c7d5ee913fd91db8a4157e4dbb12d77c61 Mon Sep 17 00:00:00 2001 From: Igor Carvalho Date: Mon, 7 Aug 2023 17:40:24 -0300 Subject: [PATCH 61/90] Add initial api modal constant structure --- src/frontend/src/constants/constants.ts | 85 +++++++++++++++++++++++++ 1 file changed, 85 insertions(+) diff --git a/src/frontend/src/constants/constants.ts b/src/frontend/src/constants/constants.ts index 0ff77cf45..2ac8df4fd 100644 --- a/src/frontend/src/constants/constants.ts +++ b/src/frontend/src/constants/constants.ts @@ -508,3 +508,88 @@ export const URL_EXCLUDED_FROM_ERROR_RETRIES = [ "/api/v1/custom_component", "/api/v1/validate/prompt", ]; + +export const tabsCode = [] + +export function tabsArray(codes: string[], method: number) { + if (!method) return + if (method === 0) { + return ( + [ + { + name: "cURL", + mode: "bash", + image: "https://curl.se/logo/curl-symbol-transparent.png", + language: "sh", + code: codes[0], + }, + { + name: "Python API", + mode: "python", + image: + "https://images.squarespace-cdn.com/content/v1/5df3d8c5d2be5962e4f87890/1628015119369-OY4TV3XJJ53ECO0W2OLQ/Python+API+Training+Logo.png?format=1000w", + language: "py", + code: codes[1], + }, + { + name: "Python Code", + mode: "python", + image: "https://cdn-icons-png.flaticon.com/512/5968/5968350.png", + language: "py", + code: codes[2], + }, + { + name: "Chat Widget HTML", + description: + "Insert this code anywhere in your <body> tag. To use with react and other libs, check our documentation.", + mode: "html", + image: "https://cdn-icons-png.flaticon.com/512/5968/5968350.png", + language: "py", + code: codes[3], + }, + ] + ); + } + return ( + [ + { + name: "cURL", + mode: "bash", + image: "https://curl.se/logo/curl-symbol-transparent.png", + language: "sh", + code: codes[0], + }, + { + name: "Python API", + mode: "python", + image: + "https://images.squarespace-cdn.com/content/v1/5df3d8c5d2be5962e4f87890/1628015119369-OY4TV3XJJ53ECO0W2OLQ/Python+API+Training+Logo.png?format=1000w", + language: "py", + code: codes[1], + }, + { + name: "Python Code", + mode: "python", + language: "py", + image: "https://cdn-icons-png.flaticon.com/512/5968/5968350.png", + code: codes[2], + }, + { + name: "Chat Widget HTML", + description: + "Insert this code anywhere in your <body> tag. To use with react and other libs, check our documentation.", + mode: "html", + image: "https://cdn-icons-png.flaticon.com/512/5968/5968350.png", + language: "py", + code: codes[3], + }, + { + name: "Tweaks", + mode: "python", + image: "https://cdn-icons-png.flaticon.com/512/5968/5968350.png", + language: "py", + code: codes[4], + }, + ] + ); +}; From 7f31f7f519e4ac8ae1da5282a967bfe447cab89a Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Mon, 7 Aug 2023 18:35:14 -0300 Subject: [PATCH 62/90] refactor(chatComponent): simplify condition for setting canOpen state in Chat component The condition for setting the `canOpen` state in the Chat component was simplified by removing unnecessary checks for `formKeysData.input_keys` and `Object.keys(tabsState[flow.id].formKeysData.input_keys).length > 0`. The new condition only checks for the existence of `tabsState[flow.id].formKeysData`. This change improves code readability and reduces unnecessary complexity. --- src/frontend/src/components/chatComponent/index.tsx | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/src/frontend/src/components/chatComponent/index.tsx b/src/frontend/src/components/chatComponent/index.tsx index 3cbe5bbf1..ac551f6de 100644 --- a/src/frontend/src/components/chatComponent/index.tsx +++ b/src/frontend/src/components/chatComponent/index.tsx @@ -58,13 +58,7 @@ export default function Chat({ flow }: ChatType) { ) { setIsBuilt(false); } - if ( - tabsState && - tabsState[flow.id] && - tabsState[flow.id].formKeysData && - tabsState[flow.id].formKeysData.input_keys && - Object.keys(tabsState[flow.id].formKeysData.input_keys).length > 0 - ) { + if (tabsState && tabsState[flow.id] && tabsState[flow.id].formKeysData) { setCanOpen(true); } else { setCanOpen(false); From 9ee76fd18f9a1051aaedfa4fcc429aadb02c1f48 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Mon, 7 Aug 2023 18:37:49 -0300 Subject: [PATCH 63/90] =?UTF-8?q?=F0=9F=90=9B=20fix(manager.py):=20change?= =?UTF-8?q?=20default=20value=20of=20chat=5Finputs=20from=20empty=20string?= =?UTF-8?q?=20to=20empty=20dictionary=20to=20improve=20data=20consistency?= =?UTF-8?q?=20and=20prevent=20potential=20errors=20=F0=9F=90=9B=20fix(util?= =?UTF-8?q?s.py):=20handle=20case=20when=20chat=5Finputs.message=20is=20No?= =?UTF-8?q?ne=20by=20assigning=20an=20empty=20dictionary=20to=20it=20to=20?= =?UTF-8?q?prevent=20potential=20errors?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/chat/manager.py | 2 +- src/backend/langflow/chat/utils.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/backend/langflow/chat/manager.py b/src/backend/langflow/chat/manager.py index 1e93174e2..2c3427a12 100644 --- a/src/backend/langflow/chat/manager.py +++ b/src/backend/langflow/chat/manager.py @@ -117,7 +117,7 @@ class ChatManager: self, client_id: str, payload: Dict, langchain_object: Any ): # Process the graph data and chat message - chat_inputs = payload.pop("inputs", "") + chat_inputs = payload.pop("inputs", {}) chat_inputs = ChatMessage(message=chat_inputs) self.chat_history.add_message(client_id, chat_inputs) diff --git a/src/backend/langflow/chat/utils.py b/src/backend/langflow/chat/utils.py index 7db65b8e3..17c976eb9 100644 --- a/src/backend/langflow/chat/utils.py +++ b/src/backend/langflow/chat/utils.py @@ -21,9 +21,9 @@ async def process_graph( # Generate result and thought try: - if not chat_inputs.message: + if chat_inputs.message is None: logger.debug("No message provided") - raise ValueError("No message provided") + chat_inputs.message = {} logger.debug("Generating result and thought") result, intermediate_steps = await get_result_and_steps( From 239811dbff9c31616922b860a9d1ff222f1c51d0 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Mon, 7 Aug 2023 18:46:13 -0300 Subject: [PATCH 64/90] =?UTF-8?q?=F0=9F=90=9B=20fix(chat.py):=20change=20"?= =?UTF-8?q?input=5Fkeys"=20value=20from=20an=20empty=20dictionary=20to=20N?= =?UTF-8?q?one=20to=20improve=20readability=20and=20reduce=20redundancy?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/api/v1/chat.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/backend/langflow/api/v1/chat.py b/src/backend/langflow/api/v1/chat.py index dd3407d1b..06a2fdda0 100644 --- a/src/backend/langflow/api/v1/chat.py +++ b/src/backend/langflow/api/v1/chat.py @@ -155,7 +155,7 @@ async def stream_build(flow_id: str): ) else: input_keys_response = { - "input_keys": {}, + "input_keys": None, "memory_keys": [], "handle_keys": [], } From f61d265e77824dd448e773cbe492463455f200e9 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Mon, 7 Aug 2023 18:52:19 -0300 Subject: [PATCH 65/90] =?UTF-8?q?=F0=9F=94=A7=20chore(vector=5Fstore.py):?= =?UTF-8?q?=20refactor=20initialize=5Fchroma=20function=20to=20remove=20un?= =?UTF-8?q?necessary=20if=20conditions=20and=20improve=20code=20readabilit?= =?UTF-8?q?y=20=F0=9F=94=A7=20chore(vector=5Fstore.py):=20remove=20chroma?= =?UTF-8?q?=5Fserver=5F=20keys=20from=20params=20dictionary=20if=20not=20n?= =?UTF-8?q?eeded=20to=20avoid=20potential=20conflicts?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../langflow/interface/initialize/vector_store.py | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/src/backend/langflow/interface/initialize/vector_store.py b/src/backend/langflow/interface/initialize/vector_store.py index 12cf054a5..8330d7611 100644 --- a/src/backend/langflow/interface/initialize/vector_store.py +++ b/src/backend/langflow/interface/initialize/vector_store.py @@ -171,11 +171,7 @@ def initialize_pinecone(class_object: Type[Pinecone], params: dict): def initialize_chroma(class_object: Type[Chroma], params: dict): """Initialize a ChromaDB object from the params""" if ( # type: ignore - "chroma_server_host" in params - or "chroma_server_http_port" in params - or "chroma_server_ssl_enabled" in params - or "chroma_server_grpc_port" in params - or "chroma_server_cors_allow_origins" in params + "chroma_server_host" in params or "chroma_server_http_port" in params ): import chromadb # type: ignore @@ -186,6 +182,14 @@ def initialize_chroma(class_object: Type[Chroma], params: dict): } chroma_settings = chromadb.config.Settings(**settings_params) params["client_settings"] = chroma_settings + else: + # remove all chroma_server_ keys from params + params = { + key: value + for key, value in params.items() + if not key.startswith("chroma_server_") + } + persist = params.pop("persist", False) if not docs_in_params(params): params.pop("documents", None) From aff3d5302123078ddf49c4d352853bf1306a094e Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Mon, 7 Aug 2023 20:06:32 -0300 Subject: [PATCH 66/90] =?UTF-8?q?=F0=9F=94=96=20chore(pyproject.toml):=20b?= =?UTF-8?q?ump=20package=20version=20from=200.4.1=20to=200.4.2=20?= =?UTF-8?q?=F0=9F=94=96=20chore(pyproject.toml):=20bump=20langchain=20vers?= =?UTF-8?q?ion=20from=200.0.250=20to=200.0.256?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- poetry.lock | 446 +++++++++++++++++++++++-------------------------- pyproject.toml | 4 +- 2 files changed, 210 insertions(+), 240 deletions(-) diff --git a/poetry.lock b/poetry.lock index 809dab71e..47c471ddc 100644 --- a/poetry.lock +++ b/poetry.lock @@ -925,13 +925,13 @@ test-randomorder = ["pytest-randomly"] [[package]] name = "ctransformers" -version = "0.2.18" +version = "0.2.21" description = "Python bindings for the Transformer models implemented in C/C++ using GGML library." optional = true python-versions = "*" files = [ - {file = "ctransformers-0.2.18-py3-none-any.whl", hash = "sha256:6d693e269d797f2485c86c9e8a322acb1464e782bf22547ec57e09d4ccbb4523"}, - {file = "ctransformers-0.2.18.tar.gz", hash = "sha256:fc09c399baf16d99dd44aa6671d49aa4060d8904324c2527aa58e8831476c370"}, + {file = "ctransformers-0.2.21-py3-none-any.whl", hash = "sha256:18a0555d02f55a3935f5544b885038562f80e497a6197d8e871941a087dba546"}, + {file = "ctransformers-0.2.21.tar.gz", hash = "sha256:58e7a699050a106688b967faa59f377886e22a581fde6cd36821dfa541995677"}, ] [package.dependencies] @@ -939,6 +939,7 @@ huggingface-hub = "*" py-cpuinfo = ">=9.0.0,<10.0.0" [package.extras] +cuda = ["nvidia-cublas-cu12", "nvidia-cuda-runtime-cu12"] gptq = ["exllama (==0.1.0)"] tests = ["pytest"] @@ -2433,13 +2434,13 @@ testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs [[package]] name = "importlib-resources" -version = "6.0.0" +version = "6.0.1" description = "Read resources from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_resources-6.0.0-py3-none-any.whl", hash = "sha256:d952faee11004c045f785bb5636e8f885bed30dc3c940d5d42798a2a4541c185"}, - {file = "importlib_resources-6.0.0.tar.gz", hash = "sha256:4cf94875a8368bd89531a756df9a9ebe1f150e0f885030b461237bc7f2d905f2"}, + {file = "importlib_resources-6.0.1-py3-none-any.whl", hash = "sha256:134832a506243891221b88b4ae1213327eea96ceb4e407a00d790bb0626f45cf"}, + {file = "importlib_resources-6.0.1.tar.gz", hash = "sha256:4359457e42708462b9626a04657c6208ad799ceb41e5c58c57ffa0e6a098a5d4"}, ] [package.dependencies] @@ -2473,13 +2474,13 @@ files = [ [[package]] name = "ipykernel" -version = "6.25.0" +version = "6.25.1" description = "IPython Kernel for Jupyter" optional = false python-versions = ">=3.8" files = [ - {file = "ipykernel-6.25.0-py3-none-any.whl", hash = "sha256:f0042e867ac3f6bca1679e6a88cbd6a58ed93a44f9d0866aecde6efe8de76659"}, - {file = "ipykernel-6.25.0.tar.gz", hash = "sha256:e342ce84712861be4b248c4a73472be4702c1b0dd77448bfd6bcfb3af9d5ddf9"}, + {file = "ipykernel-6.25.1-py3-none-any.whl", hash = "sha256:c8a2430b357073b37c76c21c52184db42f6b4b0e438e1eb7df3c4440d120497c"}, + {file = "ipykernel-6.25.1.tar.gz", hash = "sha256:050391364c0977e768e354bdb60cbbfbee7cbb943b1af1618382021136ffd42f"}, ] [package.dependencies] @@ -2855,13 +2856,13 @@ testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", [[package]] name = "langchain" -version = "0.0.250" +version = "0.0.256" description = "Building applications with LLMs through composability" optional = false python-versions = ">=3.8.1,<4.0" files = [ - {file = "langchain-0.0.250-py3-none-any.whl", hash = "sha256:65b3520f507e848edd88a35a70700971bbbf822fda65f621ccf44a3bb36ad03a"}, - {file = "langchain-0.0.250.tar.gz", hash = "sha256:1b5775d6a472f633bb06e794f58cb6ff5d1eeb2da603b64a6a15013f8f61ee3f"}, + {file = "langchain-0.0.256-py3-none-any.whl", hash = "sha256:3389fcb85d8d4fb16bae5ca9995d3ce634a3330f8ac1f458afc6171e4ca52de5"}, + {file = "langchain-0.0.256.tar.gz", hash = "sha256:b80115e19f86199c49bca8ef18c09d2d87548332a0144a1c5ce6a2f82e4f5f9c"}, ] [package.dependencies] @@ -2873,7 +2874,7 @@ numexpr = ">=2.8.4,<3.0.0" numpy = ">=1,<2" openapi-schema-pydantic = ">=1.2,<2.0" pydantic = ">=1,<2" -PyYAML = ">=5.4.1" +PyYAML = ">=5.3" requests = ">=2,<3" SQLAlchemy = ">=1.4,<3" tenacity = ">=8.1.0,<9.0.0" @@ -2885,7 +2886,7 @@ clarifai = ["clarifai (>=9.1.0)"] cohere = ["cohere (>=4,<5)"] docarray = ["docarray[hnswlib] (>=0.32.0,<0.33.0)"] embeddings = ["sentence-transformers (>=2,<3)"] -extended-testing = ["atlassian-python-api (>=3.36.0,<4.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.0.7,<0.0.8)", "chardet (>=5.1.0,<6.0.0)", "esprima (>=4.0.1,<5.0.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "gql (>=3.4.1,<4.0.0)", "html2text (>=2020.1.16,<2021.0.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "lxml (>=4.9.2,<5.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "openai (>=0,<1)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "tqdm (>=4.48.0)", "xinference (>=0.0.6,<0.0.7)", "zep-python (>=0.32)"] +extended-testing = ["amazon-textract-caller (<2)", "atlassian-python-api (>=3.36.0,<4.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.0.7,<0.0.8)", "chardet (>=5.1.0,<6.0.0)", "esprima (>=4.0.1,<5.0.0)", "feedparser (>=6.0.10,<7.0.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "gql (>=3.4.1,<4.0.0)", "html2text (>=2020.1.16,<2021.0.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "lxml (>=4.9.2,<5.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "openai (>=0,<1)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "tqdm (>=4.48.0)", "xata (>=1.0.0a7,<2.0.0)", "xinference (>=0.0.6,<0.0.7)", "zep-python (>=0.32)"] javascript = ["esprima (>=4.0.1,<5.0.0)"] llms = ["anthropic (>=0.3,<0.4)", "clarifai (>=9.1.0)", "cohere (>=4,<5)", "huggingface_hub (>=0,<1)", "manifest-ml (>=0.0.1,<0.0.2)", "nlpcloud (>=1,<2)", "openai (>=0,<1)", "openllm (>=0.1.19)", "openlm (>=0.0.5,<0.0.6)", "torch (>=1,<3)", "transformers (>=4,<5)", "xinference (>=0.0.6,<0.0.7)"] openai = ["openai (>=0,<1)", "tiktoken (>=0.3.2,<0.4.0)"] @@ -2908,17 +2909,17 @@ langchain = ">=0.0.239" [[package]] name = "langchain-serve" -version = "0.0.58" +version = "0.0.59" description = "Langchain Serve - serve your langchain apps on Jina AI Cloud." optional = true python-versions = "*" files = [ - {file = "langchain-serve-0.0.58.tar.gz", hash = "sha256:9ec65c8eeda9f777fb8b79117d92839873b10030fd832184ba891d4d3a065fff"}, + {file = "langchain-serve-0.0.59.tar.gz", hash = "sha256:28ff4ba1b640223158413296ac5b9198eac7cbd206d3386855ad00066a9f91d6"}, ] [package.dependencies] click = "*" -jcloud = ">=0.2.13" +jcloud = ">=0.2.16" jina = "3.15.2" jina-hubble-sdk = "*" langchain = "*" @@ -2933,13 +2934,13 @@ test = ["psutil", "pytest", "pytest-asyncio"] [[package]] name = "langsmith" -version = "0.0.18" +version = "0.0.19" description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." optional = false python-versions = ">=3.8.1,<4.0" files = [ - {file = "langsmith-0.0.18-py3-none-any.whl", hash = "sha256:58f3eb029495e3696c3c94c95adfb2dd15043d82928ef932a7829fe020ad5522"}, - {file = "langsmith-0.0.18.tar.gz", hash = "sha256:d8d97542f99352c5470bff74a4fbc9bd322dea850217a8ab71badcefa041566b"}, + {file = "langsmith-0.0.19-py3-none-any.whl", hash = "sha256:ae240030fd0b98e9467fbf19ac6d58a0a4ffcc1db8462625141dae6178e62c68"}, + {file = "langsmith-0.0.19.tar.gz", hash = "sha256:e91a2cd101456e2f8d6015c9ea371d6556eb6072a1b20d4793479855163ae28f"}, ] [package.dependencies] @@ -3594,41 +3595,41 @@ twitter = ["twython"] [[package]] name = "numexpr" -version = "2.8.4" +version = "2.8.5" description = "Fast numerical expression evaluator for NumPy" optional = false python-versions = ">=3.7" files = [ - {file = "numexpr-2.8.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a75967d46b6bd56455dd32da6285e5ffabe155d0ee61eef685bbfb8dafb2e484"}, - {file = "numexpr-2.8.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:db93cf1842f068247de631bfc8af20118bf1f9447cd929b531595a5e0efc9346"}, - {file = "numexpr-2.8.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bca95f4473b444428061d4cda8e59ac564dc7dc6a1dea3015af9805c6bc2946"}, - {file = "numexpr-2.8.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e34931089a6bafc77aaae21f37ad6594b98aa1085bb8b45d5b3cd038c3c17d9"}, - {file = "numexpr-2.8.4-cp310-cp310-win32.whl", hash = "sha256:f3a920bfac2645017110b87ddbe364c9c7a742870a4d2f6120b8786c25dc6db3"}, - {file = "numexpr-2.8.4-cp310-cp310-win_amd64.whl", hash = "sha256:6931b1e9d4f629f43c14b21d44f3f77997298bea43790cfcdb4dd98804f90783"}, - {file = "numexpr-2.8.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9400781553541f414f82eac056f2b4c965373650df9694286b9bd7e8d413f8d8"}, - {file = "numexpr-2.8.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6ee9db7598dd4001138b482342b96d78110dd77cefc051ec75af3295604dde6a"}, - {file = "numexpr-2.8.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ff5835e8af9a212e8480003d731aad1727aaea909926fd009e8ae6a1cba7f141"}, - {file = "numexpr-2.8.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:655d84eb09adfee3c09ecf4a89a512225da153fdb7de13c447404b7d0523a9a7"}, - {file = "numexpr-2.8.4-cp311-cp311-win32.whl", hash = "sha256:5538b30199bfc68886d2be18fcef3abd11d9271767a7a69ff3688defe782800a"}, - {file = "numexpr-2.8.4-cp311-cp311-win_amd64.whl", hash = "sha256:3f039321d1c17962c33079987b675fb251b273dbec0f51aac0934e932446ccc3"}, - {file = "numexpr-2.8.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c867cc36cf815a3ec9122029874e00d8fbcef65035c4a5901e9b120dd5d626a2"}, - {file = "numexpr-2.8.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:059546e8f6283ccdb47c683101a890844f667fa6d56258d48ae2ecf1b3875957"}, - {file = "numexpr-2.8.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:845a6aa0ed3e2a53239b89c1ebfa8cf052d3cc6e053c72805e8153300078c0b1"}, - {file = "numexpr-2.8.4-cp37-cp37m-win32.whl", hash = "sha256:a38664e699526cb1687aefd9069e2b5b9387da7feac4545de446141f1ef86f46"}, - {file = "numexpr-2.8.4-cp37-cp37m-win_amd64.whl", hash = "sha256:eaec59e9bf70ff05615c34a8b8d6c7bd042bd9f55465d7b495ea5436f45319d0"}, - {file = "numexpr-2.8.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b318541bf3d8326682ebada087ba0050549a16d8b3fa260dd2585d73a83d20a7"}, - {file = "numexpr-2.8.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b076db98ca65eeaf9bd224576e3ac84c05e451c0bd85b13664b7e5f7b62e2c70"}, - {file = "numexpr-2.8.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:90f12cc851240f7911a47c91aaf223dba753e98e46dff3017282e633602e76a7"}, - {file = "numexpr-2.8.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c368aa35ae9b18840e78b05f929d3a7b3abccdba9630a878c7db74ca2368339"}, - {file = "numexpr-2.8.4-cp38-cp38-win32.whl", hash = "sha256:b96334fc1748e9ec4f93d5fadb1044089d73fb08208fdb8382ed77c893f0be01"}, - {file = "numexpr-2.8.4-cp38-cp38-win_amd64.whl", hash = "sha256:a6d2d7740ae83ba5f3531e83afc4b626daa71df1ef903970947903345c37bd03"}, - {file = "numexpr-2.8.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:77898fdf3da6bb96aa8a4759a8231d763a75d848b2f2e5c5279dad0b243c8dfe"}, - {file = "numexpr-2.8.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:df35324666b693f13a016bc7957de7cc4d8801b746b81060b671bf78a52b9037"}, - {file = "numexpr-2.8.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:17ac9cfe6d0078c5fc06ba1c1bbd20b8783f28c6f475bbabd3cad53683075cab"}, - {file = "numexpr-2.8.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df3a1f6b24214a1ab826e9c1c99edf1686c8e307547a9aef33910d586f626d01"}, - {file = "numexpr-2.8.4-cp39-cp39-win32.whl", hash = "sha256:7d71add384adc9119568d7e9ffa8a35b195decae81e0abf54a2b7779852f0637"}, - {file = "numexpr-2.8.4-cp39-cp39-win_amd64.whl", hash = "sha256:9f096d707290a6a00b6ffdaf581ee37331109fb7b6c8744e9ded7c779a48e517"}, - {file = "numexpr-2.8.4.tar.gz", hash = "sha256:d5432537418d18691b9115d615d6daa17ee8275baef3edf1afbbf8bc69806147"}, + {file = "numexpr-2.8.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:51f3ab160c3847ebcca93cd88f935a7802b54a01ab63fe93152994a64d7a6cf2"}, + {file = "numexpr-2.8.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:de29c77f674e4eb8f0846525a475cab64008c227c8bc4ba5153ab3f72441cc63"}, + {file = "numexpr-2.8.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf85ba1327eb87ec82ae7936f13c8850fb969a0ca34f3ba9fa3897c09d5c80d7"}, + {file = "numexpr-2.8.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c00be69f747f44a631830215cab482f0f77f75af2925695adff57c1cc0f9a68"}, + {file = "numexpr-2.8.5-cp310-cp310-win32.whl", hash = "sha256:c46350dcdb93e32f033eea5a21269514ffcaf501d9abd6036992d37e48a308b0"}, + {file = "numexpr-2.8.5-cp310-cp310-win_amd64.whl", hash = "sha256:894b027438b8ec88dea32a19193716c79f4ff8ddb92302dcc9731b51ba3565a8"}, + {file = "numexpr-2.8.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6df184d40d4cf9f21c71f429962f39332f7398147762588c9f3a5c77065d0c06"}, + {file = "numexpr-2.8.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:178b85ad373c6903e55d75787d61b92380439b70d94b001cb055a501b0821335"}, + {file = "numexpr-2.8.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:578fe4008e4d5d6ff01bbeb2d7b7ba1ec658a5cda9c720cd26a9a8325f8ef438"}, + {file = "numexpr-2.8.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef621b4ee366a5c6a484f6678c9259f5b826569f8bfa0b89ba2306d5055468bb"}, + {file = "numexpr-2.8.5-cp311-cp311-win32.whl", hash = "sha256:dd57ab1a3d3aaa9274aff1cefbf93b8ddacc7973afef5b125905f6bf18fabab0"}, + {file = "numexpr-2.8.5-cp311-cp311-win_amd64.whl", hash = "sha256:783324ba40eb804ecfc9ebae86120a1e339ab112d0ab8a1f0d48a26354d5bf9b"}, + {file = "numexpr-2.8.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:183d5430db76826e54465c69db93a3c6ecbf03cda5aa1bb96eaad0147e9b68dc"}, + {file = "numexpr-2.8.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39ce106f92ccea5b07b1d6f2f3c4370f05edf27691dc720a63903484a2137e48"}, + {file = "numexpr-2.8.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b594dc9e2d6291a0bc5c065e6d9caf3eee743b5663897832e9b17753c002947a"}, + {file = "numexpr-2.8.5-cp37-cp37m-win32.whl", hash = "sha256:62b4faf8e0627673b0210a837792bddd23050ecebc98069ab23eb0633ff1ef5f"}, + {file = "numexpr-2.8.5-cp37-cp37m-win_amd64.whl", hash = "sha256:db5c65417d69414f1ab31302ea01d3548303ef31209c38b4849d145be4e1d1ba"}, + {file = "numexpr-2.8.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:eb36ffcfa1606e41aa08d559b4277bcad0e16b83941d1a4fee8d2bd5a34f8e0e"}, + {file = "numexpr-2.8.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:34af2a0e857d02a4bc5758bc037a777d50dacb13bcd57c7905268a3e44994ed6"}, + {file = "numexpr-2.8.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a8dad2bfaad5a5c34a2e8bbf62b9df1dfab266d345fda1feb20ff4e264b347a"}, + {file = "numexpr-2.8.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b93f5a866cd13a808bc3d3a9c487d94cd02eec408b275ff0aa150f2e8e5191f8"}, + {file = "numexpr-2.8.5-cp38-cp38-win32.whl", hash = "sha256:558390fea6370003ac749ed9d0f38d708aa096f5dcb707ddb6e0ca5a0dd37da1"}, + {file = "numexpr-2.8.5-cp38-cp38-win_amd64.whl", hash = "sha256:55983806815035eb63c5039520688c49536bb7f3cc3fc1d7d64c6a00cf3f353e"}, + {file = "numexpr-2.8.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1510da20e6f5f45333610b1ded44c566e2690c6c437c84f2a212ca09627c7e01"}, + {file = "numexpr-2.8.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9e8b5bf7bcb4e8dcd66522d8fc96e1db7278f901cb4fd2e155efbe62a41dde08"}, + {file = "numexpr-2.8.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ed0e1c1ef5f34381448539f1fe9015906d21c9cfa2797c06194d4207dadb465"}, + {file = "numexpr-2.8.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aea6ab45c87c0a7041183c08a798f0ad4d7c5eccbce20cfe79ce6f1a45ef3702"}, + {file = "numexpr-2.8.5-cp39-cp39-win32.whl", hash = "sha256:cbfd833ee5fdb0efb862e152aee7e6ccea9c596d5c11d22604c2e6307bff7cad"}, + {file = "numexpr-2.8.5-cp39-cp39-win_amd64.whl", hash = "sha256:283ce8609a7ccbadf91a68f3484558b3e36d27c93c98a41ec205efb0ab43c872"}, + {file = "numexpr-2.8.5.tar.gz", hash = "sha256:45ed41e55a0abcecf3d711481e12a5fb7a904fe99d42bc282a17cc5f8ea510be"}, ] [package.dependencies] @@ -4035,68 +4036,39 @@ files = [ [[package]] name = "orjson" -version = "3.9.2" +version = "3.9.4" description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" optional = false python-versions = ">=3.7" files = [ - {file = "orjson-3.9.2-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:7323e4ca8322b1ecb87562f1ec2491831c086d9faa9a6c6503f489dadbed37d7"}, - {file = "orjson-3.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1272688ea1865f711b01ba479dea2d53e037ea00892fd04196b5875f7021d9d3"}, - {file = "orjson-3.9.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0b9a26f1d1427a9101a1e8910f2e2df1f44d3d18ad5480ba031b15d5c1cb282e"}, - {file = "orjson-3.9.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6a5ca55b0d8f25f18b471e34abaee4b175924b6cd62f59992945b25963443141"}, - {file = "orjson-3.9.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:877872db2c0f41fbe21f852ff642ca842a43bc34895b70f71c9d575df31fffb4"}, - {file = "orjson-3.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a39c2529d75373b7167bf84c814ef9b8f3737a339c225ed6c0df40736df8748"}, - {file = "orjson-3.9.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:84ebd6fdf138eb0eb4280045442331ee71c0aab5e16397ba6645f32f911bfb37"}, - {file = "orjson-3.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5a60a1cfcfe310547a1946506dd4f1ed0a7d5bd5b02c8697d9d5dcd8d2e9245e"}, - {file = "orjson-3.9.2-cp310-none-win_amd64.whl", hash = "sha256:c290c4f81e8fd0c1683638802c11610b2f722b540f8e5e858b6914b495cf90c8"}, - {file = "orjson-3.9.2-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:02ef014f9a605e84b675060785e37ec9c0d2347a04f1307a9d6840ab8ecd6f55"}, - {file = "orjson-3.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:992af54265ada1c1579500d6594ed73fe333e726de70d64919cf37f93defdd06"}, - {file = "orjson-3.9.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a40958f7af7c6d992ee67b2da4098dca8b770fc3b4b3834d540477788bfa76d3"}, - {file = "orjson-3.9.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:93864dec3e3dd058a2dbe488d11ac0345214a6a12697f53a63e34de7d28d4257"}, - {file = "orjson-3.9.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16fdf5a82df80c544c3c91516ab3882cd1ac4f1f84eefeafa642e05cef5f6699"}, - {file = "orjson-3.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:275b5a18fd9ed60b2720543d3ddac170051c43d680e47d04ff5203d2c6d8ebf1"}, - {file = "orjson-3.9.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b9aea6dcb99fcbc9f6d1dd84fca92322fda261da7fb014514bb4689c7c2097a8"}, - {file = "orjson-3.9.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7d74ae0e101d17c22ef67b741ba356ab896fc0fa64b301c2bf2bb0a4d874b190"}, - {file = "orjson-3.9.2-cp311-none-win_amd64.whl", hash = "sha256:6320b28e7bdb58c3a3a5efffe04b9edad3318d82409e84670a9b24e8035a249d"}, - {file = "orjson-3.9.2-cp37-cp37m-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:368e9cc91ecb7ac21f2aa475e1901204110cf3e714e98649c2502227d248f947"}, - {file = "orjson-3.9.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58e9e70f0dcd6a802c35887f306b555ff7a214840aad7de24901fc8bd9cf5dde"}, - {file = "orjson-3.9.2-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:00c983896c2e01c94c0ef72fd7373b2aa06d0c0eed0342c4884559f812a6835b"}, - {file = "orjson-3.9.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2ee743e8890b16c87a2f89733f983370672272b61ee77429c0a5899b2c98c1a7"}, - {file = "orjson-3.9.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7b065942d362aad4818ff599d2f104c35a565c2cbcbab8c09ec49edba91da75"}, - {file = "orjson-3.9.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e46e9c5b404bb9e41d5555762fd410d5466b7eb1ec170ad1b1609cbebe71df21"}, - {file = "orjson-3.9.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:8170157288714678ffd64f5de33039e1164a73fd8b6be40a8a273f80093f5c4f"}, - {file = "orjson-3.9.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e3e2f087161947dafe8319ea2cfcb9cea4bb9d2172ecc60ac3c9738f72ef2909"}, - {file = "orjson-3.9.2-cp37-none-win_amd64.whl", hash = "sha256:d7de3dbbe74109ae598692113cec327fd30c5a30ebca819b21dfa4052f7b08ef"}, - {file = "orjson-3.9.2-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:8cd4385c59bbc1433cad4a80aca65d2d9039646a9c57f8084897549b55913b17"}, - {file = "orjson-3.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a74036aab1a80c361039290cdbc51aa7adc7ea13f56e5ef94e9be536abd227bd"}, - {file = "orjson-3.9.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1aaa46d7d4ae55335f635eadc9be0bd9bcf742e6757209fc6dc697e390010adc"}, - {file = "orjson-3.9.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e52c67ed6bb368083aa2078ea3ccbd9721920b93d4b06c43eb4e20c4c860046"}, - {file = "orjson-3.9.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1a6cdfcf9c7dd4026b2b01fdff56986251dc0cc1e980c690c79eec3ae07b36e7"}, - {file = "orjson-3.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1882a70bb69595b9ec5aac0040a819e94d2833fe54901e2b32f5e734bc259a8b"}, - {file = "orjson-3.9.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:fc05e060d452145ab3c0b5420769e7356050ea311fc03cb9d79c481982917cca"}, - {file = "orjson-3.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f8bc2c40d9bb26efefb10949d261a47ca196772c308babc538dd9f4b73e8d386"}, - {file = "orjson-3.9.2-cp38-none-win_amd64.whl", hash = "sha256:3164fc20a585ec30a9aff33ad5de3b20ce85702b2b2a456852c413e3f0d7ab09"}, - {file = "orjson-3.9.2-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:7a6ccadf788531595ed4728aa746bc271955448d2460ff0ef8e21eb3f2a281ba"}, - {file = "orjson-3.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3245d230370f571c945f69aab823c279a868dc877352817e22e551de155cb06c"}, - {file = "orjson-3.9.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:205925b179550a4ee39b8418dd4c94ad6b777d165d7d22614771c771d44f57bd"}, - {file = "orjson-3.9.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0325fe2d69512187761f7368c8cda1959bcb75fc56b8e7a884e9569112320e57"}, - {file = "orjson-3.9.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:806704cd58708acc66a064a9a58e3be25cf1c3f9f159e8757bd3f515bfabdfa1"}, - {file = "orjson-3.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03fb36f187a0c19ff38f6289418863df8b9b7880cdbe279e920bef3a09d8dab1"}, - {file = "orjson-3.9.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:20925d07a97c49c6305bff1635318d9fc1804aa4ccacb5fb0deb8a910e57d97a"}, - {file = "orjson-3.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:eebfed53bec5674e981ebe8ed2cf00b3f7bcda62d634733ff779c264307ea505"}, - {file = "orjson-3.9.2-cp39-none-win_amd64.whl", hash = "sha256:869b961df5fcedf6c79f4096119b35679b63272362e9b745e668f0391a892d39"}, - {file = "orjson-3.9.2.tar.gz", hash = "sha256:24257c8f641979bf25ecd3e27251b5cc194cdd3a6e96004aac8446f5e63d9664"}, + {file = "orjson-3.9.4-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:2e83ec1ee66d83b558a6d273d8a01b86563daa60bea9bc040e2c1cb8008de61f"}, + {file = "orjson-3.9.4-cp310-none-win32.whl", hash = "sha256:04cd7f4a4f4cd2fe43d104eb70e7435c6fcbdde7aa0cde4230e444fbc66924d3"}, + {file = "orjson-3.9.4-cp310-none-win_amd64.whl", hash = "sha256:4fdb59cfa00e10c82e09d1c32a9ce08a38bd29496ba20a73cd7f498e3a0a5024"}, + {file = "orjson-3.9.4-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:daeed2502ddf1f2b29ec8da2fe2ea82807a5c4acf869608ce6c476db8171d070"}, + {file = "orjson-3.9.4-cp311-none-win32.whl", hash = "sha256:e12492ce65cb10f385e70a88badc6046bc720fa7d468db27b7429d85d41beaeb"}, + {file = "orjson-3.9.4-cp311-none-win_amd64.whl", hash = "sha256:3b9f8bf43a5367d5522f80e7d533c98d880868cd0b640b9088c9237306eca6e8"}, + {file = "orjson-3.9.4-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:0b400cf89c15958cd829c8a4ade8f5dd73588e63d2fb71a00483e7a74e9f92da"}, + {file = "orjson-3.9.4-cp312-none-win_amd64.whl", hash = "sha256:a533e664a0e3904307d662c5d45775544dc2b38df6e39e213ff6a86ceaa3d53c"}, + {file = "orjson-3.9.4-cp37-cp37m-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:149d1b7630771222f73ecb024ab5dd8e7f41502402b02015494d429bacc4d5c1"}, + {file = "orjson-3.9.4-cp37-none-win32.whl", hash = "sha256:bcda6179eb863c295eb5ea832676d33ef12c04d227b4c98267876c8322e5a96e"}, + {file = "orjson-3.9.4-cp37-none-win_amd64.whl", hash = "sha256:3d947366127abef192419257eb7db7fcee0841ced2b49ccceba43b65e9ce5e3f"}, + {file = "orjson-3.9.4-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:a7d029fc34a516f7eae29b778b30371fcb621134b2acfe4c51c785102aefc6cf"}, + {file = "orjson-3.9.4-cp38-none-win32.whl", hash = "sha256:94d15ee45c2aaed334688e511aa73b4681f7c08a0810884c6b3ae5824dea1222"}, + {file = "orjson-3.9.4-cp38-none-win_amd64.whl", hash = "sha256:336ec8471102851f0699198031924617b7a77baadea889df3ffda6000bd59f4c"}, + {file = "orjson-3.9.4-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:2f57ccb50e9e123709e9f2d7b1a9e09e694e49d1fa5c5585e34b8e3f01929dc3"}, + {file = "orjson-3.9.4-cp39-none-win32.whl", hash = "sha256:b5b5038187b74e2d33e5caee8a7e83ddeb6a21da86837fa2aac95c69aeb366e6"}, + {file = "orjson-3.9.4-cp39-none-win_amd64.whl", hash = "sha256:915da36bc93ef0c659fa50fe7939d4f208804ad252fc4fc8d55adbbb82293c48"}, ] [[package]] name = "overrides" -version = "7.3.1" +version = "7.4.0" description = "A decorator to automatically detect mismatch when overriding a method." optional = false python-versions = ">=3.6" files = [ - {file = "overrides-7.3.1-py3-none-any.whl", hash = "sha256:6187d8710a935d09b0bcef8238301d6ee2569d2ac1ae0ec39a8c7924e27f58ca"}, - {file = "overrides-7.3.1.tar.gz", hash = "sha256:8b97c6c1e1681b78cbc9424b138d880f0803c2254c5ebaabdde57bb6c62093f2"}, + {file = "overrides-7.4.0-py3-none-any.whl", hash = "sha256:3ad24583f86d6d7a49049695efe9933e67ba62f0c7625d53c59fa832ce4b8b7d"}, + {file = "overrides-7.4.0.tar.gz", hash = "sha256:9502a3cca51f4fac40b5feca985b6703a5c1f6ad815588a7ca9e285b9dca6757"}, ] [[package]] @@ -4575,13 +4547,13 @@ test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] [[package]] name = "psycopg" -version = "3.1.9" +version = "3.1.10" description = "PostgreSQL database adapter for Python" optional = false python-versions = ">=3.7" files = [ - {file = "psycopg-3.1.9-py3-none-any.whl", hash = "sha256:fbbac339274d8733ee70ba9822297af3e8871790a26e967b5ea53e30a4b74dcc"}, - {file = "psycopg-3.1.9.tar.gz", hash = "sha256:ab400f207a8c120bafdd8077916d8f6c0106e809401378708485b016508c30c9"}, + {file = "psycopg-3.1.10-py3-none-any.whl", hash = "sha256:8bbeddae5075c7890b2fa3e3553440376d3c5e28418335dee3c3656b06fa2b52"}, + {file = "psycopg-3.1.10.tar.gz", hash = "sha256:15b25741494344c24066dc2479b0f383dd1b82fa5e75612fa4fa5bb30726e9b6"}, ] [package.dependencies] @@ -4589,145 +4561,143 @@ typing-extensions = ">=4.1" tzdata = {version = "*", markers = "sys_platform == \"win32\""} [package.extras] -binary = ["psycopg-binary (==3.1.9)"] -c = ["psycopg-c (==3.1.9)"] -dev = ["black (>=23.1.0)", "dnspython (>=2.1)", "flake8 (>=4.0)", "mypy (>=1.2)", "types-setuptools (>=57.4)", "wheel (>=0.37)"] +binary = ["psycopg-binary (==3.1.10)"] +c = ["psycopg-c (==3.1.10)"] +dev = ["black (>=23.1.0)", "dnspython (>=2.1)", "flake8 (>=4.0)", "mypy (>=1.4.1)", "types-setuptools (>=57.4)", "wheel (>=0.37)"] docs = ["Sphinx (>=5.0)", "furo (==2022.6.21)", "sphinx-autobuild (>=2021.3.14)", "sphinx-autodoc-typehints (>=1.12)"] pool = ["psycopg-pool"] -test = ["anyio (>=3.6.2)", "mypy (>=1.2)", "pproxy (>=2.7)", "pytest (>=6.2.5)", "pytest-cov (>=3.0)", "pytest-randomly (>=3.5)"] +test = ["anyio (>=3.6.2)", "mypy (>=1.4.1)", "pproxy (>=2.7)", "pytest (>=6.2.5)", "pytest-cov (>=3.0)", "pytest-randomly (>=3.5)"] [[package]] name = "psycopg-binary" -version = "3.1.9" +version = "3.1.10" description = "PostgreSQL database adapter for Python -- C optimisation distribution" optional = false python-versions = ">=3.7" files = [ - {file = "psycopg_binary-3.1.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:284038cbe3f5a0f3de417af9b5eaa2a9524a3a06211523cf245111c71b566506"}, - {file = "psycopg_binary-3.1.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d2cea4bb0b19245c83486868d7c66f73238c4caa266b5b3c3d664d10dab2ab56"}, - {file = "psycopg_binary-3.1.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfe5c5c31f59ccb1d1f473466baa93d800138186286e80e251f930e49c80d208"}, - {file = "psycopg_binary-3.1.9-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82704a899d57c29beba5399d41eab5ef5c238b810d7e25e2d1916d2b34c4b1a3"}, - {file = "psycopg_binary-3.1.9-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eab449e39db1c429cac79b7aa27e6827aad4995f32137e922db7254f43fed7b5"}, - {file = "psycopg_binary-3.1.9-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87e0c97733b11eeca3d24e56df70f3f9d792b2abd46f48be2fb2348ffc3e7e39"}, - {file = "psycopg_binary-3.1.9-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:81e34d6df54329424944d5ca91b1cc77df6b8a9130cb5480680d56f53d4e485c"}, - {file = "psycopg_binary-3.1.9-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e2f463079d99568a343ed0b766150b30627e9ed41de99fd82e945e7e2bec764a"}, - {file = "psycopg_binary-3.1.9-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:f2cbdef6568da21c39dfd45c2074e85eabbd00e1b721832ba94980f01f582dd4"}, - {file = "psycopg_binary-3.1.9-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:53afb0cc2ebe74651f339e22d05ec082a0f44939715d9138d357852f074fcf55"}, - {file = "psycopg_binary-3.1.9-cp310-cp310-win_amd64.whl", hash = "sha256:09167f106e7685591b4cdf58eff0191fb7435d586f384133a0dd30df646cf409"}, - {file = "psycopg_binary-3.1.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a8aaa47c1791fc05c0229ec1003dd49e13238fba9434e1fc3b879632f749c3c4"}, - {file = "psycopg_binary-3.1.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3d91ee0d33ac7b42d0488a9be2516efa2ec00901b81d69566ff34a7a94b66c0b"}, - {file = "psycopg_binary-3.1.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f5e36504373e5bcdc954b1da1c6fe66379007fe1e329790e8fb72b879a01e097"}, - {file = "psycopg_binary-3.1.9-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c1def6c2d28e257325b3b208cf1966343b498282a0f4d390fda7b7e0577da64"}, - {file = "psycopg_binary-3.1.9-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:055537a9c20efe9bf17cb72bd879602eda71de6f737ebafa1953e017c6a37fbe"}, - {file = "psycopg_binary-3.1.9-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5b164355d023a91b23dcc4bb3112bc7d6e9b9c938fb5abcb6e54457d2da1f317"}, - {file = "psycopg_binary-3.1.9-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:03b08545ce1c627f4d5e6384eda2946660c4ba6ceb0a09ae47de07419f725669"}, - {file = "psycopg_binary-3.1.9-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1e31bac3d2d41e6446b20b591f638943328c958f4d1ce13d6f1c5db97c3a8dee"}, - {file = "psycopg_binary-3.1.9-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:a274c63c8fb9d419509bed2ef72befc1fd04243972e17e7f5afc5725cb13a560"}, - {file = "psycopg_binary-3.1.9-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:98d9d156b9ada08c271a79662fc5fcc1731b4d7c1f651ef5843d818d35f15ba0"}, - {file = "psycopg_binary-3.1.9-cp311-cp311-win_amd64.whl", hash = "sha256:c3a13aa022853891cadbc7256a9804e5989def760115c82334bddf0d19783b0b"}, - {file = "psycopg_binary-3.1.9-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1a321ef3579a8de0545ade6ff1edfde0c88b8847d58c5615c03751c76054796"}, - {file = "psycopg_binary-3.1.9-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5833bda4c14f24c6a8ac08d3c5712acaa4f35aab31f9ccd2265e9e9a7d0151c8"}, - {file = "psycopg_binary-3.1.9-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a207d5a7f4212443b7452851c9ccd88df9c6d4d58fa2cea2ead4dd9cb328e578"}, - {file = "psycopg_binary-3.1.9-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:07414daa86662f7657e9fabe49af85a32a975e92e6568337887d9c9ffedc224f"}, - {file = "psycopg_binary-3.1.9-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17c5d4936c746f5125c6ef9eb43655e27d4d0c9ffe34c3073878b43c3192511d"}, - {file = "psycopg_binary-3.1.9-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:5cdc13c8ec1437240801e43d07e27ff6479ac9dd8583ecf647345bfd2e8390e4"}, - {file = "psycopg_binary-3.1.9-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:3836bdaf030a5648bd5f5b452e4b068b265e28f9199060c5b70dbf4a218cde6e"}, - {file = "psycopg_binary-3.1.9-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:96725d9691a84a21eb3e81c884a2e043054e33e176801a57a05e9ac38d142c6e"}, - {file = "psycopg_binary-3.1.9-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:dade344aa90bb0b57d1cfc13304ed83ab9a36614b8ddd671381b2de72fe1483d"}, - {file = "psycopg_binary-3.1.9-cp37-cp37m-win_amd64.whl", hash = "sha256:db866cc557d9761036771d666d17fa4176c537af7e6098f42a6bf8f64217935f"}, - {file = "psycopg_binary-3.1.9-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3b62545cc64dd69ea0ae5ffe18d7c97e03660ab8244aa8c5172668a21c41daa0"}, - {file = "psycopg_binary-3.1.9-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:058ab0d79be0b229338f0e61fec6f475077518cba63c22c593645a69f01c3e23"}, - {file = "psycopg_binary-3.1.9-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2340ca2531f69e5ebd9d18987362ba57ed6ab6a271511d8026814a46a2a87b59"}, - {file = "psycopg_binary-3.1.9-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b816ce0e27a2a8786d34b61d3e36e01029245025879d64b88554326b794a4f0"}, - {file = "psycopg_binary-3.1.9-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7b36fe4314a784fbe45c9fd71c902b9bf57341aff9b97c0cbd22f8409a271e2f"}, - {file = "psycopg_binary-3.1.9-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b246fed629482b06f938b23e9281c4af592329daa3ec2cd4a6841ccbfdeb4d68"}, - {file = "psycopg_binary-3.1.9-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:90787ac05b932c0fc678cbf470ccea9c385b8077583f0490136b4569ed3fb652"}, - {file = "psycopg_binary-3.1.9-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:9c114f678e8f4a96530fa79cfd84f65f26358ecfc6cca70cfa2d5e3ae5ef217a"}, - {file = "psycopg_binary-3.1.9-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:3a82e77400d1ef6c5bbcf3e600e8bdfacf1a554512f96c090c43ceca3d1ce3b6"}, - {file = "psycopg_binary-3.1.9-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c7d990f14a37345ca05a5192cd5ac938c9cbedca9c929872af6ae311158feb0e"}, - {file = "psycopg_binary-3.1.9-cp38-cp38-win_amd64.whl", hash = "sha256:e0ca74fd85718723bb9f08e0c6898e901a0c365aef20b3c3a4ef8709125d6210"}, - {file = "psycopg_binary-3.1.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ce8f4dea5934aa6c4933e559c74bef4beb3413f51fbcf17f306ce890216ac33a"}, - {file = "psycopg_binary-3.1.9-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f41a9e0de4db194c053bcc7c00c35422a4d19d92a8187e8065b1c560626efe35"}, - {file = "psycopg_binary-3.1.9-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f94a7985135e084e122b143956c6f589d17aef743ecd0a434a3d3a222631d5a"}, - {file = "psycopg_binary-3.1.9-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3bb86d58b90faefdc0bbedf08fdea4cc2afcb1cfa4340f027d458bfd01d8b812"}, - {file = "psycopg_binary-3.1.9-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6c696dc84f9ff155761df15779181d8e4af7746b98908e130add8259912e4bb7"}, - {file = "psycopg_binary-3.1.9-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4213953da44324850c8f789301cf665f46fb94301ba403301e7af58546c3a428"}, - {file = "psycopg_binary-3.1.9-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:25e3ce947aaaa1bd9f1920fca76d7281660646304f9ea5bc036b201dd8790655"}, - {file = "psycopg_binary-3.1.9-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9c75be2a9b986139e3ff6bc0a2852081ac00811040f9b82d3aa539821311122e"}, - {file = "psycopg_binary-3.1.9-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:63e8d1dbe253657c70dbfa9c59423f4654d82698fc5ed6868b8dc0765abe20b6"}, - {file = "psycopg_binary-3.1.9-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:f4da4ca9b2365fc1d3fc741c3bbd3efccd892ce813444b884c8911a1acf1c932"}, - {file = "psycopg_binary-3.1.9-cp39-cp39-win_amd64.whl", hash = "sha256:c0b8d6bbeff1dba760a208d8bc205a05b745e6cee02b839f969f72cf56a8b80d"}, + {file = "psycopg_binary-3.1.10-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a529c203f6e0f4c67ba27cf8f9739eb3bc880ad70d6ad6c0e56c2230a66b5a09"}, + {file = "psycopg_binary-3.1.10-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:bd6e14d1aeb12754a43446c77a5ce819b68875cc25ae6538089ef90d7f6dd6f7"}, + {file = "psycopg_binary-3.1.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1583ced5948cf88124212c4503dfe5b01ac3e2dd1a2833c083917f4c4aabe8b4"}, + {file = "psycopg_binary-3.1.10-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2098721c486478987be700723b28ec7a48f134eba339de36af0e745f37dfe461"}, + {file = "psycopg_binary-3.1.10-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7e61f7b412fca7b15dd043a0b22fd528d2ed8276e76b3764c3889e29fa65082b"}, + {file = "psycopg_binary-3.1.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e0f33e33a072e3d5af51ee4d4a439e10dbe623fe87ef295d5d688180d529f13f"}, + {file = "psycopg_binary-3.1.10-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:f6f7738c59262d8d19154164d99c881ed58ed377fb6f1d685eb0dc43bbcd8022"}, + {file = "psycopg_binary-3.1.10-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:511d38b1e1961d179d47d5103ba9634ecfc7ead431d19a9337ef82f3a2bca807"}, + {file = "psycopg_binary-3.1.10-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:666e7acf2ffdb5e8a58e8b0c1759facdb9688c7e90ee8ca7aed675803b57404d"}, + {file = "psycopg_binary-3.1.10-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:57b93c756fee5f7c7bd580c34cd5d244f7d5638f8b2cf25333f97b9b8b2ebfd1"}, + {file = "psycopg_binary-3.1.10-cp310-cp310-win_amd64.whl", hash = "sha256:a1d61b7724c7215a8ea4495a5c6b704656f4b7bb6165f4cb9989b685886ebc48"}, + {file = "psycopg_binary-3.1.10-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:36fff836a7823c9d71fa7faa333c74b2b081af216cebdbb0f481dce55ee2d974"}, + {file = "psycopg_binary-3.1.10-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:32caf98cb00881bfcbbbae39a15f2a4e08b79ff983f1c0f13b60a888ef6e8431"}, + {file = "psycopg_binary-3.1.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5565a6a86fee8d74f30de89e07f399567cdf59367aeb09624eb690d524339076"}, + {file = "psycopg_binary-3.1.10-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9fb0d64520b29bd80a6731476ad8e1c20348dfdee00ab098899d23247b641675"}, + {file = "psycopg_binary-3.1.10-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bfc05ed4e74fa8615d7cc2bd57f00f97662f4e865a731dbd43da9a527e289c8c"}, + {file = "psycopg_binary-3.1.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c5b59c8cff887757ddf438ff9489d79c5e6b717112c96f5c68e16f367ff8724e"}, + {file = "psycopg_binary-3.1.10-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a4cbaf12361136afefc5faab21a174a437e71c803b083f410e5140c7605bc66b"}, + {file = "psycopg_binary-3.1.10-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:ff72576061c774bcce5f5440b93e63d4c430032dd056d30f6cb1988e549dd92c"}, + {file = "psycopg_binary-3.1.10-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:a4e91e1a8d61c60f592a1dfcebdf55e52a29fe4fdb650c5bd5414c848e77d029"}, + {file = "psycopg_binary-3.1.10-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f7187269d825e84c945be7d93dd5088a4e0b6481a4bdaba3bf7069d4ac13703d"}, + {file = "psycopg_binary-3.1.10-cp311-cp311-win_amd64.whl", hash = "sha256:ba7812a593c16d9d661844dc8dd4d81548fd1c2a0ee676f3e3d8638369f4c5e4"}, + {file = "psycopg_binary-3.1.10-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:88caa5859740507b3596c6c2e00ceaccee2c6ab5317bc535887801ad3cc7f3e1"}, + {file = "psycopg_binary-3.1.10-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a3a7e99ba10c2e83a48d79431560e0d5ca7865f68f2bac3a462dc2b151e9926"}, + {file = "psycopg_binary-3.1.10-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:848f4f4707dc73f4b4e844c92f3de795b2ddb728f75132602bda5e6ba55084fc"}, + {file = "psycopg_binary-3.1.10-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:415961e839bb49cfd75cd961503fb8846c0768f247db1fa7171c1ac61d38711b"}, + {file = "psycopg_binary-3.1.10-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0471869e658d0c6b8c3ed53153794739c18d7dad2dd5b8e6ff023a364c20f7df"}, + {file = "psycopg_binary-3.1.10-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:4290060ee0d856caa979ecf675c0e6959325f508272ccf27f64c3801c7bcbde7"}, + {file = "psycopg_binary-3.1.10-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:abf04bc06c8f6a1ac3dc2106d3b79c8661352e9d8a57ca2934ffa6aae8fe600a"}, + {file = "psycopg_binary-3.1.10-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:51fe70708243b83bf16710d8c11b61bd46562e6a24a6300d5434380b35911059"}, + {file = "psycopg_binary-3.1.10-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8b658f7f8b49fb60a1c52e3f6692f690a85bdf1ad30aafe0f3f1fd74f6958cf8"}, + {file = "psycopg_binary-3.1.10-cp37-cp37m-win_amd64.whl", hash = "sha256:ffc8c796194f23b9b07f6d25f927ec4df84a194bbc7a1f9e73316734eef512f9"}, + {file = "psycopg_binary-3.1.10-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:74ce92122be34cf0e5f06d79869e1001c8421a68fa7ddf6fe38a717155cf3a64"}, + {file = "psycopg_binary-3.1.10-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:75608a900984061c8898be68fbddc6f3da5eefdffce6e0624f5371645740d172"}, + {file = "psycopg_binary-3.1.10-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6670d160d054466e8fdedfbc749ef8bf7dfdf69296048954d24645dd4d3d3c01"}, + {file = "psycopg_binary-3.1.10-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d32026cfab7ba7ac687a42c33345026a2fb6fc5608a6144077f767af4386be0b"}, + {file = "psycopg_binary-3.1.10-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:908fa388a5b75dfd17a937acb24708bd272e21edefca9a495004c6f70ec2636a"}, + {file = "psycopg_binary-3.1.10-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e46b97073bd4de114f475249d681eaf054e950699c5d7af554d3684db39b82d"}, + {file = "psycopg_binary-3.1.10-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9cf56bb4b115def3a18157f3b3b7d8322ee94a8dea30028db602c8f9ae34ad1e"}, + {file = "psycopg_binary-3.1.10-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3b6c6f90241c4c5a6ca3f0d8827e37ef90fdc4deb9d8cfa5678baa0ea374b391"}, + {file = "psycopg_binary-3.1.10-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:747176a6aeb058079f56c5397bd90339581ab7b3cc0d62e7445654e6a484c7e1"}, + {file = "psycopg_binary-3.1.10-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:41a415e78c457b06497fa0084e4ea7245ca1a377b55756dd757034210b64da7e"}, + {file = "psycopg_binary-3.1.10-cp38-cp38-win_amd64.whl", hash = "sha256:a7bbe9017edd898d7b3a8747700ed045dda96a907dff87f45e642e28d8584481"}, + {file = "psycopg_binary-3.1.10-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0f062f20256708929a58c41d44f350efced4c00a603323d1413f6dc0b84d95a5"}, + {file = "psycopg_binary-3.1.10-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:dea30f2704337ca2d0322fccfe1fa30f61ce9185de3937eb986321063114a51f"}, + {file = "psycopg_binary-3.1.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9d88ac72531034ebf7ec09114e732b066a9078f4ce213cf65cc5e42eb538d30"}, + {file = "psycopg_binary-3.1.10-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2bea0940d69c3e24a72530730952687912893b34c53aa39e79045e7b446174d"}, + {file = "psycopg_binary-3.1.10-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6a691dc8e2436d9c1e5cf93902d63e9501688fccc957eb22f952d37886257470"}, + {file = "psycopg_binary-3.1.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa92661f99351765673835a4d936d79bd24dfbb358b29b084d83be38229a90e4"}, + {file = "psycopg_binary-3.1.10-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:30eb731ed5525d8df892db6532cc8ffd8a163b73bc355127dee9c49334e16eee"}, + {file = "psycopg_binary-3.1.10-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:50bf7a59d3a85a82d466fed341d352b44d09d6adc18656101d163a7cfc6509a0"}, + {file = "psycopg_binary-3.1.10-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:f48665947c55f8d6eb3f0be98de80411508e1ec329f354685329b57fced82c7f"}, + {file = "psycopg_binary-3.1.10-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:caa771569da01fc0389ca34920c331a284425a68f92d1ba0a80cc08935f8356e"}, + {file = "psycopg_binary-3.1.10-cp39-cp39-win_amd64.whl", hash = "sha256:b30887e631fd67affaed98f6cd2135b44f2d1a6d9bca353a69c3889c78bd7aa8"}, ] [[package]] name = "psycopg2-binary" -version = "2.9.6" +version = "2.9.7" description = "psycopg2 - Python-PostgreSQL Database Adapter" optional = false python-versions = ">=3.6" files = [ - {file = "psycopg2-binary-2.9.6.tar.gz", hash = "sha256:1f64dcfb8f6e0c014c7f55e51c9759f024f70ea572fbdef123f85318c297947c"}, - {file = "psycopg2_binary-2.9.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d26e0342183c762de3276cca7a530d574d4e25121ca7d6e4a98e4f05cb8e4df7"}, - {file = "psycopg2_binary-2.9.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c48d8f2db17f27d41fb0e2ecd703ea41984ee19362cbce52c097963b3a1b4365"}, - {file = "psycopg2_binary-2.9.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffe9dc0a884a8848075e576c1de0290d85a533a9f6e9c4e564f19adf8f6e54a7"}, - {file = "psycopg2_binary-2.9.6-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8a76e027f87753f9bd1ab5f7c9cb8c7628d1077ef927f5e2446477153a602f2c"}, - {file = "psycopg2_binary-2.9.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6460c7a99fc939b849431f1e73e013d54aa54293f30f1109019c56a0b2b2ec2f"}, - {file = "psycopg2_binary-2.9.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae102a98c547ee2288637af07393dd33f440c25e5cd79556b04e3fca13325e5f"}, - {file = "psycopg2_binary-2.9.6-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9972aad21f965599ed0106f65334230ce826e5ae69fda7cbd688d24fa922415e"}, - {file = "psycopg2_binary-2.9.6-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7a40c00dbe17c0af5bdd55aafd6ff6679f94a9be9513a4c7e071baf3d7d22a70"}, - {file = "psycopg2_binary-2.9.6-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:cacbdc5839bdff804dfebc058fe25684cae322987f7a38b0168bc1b2df703fb1"}, - {file = "psycopg2_binary-2.9.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7f0438fa20fb6c7e202863e0d5ab02c246d35efb1d164e052f2f3bfe2b152bd0"}, - {file = "psycopg2_binary-2.9.6-cp310-cp310-win32.whl", hash = "sha256:b6c8288bb8a84b47e07013bb4850f50538aa913d487579e1921724631d02ea1b"}, - {file = "psycopg2_binary-2.9.6-cp310-cp310-win_amd64.whl", hash = "sha256:61b047a0537bbc3afae10f134dc6393823882eb263088c271331602b672e52e9"}, - {file = "psycopg2_binary-2.9.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:964b4dfb7c1c1965ac4c1978b0f755cc4bd698e8aa2b7667c575fb5f04ebe06b"}, - {file = "psycopg2_binary-2.9.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:afe64e9b8ea66866a771996f6ff14447e8082ea26e675a295ad3bdbffdd72afb"}, - {file = "psycopg2_binary-2.9.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15e2ee79e7cf29582ef770de7dab3d286431b01c3bb598f8e05e09601b890081"}, - {file = "psycopg2_binary-2.9.6-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dfa74c903a3c1f0d9b1c7e7b53ed2d929a4910e272add6700c38f365a6002820"}, - {file = "psycopg2_binary-2.9.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b83456c2d4979e08ff56180a76429263ea254c3f6552cd14ada95cff1dec9bb8"}, - {file = "psycopg2_binary-2.9.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0645376d399bfd64da57148694d78e1f431b1e1ee1054872a5713125681cf1be"}, - {file = "psycopg2_binary-2.9.6-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e99e34c82309dd78959ba3c1590975b5d3c862d6f279f843d47d26ff89d7d7e1"}, - {file = "psycopg2_binary-2.9.6-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4ea29fc3ad9d91162c52b578f211ff1c931d8a38e1f58e684c45aa470adf19e2"}, - {file = "psycopg2_binary-2.9.6-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:4ac30da8b4f57187dbf449294d23b808f8f53cad6b1fc3623fa8a6c11d176dd0"}, - {file = "psycopg2_binary-2.9.6-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e78e6e2a00c223e164c417628572a90093c031ed724492c763721c2e0bc2a8df"}, - {file = "psycopg2_binary-2.9.6-cp311-cp311-win32.whl", hash = "sha256:1876843d8e31c89c399e31b97d4b9725a3575bb9c2af92038464231ec40f9edb"}, - {file = "psycopg2_binary-2.9.6-cp311-cp311-win_amd64.whl", hash = "sha256:b4b24f75d16a89cc6b4cdff0eb6a910a966ecd476d1e73f7ce5985ff1328e9a6"}, - {file = "psycopg2_binary-2.9.6-cp36-cp36m-win32.whl", hash = "sha256:498807b927ca2510baea1b05cc91d7da4718a0f53cb766c154c417a39f1820a0"}, - {file = "psycopg2_binary-2.9.6-cp36-cp36m-win_amd64.whl", hash = "sha256:0d236c2825fa656a2d98bbb0e52370a2e852e5a0ec45fc4f402977313329174d"}, - {file = "psycopg2_binary-2.9.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:34b9ccdf210cbbb1303c7c4db2905fa0319391bd5904d32689e6dd5c963d2ea8"}, - {file = "psycopg2_binary-2.9.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:84d2222e61f313c4848ff05353653bf5f5cf6ce34df540e4274516880d9c3763"}, - {file = "psycopg2_binary-2.9.6-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30637a20623e2a2eacc420059be11527f4458ef54352d870b8181a4c3020ae6b"}, - {file = "psycopg2_binary-2.9.6-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8122cfc7cae0da9a3077216528b8bb3629c43b25053284cc868744bfe71eb141"}, - {file = "psycopg2_binary-2.9.6-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38601cbbfe600362c43714482f43b7c110b20cb0f8172422c616b09b85a750c5"}, - {file = "psycopg2_binary-2.9.6-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c7e62ab8b332147a7593a385d4f368874d5fe4ad4e341770d4983442d89603e3"}, - {file = "psycopg2_binary-2.9.6-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2ab652e729ff4ad76d400df2624d223d6e265ef81bb8aa17fbd63607878ecbee"}, - {file = "psycopg2_binary-2.9.6-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:c83a74b68270028dc8ee74d38ecfaf9c90eed23c8959fca95bd703d25b82c88e"}, - {file = "psycopg2_binary-2.9.6-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d4e6036decf4b72d6425d5b29bbd3e8f0ff1059cda7ac7b96d6ac5ed34ffbacd"}, - {file = "psycopg2_binary-2.9.6-cp37-cp37m-win32.whl", hash = "sha256:a8c28fd40a4226b4a84bdf2d2b5b37d2c7bd49486b5adcc200e8c7ec991dfa7e"}, - {file = "psycopg2_binary-2.9.6-cp37-cp37m-win_amd64.whl", hash = "sha256:51537e3d299be0db9137b321dfb6a5022caaab275775680e0c3d281feefaca6b"}, - {file = "psycopg2_binary-2.9.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cf4499e0a83b7b7edcb8dabecbd8501d0d3a5ef66457200f77bde3d210d5debb"}, - {file = "psycopg2_binary-2.9.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7e13a5a2c01151f1208d5207e42f33ba86d561b7a89fca67c700b9486a06d0e2"}, - {file = "psycopg2_binary-2.9.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e0f754d27fddcfd74006455b6e04e6705d6c31a612ec69ddc040a5468e44b4e"}, - {file = "psycopg2_binary-2.9.6-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d57c3fd55d9058645d26ae37d76e61156a27722097229d32a9e73ed54819982a"}, - {file = "psycopg2_binary-2.9.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:71f14375d6f73b62800530b581aed3ada394039877818b2d5f7fc77e3bb6894d"}, - {file = "psycopg2_binary-2.9.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:441cc2f8869a4f0f4bb408475e5ae0ee1f3b55b33f350406150277f7f35384fc"}, - {file = "psycopg2_binary-2.9.6-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:65bee1e49fa6f9cf327ce0e01c4c10f39165ee76d35c846ade7cb0ec6683e303"}, - {file = "psycopg2_binary-2.9.6-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:af335bac6b666cc6aea16f11d486c3b794029d9df029967f9938a4bed59b6a19"}, - {file = "psycopg2_binary-2.9.6-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:cfec476887aa231b8548ece2e06d28edc87c1397ebd83922299af2e051cf2827"}, - {file = "psycopg2_binary-2.9.6-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:65c07febd1936d63bfde78948b76cd4c2a411572a44ac50719ead41947d0f26b"}, - {file = "psycopg2_binary-2.9.6-cp38-cp38-win32.whl", hash = "sha256:4dfb4be774c4436a4526d0c554af0cc2e02082c38303852a36f6456ece7b3503"}, - {file = "psycopg2_binary-2.9.6-cp38-cp38-win_amd64.whl", hash = "sha256:02c6e3cf3439e213e4ee930308dc122d6fb4d4bea9aef4a12535fbd605d1a2fe"}, - {file = "psycopg2_binary-2.9.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e9182eb20f41417ea1dd8e8f7888c4d7c6e805f8a7c98c1081778a3da2bee3e4"}, - {file = "psycopg2_binary-2.9.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8a6979cf527e2603d349a91060f428bcb135aea2be3201dff794813256c274f1"}, - {file = "psycopg2_binary-2.9.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8338a271cb71d8da40b023a35d9c1e919eba6cbd8fa20a54b748a332c355d896"}, - {file = "psycopg2_binary-2.9.6-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e3ed340d2b858d6e6fb5083f87c09996506af483227735de6964a6100b4e6a54"}, - {file = "psycopg2_binary-2.9.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f81e65376e52f03422e1fb475c9514185669943798ed019ac50410fb4c4df232"}, - {file = "psycopg2_binary-2.9.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfb13af3c5dd3a9588000910178de17010ebcccd37b4f9794b00595e3a8ddad3"}, - {file = "psycopg2_binary-2.9.6-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4c727b597c6444a16e9119386b59388f8a424223302d0c06c676ec8b4bc1f963"}, - {file = "psycopg2_binary-2.9.6-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:4d67fbdaf177da06374473ef6f7ed8cc0a9dc640b01abfe9e8a2ccb1b1402c1f"}, - {file = "psycopg2_binary-2.9.6-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:0892ef645c2fabb0c75ec32d79f4252542d0caec1d5d949630e7d242ca4681a3"}, - {file = "psycopg2_binary-2.9.6-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:02c0f3757a4300cf379eb49f543fb7ac527fb00144d39246ee40e1df684ab514"}, - {file = "psycopg2_binary-2.9.6-cp39-cp39-win32.whl", hash = "sha256:c3dba7dab16709a33a847e5cd756767271697041fbe3fe97c215b1fc1f5c9848"}, - {file = "psycopg2_binary-2.9.6-cp39-cp39-win_amd64.whl", hash = "sha256:f6a88f384335bb27812293fdb11ac6aee2ca3f51d3c7820fe03de0a304ab6249"}, + {file = "psycopg2-binary-2.9.7.tar.gz", hash = "sha256:1b918f64a51ffe19cd2e230b3240ba481330ce1d4b7875ae67305bd1d37b041c"}, + {file = "psycopg2_binary-2.9.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ea5f8ee87f1eddc818fc04649d952c526db4426d26bab16efbe5a0c52b27d6ab"}, + {file = "psycopg2_binary-2.9.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2993ccb2b7e80844d534e55e0f12534c2871952f78e0da33c35e648bf002bbff"}, + {file = "psycopg2_binary-2.9.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dbbc3c5d15ed76b0d9db7753c0db40899136ecfe97d50cbde918f630c5eb857a"}, + {file = "psycopg2_binary-2.9.7-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:692df8763b71d42eb8343f54091368f6f6c9cfc56dc391858cdb3c3ef1e3e584"}, + {file = "psycopg2_binary-2.9.7-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9dcfd5d37e027ec393a303cc0a216be564b96c80ba532f3d1e0d2b5e5e4b1e6e"}, + {file = "psycopg2_binary-2.9.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17cc17a70dfb295a240db7f65b6d8153c3d81efb145d76da1e4a096e9c5c0e63"}, + {file = "psycopg2_binary-2.9.7-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e5666632ba2b0d9757b38fc17337d84bdf932d38563c5234f5f8c54fd01349c9"}, + {file = "psycopg2_binary-2.9.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7db7b9b701974c96a88997d458b38ccb110eba8f805d4b4f74944aac48639b42"}, + {file = "psycopg2_binary-2.9.7-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c82986635a16fb1fa15cd5436035c88bc65c3d5ced1cfaac7f357ee9e9deddd4"}, + {file = "psycopg2_binary-2.9.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4fe13712357d802080cfccbf8c6266a3121dc0e27e2144819029095ccf708372"}, + {file = "psycopg2_binary-2.9.7-cp310-cp310-win32.whl", hash = "sha256:122641b7fab18ef76b18860dd0c772290566b6fb30cc08e923ad73d17461dc63"}, + {file = "psycopg2_binary-2.9.7-cp310-cp310-win_amd64.whl", hash = "sha256:f8651cf1f144f9ee0fa7d1a1df61a9184ab72962531ca99f077bbdcba3947c58"}, + {file = "psycopg2_binary-2.9.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4ecc15666f16f97709106d87284c136cdc82647e1c3f8392a672616aed3c7151"}, + {file = "psycopg2_binary-2.9.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3fbb1184c7e9d28d67671992970718c05af5f77fc88e26fd7136613c4ece1f89"}, + {file = "psycopg2_binary-2.9.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a7968fd20bd550431837656872c19575b687f3f6f98120046228e451e4064df"}, + {file = "psycopg2_binary-2.9.7-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:094af2e77a1976efd4956a031028774b827029729725e136514aae3cdf49b87b"}, + {file = "psycopg2_binary-2.9.7-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:26484e913d472ecb6b45937ea55ce29c57c662066d222fb0fbdc1fab457f18c5"}, + {file = "psycopg2_binary-2.9.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f309b77a7c716e6ed9891b9b42953c3ff7d533dc548c1e33fddc73d2f5e21f9"}, + {file = "psycopg2_binary-2.9.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6d92e139ca388ccfe8c04aacc163756e55ba4c623c6ba13d5d1595ed97523e4b"}, + {file = "psycopg2_binary-2.9.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:2df562bb2e4e00ee064779902d721223cfa9f8f58e7e52318c97d139cf7f012d"}, + {file = "psycopg2_binary-2.9.7-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:4eec5d36dbcfc076caab61a2114c12094c0b7027d57e9e4387b634e8ab36fd44"}, + {file = "psycopg2_binary-2.9.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1011eeb0c51e5b9ea1016f0f45fa23aca63966a4c0afcf0340ccabe85a9f65bd"}, + {file = "psycopg2_binary-2.9.7-cp311-cp311-win32.whl", hash = "sha256:ded8e15f7550db9e75c60b3d9fcbc7737fea258a0f10032cdb7edc26c2a671fd"}, + {file = "psycopg2_binary-2.9.7-cp311-cp311-win_amd64.whl", hash = "sha256:8a136c8aaf6615653450817a7abe0fc01e4ea720ae41dfb2823eccae4b9062a3"}, + {file = "psycopg2_binary-2.9.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2dec5a75a3a5d42b120e88e6ed3e3b37b46459202bb8e36cd67591b6e5feebc1"}, + {file = "psycopg2_binary-2.9.7-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc10da7e7df3380426521e8c1ed975d22df678639da2ed0ec3244c3dc2ab54c8"}, + {file = "psycopg2_binary-2.9.7-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee919b676da28f78f91b464fb3e12238bd7474483352a59c8a16c39dfc59f0c5"}, + {file = "psycopg2_binary-2.9.7-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eb1c0e682138f9067a58fc3c9a9bf1c83d8e08cfbee380d858e63196466d5c86"}, + {file = "psycopg2_binary-2.9.7-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00d8db270afb76f48a499f7bb8fa70297e66da67288471ca873db88382850bf4"}, + {file = "psycopg2_binary-2.9.7-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:9b0c2b466b2f4d89ccc33784c4ebb1627989bd84a39b79092e560e937a11d4ac"}, + {file = "psycopg2_binary-2.9.7-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:51d1b42d44f4ffb93188f9b39e6d1c82aa758fdb8d9de65e1ddfe7a7d250d7ad"}, + {file = "psycopg2_binary-2.9.7-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:11abdbfc6f7f7dea4a524b5f4117369b0d757725798f1593796be6ece20266cb"}, + {file = "psycopg2_binary-2.9.7-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:f02f4a72cc3ab2565c6d9720f0343cb840fb2dc01a2e9ecb8bc58ccf95dc5c06"}, + {file = "psycopg2_binary-2.9.7-cp37-cp37m-win32.whl", hash = "sha256:81d5dd2dd9ab78d31a451e357315f201d976c131ca7d43870a0e8063b6b7a1ec"}, + {file = "psycopg2_binary-2.9.7-cp37-cp37m-win_amd64.whl", hash = "sha256:62cb6de84d7767164a87ca97e22e5e0a134856ebcb08f21b621c6125baf61f16"}, + {file = "psycopg2_binary-2.9.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:59f7e9109a59dfa31efa022e94a244736ae401526682de504e87bd11ce870c22"}, + {file = "psycopg2_binary-2.9.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:95a7a747bdc3b010bb6a980f053233e7610276d55f3ca506afff4ad7749ab58a"}, + {file = "psycopg2_binary-2.9.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c721ee464e45ecf609ff8c0a555018764974114f671815a0a7152aedb9f3343"}, + {file = "psycopg2_binary-2.9.7-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f4f37bbc6588d402980ffbd1f3338c871368fb4b1cfa091debe13c68bb3852b3"}, + {file = "psycopg2_binary-2.9.7-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac83ab05e25354dad798401babaa6daa9577462136ba215694865394840e31f8"}, + {file = "psycopg2_binary-2.9.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:024eaeb2a08c9a65cd5f94b31ace1ee3bb3f978cd4d079406aef85169ba01f08"}, + {file = "psycopg2_binary-2.9.7-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:1c31c2606ac500dbd26381145684d87730a2fac9a62ebcfbaa2b119f8d6c19f4"}, + {file = "psycopg2_binary-2.9.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:42a62ef0e5abb55bf6ffb050eb2b0fcd767261fa3faf943a4267539168807522"}, + {file = "psycopg2_binary-2.9.7-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:7952807f95c8eba6a8ccb14e00bf170bb700cafcec3924d565235dffc7dc4ae8"}, + {file = "psycopg2_binary-2.9.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e02bc4f2966475a7393bd0f098e1165d470d3fa816264054359ed4f10f6914ea"}, + {file = "psycopg2_binary-2.9.7-cp38-cp38-win32.whl", hash = "sha256:fdca0511458d26cf39b827a663d7d87db6f32b93efc22442a742035728603d5f"}, + {file = "psycopg2_binary-2.9.7-cp38-cp38-win_amd64.whl", hash = "sha256:d0b16e5bb0ab78583f0ed7ab16378a0f8a89a27256bb5560402749dbe8a164d7"}, + {file = "psycopg2_binary-2.9.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6822c9c63308d650db201ba22fe6648bd6786ca6d14fdaf273b17e15608d0852"}, + {file = "psycopg2_binary-2.9.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8f94cb12150d57ea433e3e02aabd072205648e86f1d5a0a692d60242f7809b15"}, + {file = "psycopg2_binary-2.9.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5ee89587696d808c9a00876065d725d4ae606f5f7853b961cdbc348b0f7c9a1"}, + {file = "psycopg2_binary-2.9.7-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ad5ec10b53cbb57e9a2e77b67e4e4368df56b54d6b00cc86398578f1c635f329"}, + {file = "psycopg2_binary-2.9.7-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:642df77484b2dcaf87d4237792246d8068653f9e0f5c025e2c692fc56b0dda70"}, + {file = "psycopg2_binary-2.9.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6a8b575ac45af1eaccbbcdcf710ab984fd50af048fe130672377f78aaff6fc1"}, + {file = "psycopg2_binary-2.9.7-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f955aa50d7d5220fcb6e38f69ea126eafecd812d96aeed5d5f3597f33fad43bb"}, + {file = "psycopg2_binary-2.9.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ad26d4eeaa0d722b25814cce97335ecf1b707630258f14ac4d2ed3d1d8415265"}, + {file = "psycopg2_binary-2.9.7-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:ced63c054bdaf0298f62681d5dcae3afe60cbae332390bfb1acf0e23dcd25fc8"}, + {file = "psycopg2_binary-2.9.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2b04da24cbde33292ad34a40db9832a80ad12de26486ffeda883413c9e1b1d5e"}, + {file = "psycopg2_binary-2.9.7-cp39-cp39-win32.whl", hash = "sha256:18f12632ab516c47c1ac4841a78fddea6508a8284c7cf0f292cb1a523f2e2379"}, + {file = "psycopg2_binary-2.9.7-cp39-cp39-win_amd64.whl", hash = "sha256:eb3b8d55924a6058a26db69fb1d3e7e32695ff8b491835ba9f479537e14dcf9f"}, ] [[package]] @@ -4940,13 +4910,13 @@ email = ["email-validator (>=1.0.3)"] [[package]] name = "pygments" -version = "2.15.1" +version = "2.16.1" description = "Pygments is a syntax highlighting package written in Python." optional = false python-versions = ">=3.7" files = [ - {file = "Pygments-2.15.1-py3-none-any.whl", hash = "sha256:db2db3deb4b4179f399a09054b023b6a586b76499d36965813c71aa8ed7b5fd1"}, - {file = "Pygments-2.15.1.tar.gz", hash = "sha256:8ace4d3c1dd481894b2005f560ead0f9f19ee64fe983366be1a21e171d12775c"}, + {file = "Pygments-2.16.1-py3-none-any.whl", hash = "sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692"}, + {file = "Pygments-2.16.1.tar.gz", hash = "sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29"}, ] [package.extras] @@ -5073,23 +5043,23 @@ diagrams = ["jinja2", "railroad-diagrams"] [[package]] name = "pypdf" -version = "3.14.0" +version = "3.15.0" description = "A pure-python PDF library capable of splitting, merging, cropping, and transforming PDF files" optional = false python-versions = ">=3.6" files = [ - {file = "pypdf-3.14.0-py3-none-any.whl", hash = "sha256:55a5943d9a598ff6b9d301acf8fa33303656a1ea86fd3d754c6d20d417636c6f"}, - {file = "pypdf-3.14.0.tar.gz", hash = "sha256:1fb4edffa5d3a448f964d0ad2a31cd8e408ea5d76d45efac042a8c3448c83b0a"}, + {file = "pypdf-3.15.0-py3-none-any.whl", hash = "sha256:2e29ddb62561ec91157c784783714703ddd3ce08f070ecbc57404fb86cd9fc97"}, + {file = "pypdf-3.15.0.tar.gz", hash = "sha256:8a6264e1c47c63dc2484e29bdfa76b121435896a84e94b7c5ae82c6ae96354bb"}, ] [package.dependencies] typing_extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""} [package.extras] -crypto = ["PyCryptodome"] +crypto = ["PyCryptodome", "cryptography"] dev = ["black", "flit", "pip-tools", "pre-commit (<2.18.0)", "pytest-cov", "pytest-socket", "wheel"] docs = ["myst_parser", "sphinx", "sphinx_rtd_theme"] -full = ["Pillow (>=8.0.0)", "PyCryptodome"] +full = ["Pillow (>=8.0.0)", "PyCryptodome", "cryptography"] image = ["Pillow (>=8.0.0)"] [[package]] @@ -7261,13 +7231,13 @@ files = [ [[package]] name = "wheel" -version = "0.41.0" +version = "0.41.1" description = "A built-package format for Python" optional = false python-versions = ">=3.7" files = [ - {file = "wheel-0.41.0-py3-none-any.whl", hash = "sha256:7e9be3bbd0078f6147d82ed9ed957e323e7708f57e134743d2edef3a7b7972a9"}, - {file = "wheel-0.41.0.tar.gz", hash = "sha256:55a0f0a5a84869bce5ba775abfd9c462e3a6b1b7b7ec69d72c0b83d673a5114d"}, + {file = "wheel-0.41.1-py3-none-any.whl", hash = "sha256:473219bd4cbedc62cea0cb309089b593e47c15c4a2531015f94e4e3b9a0f6981"}, + {file = "wheel-0.41.1.tar.gz", hash = "sha256:12b911f083e876e10c595779709f8a88a59f45aacc646492a67fe9ef796c1b47"}, ] [package.extras] @@ -7580,4 +7550,4 @@ local = ["ctransformers", "llama-cpp-python", "sentence-transformers"] [metadata] lock-version = "2.0" python-versions = ">=3.9,<3.11" -content-hash = "57abce2ebcdc3cd7e359c36805822b9398d3bfb500c175b173a6d784d1276df6" +content-hash = "1329d94d3cb37062393d79da99fb3fa7d214ebdcdab6402c411561f960c6689f" diff --git a/pyproject.toml b/pyproject.toml index a29ae46ca..019632e46 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "langflow" -version = "0.4.1" +version = "0.4.2" description = "A Python package with a built-in web application" authors = ["Logspace "] maintainers = [ @@ -33,7 +33,7 @@ google-search-results = "^2.4.1" google-api-python-client = "^2.79.0" typer = "^0.9.0" gunicorn = "^21.1.0" -langchain = "^0.0.250" +langchain = "^0.0.256" openai = "^0.27.8" pandas = "^2.0.0" chromadb = "^0.3.21" From 2abc0d2fb473a57f15d9e21dd895fe2d7364b817 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Mon, 7 Aug 2023 20:28:41 -0300 Subject: [PATCH 67/90] =?UTF-8?q?=F0=9F=90=9B=20fix(formModal/index.tsx):?= =?UTF-8?q?=20handle=20case=20when=20tabsState=20or=20formKeysData=20is=20?= =?UTF-8?q?undefined=20to=20prevent=20errors=20=E2=9C=A8=20feat(formModal/?= =?UTF-8?q?index.tsx):=20add=20support=20for=20displaying=20input=20keys?= =?UTF-8?q?=20and=20handling=20input=20changes=20in=20the=20form=20modal?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/frontend/src/modals/formModal/index.tsx | 139 ++++++++++---------- 1 file changed, 72 insertions(+), 67 deletions(-) diff --git a/src/frontend/src/modals/formModal/index.tsx b/src/frontend/src/modals/formModal/index.tsx index 751eacea1..81b15559c 100644 --- a/src/frontend/src/modals/formModal/index.tsx +++ b/src/frontend/src/modals/formModal/index.tsx @@ -67,14 +67,17 @@ export default function FormModal({ const id = useRef(flow.id); const tabsStateFlowId = tabsState[flow.id]; const tabsStateFlowIdFormKeysData = tabsStateFlowId.formKeysData; - const [chatKey, setChatKey] = useState( - Object.keys(tabsState[flow.id].formKeysData.input_keys).find( - (k) => - !tabsState[flow.id].formKeysData.handle_keys.some((j) => j === k) && - tabsState[flow.id].formKeysData.input_keys[k] === "" - ) - ); - + const [chatKey, setChatKey] = useState(() => { + if (tabsState[flow.id]?.formKeysData?.input_keys) { + return Object.keys(tabsState[flow.id].formKeysData.input_keys).find( + (k) => + !tabsState[flow.id].formKeysData.handle_keys.some((j) => j === k) && + tabsState[flow.id].formKeysData.input_keys[k] === "" + ); + } + // TODO: return a sensible default + return ""; + }); useEffect(() => { if (messagesRef.current) { messagesRef.current.scrollTop = messagesRef.current.scrollHeight; @@ -419,68 +422,70 @@ export default function FormModal({
- {Object.keys(tabsState[id.current].formKeysData.input_keys).map( - (i, k) => ( -
- - - {i} - + {tabsState[id.current]?.formKeysData?.input_keys + ? Object.keys( + tabsState[id.current].formKeysData.input_keys + ).map((i, k) => ( +
+ + + {i} + -
{ - event.stopPropagation(); +
{ + event.stopPropagation(); + }} + > + + handleOnCheckedChange(value, i) + } + size="small" + disabled={tabsState[ + id.current + ].formKeysData.handle_keys.some((t) => t === i)} + /> +
+
+ } + key={k} + keyValue={i} + > +
+ {tabsState[id.current].formKeysData.handle_keys.some( + (t) => t === i + ) && ( +
+ Source: Component +
+ )} +
- } - key={k} - keyValue={i} - > -
- {tabsState[id.current].formKeysData.handle_keys.some( - (t) => t === i - ) && ( -
- Source: Component -
- )} - -
-
-
- ) - )} +
+
+ )) + : null} {tabsState[id.current].formKeysData.memory_keys.map((i, k) => (
Date: Mon, 7 Aug 2023 20:31:32 -0300 Subject: [PATCH 68/90] =?UTF-8?q?=F0=9F=90=9B=20fix(utils.ts):=20fix=20mis?= =?UTF-8?q?sing=20closing=20bracket=20in=20getChatInputField=20function=20?= =?UTF-8?q?=E2=9C=A8=20feat(utils.ts):=20add=20getChatInputField=20functio?= =?UTF-8?q?n=20to=20retrieve=20the=20chat=20input=20field=20based=20on=20t?= =?UTF-8?q?he=20current=20flow=20and=20tabs=20state?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/frontend/src/utils/utils.ts | 27 +++++++++++++++++++++++---- 1 file changed, 23 insertions(+), 4 deletions(-) diff --git a/src/frontend/src/utils/utils.ts b/src/frontend/src/utils/utils.ts index 462e72084..615e06d64 100644 --- a/src/frontend/src/utils/utils.ts +++ b/src/frontend/src/utils/utils.ts @@ -248,6 +248,26 @@ export function buildTweakObject(tweak) { return tweakString; } +/** + * Function to get Chat Input Field + * @param {FlowType} flow - The current flow. + * @param {TabsState} tabsState - The current tabs state. + * @returns {string} - The chat input field + */ +export function getChatInputField(flow: FlowType, tabsState?: TabsState) { + let chat_input_field = "text"; + + if ( + tabsState[flow.id] && + tabsState[flow.id].formKeysData && + tabsState[flow.id].formKeysData.input_keys + ) { + chat_input_field = Object.keys( + tabsState[flow.id].formKeysData.input_keys + )[0]; + } + return chat_input_field; + /** * Function to get the python code for the API * @param {string} flowId - The id of the flow @@ -365,6 +385,7 @@ export function getWidgetCode(flow: FlowType, tabsState?: TabsState): string { const flowId = flow.id; const flowName = flow.name; const inputs = buildInputs(tabsState, flow.id); + let chat_input_field = getChatInputField(flow, tabsState); return ` @@ -377,11 +398,9 @@ chat_input_field: Input key that you want the chat to send the user message with ${ tabsState[flow.id] && tabsState[flow.id].formKeysData ? `chat_inputs='${inputs}' - chat_input_field="${ - Object.keys(tabsState[flow.id].formKeysData.input_keys)[0] - }" + chat_input_field="${chat_input_field}" ` : "" - }host_url="http://localhost:7860" + }host_url="http://localhost:7860" >`; } From 16666426f729bb176f080e80a84313752bc7bc74 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Mon, 7 Aug 2023 20:31:59 -0300 Subject: [PATCH 69/90] =?UTF-8?q?=F0=9F=90=9B=20fix(chatComponent):=20fix?= =?UTF-8?q?=20condition=20to=20check=20if=20parsedData.input=5Fkeys=20is?= =?UTF-8?q?=20not=20undefined=20=E2=9C=A8=20feat(chatComponent):=20add=20l?= =?UTF-8?q?ogic=20to=20close=20the=20connection=20and=20finish=20when=20en?= =?UTF-8?q?d=5Fof=5Fstream=20event=20is=20received?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/components/chatComponent/buildTrigger/index.tsx | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/frontend/src/components/chatComponent/buildTrigger/index.tsx b/src/frontend/src/components/chatComponent/buildTrigger/index.tsx index 4eb69546e..9b0b74678 100644 --- a/src/frontend/src/components/chatComponent/buildTrigger/index.tsx +++ b/src/frontend/src/components/chatComponent/buildTrigger/index.tsx @@ -82,13 +82,15 @@ export default function BuildTrigger({ const parsedData = JSON.parse(event.data); // if the event is the end of the stream, close the connection if (parsedData.end_of_stream) { + // Close the connection and finish + finished = true; eventSource.close(); return; } else if (parsedData.log) { // If the event is a log, log it setSuccessData({ title: parsedData.log }); - } else if (parsedData.input_keys) { + } else if (parsedData.input_keys !== undefined) { setTabsState((old) => { return { ...old, From bd7489206da2b38052587a0f286b8bf8905f9917 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Mon, 7 Aug 2023 20:32:27 -0300 Subject: [PATCH 70/90] =?UTF-8?q?=F0=9F=90=9B=20fix(chatComponent):=20add?= =?UTF-8?q?=20null=20check=20for=20formKeysData.input=5Fkeys=20to=20preven?= =?UTF-8?q?t=20error=20when=20accessing=20undefined=20property?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/frontend/src/components/chatComponent/index.tsx | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/src/frontend/src/components/chatComponent/index.tsx b/src/frontend/src/components/chatComponent/index.tsx index ac551f6de..2b32dc0c4 100644 --- a/src/frontend/src/components/chatComponent/index.tsx +++ b/src/frontend/src/components/chatComponent/index.tsx @@ -58,7 +58,12 @@ export default function Chat({ flow }: ChatType) { ) { setIsBuilt(false); } - if (tabsState && tabsState[flow.id] && tabsState[flow.id].formKeysData) { + if ( + tabsState && + tabsState[flow.id] && + tabsState[flow.id].formKeysData && + tabsState[flow.id].formKeysData.input_keys !== null + ) { setCanOpen(true); } else { setCanOpen(false); From d1f09a6a645e358ac2cf2ee117d31da67cc4ee3d Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Mon, 7 Aug 2023 20:34:22 -0300 Subject: [PATCH 71/90] =?UTF-8?q?=F0=9F=94=A8=20refactor(utils.ts):=20remo?= =?UTF-8?q?ve=20unnecessary=20newline=20after=20the=20return=20statement?= =?UTF-8?q?=20in=20getChatInputField=20function?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/frontend/src/utils/utils.ts | 1 + 1 file changed, 1 insertion(+) diff --git a/src/frontend/src/utils/utils.ts b/src/frontend/src/utils/utils.ts index 615e06d64..5e30d99cf 100644 --- a/src/frontend/src/utils/utils.ts +++ b/src/frontend/src/utils/utils.ts @@ -267,6 +267,7 @@ export function getChatInputField(flow: FlowType, tabsState?: TabsState) { )[0]; } return chat_input_field; +} /** * Function to get the python code for the API From 4dafbae1b999bc0a614e2a768059a7d1545d59e6 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Tue, 8 Aug 2023 14:09:15 -0300 Subject: [PATCH 72/90] =?UTF-8?q?=F0=9F=93=9D=20docs(loading=5Fdocument.md?= =?UTF-8?q?x):=20update=20the=20"Web=20Page"=20field=20URL=20to=20point=20?= =?UTF-8?q?to=20the=20correct=20documentation=20URL?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit 📝 docs(CNAME): update the CNAME file to point to the new domain for the documentation 🔧 fix(ApiModal/index.tsx): update the links in the description to point to the correct documentation URL --- docs/docs/guides/loading_document.mdx | 2 +- docs/static/CNAME | 2 +- src/frontend/src/modals/ApiModal/index.tsx | 6 +++--- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/docs/docs/guides/loading_document.mdx b/docs/docs/guides/loading_document.mdx index d760e9124..73fb85968 100644 --- a/docs/docs/guides/loading_document.mdx +++ b/docs/docs/guides/loading_document.mdx @@ -43,7 +43,7 @@ This guide takes you through the process of augmenting the "Basic Chat with Prom 8. Connect this loader to the `{context}` variable that we just added. -9. In the "Web Page" field, enter "https://langflow.org/how-upload-examples". +9. In the "Web Page" field, enter "https://docs.langflow.org/how-upload-examples". 10. Now, click on "ConversationBufferMemory". diff --git a/docs/static/CNAME b/docs/static/CNAME index 3e6c5c023..ab1d0c849 100644 --- a/docs/static/CNAME +++ b/docs/static/CNAME @@ -1 +1 @@ -langflow.org \ No newline at end of file +docs.langflow.org \ No newline at end of file diff --git a/src/frontend/src/modals/ApiModal/index.tsx b/src/frontend/src/modals/ApiModal/index.tsx index 236a03d65..adab0ca7d 100644 --- a/src/frontend/src/modals/ApiModal/index.tsx +++ b/src/frontend/src/modals/ApiModal/index.tsx @@ -74,7 +74,7 @@ const ApiModal = forwardRef( { name: "Chat Widget HTML", description: - "Insert this code anywhere in your <body> tag. To use with react and other libs, check our documentation.", + "Insert this code anywhere in your <body> tag. To use with react and other libs, check our documentation.", mode: "html", image: "https://cdn-icons-png.flaticon.com/512/5968/5968350.png", language: "py", @@ -127,7 +127,7 @@ const ApiModal = forwardRef( { name: "Chat Widget HTML", description: - "Insert this code anywhere in your <body> tag. To use with react and other libs, check our documentation.", + "Insert this code anywhere in your <body> tag. To use with react and other libs, check our documentation.", mode: "html", image: "https://cdn-icons-png.flaticon.com/512/5968/5968350.png", language: "py", @@ -168,7 +168,7 @@ const ApiModal = forwardRef( { name: "Chat Widget HTML", description: - "Insert this code anywhere in your <body> tag. To use with react and other libs, check our documentation.", + "Insert this code anywhere in your <body> tag. To use with react and other libs, check our documentation.", mode: "html", image: "https://cdn-icons-png.flaticon.com/512/5968/5968350.png", language: "py", From a64f03da9cfa931ffe971438afa520d5e325db6b Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Tue, 8 Aug 2023 14:11:57 -0300 Subject: [PATCH 73/90] =?UTF-8?q?=F0=9F=93=9D=20docs(loading=5Fdocument.md?= =?UTF-8?q?x):=20update=20the=20"Web=20Page"=20field=20URL=20to=20point=20?= =?UTF-8?q?to=20the=20correct=20documentation=20URL?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit 📝 docs(CNAME): update the CNAME file to point to the new domain for the documentation 🔧 fix(ApiModal/index.tsx): update the links in the description to point to the correct documentation URL --- docs/docs/guides/loading_document.mdx | 2 +- docs/static/CNAME | 2 +- src/frontend/src/modals/ApiModal/index.tsx | 6 +++--- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/docs/docs/guides/loading_document.mdx b/docs/docs/guides/loading_document.mdx index d760e9124..73fb85968 100644 --- a/docs/docs/guides/loading_document.mdx +++ b/docs/docs/guides/loading_document.mdx @@ -43,7 +43,7 @@ This guide takes you through the process of augmenting the "Basic Chat with Prom 8. Connect this loader to the `{context}` variable that we just added. -9. In the "Web Page" field, enter "https://langflow.org/how-upload-examples". +9. In the "Web Page" field, enter "https://docs.langflow.org/how-upload-examples". 10. Now, click on "ConversationBufferMemory". diff --git a/docs/static/CNAME b/docs/static/CNAME index 3e6c5c023..ab1d0c849 100644 --- a/docs/static/CNAME +++ b/docs/static/CNAME @@ -1 +1 @@ -langflow.org \ No newline at end of file +docs.langflow.org \ No newline at end of file diff --git a/src/frontend/src/modals/ApiModal/index.tsx b/src/frontend/src/modals/ApiModal/index.tsx index 236a03d65..adab0ca7d 100644 --- a/src/frontend/src/modals/ApiModal/index.tsx +++ b/src/frontend/src/modals/ApiModal/index.tsx @@ -74,7 +74,7 @@ const ApiModal = forwardRef( { name: "Chat Widget HTML", description: - "Insert this code anywhere in your <body> tag. To use with react and other libs, check our documentation.", + "Insert this code anywhere in your <body> tag. To use with react and other libs, check our documentation.", mode: "html", image: "https://cdn-icons-png.flaticon.com/512/5968/5968350.png", language: "py", @@ -127,7 +127,7 @@ const ApiModal = forwardRef( { name: "Chat Widget HTML", description: - "Insert this code anywhere in your <body> tag. To use with react and other libs, check our documentation.", + "Insert this code anywhere in your <body> tag. To use with react and other libs, check our documentation.", mode: "html", image: "https://cdn-icons-png.flaticon.com/512/5968/5968350.png", language: "py", @@ -168,7 +168,7 @@ const ApiModal = forwardRef( { name: "Chat Widget HTML", description: - "Insert this code anywhere in your <body> tag. To use with react and other libs, check our documentation.", + "Insert this code anywhere in your <body> tag. To use with react and other libs, check our documentation.", mode: "html", image: "https://cdn-icons-png.flaticon.com/512/5968/5968350.png", language: "py", From f9b392d5b86b7677111ce6117b8114b16d8f0977 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Tue, 8 Aug 2023 15:02:16 -0300 Subject: [PATCH 74/90] Revert "Fixes flows without input_keys not being runnable" --- src/backend/langflow/api/v1/chat.py | 2 +- src/backend/langflow/chat/manager.py | 2 +- src/backend/langflow/chat/utils.py | 4 +- .../interface/initialize/vector_store.py | 14 +- .../chatComponent/buildTrigger/index.tsx | 4 +- .../src/components/chatComponent/index.tsx | 3 +- src/frontend/src/modals/formModal/index.tsx | 139 +++++++++--------- src/frontend/src/utils/utils.ts | 28 +--- 8 files changed, 83 insertions(+), 113 deletions(-) diff --git a/src/backend/langflow/api/v1/chat.py b/src/backend/langflow/api/v1/chat.py index 06a2fdda0..dd3407d1b 100644 --- a/src/backend/langflow/api/v1/chat.py +++ b/src/backend/langflow/api/v1/chat.py @@ -155,7 +155,7 @@ async def stream_build(flow_id: str): ) else: input_keys_response = { - "input_keys": None, + "input_keys": {}, "memory_keys": [], "handle_keys": [], } diff --git a/src/backend/langflow/chat/manager.py b/src/backend/langflow/chat/manager.py index 2c3427a12..1e93174e2 100644 --- a/src/backend/langflow/chat/manager.py +++ b/src/backend/langflow/chat/manager.py @@ -117,7 +117,7 @@ class ChatManager: self, client_id: str, payload: Dict, langchain_object: Any ): # Process the graph data and chat message - chat_inputs = payload.pop("inputs", {}) + chat_inputs = payload.pop("inputs", "") chat_inputs = ChatMessage(message=chat_inputs) self.chat_history.add_message(client_id, chat_inputs) diff --git a/src/backend/langflow/chat/utils.py b/src/backend/langflow/chat/utils.py index 17c976eb9..7db65b8e3 100644 --- a/src/backend/langflow/chat/utils.py +++ b/src/backend/langflow/chat/utils.py @@ -21,9 +21,9 @@ async def process_graph( # Generate result and thought try: - if chat_inputs.message is None: + if not chat_inputs.message: logger.debug("No message provided") - chat_inputs.message = {} + raise ValueError("No message provided") logger.debug("Generating result and thought") result, intermediate_steps = await get_result_and_steps( diff --git a/src/backend/langflow/interface/initialize/vector_store.py b/src/backend/langflow/interface/initialize/vector_store.py index 8330d7611..12cf054a5 100644 --- a/src/backend/langflow/interface/initialize/vector_store.py +++ b/src/backend/langflow/interface/initialize/vector_store.py @@ -171,7 +171,11 @@ def initialize_pinecone(class_object: Type[Pinecone], params: dict): def initialize_chroma(class_object: Type[Chroma], params: dict): """Initialize a ChromaDB object from the params""" if ( # type: ignore - "chroma_server_host" in params or "chroma_server_http_port" in params + "chroma_server_host" in params + or "chroma_server_http_port" in params + or "chroma_server_ssl_enabled" in params + or "chroma_server_grpc_port" in params + or "chroma_server_cors_allow_origins" in params ): import chromadb # type: ignore @@ -182,14 +186,6 @@ def initialize_chroma(class_object: Type[Chroma], params: dict): } chroma_settings = chromadb.config.Settings(**settings_params) params["client_settings"] = chroma_settings - else: - # remove all chroma_server_ keys from params - params = { - key: value - for key, value in params.items() - if not key.startswith("chroma_server_") - } - persist = params.pop("persist", False) if not docs_in_params(params): params.pop("documents", None) diff --git a/src/frontend/src/components/chatComponent/buildTrigger/index.tsx b/src/frontend/src/components/chatComponent/buildTrigger/index.tsx index 9b0b74678..4eb69546e 100644 --- a/src/frontend/src/components/chatComponent/buildTrigger/index.tsx +++ b/src/frontend/src/components/chatComponent/buildTrigger/index.tsx @@ -82,15 +82,13 @@ export default function BuildTrigger({ const parsedData = JSON.parse(event.data); // if the event is the end of the stream, close the connection if (parsedData.end_of_stream) { - // Close the connection and finish - finished = true; eventSource.close(); return; } else if (parsedData.log) { // If the event is a log, log it setSuccessData({ title: parsedData.log }); - } else if (parsedData.input_keys !== undefined) { + } else if (parsedData.input_keys) { setTabsState((old) => { return { ...old, diff --git a/src/frontend/src/components/chatComponent/index.tsx b/src/frontend/src/components/chatComponent/index.tsx index 2b32dc0c4..3cbe5bbf1 100644 --- a/src/frontend/src/components/chatComponent/index.tsx +++ b/src/frontend/src/components/chatComponent/index.tsx @@ -62,7 +62,8 @@ export default function Chat({ flow }: ChatType) { tabsState && tabsState[flow.id] && tabsState[flow.id].formKeysData && - tabsState[flow.id].formKeysData.input_keys !== null + tabsState[flow.id].formKeysData.input_keys && + Object.keys(tabsState[flow.id].formKeysData.input_keys).length > 0 ) { setCanOpen(true); } else { diff --git a/src/frontend/src/modals/formModal/index.tsx b/src/frontend/src/modals/formModal/index.tsx index 81b15559c..751eacea1 100644 --- a/src/frontend/src/modals/formModal/index.tsx +++ b/src/frontend/src/modals/formModal/index.tsx @@ -67,17 +67,14 @@ export default function FormModal({ const id = useRef(flow.id); const tabsStateFlowId = tabsState[flow.id]; const tabsStateFlowIdFormKeysData = tabsStateFlowId.formKeysData; - const [chatKey, setChatKey] = useState(() => { - if (tabsState[flow.id]?.formKeysData?.input_keys) { - return Object.keys(tabsState[flow.id].formKeysData.input_keys).find( - (k) => - !tabsState[flow.id].formKeysData.handle_keys.some((j) => j === k) && - tabsState[flow.id].formKeysData.input_keys[k] === "" - ); - } - // TODO: return a sensible default - return ""; - }); + const [chatKey, setChatKey] = useState( + Object.keys(tabsState[flow.id].formKeysData.input_keys).find( + (k) => + !tabsState[flow.id].formKeysData.handle_keys.some((j) => j === k) && + tabsState[flow.id].formKeysData.input_keys[k] === "" + ) + ); + useEffect(() => { if (messagesRef.current) { messagesRef.current.scrollTop = messagesRef.current.scrollHeight; @@ -422,70 +419,68 @@ export default function FormModal({
- {tabsState[id.current]?.formKeysData?.input_keys - ? Object.keys( - tabsState[id.current].formKeysData.input_keys - ).map((i, k) => ( -
- - - {i} - + {Object.keys(tabsState[id.current].formKeysData.input_keys).map( + (i, k) => ( +
+ + + {i} + -
{ - event.stopPropagation(); - }} - > - - handleOnCheckedChange(value, i) - } - size="small" - disabled={tabsState[ - id.current - ].formKeysData.handle_keys.some((t) => t === i)} - /> -
-
- } - key={k} - keyValue={i} - > -
- {tabsState[id.current].formKeysData.handle_keys.some( - (t) => t === i - ) && ( -
- Source: Component -
- )} - + > + + handleOnCheckedChange(value, i) + } + size="small" + disabled={tabsState[ + id.current + ].formKeysData.handle_keys.some((t) => t === i)} + /> +
- -
- )) - : null} + } + key={k} + keyValue={i} + > +
+ {tabsState[id.current].formKeysData.handle_keys.some( + (t) => t === i + ) && ( +
+ Source: Component +
+ )} + +
+ +
+ ) + )} {tabsState[id.current].formKeysData.memory_keys.map((i, k) => (
@@ -399,9 +377,11 @@ chat_input_field: Input key that you want the chat to send the user message with ${ tabsState[flow.id] && tabsState[flow.id].formKeysData ? `chat_inputs='${inputs}' - chat_input_field="${chat_input_field}" + chat_input_field="${ + Object.keys(tabsState[flow.id].formKeysData.input_keys)[0] + }" ` : "" - }host_url="http://localhost:7860" + }host_url="http://localhost:7860" >`; } From 597b2153a3fa56d857ad1c7d348ac507afd7f378 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Tue, 8 Aug 2023 15:06:38 -0300 Subject: [PATCH 75/90] =?UTF-8?q?=F0=9F=94=96=20chore(pyproject.toml):=20b?= =?UTF-8?q?ump=20version=20from=200.4.2=20to=200.4.3=20for=20langflow=20pa?= =?UTF-8?q?ckage?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- poetry.lock | 90 ++++++++++++++++++++++---------------------------- pyproject.toml | 2 +- 2 files changed, 41 insertions(+), 51 deletions(-) diff --git a/poetry.lock b/poetry.lock index 47c471ddc..ced8e8e4c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1585,13 +1585,13 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] [[package]] name = "google-api-python-client" -version = "2.95.0" +version = "2.96.0" description = "Google API Client Library for Python" optional = false python-versions = ">=3.7" files = [ - {file = "google-api-python-client-2.95.0.tar.gz", hash = "sha256:d2731ede12f79e53fbe11fdb913dfe986440b44c0a28431c78a8ec275f4c1541"}, - {file = "google_api_python_client-2.95.0-py2.py3-none-any.whl", hash = "sha256:a8aab2da678f42a01f2f52108f787fef4310f23f9dd917c4e64664c3f0c885ba"}, + {file = "google-api-python-client-2.96.0.tar.gz", hash = "sha256:f712373d03d338af57b9f5fe98c91f4b5baaa8765469b015bc623c4681c5bd51"}, + {file = "google_api_python_client-2.96.0-py2.py3-none-any.whl", hash = "sha256:38c2b61b10d15bb41ec8f89303e3837ec2d2c3e4e38de5800c05ee322492f937"}, ] [package.dependencies] @@ -5725,59 +5725,49 @@ files = [ [[package]] name = "safetensors" -version = "0.3.1" +version = "0.3.2" description = "Fast and Safe Tensor serialization" optional = true python-versions = "*" files = [ - {file = "safetensors-0.3.1-cp310-cp310-macosx_10_11_x86_64.whl", hash = "sha256:2ae9b7dd268b4bae6624729dac86deb82104820e9786429b0583e5168db2f770"}, - {file = "safetensors-0.3.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:08c85c1934682f1e2cd904d38433b53cd2a98245a7cc31f5689f9322a2320bbf"}, - {file = "safetensors-0.3.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba625c7af9e1c5d0d91cb83d2fba97d29ea69d4db2015d9714d24c7f6d488e15"}, - {file = "safetensors-0.3.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b57d5890c619ec10d9f1b6426b8690d0c9c2868a90dc52f13fae6f6407ac141f"}, - {file = "safetensors-0.3.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c9f562ea696d50b95cadbeb1716dc476714a87792ffe374280c0835312cbfe2"}, - {file = "safetensors-0.3.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c115951b3a865ece8d98ee43882f2fd0a999c0200d6e6fec24134715ebe3b57"}, - {file = "safetensors-0.3.1-cp310-cp310-win32.whl", hash = "sha256:118f8f7503ea312fc7af27e934088a1b589fb1eff5a7dea2cd1de6c71ee33391"}, - {file = "safetensors-0.3.1-cp310-cp310-win_amd64.whl", hash = "sha256:54846eaae25fded28a7bebbb66be563cad221b4c80daee39e2f55df5e5e0266f"}, - {file = "safetensors-0.3.1-cp311-cp311-macosx_10_11_universal2.whl", hash = "sha256:5af82e10946c4822506db0f29269f43147e889054704dde994d4e22f0c37377b"}, - {file = "safetensors-0.3.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:626c86dd1d930963c8ea7f953a3787ae85322551e3a5203ac731d6e6f3e18f44"}, - {file = "safetensors-0.3.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:12e30677e6af1f4cc4f2832546e91dbb3b0aa7d575bfa473d2899d524e1ace08"}, - {file = "safetensors-0.3.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d534b80bc8d39945bb902f34b0454773971fe9e5e1f2142af451759d7e52b356"}, - {file = "safetensors-0.3.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ddd0ddd502cf219666e7d30f23f196cb87e829439b52b39f3e7da7918c3416df"}, - {file = "safetensors-0.3.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:997a2cc14023713f423e6d16536d55cb16a3d72850f142e05f82f0d4c76d383b"}, - {file = "safetensors-0.3.1-cp311-cp311-win32.whl", hash = "sha256:6ae9ca63d9e22f71ec40550207bd284a60a6b4916ae6ca12c85a8d86bf49e0c3"}, - {file = "safetensors-0.3.1-cp311-cp311-win_amd64.whl", hash = "sha256:62aa7421ca455418423e35029524489480adda53e3f702453580180ecfebe476"}, - {file = "safetensors-0.3.1-cp37-cp37m-macosx_10_11_x86_64.whl", hash = "sha256:6d54b3ed367b6898baab75dfd057c24f36ec64d3938ffff2af981d56bfba2f42"}, - {file = "safetensors-0.3.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:262423aeda91117010f8c607889066028f680fbb667f50cfe6eae96f22f9d150"}, - {file = "safetensors-0.3.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:10efe2513a8327fd628cea13167089588acc23093ba132aecfc536eb9a4560fe"}, - {file = "safetensors-0.3.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:689b3d6a7ebce70ee9438267ee55ea89b575c19923876645e927d08757b552fe"}, - {file = "safetensors-0.3.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14cd9a87bc73ce06903e9f8ee8b05b056af6f3c9f37a6bd74997a16ed36ff5f4"}, - {file = "safetensors-0.3.1-cp37-cp37m-win32.whl", hash = "sha256:a77cb39624480d5f143c1cc272184f65a296f573d61629eff5d495d2e0541d3e"}, - {file = "safetensors-0.3.1-cp37-cp37m-win_amd64.whl", hash = "sha256:9eff3190bfbbb52eef729911345c643f875ca4dbb374aa6c559675cfd0ab73db"}, - {file = "safetensors-0.3.1-cp38-cp38-macosx_10_11_x86_64.whl", hash = "sha256:05cbfef76e4daa14796db1bbb52072d4b72a44050c368b2b1f6fd3e610669a89"}, - {file = "safetensors-0.3.1-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:c49061461f4a81e5ec3415070a3f135530834c89cbd6a7db7cd49e3cb9d9864b"}, - {file = "safetensors-0.3.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22cf7e73ca42974f098ce0cf4dd8918983700b6b07a4c6827d50c8daefca776e"}, - {file = "safetensors-0.3.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:04f909442d6223ff0016cd2e1b2a95ef8039b92a558014627363a2e267213f62"}, - {file = "safetensors-0.3.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2c573c5a0d5d45791ae8c179e26d74aff86e719056591aa7edb3ca7be55bc961"}, - {file = "safetensors-0.3.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6994043b12e717cf2a6ba69077ac41f0d3675b2819734f07f61819e854c622c7"}, - {file = "safetensors-0.3.1-cp38-cp38-win32.whl", hash = "sha256:158ede81694180a0dbba59422bc304a78c054b305df993c0c6e39c6330fa9348"}, - {file = "safetensors-0.3.1-cp38-cp38-win_amd64.whl", hash = "sha256:afdc725beff7121ea8d39a7339f5a6abcb01daa189ea56290b67fe262d56e20f"}, - {file = "safetensors-0.3.1-cp39-cp39-macosx_10_11_x86_64.whl", hash = "sha256:cba910fcc9e5e64d32d62b837388721165e9c7e45d23bc3a38ad57694b77f40d"}, - {file = "safetensors-0.3.1-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:a4f7dbfe7285573cdaddd85ef6fa84ebbed995d3703ab72d71257944e384612f"}, - {file = "safetensors-0.3.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54aed0802f9eaa83ca7b1cbb986bfb90b8e2c67b6a4bcfe245627e17dad565d4"}, - {file = "safetensors-0.3.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:34b75a766f3cfc99fd4c33e329b76deae63f5f388e455d863a5d6e99472fca8e"}, - {file = "safetensors-0.3.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1a0f31904f35dc14919a145b2d7a2d8842a43a18a629affe678233c4ea90b4af"}, - {file = "safetensors-0.3.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcf527ecc5f58907fd9031510378105487f318cc91ecdc5aee3c7cc8f46030a8"}, - {file = "safetensors-0.3.1-cp39-cp39-win32.whl", hash = "sha256:e2f083112cf97aa9611e2a05cc170a2795eccec5f6ff837f4565f950670a9d83"}, - {file = "safetensors-0.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:5f4f614b8e8161cd8a9ca19c765d176a82b122fa3d3387b77862145bfe9b4e93"}, - {file = "safetensors-0.3.1.tar.gz", hash = "sha256:571da56ff8d0bec8ae54923b621cda98d36dcef10feb36fd492c4d0c2cd0e869"}, + {file = "safetensors-0.3.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:b6a66989075c2891d743153e8ba9ca84ee7232c8539704488f454199b8b8f84d"}, + {file = "safetensors-0.3.2-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:670d6bc3a3b377278ce2971fa7c36ebc0a35041c4ea23b9df750a39380800195"}, + {file = "safetensors-0.3.2-cp310-cp310-macosx_13_0_x86_64.whl", hash = "sha256:7f80af7e4ab3188daaff12d43d078da3017a90d732d38d7af4eb08b6ca2198a5"}, + {file = "safetensors-0.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cbb44e140bf2aeda98d9dde669dbec15f7b77f96a9274469b91a6cf4bcc5ec3b"}, + {file = "safetensors-0.3.2-cp310-cp310-win32.whl", hash = "sha256:2961c1243fd0da46aa6a1c835305cc4595486f8ac64632a604d0eb5f2de76175"}, + {file = "safetensors-0.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:c813920482c337d1424d306e1b05824a38e3ef94303748a0a287dea7a8c4f805"}, + {file = "safetensors-0.3.2-cp311-cp311-macosx_10_11_universal2.whl", hash = "sha256:707df34bd9b9047e97332136ad98e57028faeccdb9cfe1c3b52aba5964cc24bf"}, + {file = "safetensors-0.3.2-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:becc5bb85b2947eae20ed23b407ebfd5277d9a560f90381fe2c42e6c043677ba"}, + {file = "safetensors-0.3.2-cp311-cp311-macosx_13_0_universal2.whl", hash = "sha256:54ad6af663e15e2b99e2ea3280981b7514485df72ba6d014dc22dae7ba6a5e6c"}, + {file = "safetensors-0.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ada0fac127ff8fb04834da5c6d85a8077e6a1c9180a11251d96f8068db922a17"}, + {file = "safetensors-0.3.2-cp311-cp311-win32.whl", hash = "sha256:155b82dbe2b0ebff18cde3f76b42b6d9470296e92561ef1a282004d449fa2b4c"}, + {file = "safetensors-0.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:a86428d196959619ce90197731be9391b5098b35100a7228ef4643957648f7f5"}, + {file = "safetensors-0.3.2-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:c1f8ab41ed735c5b581f451fd15d9602ff51aa88044bfa933c5fa4b1d0c644d1"}, + {file = "safetensors-0.3.2-cp37-cp37m-macosx_13_0_x86_64.whl", hash = "sha256:bc9cfb3c9ea2aec89685b4d656f9f2296f0f0d67ecf2bebf950870e3be89b3db"}, + {file = "safetensors-0.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d7d70d48585fe8df00725aa788f2e64fd24a4c9ae07cd6be34f6859d0f89a9c"}, + {file = "safetensors-0.3.2-cp37-cp37m-win32.whl", hash = "sha256:6ff59bc90cdc857f68b1023be9085fda6202bbe7f2fd67d06af8f976d6adcc10"}, + {file = "safetensors-0.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:8b05c93da15fa911763a89281906ca333ed800ab0ef1c7ce53317aa1a2322f19"}, + {file = "safetensors-0.3.2-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:8969cfd9e8d904e8d3c67c989e1bd9a95e3cc8980d4f95e4dcd43c299bb94253"}, + {file = "safetensors-0.3.2-cp38-cp38-macosx_13_0_x86_64.whl", hash = "sha256:f54148ac027556eb02187e9bc1556c4d916c99ca3cb34ca36a7d304d675035c1"}, + {file = "safetensors-0.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa98f49e95f02eb750d32c4947e7d5aa43883149ebd0414920866446525b70f0"}, + {file = "safetensors-0.3.2-cp38-cp38-win32.whl", hash = "sha256:33409df5e28a83dc5cc5547a3ac17c0f1b13a1847b1eb3bc4b3be0df9915171e"}, + {file = "safetensors-0.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:e04a7cbbb3856159ab99e3adb14521544f65fcb8548cce773a1435a0f8d78d27"}, + {file = "safetensors-0.3.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:7c864cf5dcbfb608c5378f83319c60cc9c97263343b57c02756b7613cd5ab4dd"}, + {file = "safetensors-0.3.2-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:14e8c19d6dc51d4f70ee33c46aff04c8ba3f95812e74daf8036c24bc86e75cae"}, + {file = "safetensors-0.3.2-cp39-cp39-macosx_13_0_x86_64.whl", hash = "sha256:fafd95e5ef41e8f312e2a32b7031f7b9b2a621b255f867b221f94bb2e9f51ae8"}, + {file = "safetensors-0.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87ff0024ef2e5722a79af24688ce4a430f70601d0cf712a744105ed4b8f67ba5"}, + {file = "safetensors-0.3.2-cp39-cp39-win32.whl", hash = "sha256:827af9478b78977248ba93e2fd97ea307fb63f463f80cef4824460f8c2542a52"}, + {file = "safetensors-0.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:9b09f27c456efa301f98681ea14b12f81f2637889f6336223ccab71e42c34541"}, + {file = "safetensors-0.3.2.tar.gz", hash = "sha256:2dbd34554ed3b99435a0e84df077108f5334c8336b5ed9cb8b6b98f7b10da2f6"}, ] [package.extras] -all = ["black (==22.3)", "click (==8.0.4)", "flake8 (>=3.8.3)", "flax (>=0.6.3)", "h5py (>=3.7.0)", "huggingface-hub (>=0.12.1)", "isort (>=5.5.4)", "jax (>=0.3.25)", "jaxlib (>=0.3.25)", "numpy (>=1.21.6)", "paddlepaddle (>=2.4.1)", "pytest (>=7.2.0)", "pytest-benchmark (>=4.0.0)", "setuptools-rust (>=1.5.2)", "tensorflow (>=2.11.0)", "torch (>=1.10)"] -dev = ["black (==22.3)", "click (==8.0.4)", "flake8 (>=3.8.3)", "flax (>=0.6.3)", "h5py (>=3.7.0)", "huggingface-hub (>=0.12.1)", "isort (>=5.5.4)", "jax (>=0.3.25)", "jaxlib (>=0.3.25)", "numpy (>=1.21.6)", "paddlepaddle (>=2.4.1)", "pytest (>=7.2.0)", "pytest-benchmark (>=4.0.0)", "setuptools-rust (>=1.5.2)", "tensorflow (>=2.11.0)", "torch (>=1.10)"] +all = ["black (==22.3)", "click (==8.0.4)", "flake8 (>=3.8.3)", "flax (>=0.6.3)", "h5py (>=3.7.0)", "huggingface-hub (>=0.12.1)", "isort (>=5.5.4)", "jax (>=0.3.25)", "jaxlib (>=0.3.25)", "numpy (>=1.21.6)", "paddlepaddle (>=2.4.1)", "pytest (>=7.2.0)", "pytest-benchmark (>=4.0.0)", "setuptools-rust (>=1.5.2)", "tensorflow (==2.11.0)", "torch (>=1.10)"] +dev = ["black (==22.3)", "click (==8.0.4)", "flake8 (>=3.8.3)", "flax (>=0.6.3)", "h5py (>=3.7.0)", "huggingface-hub (>=0.12.1)", "isort (>=5.5.4)", "jax (>=0.3.25)", "jaxlib (>=0.3.25)", "numpy (>=1.21.6)", "paddlepaddle (>=2.4.1)", "pytest (>=7.2.0)", "pytest-benchmark (>=4.0.0)", "setuptools-rust (>=1.5.2)", "tensorflow (==2.11.0)", "torch (>=1.10)"] jax = ["flax (>=0.6.3)", "jax (>=0.3.25)", "jaxlib (>=0.3.25)"] numpy = ["numpy (>=1.21.6)"] paddlepaddle = ["paddlepaddle (>=2.4.1)"] +pinned-tf = ["tensorflow (==2.11.0)"] quality = ["black (==22.3)", "click (==8.0.4)", "flake8 (>=3.8.3)", "isort (>=5.5.4)"] tensorflow = ["tensorflow (>=2.11.0)"] testing = ["h5py (>=3.7.0)", "huggingface-hub (>=0.12.1)", "numpy (>=1.21.6)", "pytest (>=7.2.0)", "pytest-benchmark (>=4.0.0)", "setuptools-rust (>=1.5.2)"] @@ -6623,20 +6613,20 @@ files = [ [[package]] name = "tqdm" -version = "4.65.0" +version = "4.65.1" description = "Fast, Extensible Progress Meter" optional = false python-versions = ">=3.7" files = [ - {file = "tqdm-4.65.0-py3-none-any.whl", hash = "sha256:c4f53a17fe37e132815abceec022631be8ffe1b9381c2e6e30aa70edc99e9671"}, - {file = "tqdm-4.65.0.tar.gz", hash = "sha256:1871fb68a86b8fb3b59ca4cdd3dcccbc7e6d613eeed31f4c332531977b89beb5"}, + {file = "tqdm-4.65.1-py3-none-any.whl", hash = "sha256:16181c62ad2c6f8f6f29876e66322faad1c7fd3cc70aa9cc25ff63e50d1da031"}, + {file = "tqdm-4.65.1.tar.gz", hash = "sha256:2cb0075cc5269f8edac40bdeb757cc36ab5b6648caf014822b67e1a49fba141d"}, ] [package.dependencies] colorama = {version = "*", markers = "platform_system == \"Windows\""} [package.extras] -dev = ["py-make (>=0.1.0)", "twine", "wheel"] +dev = ["pytest (>=6)", "pytest-cov", "pytest-timeout", "pytest-xdist"] notebook = ["ipywidgets (>=6)"] slack = ["slack-sdk"] telegram = ["requests"] diff --git a/pyproject.toml b/pyproject.toml index 019632e46..80ec08228 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "langflow" -version = "0.4.2" +version = "0.4.3" description = "A Python package with a built-in web application" authors = ["Logspace "] maintainers = [ From b8d8ed82838aade1e64e633d8c50e6ef9c05673c Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Tue, 8 Aug 2023 15:12:37 -0300 Subject: [PATCH 76/90] Revert "Revert "Fixes flows without input_keys not being runnable" (#731)" This reverts commit 352e74cbc71138435c14140d246c9681557d48ac, reversing changes made to 3f531a11572a0bd7ade53a2aebc415d9ced652e8. --- src/backend/langflow/api/v1/chat.py | 2 +- src/backend/langflow/chat/manager.py | 2 +- src/backend/langflow/chat/utils.py | 4 +- .../interface/initialize/vector_store.py | 14 +- .../chatComponent/buildTrigger/index.tsx | 4 +- .../src/components/chatComponent/index.tsx | 3 +- src/frontend/src/modals/formModal/index.tsx | 139 +++++++++--------- src/frontend/src/utils/utils.ts | 28 +++- 8 files changed, 113 insertions(+), 83 deletions(-) diff --git a/src/backend/langflow/api/v1/chat.py b/src/backend/langflow/api/v1/chat.py index dd3407d1b..06a2fdda0 100644 --- a/src/backend/langflow/api/v1/chat.py +++ b/src/backend/langflow/api/v1/chat.py @@ -155,7 +155,7 @@ async def stream_build(flow_id: str): ) else: input_keys_response = { - "input_keys": {}, + "input_keys": None, "memory_keys": [], "handle_keys": [], } diff --git a/src/backend/langflow/chat/manager.py b/src/backend/langflow/chat/manager.py index 1e93174e2..2c3427a12 100644 --- a/src/backend/langflow/chat/manager.py +++ b/src/backend/langflow/chat/manager.py @@ -117,7 +117,7 @@ class ChatManager: self, client_id: str, payload: Dict, langchain_object: Any ): # Process the graph data and chat message - chat_inputs = payload.pop("inputs", "") + chat_inputs = payload.pop("inputs", {}) chat_inputs = ChatMessage(message=chat_inputs) self.chat_history.add_message(client_id, chat_inputs) diff --git a/src/backend/langflow/chat/utils.py b/src/backend/langflow/chat/utils.py index 7db65b8e3..17c976eb9 100644 --- a/src/backend/langflow/chat/utils.py +++ b/src/backend/langflow/chat/utils.py @@ -21,9 +21,9 @@ async def process_graph( # Generate result and thought try: - if not chat_inputs.message: + if chat_inputs.message is None: logger.debug("No message provided") - raise ValueError("No message provided") + chat_inputs.message = {} logger.debug("Generating result and thought") result, intermediate_steps = await get_result_and_steps( diff --git a/src/backend/langflow/interface/initialize/vector_store.py b/src/backend/langflow/interface/initialize/vector_store.py index 12cf054a5..8330d7611 100644 --- a/src/backend/langflow/interface/initialize/vector_store.py +++ b/src/backend/langflow/interface/initialize/vector_store.py @@ -171,11 +171,7 @@ def initialize_pinecone(class_object: Type[Pinecone], params: dict): def initialize_chroma(class_object: Type[Chroma], params: dict): """Initialize a ChromaDB object from the params""" if ( # type: ignore - "chroma_server_host" in params - or "chroma_server_http_port" in params - or "chroma_server_ssl_enabled" in params - or "chroma_server_grpc_port" in params - or "chroma_server_cors_allow_origins" in params + "chroma_server_host" in params or "chroma_server_http_port" in params ): import chromadb # type: ignore @@ -186,6 +182,14 @@ def initialize_chroma(class_object: Type[Chroma], params: dict): } chroma_settings = chromadb.config.Settings(**settings_params) params["client_settings"] = chroma_settings + else: + # remove all chroma_server_ keys from params + params = { + key: value + for key, value in params.items() + if not key.startswith("chroma_server_") + } + persist = params.pop("persist", False) if not docs_in_params(params): params.pop("documents", None) diff --git a/src/frontend/src/components/chatComponent/buildTrigger/index.tsx b/src/frontend/src/components/chatComponent/buildTrigger/index.tsx index 4eb69546e..9b0b74678 100644 --- a/src/frontend/src/components/chatComponent/buildTrigger/index.tsx +++ b/src/frontend/src/components/chatComponent/buildTrigger/index.tsx @@ -82,13 +82,15 @@ export default function BuildTrigger({ const parsedData = JSON.parse(event.data); // if the event is the end of the stream, close the connection if (parsedData.end_of_stream) { + // Close the connection and finish + finished = true; eventSource.close(); return; } else if (parsedData.log) { // If the event is a log, log it setSuccessData({ title: parsedData.log }); - } else if (parsedData.input_keys) { + } else if (parsedData.input_keys !== undefined) { setTabsState((old) => { return { ...old, diff --git a/src/frontend/src/components/chatComponent/index.tsx b/src/frontend/src/components/chatComponent/index.tsx index 3cbe5bbf1..2b32dc0c4 100644 --- a/src/frontend/src/components/chatComponent/index.tsx +++ b/src/frontend/src/components/chatComponent/index.tsx @@ -62,8 +62,7 @@ export default function Chat({ flow }: ChatType) { tabsState && tabsState[flow.id] && tabsState[flow.id].formKeysData && - tabsState[flow.id].formKeysData.input_keys && - Object.keys(tabsState[flow.id].formKeysData.input_keys).length > 0 + tabsState[flow.id].formKeysData.input_keys !== null ) { setCanOpen(true); } else { diff --git a/src/frontend/src/modals/formModal/index.tsx b/src/frontend/src/modals/formModal/index.tsx index 751eacea1..81b15559c 100644 --- a/src/frontend/src/modals/formModal/index.tsx +++ b/src/frontend/src/modals/formModal/index.tsx @@ -67,14 +67,17 @@ export default function FormModal({ const id = useRef(flow.id); const tabsStateFlowId = tabsState[flow.id]; const tabsStateFlowIdFormKeysData = tabsStateFlowId.formKeysData; - const [chatKey, setChatKey] = useState( - Object.keys(tabsState[flow.id].formKeysData.input_keys).find( - (k) => - !tabsState[flow.id].formKeysData.handle_keys.some((j) => j === k) && - tabsState[flow.id].formKeysData.input_keys[k] === "" - ) - ); - + const [chatKey, setChatKey] = useState(() => { + if (tabsState[flow.id]?.formKeysData?.input_keys) { + return Object.keys(tabsState[flow.id].formKeysData.input_keys).find( + (k) => + !tabsState[flow.id].formKeysData.handle_keys.some((j) => j === k) && + tabsState[flow.id].formKeysData.input_keys[k] === "" + ); + } + // TODO: return a sensible default + return ""; + }); useEffect(() => { if (messagesRef.current) { messagesRef.current.scrollTop = messagesRef.current.scrollHeight; @@ -419,68 +422,70 @@ export default function FormModal({
- {Object.keys(tabsState[id.current].formKeysData.input_keys).map( - (i, k) => ( -
- - - {i} - + {tabsState[id.current]?.formKeysData?.input_keys + ? Object.keys( + tabsState[id.current].formKeysData.input_keys + ).map((i, k) => ( +
+ + + {i} + -
{ - event.stopPropagation(); +
{ + event.stopPropagation(); + }} + > + + handleOnCheckedChange(value, i) + } + size="small" + disabled={tabsState[ + id.current + ].formKeysData.handle_keys.some((t) => t === i)} + /> +
+
+ } + key={k} + keyValue={i} + > +
+ {tabsState[id.current].formKeysData.handle_keys.some( + (t) => t === i + ) && ( +
+ Source: Component +
+ )} +
- } - key={k} - keyValue={i} - > -
- {tabsState[id.current].formKeysData.handle_keys.some( - (t) => t === i - ) && ( -
- Source: Component -
- )} - -
-
-
- ) - )} +
+
+ )) + : null} {tabsState[id.current].formKeysData.memory_keys.map((i, k) => (
@@ -377,11 +399,9 @@ chat_input_field: Input key that you want the chat to send the user message with ${ tabsState[flow.id] && tabsState[flow.id].formKeysData ? `chat_inputs='${inputs}' - chat_input_field="${ - Object.keys(tabsState[flow.id].formKeysData.input_keys)[0] - }" + chat_input_field="${chat_input_field}" ` : "" - }host_url="http://localhost:7860" + }host_url="http://localhost:7860" >`; } From cf8c0601eb21e179dfd0b678c02ad2c0d647b934 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Tue, 8 Aug 2023 15:13:22 -0300 Subject: [PATCH 77/90] Revert "Merge branch 'main' into release" This reverts commit 4dea0f66906ea2400899fde0c065234d419cfc04, reversing changes made to 597b2153a3fa56d857ad1c7d348ac507afd7f378. --- src/backend/langflow/api/v1/chat.py | 2 +- src/backend/langflow/chat/manager.py | 2 +- src/backend/langflow/chat/utils.py | 4 +- .../interface/initialize/vector_store.py | 14 +- .../chatComponent/buildTrigger/index.tsx | 4 +- .../src/components/chatComponent/index.tsx | 3 +- src/frontend/src/modals/formModal/index.tsx | 139 +++++++++--------- src/frontend/src/utils/utils.ts | 28 +++- 8 files changed, 113 insertions(+), 83 deletions(-) diff --git a/src/backend/langflow/api/v1/chat.py b/src/backend/langflow/api/v1/chat.py index dd3407d1b..06a2fdda0 100644 --- a/src/backend/langflow/api/v1/chat.py +++ b/src/backend/langflow/api/v1/chat.py @@ -155,7 +155,7 @@ async def stream_build(flow_id: str): ) else: input_keys_response = { - "input_keys": {}, + "input_keys": None, "memory_keys": [], "handle_keys": [], } diff --git a/src/backend/langflow/chat/manager.py b/src/backend/langflow/chat/manager.py index 1e93174e2..2c3427a12 100644 --- a/src/backend/langflow/chat/manager.py +++ b/src/backend/langflow/chat/manager.py @@ -117,7 +117,7 @@ class ChatManager: self, client_id: str, payload: Dict, langchain_object: Any ): # Process the graph data and chat message - chat_inputs = payload.pop("inputs", "") + chat_inputs = payload.pop("inputs", {}) chat_inputs = ChatMessage(message=chat_inputs) self.chat_history.add_message(client_id, chat_inputs) diff --git a/src/backend/langflow/chat/utils.py b/src/backend/langflow/chat/utils.py index 7db65b8e3..17c976eb9 100644 --- a/src/backend/langflow/chat/utils.py +++ b/src/backend/langflow/chat/utils.py @@ -21,9 +21,9 @@ async def process_graph( # Generate result and thought try: - if not chat_inputs.message: + if chat_inputs.message is None: logger.debug("No message provided") - raise ValueError("No message provided") + chat_inputs.message = {} logger.debug("Generating result and thought") result, intermediate_steps = await get_result_and_steps( diff --git a/src/backend/langflow/interface/initialize/vector_store.py b/src/backend/langflow/interface/initialize/vector_store.py index 12cf054a5..8330d7611 100644 --- a/src/backend/langflow/interface/initialize/vector_store.py +++ b/src/backend/langflow/interface/initialize/vector_store.py @@ -171,11 +171,7 @@ def initialize_pinecone(class_object: Type[Pinecone], params: dict): def initialize_chroma(class_object: Type[Chroma], params: dict): """Initialize a ChromaDB object from the params""" if ( # type: ignore - "chroma_server_host" in params - or "chroma_server_http_port" in params - or "chroma_server_ssl_enabled" in params - or "chroma_server_grpc_port" in params - or "chroma_server_cors_allow_origins" in params + "chroma_server_host" in params or "chroma_server_http_port" in params ): import chromadb # type: ignore @@ -186,6 +182,14 @@ def initialize_chroma(class_object: Type[Chroma], params: dict): } chroma_settings = chromadb.config.Settings(**settings_params) params["client_settings"] = chroma_settings + else: + # remove all chroma_server_ keys from params + params = { + key: value + for key, value in params.items() + if not key.startswith("chroma_server_") + } + persist = params.pop("persist", False) if not docs_in_params(params): params.pop("documents", None) diff --git a/src/frontend/src/components/chatComponent/buildTrigger/index.tsx b/src/frontend/src/components/chatComponent/buildTrigger/index.tsx index 4eb69546e..9b0b74678 100644 --- a/src/frontend/src/components/chatComponent/buildTrigger/index.tsx +++ b/src/frontend/src/components/chatComponent/buildTrigger/index.tsx @@ -82,13 +82,15 @@ export default function BuildTrigger({ const parsedData = JSON.parse(event.data); // if the event is the end of the stream, close the connection if (parsedData.end_of_stream) { + // Close the connection and finish + finished = true; eventSource.close(); return; } else if (parsedData.log) { // If the event is a log, log it setSuccessData({ title: parsedData.log }); - } else if (parsedData.input_keys) { + } else if (parsedData.input_keys !== undefined) { setTabsState((old) => { return { ...old, diff --git a/src/frontend/src/components/chatComponent/index.tsx b/src/frontend/src/components/chatComponent/index.tsx index 3cbe5bbf1..2b32dc0c4 100644 --- a/src/frontend/src/components/chatComponent/index.tsx +++ b/src/frontend/src/components/chatComponent/index.tsx @@ -62,8 +62,7 @@ export default function Chat({ flow }: ChatType) { tabsState && tabsState[flow.id] && tabsState[flow.id].formKeysData && - tabsState[flow.id].formKeysData.input_keys && - Object.keys(tabsState[flow.id].formKeysData.input_keys).length > 0 + tabsState[flow.id].formKeysData.input_keys !== null ) { setCanOpen(true); } else { diff --git a/src/frontend/src/modals/formModal/index.tsx b/src/frontend/src/modals/formModal/index.tsx index 751eacea1..81b15559c 100644 --- a/src/frontend/src/modals/formModal/index.tsx +++ b/src/frontend/src/modals/formModal/index.tsx @@ -67,14 +67,17 @@ export default function FormModal({ const id = useRef(flow.id); const tabsStateFlowId = tabsState[flow.id]; const tabsStateFlowIdFormKeysData = tabsStateFlowId.formKeysData; - const [chatKey, setChatKey] = useState( - Object.keys(tabsState[flow.id].formKeysData.input_keys).find( - (k) => - !tabsState[flow.id].formKeysData.handle_keys.some((j) => j === k) && - tabsState[flow.id].formKeysData.input_keys[k] === "" - ) - ); - + const [chatKey, setChatKey] = useState(() => { + if (tabsState[flow.id]?.formKeysData?.input_keys) { + return Object.keys(tabsState[flow.id].formKeysData.input_keys).find( + (k) => + !tabsState[flow.id].formKeysData.handle_keys.some((j) => j === k) && + tabsState[flow.id].formKeysData.input_keys[k] === "" + ); + } + // TODO: return a sensible default + return ""; + }); useEffect(() => { if (messagesRef.current) { messagesRef.current.scrollTop = messagesRef.current.scrollHeight; @@ -419,68 +422,70 @@ export default function FormModal({
- {Object.keys(tabsState[id.current].formKeysData.input_keys).map( - (i, k) => ( -
- - - {i} - + {tabsState[id.current]?.formKeysData?.input_keys + ? Object.keys( + tabsState[id.current].formKeysData.input_keys + ).map((i, k) => ( +
+ + + {i} + -
{ - event.stopPropagation(); +
{ + event.stopPropagation(); + }} + > + + handleOnCheckedChange(value, i) + } + size="small" + disabled={tabsState[ + id.current + ].formKeysData.handle_keys.some((t) => t === i)} + /> +
+
+ } + key={k} + keyValue={i} + > +
+ {tabsState[id.current].formKeysData.handle_keys.some( + (t) => t === i + ) && ( +
+ Source: Component +
+ )} +
- } - key={k} - keyValue={i} - > -
- {tabsState[id.current].formKeysData.handle_keys.some( - (t) => t === i - ) && ( -
- Source: Component -
- )} - -
-
-
- ) - )} +
+
+ )) + : null} {tabsState[id.current].formKeysData.memory_keys.map((i, k) => (
@@ -377,11 +399,9 @@ chat_input_field: Input key that you want the chat to send the user message with ${ tabsState[flow.id] && tabsState[flow.id].formKeysData ? `chat_inputs='${inputs}' - chat_input_field="${ - Object.keys(tabsState[flow.id].formKeysData.input_keys)[0] - }" + chat_input_field="${chat_input_field}" ` : "" - }host_url="http://localhost:7860" + }host_url="http://localhost:7860" >`; } From 3170ac665a33a0c14bf50ff35ee2568d9f6bd814 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Tue, 8 Aug 2023 16:15:30 -0300 Subject: [PATCH 78/90] =?UTF-8?q?=F0=9F=93=9D=20docs(pyproject.toml):=20ad?= =?UTF-8?q?d=20documentation=20link=20to=20the=20project=20configuration?= =?UTF-8?q?=20file?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- poetry.lock | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/poetry.lock b/poetry.lock index 0bf39ba76..2ff3cfeb3 100644 --- a/poetry.lock +++ b/poetry.lock @@ -7578,4 +7578,4 @@ local = ["ctransformers", "llama-cpp-python", "sentence-transformers"] [metadata] lock-version = "2.0" python-versions = ">=3.9,<3.11" -content-hash = "1329d94d3cb37062393d79da99fb3fa7d214ebdcdab6402c411561f960c6689f" +content-hash = "b571961fa0603990e0ba3347fc5000255ce703b5ea7ed167b92792c123e54e4d" diff --git a/pyproject.toml b/pyproject.toml index 31bce703d..5a22d6dc7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -19,7 +19,7 @@ readme = "README.md" keywords = ["nlp", "langchain", "openai", "gpt", "gui"] packages = [{ include = "langflow", from = "src/backend" }] include = ["src/backend/langflow/*", "src/backend/langflow/**/*"] - +documentation = "https://docs.langflow.org" [tool.poetry.scripts] langflow = "langflow.__main__:main" From 4b47f9f196713df14c3a80eb51d6d780cd433f0c Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Tue, 8 Aug 2023 16:35:15 -0300 Subject: [PATCH 79/90] =?UTF-8?q?=F0=9F=90=9B=20fix(endpoints.py):=20add?= =?UTF-8?q?=20check=20to=20skip=20empty=20custom=5Fcomponent=5Fdict=20to?= =?UTF-8?q?=20prevent=20potential=20errors?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/api/v1/endpoints.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/backend/langflow/api/v1/endpoints.py b/src/backend/langflow/api/v1/endpoints.py index ff8717ec0..5d0c9a900 100644 --- a/src/backend/langflow/api/v1/endpoints.py +++ b/src/backend/langflow/api/v1/endpoints.py @@ -51,6 +51,8 @@ def get_all(): logger.info(f"Loading {len(custom_component_dicts)} category(ies)") for custom_component_dict in custom_component_dicts: # custom_component_dict is a dict of dicts + if not custom_component_dict: + continue category = list(custom_component_dict.keys())[0] logger.info( f"Loading {len(custom_component_dict[category])} component(s) from category {category}" From 67f0ca7549ca8d0013ca241e79102b857a76d178 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Tue, 8 Aug 2023 16:50:04 -0300 Subject: [PATCH 80/90] =?UTF-8?q?=F0=9F=93=A6=20chore(pyproject.toml):=20u?= =?UTF-8?q?pdate=20orjson=20dependency=20from=20version=203.9.1=20to=203.9?= =?UTF-8?q?.3=20for=20bug=20fixes=20and=20improvements?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- poetry.lock | 77 +++++++++++++++++++++++++++++++++++++------------- pyproject.toml | 2 +- 2 files changed, 59 insertions(+), 20 deletions(-) diff --git a/poetry.lock b/poetry.lock index 2ff3cfeb3..aecc8ce57 100644 --- a/poetry.lock +++ b/poetry.lock @@ -4074,28 +4074,67 @@ files = [ [[package]] name = "orjson" -version = "3.9.4" +version = "3.9.3" description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" optional = false python-versions = ">=3.7" files = [ - {file = "orjson-3.9.4-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:2e83ec1ee66d83b558a6d273d8a01b86563daa60bea9bc040e2c1cb8008de61f"}, - {file = "orjson-3.9.4-cp310-none-win32.whl", hash = "sha256:04cd7f4a4f4cd2fe43d104eb70e7435c6fcbdde7aa0cde4230e444fbc66924d3"}, - {file = "orjson-3.9.4-cp310-none-win_amd64.whl", hash = "sha256:4fdb59cfa00e10c82e09d1c32a9ce08a38bd29496ba20a73cd7f498e3a0a5024"}, - {file = "orjson-3.9.4-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:daeed2502ddf1f2b29ec8da2fe2ea82807a5c4acf869608ce6c476db8171d070"}, - {file = "orjson-3.9.4-cp311-none-win32.whl", hash = "sha256:e12492ce65cb10f385e70a88badc6046bc720fa7d468db27b7429d85d41beaeb"}, - {file = "orjson-3.9.4-cp311-none-win_amd64.whl", hash = "sha256:3b9f8bf43a5367d5522f80e7d533c98d880868cd0b640b9088c9237306eca6e8"}, - {file = "orjson-3.9.4-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:0b400cf89c15958cd829c8a4ade8f5dd73588e63d2fb71a00483e7a74e9f92da"}, - {file = "orjson-3.9.4-cp312-none-win_amd64.whl", hash = "sha256:a533e664a0e3904307d662c5d45775544dc2b38df6e39e213ff6a86ceaa3d53c"}, - {file = "orjson-3.9.4-cp37-cp37m-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:149d1b7630771222f73ecb024ab5dd8e7f41502402b02015494d429bacc4d5c1"}, - {file = "orjson-3.9.4-cp37-none-win32.whl", hash = "sha256:bcda6179eb863c295eb5ea832676d33ef12c04d227b4c98267876c8322e5a96e"}, - {file = "orjson-3.9.4-cp37-none-win_amd64.whl", hash = "sha256:3d947366127abef192419257eb7db7fcee0841ced2b49ccceba43b65e9ce5e3f"}, - {file = "orjson-3.9.4-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:a7d029fc34a516f7eae29b778b30371fcb621134b2acfe4c51c785102aefc6cf"}, - {file = "orjson-3.9.4-cp38-none-win32.whl", hash = "sha256:94d15ee45c2aaed334688e511aa73b4681f7c08a0810884c6b3ae5824dea1222"}, - {file = "orjson-3.9.4-cp38-none-win_amd64.whl", hash = "sha256:336ec8471102851f0699198031924617b7a77baadea889df3ffda6000bd59f4c"}, - {file = "orjson-3.9.4-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:2f57ccb50e9e123709e9f2d7b1a9e09e694e49d1fa5c5585e34b8e3f01929dc3"}, - {file = "orjson-3.9.4-cp39-none-win32.whl", hash = "sha256:b5b5038187b74e2d33e5caee8a7e83ddeb6a21da86837fa2aac95c69aeb366e6"}, - {file = "orjson-3.9.4-cp39-none-win_amd64.whl", hash = "sha256:915da36bc93ef0c659fa50fe7939d4f208804ad252fc4fc8d55adbbb82293c48"}, + {file = "orjson-3.9.3-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:082714b5554fcced092c45272f22a93400389733083c43f5043c4316e86f57a2"}, + {file = "orjson-3.9.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:97ddec69ca4fa1b66d512cf4f4a3fe6a57c4bf21209295ab2f4ada415996e08a"}, + {file = "orjson-3.9.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ab7501722ec2172b1c6ea333bc47bba3bbb9b5fc0e3e891191e8447f43d3187d"}, + {file = "orjson-3.9.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ae680163ab09f04683d35fbd63eee858019f0066640f7cbad4dba3e7422a4bc"}, + {file = "orjson-3.9.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7e5abca1e0a9d110bab7346fab0acd3b7848d2ee13318bc24a31bbfbdad974b8"}, + {file = "orjson-3.9.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c55f42a8b07cdb7d514cfaeb56f6e9029eef1cbc8e670ac31fc377c46b993cd1"}, + {file = "orjson-3.9.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:303f1324f5ea516f8e874ea0f8d15c581caabdca59fc990705fc76f3bd9f3bdf"}, + {file = "orjson-3.9.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c444e3931ea4fe7dec26d195486a681fedc0233230c9b84848f8e60affd4a4"}, + {file = "orjson-3.9.3-cp310-none-win32.whl", hash = "sha256:63333de96d83091023c9c99cc579973a2977b15feb5cdc8d9660104c886e9ab8"}, + {file = "orjson-3.9.3-cp310-none-win_amd64.whl", hash = "sha256:7bce6ff507a83c6a4b6b00726f3a7d7aed0b1f0884aac0440e95b55cac0b113e"}, + {file = "orjson-3.9.3-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:ec4421f377cce51decd6ea3869a8b41e9f05c50bf6acef8284f8906e642992c4"}, + {file = "orjson-3.9.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b3177bd67756e53bdbd72c79fae3507796a67b67c32a16f4b55cad48ef25c13"}, + {file = "orjson-3.9.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b21908252c8a13b8f48d4cccdb7fabb592824cf39c9fa4e9076015dd65eabeba"}, + {file = "orjson-3.9.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7b795c6ac344b0c49776b7e135a9bed0cd15b1ade2a4c7b3a19e3913247702e"}, + {file = "orjson-3.9.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ac43842f5ba26e6f21b4e63312bd1137111a9b9821d7f7dfe189a4015c6c6bc"}, + {file = "orjson-3.9.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8def4f6560c7b6dbc4b356dfd8e6624a018d920ce5a2864291a2bf1052cd6b68"}, + {file = "orjson-3.9.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bbc0dafd1de42c8dbfd6e5d1fe4deab15d2de474e11475921286bebefd109ec8"}, + {file = "orjson-3.9.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:85b1870d5420292419b34002659082d77f31b13d4d8cbd67bed9d717c775a0fb"}, + {file = "orjson-3.9.3-cp311-none-win32.whl", hash = "sha256:d6ece3f48f14a06c325181f2b9bd9a9827aac2ecdcad11eb12f561fb697eaaaa"}, + {file = "orjson-3.9.3-cp311-none-win_amd64.whl", hash = "sha256:448feda092c681c0a5b8eec62dd4f625ad5d316dafd56c81fb3f05b5221827ff"}, + {file = "orjson-3.9.3-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:413d7cf731f1222373360128a3d5232d52630a7355f446bf2659fc3445ec0b76"}, + {file = "orjson-3.9.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:009a0f79804c604998b068f5f942e40546913ed45ee2f0a3d0e75695bf7543fa"}, + {file = "orjson-3.9.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ce062844255cce4d6a8a150e8e78b9fcd6c5a3f1ff3f8792922de25827c25b9c"}, + {file = "orjson-3.9.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:776659e18debe5de73c30b0957cd6454fcc61d87377fcb276441fca1b9f1305d"}, + {file = "orjson-3.9.3-cp312-none-win_amd64.whl", hash = "sha256:47b237da3818c8e546df4d2162f0a5cfd50b7b58528907919a27244141e0e48e"}, + {file = "orjson-3.9.3-cp37-cp37m-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:f954115d8496d4ab5975438e3ce07780c1644ea0a66c78a943ef79f33769b61a"}, + {file = "orjson-3.9.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05c57100517b6dbfe34181ed2248bebfab03bd2a7aafb6fbf849c6fd3bb2fbda"}, + {file = "orjson-3.9.3-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:aa6017140fe487ab8fae605a2890c94c6fbe7a8e763ff33bbdb00e27ce078cfd"}, + {file = "orjson-3.9.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6fe77af2ff33c370fb06c9fdf004a66d85ea19c77f0273bbf70c70f98f832725"}, + {file = "orjson-3.9.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e2fa8c385b27bab886caa098fa3ae114d56571ae6e7a5610cb624d7b0a66faed"}, + {file = "orjson-3.9.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8323739e7905ae4ec4dbdebb31067d28be981f30c11b6ae88ddec2671c0b3194"}, + {file = "orjson-3.9.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ad43fd5b1ededb54fe01e67468710fcfec8a5830e4ce131f85e741ea151a18e9"}, + {file = "orjson-3.9.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:42cb645780f732c829bc351346a54157d57f2bc409e671ee36b9fc1037bb77fe"}, + {file = "orjson-3.9.3-cp37-none-win32.whl", hash = "sha256:b84542669d1b0175dc2870025b73cbd4f4a3beb17796de6ec82683663e0400f3"}, + {file = "orjson-3.9.3-cp37-none-win_amd64.whl", hash = "sha256:1440a404ce84f43e2f8e97d8b5fe6f271458e0ffd37290dc3a9f6aa067c69930"}, + {file = "orjson-3.9.3-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:1da8edaefb75f25b449ed4e22d00b9b49211b97dcefd44b742bdd8721d572788"}, + {file = "orjson-3.9.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47210746acda49febe3bb07253eb5d63d7c7511beec5fa702aad3ce64e15664f"}, + {file = "orjson-3.9.3-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:893c62afd5b26f04e2814dffa4d9d4060583ac43dc3e79ed3eadf62a5ac37b2c"}, + {file = "orjson-3.9.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:32aef33ae33901c327fd5679f91fa37199834d122dffd234416a6fe4193d1982"}, + {file = "orjson-3.9.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd2761384ddb9de63b20795845d5cedadf052255a34c3ff1750cfc77b29d9926"}, + {file = "orjson-3.9.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:19e2502b4af2055050dcc74718f2647b65102087c6f5b3f939e2e1a3e3099602"}, + {file = "orjson-3.9.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:fa7c7a39eeb8dd171f59d96fd4610f908ac14b2f2eb268f4498e5f310bda8da7"}, + {file = "orjson-3.9.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cc3fe0c0ae7acf00d827efe2506131f1b19af3c87e3d76b0e081748984e51c26"}, + {file = "orjson-3.9.3-cp38-none-win32.whl", hash = "sha256:5b1ff8e920518753b310034e5796f0116f7732b0b27531012d46f0b54f3c8c85"}, + {file = "orjson-3.9.3-cp38-none-win_amd64.whl", hash = "sha256:9f2b1007174c93dd838f52e623c972df33057e3cb7ad9341b7d9bbd66b8d8fb4"}, + {file = "orjson-3.9.3-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:cddc5b8bd7b0d1dfd36637eedbd83726b8b8a5969d3ecee70a9b54a94b8a0258"}, + {file = "orjson-3.9.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:43c3bbf4b6f94fad2fd73c81293da8b343fbd07ce48d7836c07d0d54b58c8e93"}, + {file = "orjson-3.9.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a5cc22ef6973992db18952f8b978781e19a0c62c098f475db936284df9311df7"}, + {file = "orjson-3.9.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9dcea93630986209c690f27f32398956b04ccbba8f1fa7c3d1bb88a01d9ab87a"}, + {file = "orjson-3.9.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:526cb34e63faaad908c34597294507b7a4b999a436b4f206bc4e60ff4e911c20"}, + {file = "orjson-3.9.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f5ac6e30ee10af57f52e72f9c8b9bc4846a9343449d10ca2ae9760615da3042"}, + {file = "orjson-3.9.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b6c37ab097c062bdf535105c7156839c4e370065c476bb2393149ad31a2cdf6e"}, + {file = "orjson-3.9.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:27d69628f449c52a7a34836b15ec948804254f7954457f88de53f2f4de99512f"}, + {file = "orjson-3.9.3-cp39-none-win32.whl", hash = "sha256:5297463d8831c2327ed22bf92eb6d50347071ff1c73fb4702d50b8bc514aeac9"}, + {file = "orjson-3.9.3-cp39-none-win_amd64.whl", hash = "sha256:69a33486b5b6e5a99939fdb13c1c0d8bcc7c89fe6083e7b9ce3c70931ca9fb71"}, + {file = "orjson-3.9.3.tar.gz", hash = "sha256:d3da4faf6398154c1e75d32778035fa7dc284814809f76e8f8d50c4f54859399"}, ] [[package]] @@ -7578,4 +7617,4 @@ local = ["ctransformers", "llama-cpp-python", "sentence-transformers"] [metadata] lock-version = "2.0" python-versions = ">=3.9,<3.11" -content-hash = "b571961fa0603990e0ba3347fc5000255ce703b5ea7ed167b92792c123e54e4d" +content-hash = "7c6d7dc33a9b0ae9da053fb78b9f2eabbe78df38c4763e5a8719df6249d6f657" diff --git a/pyproject.toml b/pyproject.toml index 5a22d6dc7..5b7054b65 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -63,7 +63,7 @@ python-multipart = "^0.0.6" sqlmodel = "^0.0.8" faiss-cpu = "^1.7.4" anthropic = "^0.3.0" -orjson = "^3.9.1" +orjson = "3.9.3" multiprocess = "^0.70.14" cachetools = "^5.3.1" types-cachetools = "^5.3.0.5" From 59a55b4371357baaa0600b26b96488332a44feb1 Mon Sep 17 00:00:00 2001 From: gustavoschaedler Date: Tue, 8 Aug 2023 21:19:22 +0100 Subject: [PATCH 81/90] =?UTF-8?q?=F0=9F=94=A7=20fix(auth.py):=20update=20i?= =?UTF-8?q?mport=20statements=20to=20reflect=20new=20file=20structure=20in?= =?UTF-8?q?=20langflow=20package=20=F0=9F=94=A7=20fix(auth.py):=20fix=20ty?= =?UTF-8?q?po=20in=20verify=5Fpassword=20function=20call=20=F0=9F=94=A7=20?= =?UTF-8?q?fix(auth.py):=20update=20get=5Fuser=20function=20call=20to=20re?= =?UTF-8?q?flect=20new=20file=20structure=20in=20langflow=20package=20?= =?UTF-8?q?=F0=9F=94=A7=20fix(auth.py):=20update=20get=5Fcurrent=5Fuser=20?= =?UTF-8?q?function=20signature=20to=20match=20the=20annotated=20type=20?= =?UTF-8?q?=F0=9F=94=A7=20fix(auth.py):=20update=20get=5Fuser=20function?= =?UTF-8?q?=20call=20to=20reflect=20new=20file=20structure=20in=20langflow?= =?UTF-8?q?=20package=20=F0=9F=94=A7=20fix(auth.py):=20update=20get=5Fcurr?= =?UTF-8?q?ent=5Fuser=20function=20signature=20to=20match=20the=20annotate?= =?UTF-8?q?d=20type=20=F0=9F=94=A7=20fix(auth.py):=20update=20get=5Fuser?= =?UTF-8?q?=20function=20call=20to=20reflect=20new=20file=20structure=20in?= =?UTF-8?q?=20langflow=20package=20=F0=9F=94=A7=20fix(auth.py):=20update?= =?UTF-8?q?=20get=5Fcurrent=5Fuser=20function=20signature=20to=20match=20t?= =?UTF-8?q?he=20annotated=20type=20=F0=9F=94=A7=20fix(auth.py):=20update?= =?UTF-8?q?=20get=5Fuser=20function=20call=20to=20reflect=20new=20file=20s?= =?UTF-8?q?tructure=20in=20langflow=20package=20=F0=9F=94=A7=20fix(auth.py?= =?UTF-8?q?):=20update=20get=5Fcurrent=5Fuser=20function=20signature=20to?= =?UTF-8?q?=20match=20the=20annotated=20type=20=F0=9F=94=A7=20fix(auth.py)?= =?UTF-8?q?:=20update=20get=5Fuser=20function=20call=20to=20reflect=20new?= =?UTF-8?q?=20file=20structure=20in=20langflow=20package=20=F0=9F=94=A7=20?= =?UTF-8?q?fix(auth.py):=20update=20get=5Fcurrent=5Fuser=20function=20sign?= =?UTF-8?q?ature=20to=20match=20the=20annotated=20type=20=F0=9F=94=A7=20fi?= =?UTF-8?q?x(auth.py):=20update=20get=5Fuser=20function=20call=20to=20refl?= =?UTF-8?q?ect=20new=20file=20structure=20in=20langflow=20package=20?= =?UTF-8?q?=F0=9F=94=A7=20fix(auth.py):=20update=20get=5Fcurrent=5Fuser=20?= =?UTF-8?q?function=20signature=20to=20match=20the=20annotated=20type=20?= =?UTF-8?q?=F0=9F=94=A7=20fix(auth.py):=20update=20get=5Fuser=20function?= =?UTF-8?q?=20call=20to=20reflect=20new=20file=20structure=20in=20langflow?= =?UTF-8?q?=20package=20=F0=9F=94=A7=20fix(auth.py):=20update=20get=5Fcurr?= =?UTF-8?q?ent=5Fuser=20function=20signature=20to=20match=20the=20annotate?= =?UTF-8?q?d=20type=20=F0=9F=94=A7=20fix(auth.py):=20update=20get=5Fuser?= =?UTF-8?q?=20function=20call=20to=20reflect=20new=20file=20structure=20in?= =?UTF-8?q?=20langflow=20package=20=F0=9F=94=A7=20fix(auth.py):=20update?= =?UTF-8?q?=20get=5Fcurrent=5Fuser=20function=20signature=20to=20match=20t?= =?UTF-8?q?he=20annotated=20type=20=F0=9F=94=A7=20fix(auth.py):=20update?= =?UTF-8?q?=20get=5Fuser=20function=20call=20to=20reflect=20new=20file=20s?= =?UTF-8?q?tructure=20in=20langflow=20package=20=F0=9F=94=A7=20fix(auth.py?= =?UTF-8?q?):=20update=20get=5Fcurrent=5Fuser=20function=20signature=20to?= =?UTF-8?q?=20match=20the=20annotated=20type=20=F0=9F=94=A7=20fix(auth.py)?= =?UTF-8?q?:=20update=20get=5Fuser=20function=20call=20to=20reflect=20new?= =?UTF-8?q?=20file=20structure=20in=20langflow=20package=20=F0=9F=94=A7=20?= =?UTF-8?q?fix(auth.py):=20update=20get=5Fcurrent=5Fuser=20function=20sign?= =?UTF-8?q?ature=20to=20match=20the=20annotated=20type=20=F0=9F=94=A7=20fi?= =?UTF-8?q?x(auth.py):=20update=20get=5Fuser=20function=20call=20to=20refl?= =?UTF-8?q?ect=20new=20file=20structure=20in=20langflow=20package=20?= =?UTF-8?q?=F0=9F=94=A7=20fix(auth.py):=20update=20get=5Fcurrent=5Fuser=20?= =?UTF-8?q?function=20signature=20to=20match=20the=20annotated=20type=20?= =?UTF-8?q?=F0=9F=94=A7=20fix(auth.py):=20update=20get=5Fuser=20function?= =?UTF-8?q?=20call=20to=20reflect?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/auth/auth.py | 16 ++-- .../langflow/{ => database}/models/token.py | 0 src/backend/langflow/database/models/user.py | 37 +++++++++ src/backend/langflow/main.py | 6 +- src/backend/langflow/models/__init__.py | 0 src/backend/langflow/models/base_control.py | 7 -- src/backend/langflow/models/models.py | 21 ----- src/backend/langflow/models/user.py | 17 ---- src/backend/langflow/routers/items.py | 17 ---- src/backend/langflow/routers/login.py | 15 ++-- src/backend/langflow/routers/users.py | 79 +++++++++++++++++-- 11 files changed, 130 insertions(+), 85 deletions(-) rename src/backend/langflow/{ => database}/models/token.py (100%) create mode 100644 src/backend/langflow/database/models/user.py delete mode 100644 src/backend/langflow/models/__init__.py delete mode 100644 src/backend/langflow/models/base_control.py delete mode 100644 src/backend/langflow/models/models.py delete mode 100644 src/backend/langflow/models/user.py delete mode 100644 src/backend/langflow/routers/items.py diff --git a/src/backend/langflow/auth/auth.py b/src/backend/langflow/auth/auth.py index c4b8ad5b4..e33ac64dd 100644 --- a/src/backend/langflow/auth/auth.py +++ b/src/backend/langflow/auth/auth.py @@ -4,11 +4,13 @@ from passlib.context import CryptContext from jose import JWTError, jwt from datetime import datetime, timedelta, timezone from fastapi.security import OAuth2PasswordBearer -from langflow.models.token import TokenData -from langflow.models.user import get_user, User +from langflow.database.models.token import TokenData +from langflow.database.models.user import get_user, User from sqlalchemy.orm import Session from langflow.database.base import get_session + +# TODO: Move to env - Test propose!!!!! SECRET_KEY = "698619adad2d916f1f32d264540976964b3c0d3828e0870a65add5800a8cc6b9" ALGORITHM = "HS256" ACCESS_TOKEN_EXPIRE_MINUTES = 30 @@ -25,7 +27,7 @@ def get_password_hash(password): return pwd_context.hash(password) -def create_access_token(data: dict, expires_delta: timedelta = None): +def create_access_token(data: dict, expires_delta: timedelta = None): # type: ignore to_encode = data.copy() if expires_delta: expire = datetime.now(timezone.utc) + expires_delta @@ -37,7 +39,7 @@ def create_access_token(data: dict, expires_delta: timedelta = None): def authenticate_user(db: Session, username: str, password: str): if user := get_user(db, username): - return user if verify_password(password, user.hashed_password) else False + return user if verify_password(password, user.password) else False else: return False @@ -52,14 +54,14 @@ async def get_current_user( ) try: payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM]) - username: str = payload.get("sub") + username: str = payload.get("sub") # type: ignore if username is None: raise credentials_exception token_data = TokenData(username=username) except JWTError as e: raise credentials_exception from e - user = get_user(db, username=token_data.username) + user = get_user(db, token_data.username) # type: ignore if user is None: raise credentials_exception return user @@ -68,6 +70,6 @@ async def get_current_user( async def get_current_active_user( current_user: Annotated[User, Depends(get_current_user)] ): - if current_user.disabled: + if current_user.is_disabled: raise HTTPException(status_code=400, detail="Inactive user") return current_user diff --git a/src/backend/langflow/models/token.py b/src/backend/langflow/database/models/token.py similarity index 100% rename from src/backend/langflow/models/token.py rename to src/backend/langflow/database/models/token.py diff --git a/src/backend/langflow/database/models/user.py b/src/backend/langflow/database/models/user.py new file mode 100644 index 000000000..6e13f3e49 --- /dev/null +++ b/src/backend/langflow/database/models/user.py @@ -0,0 +1,37 @@ +from datetime import datetime +from sqlalchemy.orm import Session + +from langflow.database.models.base import SQLModelSerializable, SQLModel +from sqlmodel import Field +from uuid import UUID, uuid4 + + +class User(SQLModelSerializable, table=True): + id: UUID = Field(default_factory=uuid4, primary_key=True, unique=True) + username: str = Field(index=True, unique=True) + password: str = Field() + is_disabled: bool = Field(default=False) + is_superuser: bool = Field(default=False) + create_at: datetime = Field(default_factory=datetime.utcnow) + updated_at: datetime = Field(default_factory=datetime.utcnow) + + +class UserAddModel(SQLModel): + username: str = Field() + password: str = Field() + is_disabled: bool = Field(default=False) + is_superuser: bool = Field(default=False) + + +class UserListModel(SQLModel): + id: UUID = Field(default_factory=uuid4) + username: str = Field() + is_disabled: bool = Field() + is_superuser: bool = Field() + create_at: datetime = Field() + updated_at: datetime = Field() + + +def get_user(db: Session, username: str) -> User: + db_user = db.query(User).filter(User.username == username).first() + return User.from_orm(db_user) if db_user else None # type: ignore diff --git a/src/backend/langflow/main.py b/src/backend/langflow/main.py index 062e0ef84..fed302603 100644 --- a/src/backend/langflow/main.py +++ b/src/backend/langflow/main.py @@ -6,7 +6,7 @@ from fastapi.responses import FileResponse from fastapi.staticfiles import StaticFiles from langflow.api import router -from langflow.routers import login, users, items, health +from langflow.routers import login, users, health from langflow.database.base import create_db_and_tables from langflow.interface.utils import setup_llm_caching from langflow.utils.logger import configure @@ -30,7 +30,6 @@ def create_app(): app.include_router(login.router) app.include_router(users.router) - app.include_router(items.router) app.include_router(health.router) app.include_router(router) @@ -74,8 +73,7 @@ def setup_app(static_files_dir: Optional[Path] = None) -> FastAPI: static_files_dir = get_static_files_dir() if not static_files_dir or not static_files_dir.exists(): - raise RuntimeError( - f"Static files directory {static_files_dir} does not exist.") + raise RuntimeError(f"Static files directory {static_files_dir} does not exist.") app = create_app() setup_static_files(app, static_files_dir) return app diff --git a/src/backend/langflow/models/__init__.py b/src/backend/langflow/models/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/src/backend/langflow/models/base_control.py b/src/backend/langflow/models/base_control.py deleted file mode 100644 index 9eea9d9f0..000000000 --- a/src/backend/langflow/models/base_control.py +++ /dev/null @@ -1,7 +0,0 @@ -from pydantic import BaseModel -from datetime import datetime - - -class BaseControl(BaseModel): - created_at: datetime - updated_at: datetime diff --git a/src/backend/langflow/models/models.py b/src/backend/langflow/models/models.py deleted file mode 100644 index d86d5f7f0..000000000 --- a/src/backend/langflow/models/models.py +++ /dev/null @@ -1,21 +0,0 @@ -from sqlalchemy import Column, String, Boolean, DateTime -from sqlalchemy.ext.declarative import declarative_base -from sqlalchemy.sql import func -from sqlalchemy.dialects.postgresql import UUID -from uuid import uuid4 - -Base = declarative_base() - - -class User(Base): - __tablename__ = "users" - - id = Column( - UUID(as_uuid=True), primary_key=True, default=uuid4, unique=True, nullable=False - ) - username = Column(String, unique=True, index=True) - email = Column(String, unique=True, index=True) - disabled = Column(Boolean, default=False) - is_superuser = Column(Boolean, default=False) - created_at = Column(DateTime(timezone=True), server_default=func.now()) - updated_at = Column(DateTime(timezone=True), onupdate=func.now()) diff --git a/src/backend/langflow/models/user.py b/src/backend/langflow/models/user.py deleted file mode 100644 index b8f6a3fc4..000000000 --- a/src/backend/langflow/models/user.py +++ /dev/null @@ -1,17 +0,0 @@ -from sqlalchemy.orm import Session -from langflow.models.user import User as DBUser -from langflow.models.base_control import BaseControl -from uuid import UUID - - -class User(BaseControl): - id: UUID - username: str - email: str - disabled: bool = False - is_superuser: bool = False - - -def get_user(db: Session, user_id: UUID) -> User: - db_user = db.query(DBUser).filter(DBUser.id == user_id).first() - return User.from_orm(db_user) if db_user else None # type: ignore diff --git a/src/backend/langflow/routers/items.py b/src/backend/langflow/routers/items.py deleted file mode 100644 index 7ca1ff320..000000000 --- a/src/backend/langflow/routers/items.py +++ /dev/null @@ -1,17 +0,0 @@ -from fastapi import APIRouter, Depends -from ..models.user import User -from ..auth.auth import get_current_active_user - -router = APIRouter() - - -@router.get("/users/all/") -async def read_own_items( - current_user: User = Depends(get_current_active_user) -): - return [ - { - "item_id": "my_id", - "owner": current_user.username - } - ] diff --git a/src/backend/langflow/routers/login.py b/src/backend/langflow/routers/login.py index dba69758f..47839f6f5 100644 --- a/src/backend/langflow/routers/login.py +++ b/src/backend/langflow/routers/login.py @@ -1,34 +1,35 @@ from datetime import timedelta + from fastapi import APIRouter, Depends, HTTPException, status from fastapi.security import OAuth2PasswordRequestForm -from langflow.models.token import Token +from langflow.database.models.token import Token from langflow.auth.auth import ( ACCESS_TOKEN_EXPIRE_MINUTES, authenticate_user, create_access_token, ) + from sqlalchemy.orm import Session from langflow.database.base import get_session -TOKEN_TYPE = "bearer" - router = APIRouter() def create_user_token(user: str) -> dict: access_token_expires = timedelta(minutes=ACCESS_TOKEN_EXPIRE_MINUTES) access_token = create_access_token( - data={"sub": user.username}, expires_delta=access_token_expires + data={"sub": user.username}, expires_delta=access_token_expires # type: ignore ) - return {"access_token": access_token, "token_type": TOKEN_TYPE} + + return {"access_token": access_token, "token_type": "bearer"} @router.post("/token", response_model=Token) -async def login_for_access_token( +async def login_to_get_access_token( form_data: OAuth2PasswordRequestForm = Depends(), db: Session = Depends(get_session) ): if user := authenticate_user(db, form_data.username, form_data.password): - return create_user_token(user) + return create_user_token(user) # type: ignore else: raise HTTPException( status_code=status.HTTP_401_UNAUTHORIZED, diff --git a/src/backend/langflow/routers/users.py b/src/backend/langflow/routers/users.py index 1a9184ec8..f34199d5d 100644 --- a/src/backend/langflow/routers/users.py +++ b/src/backend/langflow/routers/users.py @@ -1,10 +1,79 @@ -from fastapi import APIRouter, Depends -from langflow.models.user import User +from typing import List +from sqlmodel import Session, select +from sqlalchemy.exc import IntegrityError +from fastapi import APIRouter, Depends, HTTPException + +from langflow.database.base import get_session from langflow.auth.auth import get_current_active_user +from langflow.database.models.user import UserAddModel, UserListModel, User -router = APIRouter() +from passlib.context import CryptContext + +router = APIRouter(prefix="/users", tags=["Users"]) -@router.get("/users/me/", response_model=User) -async def read_users_me(current_user: User = Depends(get_current_active_user)): +def get_password_hash(password): + pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto") + return pwd_context.hash(password) + + +@router.get("/user", response_model=UserListModel) +async def read_current_user(current_user: User = Depends(get_current_active_user)): return current_user + + +@router.get("/users", response_model=List[UserListModel]) +async def read_all_users( + skip: int = 0, + limit: int = 10, + _: Session = Depends(get_current_active_user), + db: Session = Depends(get_session), +): + query = select(User) + query = query.offset(skip).limit(limit) + + return db.execute(query).fetchall() + + +@router.post("/user", response_model=User) +async def add_user( + user: UserAddModel, + _: Session = Depends(get_current_active_user), + db: Session = Depends(get_session), +): + new_user = User(**user.dict()) + try: + new_user.password = get_password_hash(user.password) + + db.add(new_user) + db.commit() + db.refresh(new_user) + except IntegrityError as e: + db.rollback() + raise HTTPException( + status_code=400, + detail="User exists", + ) from e + + return new_user + + +# TODO: Remove - Just for testing purposes +@router.post("/super_user", response_model=User) +async def add_super_user_to_testing_purposes(db: Session = Depends(get_session)): + new_user = User(username="superuser", password="12345", is_superuser=True) + + try: + new_user.password = get_password_hash(new_user.password) + + db.add(new_user) + db.commit() + db.refresh(new_user) + except IntegrityError as e: + db.rollback() + raise HTTPException( + status_code=400, + detail="User exists", + ) from e + + return new_user From 1c289797664687375bf790a8781f793d8dda7d16 Mon Sep 17 00:00:00 2001 From: gustavoschaedler Date: Tue, 8 Aug 2023 22:08:51 +0100 Subject: [PATCH 82/90] =?UTF-8?q?=F0=9F=94=80=20refactor(users.py):=20rena?= =?UTF-8?q?me=20router=20prefix=20from=20"/users"=20to=20"Login"=20to=20im?= =?UTF-8?q?prove=20clarity=20and=20consistency?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/routers/users.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/backend/langflow/routers/users.py b/src/backend/langflow/routers/users.py index ee2f6a93c..98809f530 100644 --- a/src/backend/langflow/routers/users.py +++ b/src/backend/langflow/routers/users.py @@ -9,7 +9,7 @@ from langflow.database.models.user import UserAddModel, UserListModel, User from passlib.context import CryptContext -router = APIRouter(prefix="/users", tags=["Users"]) +router = APIRouter(tags=["Login"]) def get_password_hash(password): From 3c6d46021d69509c8e577b70097e0280b659cfa6 Mon Sep 17 00:00:00 2001 From: gustavoschaedler Date: Tue, 8 Aug 2023 22:09:38 +0100 Subject: [PATCH 83/90] =?UTF-8?q?=F0=9F=94=A7=20chore(user.py):=20remove?= =?UTF-8?q?=20unnecessary=20line=20breaks=20and=20import=20formatting=20fo?= =?UTF-8?q?r=20better=20code=20readability=20=F0=9F=94=A7=20chore(main.py)?= =?UTF-8?q?:=20fix=20indentation=20and=20remove=20unnecessary=20line=20bre?= =?UTF-8?q?aks=20for=20better=20code=20readability=20=F0=9F=94=A7=20chore(?= =?UTF-8?q?login.py):=20fix=20indentation=20and=20add=20missing=20line=20b?= =?UTF-8?q?reaks=20for=20better=20code=20readability?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/database/models/user.py | 5 +---- src/backend/langflow/main.py | 3 +-- src/backend/langflow/routers/login.py | 3 ++- 3 files changed, 4 insertions(+), 7 deletions(-) diff --git a/src/backend/langflow/database/models/user.py b/src/backend/langflow/database/models/user.py index 2d6b9bbf7..144e71fae 100644 --- a/src/backend/langflow/database/models/user.py +++ b/src/backend/langflow/database/models/user.py @@ -1,10 +1,7 @@ from datetime import datetime from sqlalchemy.orm import Session -from langflow.services.database.models.base import ( - SQLModelSerializable, - SQLModel -) +from langflow.services.database.models.base import SQLModelSerializable, SQLModel from sqlmodel import Field from uuid import UUID, uuid4 diff --git a/src/backend/langflow/main.py b/src/backend/langflow/main.py index e9e3dc3a1..b63caff24 100644 --- a/src/backend/langflow/main.py +++ b/src/backend/langflow/main.py @@ -80,8 +80,7 @@ def setup_app( static_files_dir = get_static_files_dir() if not backend_only and (not static_files_dir or not static_files_dir.exists()): - raise RuntimeError( - f"Static files directory {static_files_dir} does not exist.") + raise RuntimeError(f"Static files directory {static_files_dir} does not exist.") app = create_app() if not backend_only and static_files_dir is not None: setup_static_files(app, static_files_dir) diff --git a/src/backend/langflow/routers/login.py b/src/backend/langflow/routers/login.py index 1b889cdad..48cdd11dc 100644 --- a/src/backend/langflow/routers/login.py +++ b/src/backend/langflow/routers/login.py @@ -20,7 +20,8 @@ def create_user_token(user: str) -> dict: access_token_expires = timedelta(minutes=ACCESS_TOKEN_EXPIRE_MINUTES) access_token = create_access_token( # type: ignore - data={"sub": user.username}, expires_delta=access_token_expires + data={"sub": user.username}, + expires_delta=access_token_expires, ) return {"access_token": access_token, "token_type": "bearer"} From 056ce51ad0fec1d25a1b82ed66ce75871a74a5fe Mon Sep 17 00:00:00 2001 From: gustavoschaedler Date: Tue, 8 Aug 2023 22:20:53 +0100 Subject: [PATCH 84/90] =?UTF-8?q?=F0=9F=90=9B=20fix(login.py):=20change=20?= =?UTF-8?q?argument=20type=20of=20create=5Fuser=5Ftoken=20function=20from?= =?UTF-8?q?=20str=20to=20User=20to=20improve=20type=20safety=20and=20clari?= =?UTF-8?q?ty=20=F0=9F=90=9B=20fix(login.py):=20remove=20unnecessary=20typ?= =?UTF-8?q?e=20ignore=20comment=20in=20create=5Fuser=5Ftoken=20function=20?= =?UTF-8?q?=F0=9F=90=9B=20fix(login.py):=20remove=20unnecessary=20type=20i?= =?UTF-8?q?gnore=20comment=20in=20return=20statement=20of=20login=5Fto=5Fg?= =?UTF-8?q?et=5Faccess=5Ftoken=20function=20=F0=9F=90=9B=20fix(users.py):?= =?UTF-8?q?=20remove=20unnecessary=20async=20keyword=20from=20read=5Fcurre?= =?UTF-8?q?nt=5Fuser=20function=20=F0=9F=90=9B=20fix(users.py):=20remove?= =?UTF-8?q?=20unnecessary=20async=20keyword=20from=20read=5Fall=5Fusers=20?= =?UTF-8?q?function=20=F0=9F=90=9B=20fix(users.py):=20remove=20unnecessary?= =?UTF-8?q?=20async=20keyword=20from=20add=5Fuser=20function=20?= =?UTF-8?q?=F0=9F=90=9B=20fix(users.py):=20remove=20unnecessary=20async=20?= =?UTF-8?q?keyword=20from=20add=5Fsuper=5Fuser=5Fto=5Ftesting=5Fpurposes?= =?UTF-8?q?=20function?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/routers/login.py | 6 +++--- src/backend/langflow/routers/users.py | 8 ++++---- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/src/backend/langflow/routers/login.py b/src/backend/langflow/routers/login.py index 48cdd11dc..3cb71a82e 100644 --- a/src/backend/langflow/routers/login.py +++ b/src/backend/langflow/routers/login.py @@ -11,15 +11,15 @@ from langflow.auth.auth import ( from sqlalchemy.orm import Session from langflow.services.utils import get_session +from langflow.database.models.user import User router = APIRouter() -def create_user_token(user: str) -> dict: +def create_user_token(user: User) -> dict: access_token_expires = timedelta(minutes=ACCESS_TOKEN_EXPIRE_MINUTES) access_token = create_access_token( - # type: ignore data={"sub": user.username}, expires_delta=access_token_expires, ) @@ -32,7 +32,7 @@ async def login_to_get_access_token( form_data: OAuth2PasswordRequestForm = Depends(), db: Session = Depends(get_session) ): if user := authenticate_user(db, form_data.username, form_data.password): - return create_user_token(user) # type: ignore + return create_user_token(user) else: raise HTTPException( status_code=status.HTTP_401_UNAUTHORIZED, diff --git a/src/backend/langflow/routers/users.py b/src/backend/langflow/routers/users.py index 98809f530..bcf1a7075 100644 --- a/src/backend/langflow/routers/users.py +++ b/src/backend/langflow/routers/users.py @@ -18,12 +18,12 @@ def get_password_hash(password): @router.get("/user", response_model=UserListModel) -async def read_current_user(current_user: User = Depends(get_current_active_user)): +def read_current_user(current_user: User = Depends(get_current_active_user)): return current_user @router.get("/users", response_model=List[UserListModel]) -async def read_all_users( +def read_all_users( skip: int = 0, limit: int = 10, _: Session = Depends(get_current_active_user), @@ -36,7 +36,7 @@ async def read_all_users( @router.post("/user", response_model=User) -async def add_user( +def add_user( user: UserAddModel, _: Session = Depends(get_current_active_user), db: Session = Depends(get_session), @@ -60,7 +60,7 @@ async def add_user( # TODO: Remove - Just for testing purposes @router.post("/super_user", response_model=User) -async def add_super_user_to_testing_purposes(db: Session = Depends(get_session)): +def add_super_user_to_testing_purposes(db: Session = Depends(get_session)): new_user = User(username="superuser", password="12345", is_superuser=True) try: From f822581df3eb9c34eec5e76e04d137a38283afc4 Mon Sep 17 00:00:00 2001 From: gustavoschaedler Date: Wed, 9 Aug 2023 00:47:11 +0100 Subject: [PATCH 85/90] =?UTF-8?q?=F0=9F=94=A7=20chore(pyproject.toml):=20a?= =?UTF-8?q?dd=20passlib=20and=20bcrypt=20dependencies=20to=20improve=20pas?= =?UTF-8?q?sword=20hashing=20and=20security?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- poetry.lock | 15 ++++++++++++++- pyproject.toml | 2 ++ 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/poetry.lock b/poetry.lock index 745b9be4d..275706d6e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -3066,10 +3066,13 @@ files = [ {file = "lxml-4.9.3-cp27-cp27m-macosx_11_0_x86_64.whl", hash = "sha256:b0a545b46b526d418eb91754565ba5b63b1c0b12f9bd2f808c852d9b4b2f9b5c"}, {file = "lxml-4.9.3-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:075b731ddd9e7f68ad24c635374211376aa05a281673ede86cbe1d1b3455279d"}, {file = "lxml-4.9.3-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1e224d5755dba2f4a9498e150c43792392ac9b5380aa1b845f98a1618c94eeef"}, + {file = "lxml-4.9.3-cp27-cp27m-win32.whl", hash = "sha256:2c74524e179f2ad6d2a4f7caf70e2d96639c0954c943ad601a9e146c76408ed7"}, + {file = "lxml-4.9.3-cp27-cp27m-win_amd64.whl", hash = "sha256:4f1026bc732b6a7f96369f7bfe1a4f2290fb34dce00d8644bc3036fb351a4ca1"}, {file = "lxml-4.9.3-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c0781a98ff5e6586926293e59480b64ddd46282953203c76ae15dbbbf302e8bb"}, {file = "lxml-4.9.3-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:cef2502e7e8a96fe5ad686d60b49e1ab03e438bd9123987994528febd569868e"}, {file = "lxml-4.9.3-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:b86164d2cff4d3aaa1f04a14685cbc072efd0b4f99ca5708b2ad1b9b5988a991"}, {file = "lxml-4.9.3-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:42871176e7896d5d45138f6d28751053c711ed4d48d8e30b498da155af39aebd"}, + {file = "lxml-4.9.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:ae8b9c6deb1e634ba4f1930eb67ef6e6bf6a44b6eb5ad605642b2d6d5ed9ce3c"}, {file = "lxml-4.9.3-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:411007c0d88188d9f621b11d252cce90c4a2d1a49db6c068e3c16422f306eab8"}, {file = "lxml-4.9.3-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:cd47b4a0d41d2afa3e58e5bf1f62069255aa2fd6ff5ee41604418ca925911d76"}, {file = "lxml-4.9.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0e2cb47860da1f7e9a5256254b74ae331687b9672dfa780eed355c4c9c3dbd23"}, @@ -3078,6 +3081,7 @@ files = [ {file = "lxml-4.9.3-cp310-cp310-win_amd64.whl", hash = "sha256:97047f0d25cd4bcae81f9ec9dc290ca3e15927c192df17331b53bebe0e3ff96d"}, {file = "lxml-4.9.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:1f447ea5429b54f9582d4b955f5f1985f278ce5cf169f72eea8afd9502973dd5"}, {file = "lxml-4.9.3-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:57d6ba0ca2b0c462f339640d22882acc711de224d769edf29962b09f77129cbf"}, + {file = "lxml-4.9.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:9767e79108424fb6c3edf8f81e6730666a50feb01a328f4a016464a5893f835a"}, {file = "lxml-4.9.3-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:71c52db65e4b56b8ddc5bb89fb2e66c558ed9d1a74a45ceb7dcb20c191c3df2f"}, {file = "lxml-4.9.3-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:d73d8ecf8ecf10a3bd007f2192725a34bd62898e8da27eb9d32a58084f93962b"}, {file = "lxml-4.9.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0a3d3487f07c1d7f150894c238299934a2a074ef590b583103a45002035be120"}, @@ -3097,6 +3101,7 @@ files = [ {file = "lxml-4.9.3-cp36-cp36m-macosx_11_0_x86_64.whl", hash = "sha256:64f479d719dc9f4c813ad9bb6b28f8390360660b73b2e4beb4cb0ae7104f1c12"}, {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:dd708cf4ee4408cf46a48b108fb9427bfa00b9b85812a9262b5c668af2533ea5"}, {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c31c7462abdf8f2ac0577d9f05279727e698f97ecbb02f17939ea99ae8daa98"}, + {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:e3cd95e10c2610c360154afdc2f1480aea394f4a4f1ea0a5eacce49640c9b190"}, {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:4930be26af26ac545c3dffb662521d4e6268352866956672231887d18f0eaab2"}, {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4aec80cde9197340bc353d2768e2a75f5f60bacda2bab72ab1dc499589b3878c"}, {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:14e019fd83b831b2e61baed40cab76222139926b1fb5ed0e79225bc0cae14584"}, @@ -3106,6 +3111,7 @@ files = [ {file = "lxml-4.9.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bef4e656f7d98aaa3486d2627e7d2df1157d7e88e7efd43a65aa5dd4714916cf"}, {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:46f409a2d60f634fe550f7133ed30ad5321ae2e6630f13657fb9479506b00601"}, {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:4c28a9144688aef80d6ea666c809b4b0e50010a2aca784c97f5e6bf143d9f129"}, + {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:141f1d1a9b663c679dc524af3ea1773e618907e96075262726c7612c02b149a4"}, {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:53ace1c1fd5a74ef662f844a0413446c0629d151055340e9893da958a374f70d"}, {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:17a753023436a18e27dd7769e798ce302963c236bc4114ceee5b25c18c52c693"}, {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7d298a1bd60c067ea75d9f684f5f3992c9d6766fadbc0bcedd39750bf344c2f4"}, @@ -3115,6 +3121,7 @@ files = [ {file = "lxml-4.9.3-cp37-cp37m-win_amd64.whl", hash = "sha256:120fa9349a24c7043854c53cae8cec227e1f79195a7493e09e0c12e29f918e52"}, {file = "lxml-4.9.3-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:4d2d1edbca80b510443f51afd8496be95529db04a509bc8faee49c7b0fb6d2cc"}, {file = "lxml-4.9.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:8d7e43bd40f65f7d97ad8ef5c9b1778943d02f04febef12def25f7583d19baac"}, + {file = "lxml-4.9.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:71d66ee82e7417828af6ecd7db817913cb0cf9d4e61aa0ac1fde0583d84358db"}, {file = "lxml-4.9.3-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:6fc3c450eaa0b56f815c7b62f2b7fba7266c4779adcf1cece9e6deb1de7305ce"}, {file = "lxml-4.9.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:65299ea57d82fb91c7f019300d24050c4ddeb7c5a190e076b5f48a2b43d19c42"}, {file = "lxml-4.9.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:eadfbbbfb41b44034a4c757fd5d70baccd43296fb894dba0295606a7cf3124aa"}, @@ -3124,6 +3131,7 @@ files = [ {file = "lxml-4.9.3-cp38-cp38-win_amd64.whl", hash = "sha256:92af161ecbdb2883c4593d5ed4815ea71b31fafd7fd05789b23100d081ecac96"}, {file = "lxml-4.9.3-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:9bb6ad405121241e99a86efff22d3ef469024ce22875a7ae045896ad23ba2340"}, {file = "lxml-4.9.3-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:8ed74706b26ad100433da4b9d807eae371efaa266ffc3e9191ea436087a9d6a7"}, + {file = "lxml-4.9.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fbf521479bcac1e25a663df882c46a641a9bff6b56dc8b0fafaebd2f66fb231b"}, {file = "lxml-4.9.3-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:303bf1edce6ced16bf67a18a1cf8339d0db79577eec5d9a6d4a80f0fb10aa2da"}, {file = "lxml-4.9.3-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:5515edd2a6d1a5a70bfcdee23b42ec33425e405c5b351478ab7dc9347228f96e"}, {file = "lxml-4.9.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:690dafd0b187ed38583a648076865d8c229661ed20e48f2335d68e2cf7dc829d"}, @@ -3134,13 +3142,16 @@ files = [ {file = "lxml-4.9.3-cp39-cp39-win_amd64.whl", hash = "sha256:4dd9a263e845a72eacb60d12401e37c616438ea2e5442885f65082c276dfb2b2"}, {file = "lxml-4.9.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:6689a3d7fd13dc687e9102a27e98ef33730ac4fe37795d5036d18b4d527abd35"}, {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:f6bdac493b949141b733c5345b6ba8f87a226029cbabc7e9e121a413e49441e0"}, + {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:05186a0f1346ae12553d66df1cfce6f251589fea3ad3da4f3ef4e34b2d58c6a3"}, {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c2006f5c8d28dee289f7020f721354362fa304acbaaf9745751ac4006650254b"}, {file = "lxml-4.9.3-pp38-pypy38_pp73-macosx_11_0_x86_64.whl", hash = "sha256:5c245b783db29c4e4fbbbfc9c5a78be496c9fea25517f90606aa1f6b2b3d5f7b"}, {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:4fb960a632a49f2f089d522f70496640fdf1218f1243889da3822e0a9f5f3ba7"}, + {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:50670615eaf97227d5dc60de2dc99fb134a7130d310d783314e7724bf163f75d"}, {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9719fe17307a9e814580af1f5c6e05ca593b12fb7e44fe62450a5384dbf61b4b"}, {file = "lxml-4.9.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:3331bece23c9ee066e0fb3f96c61322b9e0f54d775fccefff4c38ca488de283a"}, {file = "lxml-4.9.3-pp39-pypy39_pp73-macosx_11_0_x86_64.whl", hash = "sha256:ed667f49b11360951e201453fc3967344d0d0263aa415e1619e85ae7fd17b4e0"}, {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:8b77946fd508cbf0fccd8e400a7f71d4ac0e1595812e66025bac475a8e811694"}, + {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:e4da8ca0c0c0aea88fd46be8e44bd49716772358d648cce45fe387f7b92374a7"}, {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fe4bda6bd4340caa6e5cf95e73f8fea5c4bfc55763dd42f1b50a94c1b4a2fbd4"}, {file = "lxml-4.9.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:f3df3db1d336b9356dd3112eae5f5c2b8b377f3bc826848567f10bfddfee77e9"}, {file = "lxml-4.9.3.tar.gz", hash = "sha256:48628bd53a426c9eb9bc066a923acaa0878d1e86129fd5359aee99285f4eed9c"}, @@ -4401,6 +4412,7 @@ files = [ {file = "Pillow-10.0.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3b08d4cc24f471b2c8ca24ec060abf4bebc6b144cb89cba638c720546b1cf538"}, {file = "Pillow-10.0.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d737a602fbd82afd892ca746392401b634e278cb65d55c4b7a8f48e9ef8d008d"}, {file = "Pillow-10.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:3a82c40d706d9aa9734289740ce26460a11aeec2d9c79b7af87bb35f0073c12f"}, + {file = "Pillow-10.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:bc2ec7c7b5d66b8ec9ce9f720dbb5fa4bace0f545acd34870eff4a369b44bf37"}, {file = "Pillow-10.0.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:d80cf684b541685fccdd84c485b31ce73fc5c9b5d7523bf1394ce134a60c6883"}, {file = "Pillow-10.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:76de421f9c326da8f43d690110f0e79fe3ad1e54be811545d7d91898b4c8493e"}, {file = "Pillow-10.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81ff539a12457809666fef6624684c008e00ff6bf455b4b89fd00a140eecd640"}, @@ -4410,6 +4422,7 @@ files = [ {file = "Pillow-10.0.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d50b6aec14bc737742ca96e85d6d0a5f9bfbded018264b3b70ff9d8c33485551"}, {file = "Pillow-10.0.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:00e65f5e822decd501e374b0650146063fbb30a7264b4d2744bdd7b913e0cab5"}, {file = "Pillow-10.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:f31f9fdbfecb042d046f9d91270a0ba28368a723302786c0009ee9b9f1f60199"}, + {file = "Pillow-10.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:1ce91b6ec08d866b14413d3f0bbdea7e24dfdc8e59f562bb77bc3fe60b6144ca"}, {file = "Pillow-10.0.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:349930d6e9c685c089284b013478d6f76e3a534e36ddfa912cde493f235372f3"}, {file = "Pillow-10.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3a684105f7c32488f7153905a4e3015a3b6c7182e106fe3c37fbb5ef3e6994c3"}, {file = "Pillow-10.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4f69b3700201b80bb82c3a97d5e9254084f6dd5fb5b16fc1a7b974260f89f43"}, @@ -7668,4 +7681,4 @@ local = ["ctransformers", "llama-cpp-python", "sentence-transformers"] [metadata] lock-version = "2.0" python-versions = ">=3.9,<3.11" -content-hash = "7c6d7dc33a9b0ae9da053fb78b9f2eabbe78df38c4763e5a8719df6249d6f657" +content-hash = "b3bfdb00177ebfac66bd91d306468a280ccff79e6a95fe7d4894d0c7c6ce44e6" diff --git a/pyproject.toml b/pyproject.toml index 5b7054b65..be1b6be2c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -78,6 +78,8 @@ psycopg-binary = "^3.1.9" fastavro = "^1.8.0" langchain-experimental = "^0.0.8" alembic = "^1.11.2" +passlib = "^1.7.4" +bcrypt = "^4.0.1" [tool.poetry.group.dev.dependencies] black = "^23.1.0" From fccb00424833b47ff3028a825a1fa8e4ff129503 Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Tue, 8 Aug 2023 20:53:39 -0300 Subject: [PATCH 86/90] format code and fix merge errors generated after conflict --- .../components/parameterComponent/index.tsx | 3 +- .../src/CustomNodes/GenericNode/index.tsx | 23 +- .../components/codeTabsComponent/index.tsx | 319 +++++++++++------- .../src/components/headerComponent/index.tsx | 5 +- src/frontend/src/constants/constants.ts | 98 +++--- src/frontend/src/contexts/tabsContext.tsx | 4 +- src/frontend/src/contexts/undoRedoContext.tsx | 8 +- .../src/modals/EditNodeModal/index.tsx | 152 ++++++--- src/frontend/src/modals/exportModal/index.tsx | 4 +- .../src/modals/flowSettingsModal/index.tsx | 4 +- src/frontend/src/modals/formModal/index.tsx | 88 ++--- .../components/PageComponent/index.tsx | 5 +- .../extraSidebarComponent/index.tsx | 3 +- 13 files changed, 439 insertions(+), 277 deletions(-) diff --git a/src/frontend/src/CustomNodes/GenericNode/components/parameterComponent/index.tsx b/src/frontend/src/CustomNodes/GenericNode/components/parameterComponent/index.tsx index 42942a360..f25dd5185 100644 --- a/src/frontend/src/CustomNodes/GenericNode/components/parameterComponent/index.tsx +++ b/src/frontend/src/CustomNodes/GenericNode/components/parameterComponent/index.tsx @@ -69,7 +69,8 @@ export default function ParameterComponent({ const { reactFlowInstance } = useContext(typesContext); let disabled = - reactFlowInstance?.getEdges().some((edge) => edge.targetHandle === id) ?? false; + reactFlowInstance?.getEdges().some((edge) => edge.targetHandle === id) ?? + false; const { data: myData } = useContext(typesContext); diff --git a/src/frontend/src/CustomNodes/GenericNode/index.tsx b/src/frontend/src/CustomNodes/GenericNode/index.tsx index 61717ad1d..737a02fc3 100644 --- a/src/frontend/src/CustomNodes/GenericNode/index.tsx +++ b/src/frontend/src/CustomNodes/GenericNode/index.tsx @@ -166,8 +166,9 @@ export default function GenericNode({ !data.node.template[templateName].advanced ? ( ) : ( <> diff --git a/src/frontend/src/components/codeTabsComponent/index.tsx b/src/frontend/src/components/codeTabsComponent/index.tsx index e92d80b20..eabb2db1b 100644 --- a/src/frontend/src/components/codeTabsComponent/index.tsx +++ b/src/frontend/src/components/codeTabsComponent/index.tsx @@ -207,7 +207,9 @@ export default function CodeTabsComponent({ > {data.map((node: any, index) => (
- {tweaks.tweaksList.current.includes(node["data"]["id"]) && ( + {tweaks.tweaksList.current.includes( + node["data"]["id"] + ) && ( templateName.charAt(0) !== "_" && - node.data.node.template[templateName].show && - (node.data.node.template[templateName].type === "str" || - node.data.node.template[templateName].type === - "bool" || - node.data.node.template[templateName].type === - "float" || - node.data.node.template[templateName].type === - "code" || - node.data.node.template[templateName].type === - "prompt" || - node.data.node.template[templateName].type === - "file" || - node.data.node.template[templateName].type === "int") + node.data.node.template[templateName] + .show && + (node.data.node.template[templateName] + .type === "str" || + node.data.node.template[templateName] + .type === "bool" || + node.data.node.template[templateName] + .type === "float" || + node.data.node.template[templateName] + .type === "code" || + node.data.node.template[templateName] + .type === "prompt" || + node.data.node.template[templateName] + .type === "file" || + node.data.node.template[templateName] + .type === "int") ) .map((templateName, index) => { return ( @@ -255,22 +260,29 @@ export default function CodeTabsComponent({
- {node.data.node.template[templateName].type === - "str" && - !node.data.node.template[templateName].options ? ( + {node.data.node.template[ + templateName + ].type === "str" && + !node.data.node.template[ + templateName + ].options ? (
- {node.data.node.template[templateName] - .list ? ( + {node.data.node.template[ + templateName + ].list ? ( - ) : node.data.node.template[templateName] - .multiline ? ( + ) : node.data.node.template[ + templateName + ].multiline ? (
@@ -305,16 +321,19 @@ export default function CodeTabsComponent({ disabled={false} editNode={true} value={ - !node.data.node.template[ + !node.data.node + .template[ templateName ].value || - node.data.node.template[ + node.data.node + .template[ templateName ].value === "" ? "" : node.data.node - .template[templateName] - .value + .template[ + templateName + ].value } onChange={(target) => { setData((old) => { @@ -331,7 +350,9 @@ export default function CodeTabsComponent({ node["data"]["id"], target, node.data.node - .template[templateName] + .template[ + templateName + ] ); }} /> @@ -342,16 +363,20 @@ export default function CodeTabsComponent({ editNode={true} disabled={false} password={ - node.data.node.template[templateName] - .password ?? false + node.data.node.template[ + templateName + ].password ?? false } value={ - !node.data.node.template[templateName] - .value || - node.data.node.template[templateName] - .value === "" + !node.data.node.template[ + templateName + ].value || + node.data.node.template[ + templateName + ].value === "" ? "" - : node.data.node.template[ + : node.data.node + .template[ templateName ].value } @@ -369,20 +394,24 @@ export default function CodeTabsComponent({ tweaks.buildTweakObject( node["data"]["id"], target, - node.data.node.template[templateName] + node.data.node.template[ + templateName + ] ); }} /> )}
- ) : node.data.node.template[templateName].type === - "bool" ? ( + ) : node.data.node.template[ + templateName + ].type === "bool" ? (
{" "} { setData((old) => { @@ -398,24 +427,30 @@ export default function CodeTabsComponent({ tweaks.buildTweakObject( node["data"]["id"], e, - node.data.node.template[templateName] + node.data.node.template[ + templateName + ] ); }} size="small" disabled={false} />
- ) : node.data.node.template[templateName].type === - "file" ? ( + ) : node.data.node.template[ + templateName + ].type === "file" ? (
@@ -423,17 +458,22 @@ export default function CodeTabsComponent({ editNode={true} disabled={false} value={ - node.data.node.template[templateName] - .value ?? "" + node.data.node.template[ + templateName + ].value ?? "" } - onChange={(target: any) => {}} + onChange={( + target: any + ) => {}} fileTypes={ - node.data.node.template[templateName] - .fileTypes + node.data.node.template[ + templateName + ].fileTypes } suffixes={ - node.data.node.template[templateName] - .suffixes + node.data.node.template[ + templateName + ].suffixes } onFileChange={( value: any @@ -445,20 +485,24 @@ export default function CodeTabsComponent({ >
- ) : node.data.node.template[templateName].type === - "float" ? ( + ) : node.data.node.template[ + templateName + ].type === "float" ? (
{ setData((old) => { @@ -474,22 +518,27 @@ export default function CodeTabsComponent({ tweaks.buildTweakObject( node["data"]["id"], target, - node.data.node.template[templateName] + node.data.node.template[ + templateName + ] ); }} />
- ) : node.data.node.template[templateName].type === - "str" && - node.data.node.template[templateName] - .options ? ( + ) : node.data.node.template[ + templateName + ].type === "str" && + node.data.node.template[ + templateName + ].options ? (
{ setData((old) => { @@ -505,34 +554,43 @@ export default function CodeTabsComponent({ tweaks.buildTweakObject( node["data"]["id"], target, - node.data.node.template[templateName] + node.data.node.template[ + templateName + ] ); }} value={ - !node.data.node.template[templateName] - .value || - node.data.node.template[templateName] - .value === "" + !node.data.node.template[ + templateName + ].value || + node.data.node.template[ + templateName + ].value === "" ? "" - : node.data.node.template[templateName] - .value + : node.data.node.template[ + templateName + ].value } >
- ) : node.data.node.template[templateName].type === - "int" ? ( + ) : node.data.node.template[ + templateName + ].type === "int" ? (
{ setData((old) => { @@ -548,22 +606,28 @@ export default function CodeTabsComponent({ tweaks.buildTweakObject( node["data"]["id"], target, - node.data.node.template[templateName] + node.data.node.template[ + templateName + ] ); }} />
- ) : node.data.node.template[templateName].type === - "prompt" ? ( + ) : node.data.node.template[ + templateName + ].type === "prompt" ? (
@@ -571,12 +635,15 @@ export default function CodeTabsComponent({ editNode={true} disabled={false} value={ - !node.data.node.template[templateName] - .value || - node.data.node.template[templateName] - .value === "" + !node.data.node.template[ + templateName + ].value || + node.data.node.template[ + templateName + ].value === "" ? "" - : node.data.node.template[ + : node.data.node + .template[ templateName ].value } @@ -594,21 +661,27 @@ export default function CodeTabsComponent({ tweaks.buildTweakObject( node["data"]["id"], target, - node.data.node.template[templateName] + node.data.node.template[ + templateName + ] ); }} />
- ) : node.data.node.template[templateName].type === - "code" ? ( + ) : node.data.node.template[ + templateName + ].type === "code" ? ( @@ -617,12 +690,15 @@ export default function CodeTabsComponent({ disabled={false} editNode={true} value={ - !node.data.node.template[templateName] - .value || - node.data.node.template[templateName] - .value === "" + !node.data.node.template[ + templateName + ].value || + node.data.node.template[ + templateName + ].value === "" ? "" - : node.data.node.template[ + : node.data.node + .template[ templateName ].value } @@ -640,14 +716,17 @@ export default function CodeTabsComponent({ tweaks.buildTweakObject( node["data"]["id"], target, - node.data.node.template[templateName] + node.data.node.template[ + templateName + ] ); }} />
- ) : node.data.node.template[templateName].type === - "Any" ? ( + ) : node.data.node.template[ + templateName + ].type === "Any" ? ( "-" ) : (
diff --git a/src/frontend/src/components/headerComponent/index.tsx b/src/frontend/src/components/headerComponent/index.tsx index 545a49593..8acb9583e 100644 --- a/src/frontend/src/components/headerComponent/index.tsx +++ b/src/frontend/src/components/headerComponent/index.tsx @@ -34,9 +34,8 @@ export default function Header() { ⛓️ - {flows.findIndex((flow) => tabId === flow.id) !== -1 && tabId !== "" && ( - - )} + {flows.findIndex((flow) => tabId === flow.id) !== -1 && + tabId !== "" && }
diff --git a/src/frontend/src/constants/constants.ts b/src/frontend/src/constants/constants.ts index 2ac8df4fd..ab9de7f63 100644 --- a/src/frontend/src/constants/constants.ts +++ b/src/frontend/src/constants/constants.ts @@ -509,49 +509,12 @@ export const URL_EXCLUDED_FROM_ERROR_RETRIES = [ "/api/v1/validate/prompt", ]; -export const tabsCode = [] +export const tabsCode = []; export function tabsArray(codes: string[], method: number) { - if (!method) return + if (!method) return; if (method === 0) { - return ( - [ - { - name: "cURL", - mode: "bash", - image: "https://curl.se/logo/curl-symbol-transparent.png", - language: "sh", - code: codes[0], - }, - { - name: "Python API", - mode: "python", - image: - "https://images.squarespace-cdn.com/content/v1/5df3d8c5d2be5962e4f87890/1628015119369-OY4TV3XJJ53ECO0W2OLQ/Python+API+Training+Logo.png?format=1000w", - language: "py", - code: codes[1], - }, - { - name: "Python Code", - mode: "python", - image: "https://cdn-icons-png.flaticon.com/512/5968/5968350.png", - language: "py", - code: codes[2], - }, - { - name: "Chat Widget HTML", - description: - "Insert this code anywhere in your <body> tag. To use with react and other libs, check our documentation.", - mode: "html", - image: "https://cdn-icons-png.flaticon.com/512/5968/5968350.png", - language: "py", - code: codes[3], - }, - ] - ); - } - return ( - [ + return [ { name: "cURL", mode: "bash", @@ -570,8 +533,8 @@ export function tabsArray(codes: string[], method: number) { { name: "Python Code", mode: "python", - language: "py", image: "https://cdn-icons-png.flaticon.com/512/5968/5968350.png", + language: "py", code: codes[2], }, { @@ -583,13 +546,46 @@ export function tabsArray(codes: string[], method: number) { language: "py", code: codes[3], }, - { - name: "Tweaks", - mode: "python", - image: "https://cdn-icons-png.flaticon.com/512/5968/5968350.png", - language: "py", - code: codes[4], - }, - ] - ); -}; + ]; + } + return [ + { + name: "cURL", + mode: "bash", + image: "https://curl.se/logo/curl-symbol-transparent.png", + language: "sh", + code: codes[0], + }, + { + name: "Python API", + mode: "python", + image: + "https://images.squarespace-cdn.com/content/v1/5df3d8c5d2be5962e4f87890/1628015119369-OY4TV3XJJ53ECO0W2OLQ/Python+API+Training+Logo.png?format=1000w", + language: "py", + code: codes[1], + }, + { + name: "Python Code", + mode: "python", + language: "py", + image: "https://cdn-icons-png.flaticon.com/512/5968/5968350.png", + code: codes[2], + }, + { + name: "Chat Widget HTML", + description: + "Insert this code anywhere in your <body> tag. To use with react and other libs, check our documentation.", + mode: "html", + image: "https://cdn-icons-png.flaticon.com/512/5968/5968350.png", + language: "py", + code: codes[3], + }, + { + name: "Tweaks", + mode: "python", + image: "https://cdn-icons-png.flaticon.com/512/5968/5968350.png", + language: "py", + code: codes[4], + }, + ]; +} diff --git a/src/frontend/src/contexts/tabsContext.tsx b/src/frontend/src/contexts/tabsContext.tsx index 2cc4adc2b..5adc72e72 100644 --- a/src/frontend/src/contexts/tabsContext.tsx +++ b/src/frontend/src/contexts/tabsContext.tsx @@ -318,7 +318,9 @@ export function TabsProvider({ children }: { children: ReactNode }) { // add a change event listener to the file input input.onchange = (event: Event) => { // check if the file type is application/json - if ((event.target as HTMLInputElement).files[0].type === "application/json") { + if ( + (event.target as HTMLInputElement).files[0].type === "application/json" + ) { // get the file from the file input const file = (event.target as HTMLInputElement).files[0]; // read the file as text diff --git a/src/frontend/src/contexts/undoRedoContext.tsx b/src/frontend/src/contexts/undoRedoContext.tsx index 218a5ec8a..e2473c39e 100644 --- a/src/frontend/src/contexts/undoRedoContext.tsx +++ b/src/frontend/src/contexts/undoRedoContext.tsx @@ -50,8 +50,12 @@ export function UndoRedoProvider({ children }) { useEffect(() => { // whenever the flows variable changes, we need to add one array to the past and future states - setPast((old) => flows.map((flow, index) => (old[index] ? old[index] : []))); - setFuture((old) => flows.map((flow, index) => (old[index] ? old[index] : []))); + setPast((old) => + flows.map((flow, index) => (old[index] ? old[index] : [])) + ); + setFuture((old) => + flows.map((flow, index) => (old[index] ? old[index] : [])) + ); setTabIndex(flows.findIndex((flow) => flow.id === tabId)); }, [flows, tabId]); diff --git a/src/frontend/src/modals/EditNodeModal/index.tsx b/src/frontend/src/modals/EditNodeModal/index.tsx index 9447f21ac..36c38e963 100644 --- a/src/frontend/src/modals/EditNodeModal/index.tsx +++ b/src/frontend/src/modals/EditNodeModal/index.tsx @@ -49,13 +49,15 @@ const EditNodeModal = forwardRef( const { reactFlowInstance } = useContext(typesContext); let disabled = - reactFlowInstance?.getEdges().some((edge) => edge.targetHandle === data.id) ?? - false; + reactFlowInstance + ?.getEdges() + .some((edge) => edge.targetHandle === data.id) ?? false; function changeAdvanced(templateParam) { setMyData((old) => { let newData = cloneDeep(old); - newData.node.template[templateParam].advanced = !newData.node.template[templateParam].advanced; + newData.node.template[templateParam].advanced = + !newData.node.template[templateParam].advanced; return newData; }); } @@ -115,23 +117,32 @@ const EditNodeModal = forwardRef( (templateParam) => templateParam.charAt(0) !== "_" && myData.node.template[templateParam].show && - (myData.node.template[templateParam].type === "str" || - myData.node.template[templateParam].type === "bool" || - myData.node.template[templateParam].type === "float" || - myData.node.template[templateParam].type === "code" || - myData.node.template[templateParam].type === "prompt" || - myData.node.template[templateParam].type === "file" || - myData.node.template[templateParam].type === "int") + (myData.node.template[templateParam].type === + "str" || + myData.node.template[templateParam].type === + "bool" || + myData.node.template[templateParam].type === + "float" || + myData.node.template[templateParam].type === + "code" || + myData.node.template[templateParam].type === + "prompt" || + myData.node.template[templateParam].type === + "file" || + myData.node.template[templateParam].type === + "int") ) .map((templateParam, index) => ( {myData.node.template[templateParam].name ? myData.node.template[templateParam].name - : myData.node.template[templateParam].display_name} + : myData.node.template[templateParam] + .display_name} - {myData.node.template[templateParam].type === "str" && + {myData.node.template[templateParam].type === + "str" && !myData.node.template[templateParam].options ? (
{myData.node.template[templateParam].list ? ( @@ -139,21 +150,26 @@ const EditNodeModal = forwardRef( editNode={true} disabled={disabled} value={ - !myData.node.template[templateParam].value || - myData.node.template[templateParam].value === "" + !myData.node.template[templateParam] + .value || + myData.node.template[templateParam] + .value === "" ? [""] - : myData.node.template[templateParam].value + : myData.node.template[templateParam] + .value } onChange={(value: string[]) => { handleOnNewValue(value, templateParam); }} /> - ) : myData.node.template[templateParam].multiline ? ( + ) : myData.node.template[templateParam] + .multiline ? ( { handleOnNewValue(value, templateParam); @@ -164,11 +180,12 @@ const EditNodeModal = forwardRef( editNode={true} disabled={disabled} password={ - myData.node.template[templateParam].password ?? - false + myData.node.template[templateParam] + .password ?? false } value={ - myData.node.template[templateParam].value ?? "" + myData.node.template[templateParam] + .value ?? "" } onChange={(value) => { handleOnNewValue(value, templateParam); @@ -176,73 +193,104 @@ const EditNodeModal = forwardRef( /> )}
- ) : myData.node.template[templateParam].type === "bool" ? ( + ) : myData.node.template[templateParam].type === + "bool" ? (
{" "} { - handleOnNewValue(isEnabled, templateParam); + handleOnNewValue( + isEnabled, + templateParam + ); }} size="small" />
- ) : myData.node.template[templateParam].type === "float" ? ( + ) : myData.node.template[templateParam].type === + "float" ? (
{ handleOnNewValue(value, templateParam); }} />
- ) : myData.node.template[templateParam].type === "str" && + ) : myData.node.template[templateParam].type === + "str" && myData.node.template[templateParam].options ? (
handleOnNewValue(value, templateParam)} + options={ + myData.node.template[templateParam] + .options + } + onSelect={(value) => + handleOnNewValue(value, templateParam) + } value={ - myData.node.template[templateParam].value ?? - "Choose an option" + myData.node.template[templateParam] + .value ?? "Choose an option" } >
- ) : myData.node.template[templateParam].type === "int" ? ( + ) : myData.node.template[templateParam].type === + "int" ? (
{ handleOnNewValue(value, templateParam); }} />
- ) : myData.node.template[templateParam].type === "file" ? ( + ) : myData.node.template[templateParam].type === + "file" ? (
{ handleOnNewValue(value, templateParam); }} fileTypes={ - myData.node.template[templateParam].fileTypes + myData.node.template[templateParam] + .fileTypes + } + suffixes={ + myData.node.template[templateParam] + .suffixes } - suffixes={myData.node.template[templateParam].suffixes} onFileChange={(filePath: string) => { - data.node.template[templateParam].file_path = filePath; + data.node.template[ + templateParam + ].file_path = filePath; }} >
- ) : myData.node.template[templateParam].type === "prompt" ? ( + ) : myData.node.template[templateParam].type === + "prompt" ? (
{ myData.node = nodeClass; }} - value={myData.node.template[templateParam].value ?? ""} + value={ + myData.node.template[templateParam] + .value ?? "" + } onChange={(value: string) => { handleOnNewValue(value, templateParam); }} />
- ) : myData.node.template[templateParam].type === "code" ? ( + ) : myData.node.template[templateParam].type === + "code" ? (
{ data.node = nodeClass; @@ -270,13 +323,17 @@ const EditNodeModal = forwardRef( nodeClass={data.node} disabled={disabled} editNode={true} - value={myData.node.template[templateParam].value ?? ""} + value={ + myData.node.template[templateParam] + .value ?? "" + } onChange={(value: string) => { handleOnNewValue(value, templateParam); }} />
- ) : myData.node.template[templateParam].type === "Any" ? ( + ) : myData.node.template[templateParam].type === + "Any" ? ( "-" ) : (
@@ -285,8 +342,13 @@ const EditNodeModal = forwardRef(
changeAdvanced(templateParam)} + enabled={ + !myData.node.template[templateParam] + .advanced + } + setEnabled={(e) => + changeAdvanced(templateParam) + } disabled={disabled} size="small" /> diff --git a/src/frontend/src/modals/exportModal/index.tsx b/src/frontend/src/modals/exportModal/index.tsx index d3bbbaa64..bc75c2b34 100644 --- a/src/frontend/src/modals/exportModal/index.tsx +++ b/src/frontend/src/modals/exportModal/index.tsx @@ -12,7 +12,9 @@ const ExportModal = forwardRef((props: { children: ReactNode }, ref) => { const { flows, tabId, updateFlow, downloadFlow, saveFlow } = useContext(TabsContext); const [checked, setChecked] = useState(false); - const [name, setName] = useState(flows.find((flow) => flow.id === tabId).name); + const [name, setName] = useState( + flows.find((flow) => flow.id === tabId).name + ); const [invalidName, setInvalidName] = useState(false); const [description, setDescription] = useState( flows.find((flow) => flow.id === tabId).description diff --git a/src/frontend/src/modals/flowSettingsModal/index.tsx b/src/frontend/src/modals/flowSettingsModal/index.tsx index 12db76931..0005acbc6 100644 --- a/src/frontend/src/modals/flowSettingsModal/index.tsx +++ b/src/frontend/src/modals/flowSettingsModal/index.tsx @@ -19,7 +19,9 @@ export default function FlowSettingsModal({ const { flows, tabId, updateFlow, setTabsState, saveFlow } = useContext(TabsContext); const maxLength = 50; - const [name, setName] = useState(flows.find((flow) => flow.id === tabId).name); + const [name, setName] = useState( + flows.find((flow) => flow.id === tabId).name + ); const [description, setDescription] = useState( flows.find((flow) => flow.id === tabId).description ); diff --git a/src/frontend/src/modals/formModal/index.tsx b/src/frontend/src/modals/formModal/index.tsx index 989426d6f..6dd426045 100644 --- a/src/frontend/src/modals/formModal/index.tsx +++ b/src/frontend/src/modals/formModal/index.tsx @@ -70,9 +70,9 @@ export default function FormModal({ const [chatKey, setChatKey] = useState(() => { if (tabsState[flow.id]?.formKeysData?.input_keys) { return Object.keys(tabsState[flow.id].formKeysData.input_keys).find( - (k) => - !tabsState[flow.id].formKeysData.handle_keys.some((j) => j === k) && - tabsState[flow.id].formKeysData.input_keys[k] === "" + (key) => + !tabsState[flow.id].formKeysData.handle_keys.some((j) => j === key) && + tabsState[flow.id].formKeysData.input_keys[key] === "" ); } // TODO: return a sensible default @@ -425,13 +425,13 @@ export default function FormModal({ {tabsState[id.current]?.formKeysData?.input_keys ? Object.keys( tabsState[id.current].formKeysData.input_keys - ).map((i, k) => ( -
+ ).map((key, index) => ( +
- {i} + {key}
- handleOnCheckedChange(value, i) + handleOnCheckedChange(value, key) } size="small" disabled={tabsState[ id.current - ].formKeysData.handle_keys.some((t) => t === i)} + ].formKeysData.handle_keys.some( + (t) => t === key + )} />
} - key={k} - keyValue={i} + key={index} + keyValue={key} >
{tabsState[id.current].formKeysData.handle_keys.some( - (t) => t === i + (t) => t === key ) && (
Source: Component @@ -467,18 +469,18 @@ export default function FormModal({
@@ -486,35 +488,37 @@ export default function FormModal({
)) : null} - {tabsState[id.current].formKeysData.memory_keys.map((i, k) => ( -
- - - {key} - -
- {}} - size="small" - disabled={true} - /> + {tabsState[id.current].formKeysData.memory_keys.map( + (key, index) => ( +
+ + + {key} + +
+ {}} + size="small" + disabled={true} + /> +
+
+ } + key={index} + keyValue={key} + > +
+
+ Source: Memory
- } - key={index} - keyValue={key} - > -
-
- Source: Memory -
-
- -
- ))} +
+
+ ) + )}
diff --git a/src/frontend/src/pages/FlowPage/components/PageComponent/index.tsx b/src/frontend/src/pages/FlowPage/components/PageComponent/index.tsx index 63e89e883..a15b99e58 100644 --- a/src/frontend/src/pages/FlowPage/components/PageComponent/index.tsx +++ b/src/frontend/src/pages/FlowPage/components/PageComponent/index.tsx @@ -303,7 +303,10 @@ export default function Page({ flow }: { flow: FlowType }) { takeSnapshot(); setEdges( edges.filter( - (edge) => !mynodes.some((node) => edge.source === node.id || edge.target === node.id) + (edge) => + !mynodes.some( + (node) => edge.source === node.id || edge.target === node.id + ) ) ); }, diff --git a/src/frontend/src/pages/FlowPage/components/extraSidebarComponent/index.tsx b/src/frontend/src/pages/FlowPage/components/extraSidebarComponent/index.tsx index 924168f9d..e6d5a7010 100644 --- a/src/frontend/src/pages/FlowPage/components/extraSidebarComponent/index.tsx +++ b/src/frontend/src/pages/FlowPage/components/extraSidebarComponent/index.tsx @@ -168,7 +168,8 @@ export default function ExtraSidebar() { key={index} button={{ title: nodeNames[SBSectionName] ?? nodeNames.unknown, - Icon: nodeIconsLucide[SBSectionName] ?? nodeIconsLucide.unknown, + Icon: + nodeIconsLucide[SBSectionName] ?? nodeIconsLucide.unknown, }} >
From d243829893eb65e8b8dba857ff4770ad3a2ffcba Mon Sep 17 00:00:00 2001 From: gustavoschaedler Date: Wed, 9 Aug 2023 00:56:07 +0100 Subject: [PATCH 87/90] =?UTF-8?q?=E2=AC=86=EF=B8=8F=20chore(pyproject.toml?= =?UTF-8?q?):=20upgrade=20python-jose=20dependency=20to=20version=203.3.0?= =?UTF-8?q?=20to=20ensure=20compatibility=20and=20take=20advantage=20of=20?= =?UTF-8?q?new=20features?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- poetry.lock | 2 +- pyproject.toml | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/poetry.lock b/poetry.lock index 275706d6e..6658c86e9 100644 --- a/poetry.lock +++ b/poetry.lock @@ -7681,4 +7681,4 @@ local = ["ctransformers", "llama-cpp-python", "sentence-transformers"] [metadata] lock-version = "2.0" python-versions = ">=3.9,<3.11" -content-hash = "b3bfdb00177ebfac66bd91d306468a280ccff79e6a95fe7d4894d0c7c6ce44e6" +content-hash = "0549d60fd69bb53ccdc466b76e05647ceb5502dcd01f6040e82eb9d7d9050292" diff --git a/pyproject.toml b/pyproject.toml index be1b6be2c..34763d2e8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -80,6 +80,7 @@ langchain-experimental = "^0.0.8" alembic = "^1.11.2" passlib = "^1.7.4" bcrypt = "^4.0.1" +python-jose = "^3.3.0" [tool.poetry.group.dev.dependencies] black = "^23.1.0" From 9b523ad11c01340d2f00da7b19a621a7017cbb56 Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Tue, 8 Aug 2023 20:59:28 -0300 Subject: [PATCH 88/90] update templateName variable to templateField --- .../src/CustomNodes/GenericNode/index.tsx | 48 ++--- .../components/codeTabsComponent/index.tsx | 166 +++++++++--------- src/frontend/src/modals/ApiModal/index.tsx | 20 +-- .../components/nodeToolbarComponent/index.tsx | 22 +-- 4 files changed, 128 insertions(+), 128 deletions(-) diff --git a/src/frontend/src/CustomNodes/GenericNode/index.tsx b/src/frontend/src/CustomNodes/GenericNode/index.tsx index 737a02fc3..690ace7af 100644 --- a/src/frontend/src/CustomNodes/GenericNode/index.tsx +++ b/src/frontend/src/CustomNodes/GenericNode/index.tsx @@ -159,18 +159,18 @@ export default function GenericNode({ <> {Object.keys(data.node.template) - .filter((templateName) => templateName.charAt(0) !== "_") - .map((templateName: string, idx) => ( + .filter((templateField) => templateField.charAt(0) !== "_") + .map((templateField: string, idx) => (
- {data.node.template[templateName].show && - !data.node.template[templateName].advanced ? ( + {data.node.template[templateField].show && + !data.node.template[templateField].advanced ? ( ) : ( diff --git a/src/frontend/src/components/codeTabsComponent/index.tsx b/src/frontend/src/components/codeTabsComponent/index.tsx index eabb2db1b..62cc87f36 100644 --- a/src/frontend/src/components/codeTabsComponent/index.tsx +++ b/src/frontend/src/components/codeTabsComponent/index.tsx @@ -230,60 +230,60 @@ export default function CodeTabsComponent({ {Object.keys(node["data"]["node"]["template"]) .filter( - (templateName) => - templateName.charAt(0) !== "_" && - node.data.node.template[templateName] + (templateField) => + templateField.charAt(0) !== "_" && + node.data.node.template[templateField] .show && - (node.data.node.template[templateName] + (node.data.node.template[templateField] .type === "str" || - node.data.node.template[templateName] + node.data.node.template[templateField] .type === "bool" || - node.data.node.template[templateName] + node.data.node.template[templateField] .type === "float" || - node.data.node.template[templateName] + node.data.node.template[templateField] .type === "code" || - node.data.node.template[templateName] + node.data.node.template[templateField] .type === "prompt" || - node.data.node.template[templateName] + node.data.node.template[templateField] .type === "file" || - node.data.node.template[templateName] + node.data.node.template[templateField] .type === "int") ) - .map((templateName, index) => { + .map((templateField, index) => { return ( - {templateName} + {templateField}
{node.data.node.template[ - templateName + templateField ].type === "str" && !node.data.node.template[ - templateName + templateField ].options ? (
{node.data.node.template[ - templateName + templateField ].list ? ( { @@ -293,7 +293,7 @@ export default function CodeTabsComponent({ newInputList[ index ].data.node.template[ - templateName + templateField ].value = target; return newInputList; }); @@ -301,18 +301,18 @@ export default function CodeTabsComponent({ node["data"]["id"], target, node.data.node.template[ - templateName + templateField ] ); }} /> ) : node.data.node.template[ - templateName + templateField ].multiline ? ( @@ -323,16 +323,16 @@ export default function CodeTabsComponent({ value={ !node.data.node .template[ - templateName + templateField ].value || node.data.node .template[ - templateName + templateField ].value === "" ? "" : node.data.node .template[ - templateName + templateField ].value } onChange={(target) => { @@ -342,7 +342,7 @@ export default function CodeTabsComponent({ newInputList[ index ].data.node.template[ - templateName + templateField ].value = target; return newInputList; }); @@ -351,7 +351,7 @@ export default function CodeTabsComponent({ target, node.data.node .template[ - templateName + templateField ] ); }} @@ -364,20 +364,20 @@ export default function CodeTabsComponent({ disabled={false} password={ node.data.node.template[ - templateName + templateField ].password ?? false } value={ !node.data.node.template[ - templateName + templateField ].value || node.data.node.template[ - templateName + templateField ].value === "" ? "" : node.data.node .template[ - templateName + templateField ].value } onChange={(target) => { @@ -387,7 +387,7 @@ export default function CodeTabsComponent({ newInputList[ index ].data.node.template[ - templateName + templateField ].value = target; return newInputList; }); @@ -395,7 +395,7 @@ export default function CodeTabsComponent({ node["data"]["id"], target, node.data.node.template[ - templateName + templateField ] ); }} @@ -403,14 +403,14 @@ export default function CodeTabsComponent({ )}
) : node.data.node.template[ - templateName + templateField ].type === "bool" ? (
{" "} { @@ -420,7 +420,7 @@ export default function CodeTabsComponent({ newInputList[ index ].data.node.template[ - templateName + templateField ].value = e; return newInputList; }); @@ -428,7 +428,7 @@ export default function CodeTabsComponent({ node["data"]["id"], e, node.data.node.template[ - templateName + templateField ] ); }} @@ -437,19 +437,19 @@ export default function CodeTabsComponent({ />
) : node.data.node.template[ - templateName + templateField ].type === "file" ? ( @@ -459,7 +459,7 @@ export default function CodeTabsComponent({ disabled={false} value={ node.data.node.template[ - templateName + templateField ].value ?? "" } onChange={( @@ -467,26 +467,26 @@ export default function CodeTabsComponent({ ) => {}} fileTypes={ node.data.node.template[ - templateName + templateField ].fileTypes } suffixes={ node.data.node.template[ - templateName + templateField ].suffixes } onFileChange={( value: any ) => { node.data.node.template[ - templateName + templateField ].file_path = value; }} >
) : node.data.node.template[ - templateName + templateField ].type === "float" ? (
{ @@ -511,7 +511,7 @@ export default function CodeTabsComponent({ newInputList[ index ].data.node.template[ - templateName + templateField ].value = target; return newInputList; }); @@ -519,17 +519,17 @@ export default function CodeTabsComponent({ node["data"]["id"], target, node.data.node.template[ - templateName + templateField ] ); }} />
) : node.data.node.template[ - templateName + templateField ].type === "str" && node.data.node.template[ - templateName + templateField ].options ? (
{ @@ -547,7 +547,7 @@ export default function CodeTabsComponent({ newInputList[ index ].data.node.template[ - templateName + templateField ].value = target; return newInputList; }); @@ -555,26 +555,26 @@ export default function CodeTabsComponent({ node["data"]["id"], target, node.data.node.template[ - templateName + templateField ] ); }} value={ !node.data.node.template[ - templateName + templateField ].value || node.data.node.template[ - templateName + templateField ].value === "" ? "" : node.data.node.template[ - templateName + templateField ].value } >
) : node.data.node.template[ - templateName + templateField ].type === "int" ? (
{ @@ -599,7 +599,7 @@ export default function CodeTabsComponent({ newInputList[ index ].data.node.template[ - templateName + templateField ].value = target; return newInputList; }); @@ -607,26 +607,26 @@ export default function CodeTabsComponent({ node["data"]["id"], target, node.data.node.template[ - templateName + templateField ] ); }} />
) : node.data.node.template[ - templateName + templateField ].type === "prompt" ? ( @@ -636,15 +636,15 @@ export default function CodeTabsComponent({ disabled={false} value={ !node.data.node.template[ - templateName + templateField ].value || node.data.node.template[ - templateName + templateField ].value === "" ? "" : node.data.node .template[ - templateName + templateField ].value } onChange={(target) => { @@ -654,7 +654,7 @@ export default function CodeTabsComponent({ newInputList[ index ].data.node.template[ - templateName + templateField ].value = target; return newInputList; }); @@ -662,7 +662,7 @@ export default function CodeTabsComponent({ node["data"]["id"], target, node.data.node.template[ - templateName + templateField ] ); }} @@ -670,17 +670,17 @@ export default function CodeTabsComponent({
) : node.data.node.template[ - templateName + templateField ].type === "code" ? ( { @@ -709,7 +709,7 @@ export default function CodeTabsComponent({ newInputList[ index ].data.node.template[ - templateName + templateField ].value = target; return newInputList; }); @@ -717,7 +717,7 @@ export default function CodeTabsComponent({ node["data"]["id"], target, node.data.node.template[ - templateName + templateField ] ); }} @@ -725,7 +725,7 @@ export default function CodeTabsComponent({
) : node.data.node.template[ - templateName + templateField ].type === "Any" ? ( "-" ) : ( diff --git a/src/frontend/src/modals/ApiModal/index.tsx b/src/frontend/src/modals/ApiModal/index.tsx index a7ceac170..307e48208 100644 --- a/src/frontend/src/modals/ApiModal/index.tsx +++ b/src/frontend/src/modals/ApiModal/index.tsx @@ -184,16 +184,16 @@ const ApiModal = forwardRef( flow["data"]["nodes"].forEach((node) => { Object.keys(node["data"]["node"]["template"]) .filter( - (templateName) => - templateName.charAt(0) !== "_" && - node.data.node.template[templateName].show && - (node.data.node.template[templateName].type === "str" || - node.data.node.template[templateName].type === "bool" || - node.data.node.template[templateName].type === "float" || - node.data.node.template[templateName].type === "code" || - node.data.node.template[templateName].type === "prompt" || - node.data.node.template[templateName].type === "file" || - node.data.node.template[templateName].type === "int") + (templateField) => + templateField.charAt(0) !== "_" && + node.data.node.template[templateField].show && + (node.data.node.template[templateField].type === "str" || + node.data.node.template[templateField].type === "bool" || + node.data.node.template[templateField].type === "float" || + node.data.node.template[templateField].type === "code" || + node.data.node.template[templateField].type === "prompt" || + node.data.node.template[templateField].type === "file" || + node.data.node.template[templateField].type === "int") ) .map((n, i) => { arrNodesWithValues.push(node["id"]); diff --git a/src/frontend/src/pages/FlowPage/components/nodeToolbarComponent/index.tsx b/src/frontend/src/pages/FlowPage/components/nodeToolbarComponent/index.tsx index 9bd6db214..e036f0360 100644 --- a/src/frontend/src/pages/FlowPage/components/nodeToolbarComponent/index.tsx +++ b/src/frontend/src/pages/FlowPage/components/nodeToolbarComponent/index.tsx @@ -9,17 +9,17 @@ import { classNames } from "../../../../utils/utils"; export default function NodeToolbarComponent({ data, setData, deleteNode }) { const [nodeLength, setNodeLength] = useState( Object.keys(data.node.template).filter( - (templateName) => - templateName.charAt(0) !== "_" && - data.node.template[templateName].show && - (data.node.template[templateName].type === "str" || - data.node.template[templateName].type === "bool" || - data.node.template[templateName].type === "float" || - data.node.template[templateName].type === "code" || - data.node.template[templateName].type === "prompt" || - data.node.template[templateName].type === "file" || - data.node.template[templateName].type === "Any" || - data.node.template[templateName].type === "int") + (templateField) => + templateField.charAt(0) !== "_" && + data.node.template[templateField].show && + (data.node.template[templateField].type === "str" || + data.node.template[templateField].type === "bool" || + data.node.template[templateField].type === "float" || + data.node.template[templateField].type === "code" || + data.node.template[templateField].type === "prompt" || + data.node.template[templateField].type === "file" || + data.node.template[templateField].type === "Any" || + data.node.template[templateField].type === "int") ).length ); From f4ad8572e7a3c7558ebbec8217ec18a38003202e Mon Sep 17 00:00:00 2001 From: Melvin Hillsman Date: Tue, 8 Aug 2023 00:44:50 -0500 Subject: [PATCH 89/90] Add zip_path and workspace_url fields As noted in the [documentation](https://python.langchain.com/docs/integrations/document_loaders/slack) for this plugin zip_path is the first named parameter not file_path and workspace_url is the second with zip_path being required and workspace_url being optional. Fixes #695 Signed-off-by: Melvin Hillsman --- .../template/frontend_node/documentloaders.py | 26 +++++++++++++++++-- 1 file changed, 24 insertions(+), 2 deletions(-) diff --git a/src/backend/langflow/template/frontend_node/documentloaders.py b/src/backend/langflow/template/frontend_node/documentloaders.py index bb78d8855..cdf67e54a 100644 --- a/src/backend/langflow/template/frontend_node/documentloaders.py +++ b/src/backend/langflow/template/frontend_node/documentloaders.py @@ -30,7 +30,6 @@ class DocumentLoaderFrontNode(FrontendNode): "UnstructuredEmailLoader": build_file_field( suffixes=[".eml"], fileTypes=["eml"] ), - "SlackDirectoryLoader": build_file_field(suffixes=[".zip"], fileTypes=["zip"]), "EverNoteLoader": build_file_field(suffixes=[".xml"], fileTypes=["xml"]), "FacebookChatLoader": build_file_field(suffixes=[".json"], fileTypes=["json"]), "BSHTMLLoader": build_file_field(suffixes=[".html"], fileTypes=["html"]), @@ -105,7 +104,30 @@ class DocumentLoaderFrontNode(FrontendNode): advanced=False, ) ) - + elif self.template.type_name in {"SlackDirectoryLoader"}: + self.template.add_field( + TemplateField( + field_type="file", + required=True, + show=True, + name="zip_path", + value="", + display_name="Path to zip file", + suffixes=[".zip"], + file_types=["zip"], + ) + ) + self.template.add_field( + TemplateField( + field_type="str", + required=False, + show=True, + name="workspace_url", + value="", + display_name="Workspace URL", + advanced=False, + ) + ) elif self.template.type_name in self.file_path_templates: self.template.add_field(self.file_path_templates[self.template.type_name]) elif self.template.type_name in { From be14d4558903f115ca79c3532de8ad7ebc72c9f7 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Wed, 9 Aug 2023 10:28:41 -0300 Subject: [PATCH 90/90] =?UTF-8?q?=F0=9F=90=9B=20fix(utils.py):=20handle=20?= =?UTF-8?q?CommandError=20when=20running=20migrations=20and=20delete=20ale?= =?UTF-8?q?mbic=5Fversion=20table=20if=20wrong=20revision=20in=20DB?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../langflow/services/database/utils.py | 20 +++++++++++++++++-- 1 file changed, 18 insertions(+), 2 deletions(-) diff --git a/src/backend/langflow/services/database/utils.py b/src/backend/langflow/services/database/utils.py index 20b2bbbb4..94bcd6651 100644 --- a/src/backend/langflow/services/database/utils.py +++ b/src/backend/langflow/services/database/utils.py @@ -1,7 +1,7 @@ from typing import TYPE_CHECKING from langflow.utils.logger import logger from contextlib import contextmanager - +from alembic.util.exc import CommandError from sqlmodel import Session if TYPE_CHECKING: @@ -13,7 +13,23 @@ def initialize_database(): from langflow.services import service_manager, ServiceType database_manager = service_manager.get(ServiceType.DATABASE_MANAGER) - database_manager.run_migrations() + try: + database_manager.run_migrations() + except CommandError as exc: + if "Can't locate revision identified by" not in str(exc): + raise exc + # This means there's wrong revision in the DB + # We need to delete the alembic_version table + # and run the migrations again + logger.warning( + "Wrong revision in DB, deleting alembic_version table and running migrations again" + ) + with session_getter(database_manager) as session: + session.execute("DROP TABLE alembic_version") + database_manager.run_migrations() + except Exception as exc: + logger.error(f"Error running migrations: {exc}") + raise RuntimeError("Error running migrations") from exc database_manager.create_db_and_tables() logger.debug("Database initialized")