diff --git a/docs/docs/components/vector-stores.mdx b/docs/docs/components/vector-stores.mdx index 66d6685a1..7e21f1021 100644 --- a/docs/docs/components/vector-stores.mdx +++ b/docs/docs/components/vector-stores.mdx @@ -26,7 +26,8 @@ The `Astra DB` initializes a vector store using Astra DB from records. It create - **Collection Indexing Policy:** Indexing policy for the collection. - Ensure you configure the necessary Astra DB token and API endpoint before starting. + Ensure you configure the necessary Astra DB token and API endpoint before + starting. --- @@ -96,6 +97,44 @@ For detailed documentation and integration guides, please refer to the [Chroma C --- +### Couchbase + +`Couchbase` builds a Couchbase vector store from records, streamlining the storage and retrieval of documents. + +**Parameters:** + +- **Embedding:** Model used by Couchbase. +- **Input:** Documents or records. +- **Couchbase Cluster Connection String:** Cluster Connection string. +- **Couchbase Cluster Username:** Cluster Username. +- **Couchbase Cluster Password:** Cluster Password. +- **Bucket Name:** Bucket identifier in Couchbase. +- **Scope Name:** Scope identifier in Couchbase. +- **Collection Name:** Collection identifier in Couchbase. +- **Index Name:** Index identifier. + +For detailed documentation and integration guides, please refer to the [Couchbase Component Documentation](https://python.langchain.com/docs/integrations/vectorstores/couchbase). + +--- + +### Couchbase Search + +`CouchbaseSearch` leverages the Couchbase component to search for documents based on similarity metric. + +**Parameters:** + +- **Input:** Search query. +- **Embedding:** Model used in the Vector Store. +- **Couchbase Cluster Connection String:** Cluster Connection string. +- **Couchbase Cluster Username:** Cluster Username. +- **Couchbase Cluster Password:** Cluster Password. +- **Bucket Name:** Bucket identifier. +- **Scope Name:** Scope identifier. +- **Collection Name:** Collection identifier in Couchbase. +- **Index Name:** Index identifier. + +--- + ### FAISS The `FAISS` component manages document ingestion into a FAISS Vector Store, optimizing document indexing and retrieval. @@ -278,7 +317,8 @@ For more details, see the [PGVector Component Documentation](https://python.lang For detailed documentation, refer to the [Redis Documentation](https://python.langchain.com/docs/integrations/vectorstores/redis). - Ensure the Redis server URL and index name are configured correctly. Provide a schema if no documents are available. + Ensure the Redis server URL and index name are configured correctly. Provide a + schema if no documents are available. --- @@ -389,7 +429,8 @@ For more information, consult the [Vectara Component Documentation](https://pyth For more details, see the [Weaviate Component Documentation](https://python.langchain.com/docs/integrations/vectorstores/weaviate). - Ensure Weaviate instance is running and accessible. Verify API key, index name, text key, and attributes are set correctly. + Ensure Weaviate instance is running and accessible. Verify API key, index + name, text key, and attributes are set correctly. --- diff --git a/docs/docs/tutorials/rag-with-astradb.mdx b/docs/docs/tutorials/rag-with-astradb.mdx index 09395c598..9bb813f55 100644 --- a/docs/docs/tutorials/rag-with-astradb.mdx +++ b/docs/docs/tutorials/rag-with-astradb.mdx @@ -143,7 +143,7 @@ The RAG flow is a bit more complex. It consists of: style={{ width: "80%", margin: "20px auto" }} /> -To run it all we have to do is click on the ⚡ _Run_ button and start interacting with your RAG application. +To run it all we have to do is click on the 🤖 _Playground_ button and start interacting with your RAG application. =2.6" -files = [ - {file = "colorclass-2.2.2-py2.py3-none-any.whl", hash = "sha256:6f10c273a0ef7a1150b1120b6095cbdd68e5cf36dfd5d0fc957a2500bbf99a55"}, - {file = "colorclass-2.2.2.tar.gz", hash = "sha256:6d4fe287766166a98ca7bc6f6312daf04a0481b1eda43e7173484051c0ab4366"}, -] - [[package]] name = "coloredlogs" version = "15.0.1" @@ -1255,16 +1244,6 @@ traitlets = ">=4" [package.extras] test = ["pytest"] -[[package]] -name = "compressed-rtf" -version = "1.0.6" -description = "Compressed Rich Text Format (RTF) compression and decompression package" -optional = false -python-versions = "*" -files = [ - {file = "compressed_rtf-1.0.6.tar.gz", hash = "sha256:c1c827f1d124d24608981a56e8b8691eb1f2a69a78ccad6440e7d92fde1781dd"}, -] - [[package]] name = "configargparse" version = "1.7" @@ -1280,6 +1259,40 @@ files = [ test = ["PyYAML", "mock", "pytest"] yaml = ["PyYAML"] +[[package]] +name = "couchbase" +version = "4.2.1" +description = "Python Client for Couchbase" +optional = false +python-versions = ">=3.7" +files = [ + {file = "couchbase-4.2.1-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:7ad4c4462879f456a9067ac1788e62d852509439bac3538b9bc459a754666481"}, + {file = "couchbase-4.2.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:06d91891c599ba0f5052e594ac025a2ca6ab7885e528b854ac9c125df7c74146"}, + {file = "couchbase-4.2.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0191d4a631ead533551cb9a214704ad5f3dfff2029e21a23b57725a0b5666b25"}, + {file = "couchbase-4.2.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:b206790d6834a18c5e457f9a70f44774f476f3acccf9f22e8c1b5283a5bd03fa"}, + {file = "couchbase-4.2.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c5ca571b9ce017ecbd447de12cd46e213f93e0664bec6fca0a06e1768db1a4f8"}, + {file = "couchbase-4.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:675c615cfd4b04e73e94cf03c786da5105d94527f5c3a087813dba477a1379e9"}, + {file = "couchbase-4.2.1-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:4cd09eedf162dc28386d9c6490e832c25068406c0f5d70a0417c0b1445394651"}, + {file = "couchbase-4.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dfebb11551c6d947ce6297ab02b5006b1ac8739dda3e10d41896db0dc8672915"}, + {file = "couchbase-4.2.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:39e742ccfe90a0e59e6e1b0e12f0fe224a736c0207b218ef48048052f926e1c6"}, + {file = "couchbase-4.2.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f9ba24efddf47f30603275f5433434d8759a55233c78b3e4bc613c502ac429e9"}, + {file = "couchbase-4.2.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:adfca3929f07fb4385dc52f08d3a60634012f364b176f95ab023cdd1bb7fe9c0"}, + {file = "couchbase-4.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:e1c68b28c6f0475961afb9fe626ad2bac8a5643b53f719675386f060db4b6e19"}, + {file = "couchbase-4.2.1-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:137512462426cd495954c1815d78115d109308a4d9f8843b638285104388a359"}, + {file = "couchbase-4.2.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5987e5edcce7696e5f75b35be91f44fa69fb5eb95dba0957ad66f789affcdb36"}, + {file = "couchbase-4.2.1-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:080cb0fc333bd4a641ede4ee14ff0c7dbe95067fbb280826ea546681e0b9f9e3"}, + {file = "couchbase-4.2.1-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e317c2628a4a917083e8e7ce8e2662432b6a12ebac65fc00de6da2b37ab5975c"}, + {file = "couchbase-4.2.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:de7f8699ae344e2e96706ee0eac67e96bfdd3412fb18dcfb81d8ba5837dd3dfb"}, + {file = "couchbase-4.2.1-cp38-cp38-win_amd64.whl", hash = "sha256:82b9deb8b1fe8e8d7dde9c232ac5f4c11ff0f067930837af0e7769706e6a9453"}, + {file = "couchbase-4.2.1-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:44502d069ea17a8d692b7c88d84bc0df2cf4e944cde337c8eb3175bc0b835bb9"}, + {file = "couchbase-4.2.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c0f131b816a7d91b755232872ba10f6d6ca5a715e595ee9534478bc97a518ae8"}, + {file = "couchbase-4.2.1-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:e9b9deb312bbe5f9a8e63828f9de877714c4b09b7d88f7dc87b60e5ffb2a13e6"}, + {file = "couchbase-4.2.1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:71e8da251850d795975c3569c01d35ba1a556825dc7d9549ff9918d148255804"}, + {file = "couchbase-4.2.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d04492144ce520c612a2f8f265278c9f0cdf62fdd6f703e7a3210a7476b228f6"}, + {file = "couchbase-4.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:3f91b7699ea7b8253cf34c9fb6e191de9b2edfd7aa4d6f97b29c10b9a1670444"}, + {file = "couchbase-4.2.1.tar.gz", hash = "sha256:dc1c60d3f2fc179db8225aac4cc30d601d73cf2535aaf023d607e86be2d7dd78"}, +] + [[package]] name = "coverage" version = "7.5.1" @@ -1769,27 +1782,6 @@ files = [ {file = "duckdb-0.10.2.tar.gz", hash = "sha256:0f609c9d5f941f1ecde810f010dd9321cd406a552c1df20318a13fa64247f67f"}, ] -[[package]] -name = "easygui" -version = "0.98.3" -description = "EasyGUI is a module for very simple, very easy GUI programming in Python. EasyGUI is different from other GUI generators in that EasyGUI is NOT event-driven. Instead, all GUI interactions are invoked by simple function calls." -optional = false -python-versions = "*" -files = [ - {file = "easygui-0.98.3-py2.py3-none-any.whl", hash = "sha256:33498710c68b5376b459cd3fc48d1d1f33822139eb3ed01defbc0528326da3ba"}, - {file = "easygui-0.98.3.tar.gz", hash = "sha256:d653ff79ee1f42f63b5a090f2f98ce02335d86ad8963b3ce2661805cafe99a04"}, -] - -[[package]] -name = "ebcdic" -version = "1.1.1" -description = "Additional EBCDIC codecs" -optional = false -python-versions = "*" -files = [ - {file = "ebcdic-1.1.1-py2.py3-none-any.whl", hash = "sha256:33b4cb729bc2d0bf46cc1847b0e5946897cb8d3f53520c5b9aa5fa98d7e735f1"}, -] - [[package]] name = "ecdsa" version = "0.19.0" @@ -1905,33 +1897,6 @@ files = [ [package.extras] tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich"] -[[package]] -name = "extract-msg" -version = "0.47.0" -description = "Extracts emails and attachments saved in Microsoft Outlook's .msg files" -optional = false -python-versions = ">=3.8" -files = [ - {file = "extract_msg-0.47.0-py2.py3-none-any.whl", hash = "sha256:ab177546d6ebbea7818e9acb352f6f8cce3821e39319405e6a873808238564a5"}, - {file = "extract_msg-0.47.0.tar.gz", hash = "sha256:d3ed5fdc8cdff3567421d7e4183511905eb3c83d2605e6c9335c653efa6cfb41"}, -] - -[package.dependencies] -beautifulsoup4 = ">=4.11.1,<4.13" -compressed-rtf = ">=1.0.6,<2" -ebcdic = ">=1.1.1,<2" -olefile = "0.47" -red-black-tree-mod = "1.20" -RTFDE = ">=0.1.1,<0.2" -tzlocal = ">=4.2,<6" - -[package.extras] -all = ["extract-msg[encoding]", "extract-msg[image]", "extract-msg[mime]"] -encoding = ["chardet (>=3.0.0,<6)"] -image = ["Pillow (>=9.5.0,<10)"] -mime = ["python-magic (>=0.4.27,<0.5)"] -readthedocs = ["sphinx-rtd-theme"] - [[package]] name = "faiss-cpu" version = "1.8.0" @@ -3218,16 +3183,6 @@ files = [ {file = "hpack-4.0.0.tar.gz", hash = "sha256:fc41de0c63e687ebffde81187a948221294896f6bdc0ae2312708df339430095"}, ] -[[package]] -name = "html2text" -version = "2024.2.26" -description = "Turn HTML into equivalent Markdown-structured text." -optional = false -python-versions = ">=3.8" -files = [ - {file = "html2text-2024.2.26.tar.gz", hash = "sha256:05f8e367d15aaabc96415376776cdd11afd5127a77fce6e36afc60c563ca2c32"}, -] - [[package]] name = "httpcore" version = "1.0.5" @@ -4400,23 +4355,6 @@ orjson = ">=3.9.14,<4.0.0" pydantic = ">=1,<3" requests = ">=2,<3" -[[package]] -name = "lark" -version = "1.1.8" -description = "a modern parsing library" -optional = false -python-versions = ">=3.6" -files = [ - {file = "lark-1.1.8-py3-none-any.whl", hash = "sha256:7d2c221a66a8165f3f81aacb958d26033d40d972fdb70213ab0a2e0627e29c86"}, - {file = "lark-1.1.8.tar.gz", hash = "sha256:7ef424db57f59c1ffd6f0d4c2b705119927f566b68c0fe1942dddcc0e44391a5"}, -] - -[package.extras] -atomic-cache = ["atomicwrites"] -interegular = ["interegular (>=0.3.1,<0.4.0)"] -nearley = ["js2py"] -regex = ["regex"] - [[package]] name = "litellm" version = "1.37.19" @@ -4884,13 +4822,9 @@ files = [ {file = "lxml-5.2.2-cp36-cp36m-win_amd64.whl", hash = "sha256:edcfa83e03370032a489430215c1e7783128808fd3e2e0a3225deee278585196"}, {file = "lxml-5.2.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:28bf95177400066596cdbcfc933312493799382879da504633d16cf60bba735b"}, {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3a745cc98d504d5bd2c19b10c79c61c7c3df9222629f1b6210c0368177589fb8"}, - {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b590b39ef90c6b22ec0be925b211298e810b4856909c8ca60d27ffbca6c12e6"}, {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b336b0416828022bfd5a2e3083e7f5ba54b96242159f83c7e3eebaec752f1716"}, - {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:c2faf60c583af0d135e853c86ac2735ce178f0e338a3c7f9ae8f622fd2eb788c"}, {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:4bc6cb140a7a0ad1f7bc37e018d0ed690b7b6520ade518285dc3171f7a117905"}, - {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7ff762670cada8e05b32bf1e4dc50b140790909caa8303cfddc4d702b71ea184"}, {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:57f0a0bbc9868e10ebe874e9f129d2917750adf008fe7b9c1598c0fbbfdde6a6"}, - {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:a6d2092797b388342c1bc932077ad232f914351932353e2e8706851c870bca1f"}, {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:60499fe961b21264e17a471ec296dcbf4365fbea611bf9e303ab69db7159ce61"}, {file = "lxml-5.2.2-cp37-cp37m-win32.whl", hash = "sha256:d9b342c76003c6b9336a80efcc766748a333573abf9350f4094ee46b006ec18f"}, {file = "lxml-5.2.2-cp37-cp37m-win_amd64.whl", hash = "sha256:b16db2770517b8799c79aa80f4053cd6f8b716f21f8aca962725a9565ce3ee40"}, @@ -5349,21 +5283,6 @@ files = [ {file = "msgpack-1.0.8.tar.gz", hash = "sha256:95c02b0e27e706e48d0e5426d1710ca78e0f0628d6e89d5b5a5b91a5f12274f3"}, ] -[[package]] -name = "msoffcrypto-tool" -version = "5.4.0" -description = "Python tool and library for decrypting and encrypting MS Office files using a password or other keys" -optional = false -python-versions = "<4.0,>=3.8" -files = [ - {file = "msoffcrypto_tool-5.4.0-py3-none-any.whl", hash = "sha256:0e39319f982c22a449505e5ab7da18a8ae76376a0008e180e1528a0875525da7"}, - {file = "msoffcrypto_tool-5.4.0.tar.gz", hash = "sha256:0f5f45d91d1eaa2ca0b3adefb5aac4932afb50c678dfa8d7da390d187f1dac39"}, -] - -[package.dependencies] -cryptography = ">=35.0" -olefile = ">=0.46" - [[package]] name = "multidict" version = "6.0.5" @@ -5861,42 +5780,6 @@ rsa = ["cryptography (>=3.0.0)"] signals = ["blinker (>=1.4.0)"] signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] -[[package]] -name = "olefile" -version = "0.47" -description = "Python package to parse, read and write Microsoft OLE2 files (Structured Storage or Compound Document, Microsoft Office)" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "olefile-0.47-py2.py3-none-any.whl", hash = "sha256:543c7da2a7adadf21214938bb79c83ea12b473a4b6ee4ad4bf854e7715e13d1f"}, - {file = "olefile-0.47.zip", hash = "sha256:599383381a0bf3dfbd932ca0ca6515acd174ed48870cbf7fee123d698c192c1c"}, -] - -[package.extras] -tests = ["pytest", "pytest-cov"] - -[[package]] -name = "oletools" -version = "0.60.1" -description = "Python tools to analyze security characteristics of MS Office and OLE files (also called Structured Storage, Compound File Binary Format or Compound Document File Format), for Malware Analysis and Incident Response #DFIR" -optional = false -python-versions = "*" -files = [ - {file = "oletools-0.60.1-py2.py3-none-any.whl", hash = "sha256:edef92374e688989a39269eb9a11142fb20a023629c23538c849c14d1d1144ea"}, - {file = "oletools-0.60.1.zip", hash = "sha256:67a796da4c4b8e2feb9a6b2495bef8798a3323a75512de4e5669d9dc9d1fae31"}, -] - -[package.dependencies] -colorclass = "*" -easygui = "*" -msoffcrypto-tool = {version = "*", markers = "platform_python_implementation != \"PyPy\" or python_version >= \"3\" and (platform_system != \"Windows\" and platform_system != \"Darwin\")"} -olefile = ">=0.46" -pcodedmp = ">=1.2.5" -pyparsing = ">=2.1.0,<3" - -[package.extras] -full = ["XLMMacroDeobfuscator"] - [[package]] name = "onnxruntime" version = "1.18.0" @@ -6352,21 +6235,6 @@ bcrypt = ["bcrypt (>=3.1.0)"] build-docs = ["cloud-sptheme (>=1.10.1)", "sphinx (>=1.6)", "sphinxcontrib-fulltoc (>=1.2.0)"] totp = ["cryptography"] -[[package]] -name = "pcodedmp" -version = "1.2.6" -description = "A VBA p-code disassembler" -optional = false -python-versions = "*" -files = [ - {file = "pcodedmp-1.2.6-py2.py3-none-any.whl", hash = "sha256:4441f7c0ab4cbda27bd4668db3b14f36261d86e5059ce06c0828602cbe1c4278"}, - {file = "pcodedmp-1.2.6.tar.gz", hash = "sha256:025f8c809a126f45a082ffa820893e6a8d990d9d7ddb68694b5a9f0a6dbcd955"}, -] - -[package.dependencies] -oletools = ">=0.54" -win-unicode-console = {version = "*", markers = "platform_system == \"Windows\" and platform_python_implementation != \"PyPy\""} - [[package]] name = "pexpect" version = "4.9.0" @@ -7428,19 +7296,6 @@ files = [ {file = "pyreadline3-3.4.1.tar.gz", hash = "sha256:6f3d1f7b8a31ba32b73917cefc1f28cc660562f39aea8646d30bd6eff21f7bae"}, ] -[[package]] -name = "pysrt" -version = "1.1.2" -description = "SubRip (.srt) subtitle parser and writer" -optional = false -python-versions = "*" -files = [ - {file = "pysrt-1.1.2.tar.gz", hash = "sha256:b4f844ba33e4e7743e9db746492f3a193dc0bc112b153914698e7c1cdeb9b0b9"}, -] - -[package.dependencies] -chardet = "*" - [[package]] name = "pytest" version = "8.2.1" @@ -7988,16 +7843,6 @@ python-dateutil = ">=2.8.1,<3.0.0" typing-extensions = ">=4.11.0,<5.0.0" websockets = ">=11,<13" -[[package]] -name = "red-black-tree-mod" -version = "1.20" -description = "Flexible python implementation of red black trees" -optional = false -python-versions = "*" -files = [ - {file = "red-black-tree-mod-1.20.tar.gz", hash = "sha256:2448e6fc9cbf1be204c753f352c6ee49aa8156dbf1faa57dfc26bd7705077e0a"}, -] - [[package]] name = "redis" version = "5.0.4" @@ -8189,25 +8034,6 @@ files = [ [package.dependencies] pyasn1 = ">=0.1.3" -[[package]] -name = "rtfde" -version = "0.1.1" -description = "A library for extracting HTML content from RTF encapsulated HTML as commonly found in the exchange MSG email format." -optional = false -python-versions = ">=3.8" -files = [ - {file = "RTFDE-0.1.1-py3-none-any.whl", hash = "sha256:ea7ab0e0b9d4af08415f5017ecff91d74e24216a5e4e4682155cedc478035e99"}, - {file = "RTFDE-0.1.1.tar.gz", hash = "sha256:9e43485e79b2dd1018127735d8134f65d2a9d73af314d2a101f10346333b241e"}, -] - -[package.dependencies] -lark = "1.1.8" -oletools = ">=0.56" - -[package.extras] -dev = ["coverage (>=7.2.2)", "lxml (>=4.6)", "mypy (>=1.1.1)", "pdoc3 (>=0.10.0)"] -msg-parse = ["extract-msg (>=0.27)"] - [[package]] name = "ruff" version = "0.3.7" @@ -9531,23 +9357,6 @@ files = [ {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, ] -[[package]] -name = "tzlocal" -version = "5.2" -description = "tzinfo object for the local timezone" -optional = false -python-versions = ">=3.8" -files = [ - {file = "tzlocal-5.2-py3-none-any.whl", hash = "sha256:49816ef2fe65ea8ac19d19aa7a1ae0551c834303d5014c6d5a62e4cbda8047b8"}, - {file = "tzlocal-5.2.tar.gz", hash = "sha256:8d399205578f1a9342816409cc1e46a93ebd5755e39ea2d85334bea911bf0e6e"}, -] - -[package.dependencies] -tzdata = {version = "*", markers = "platform_system == \"Windows\""} - -[package.extras] -devenv = ["check-manifest", "pytest (>=4.3)", "pytest-cov", "pytest-mock (>=3.3)", "zest.releaser"] - [[package]] name = "ujson" version = "5.10.0" @@ -10022,16 +9831,6 @@ files = [ beautifulsoup4 = "*" requests = ">=2.0.0,<3.0.0" -[[package]] -name = "win-unicode-console" -version = "0.5" -description = "Enable Unicode input and display when running Python from Windows console." -optional = false -python-versions = "*" -files = [ - {file = "win_unicode_console-0.5.zip", hash = "sha256:d4142d4d56d46f449d6f00536a73625a871cba040f0bc1a2e305a04578f07d1e"}, -] - [[package]] name = "win32-setctime" version = "1.1.0" @@ -10459,4 +10258,5 @@ local = ["ctransformers", "llama-cpp-python", "sentence-transformers"] [metadata] lock-version = "2.0" python-versions = ">=3.10,<3.13" -content-hash = "1e59b887d9cef40cd980e2cb698c48e135911400176d8bef1012ddc8853e420a" +content-hash = "c68a14e273363466c1cea2bacec1798a6894d2bb18cf79ba443dc65c9344c57d" + diff --git a/pyproject.toml b/pyproject.toml index 486de9ec5..cc7198812 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -33,7 +33,6 @@ google-api-python-client = "^2.118.0" huggingface-hub = { version = "^0.20.0", extras = ["inference"] } llama-cpp-python = { version = "~0.2.0", optional = true } networkx = "^3.1" -pysrt = "^1.1.2" fake-useragent = "^1.4.0" psycopg2-binary = "^2.9.6" pyarrow = "^14.0.0" @@ -60,7 +59,6 @@ pywin32 = { version = "^306", markers = "sys_platform == 'win32'" } langfuse = "^2.9.0" metal-sdk = "^2.5.0" markupsafe = "^2.1.3" -extract-msg = "^0.47.0" # jq is not available for windows boto3 = "^1.34.0" numexpr = "^2.8.6" @@ -74,7 +72,6 @@ pytube = "^15.0.0" llama-index = "^0.10.13" # unstructured = { extras = ["md"], version = "^0.12.4" } dspy-ai = "^2.4.0" -html2text = "^2024.2.26" assemblyai = "^0.23.1" litellm = "^1.34.22" chromadb = "^0.4.24" @@ -86,6 +83,7 @@ langchain-google-vertexai = "^1.0.3" langchain-groq = "^0.1.3" langchain-pinecone = "^0.1.0" langchain-mistralai = "^0.1.6" +couchbase = "^4.2.1" [tool.poetry.group.dev.dependencies] diff --git a/src/backend/base/langflow/api/utils.py b/src/backend/base/langflow/api/utils.py index e2f6e07e7..f7e0548fe 100644 --- a/src/backend/base/langflow/api/utils.py +++ b/src/backend/base/langflow/api/utils.py @@ -1,3 +1,4 @@ +import os import warnings from pathlib import Path from typing import TYPE_CHECKING, Optional @@ -140,7 +141,10 @@ def get_file_path_value(file_path): # If the path is not in the cache dir, return empty string # This is to prevent access to files outside the cache dir # If the path is not a file, return empty string - if not path.exists() or not str(path).startswith(user_cache_dir("langflow", "langflow")): + if not str(path).startswith(user_cache_dir("langflow", "langflow")): + return "" + + if not path.exists(): return "" return file_path diff --git a/src/backend/base/langflow/api/v1/chat.py b/src/backend/base/langflow/api/v1/chat.py index 40290afa7..6a38f4981 100644 --- a/src/backend/base/langflow/api/v1/chat.py +++ b/src/backend/base/langflow/api/v1/chat.py @@ -53,7 +53,7 @@ async def try_running_celery_task(vertex, user_id): @router.post("/build/{flow_id}/vertices", response_model=VerticesOrderResponse) async def retrieve_vertices_order( - flow_id: str, + flow_id: uuid.UUID, data: Optional[Annotated[Optional[FlowDataRequest], Body(embed=True)]] = None, stop_component_id: Optional[str] = None, start_component_id: Optional[str] = None, @@ -78,6 +78,7 @@ async def retrieve_vertices_order( HTTPException: If there is an error checking the build status. """ try: + flow_id = str(flow_id) # First, we need to check if the flow_id is in the cache if not data: graph = await build_and_cache_graph_from_db(flow_id=flow_id, session=session, chat_service=chat_service) @@ -119,7 +120,7 @@ async def retrieve_vertices_order( @router.post("/build/{flow_id}/vertices/{vertex_id}") async def build_vertex( - flow_id: str, + flow_id: uuid.UUID, vertex_id: str, background_tasks: BackgroundTasks, inputs: Annotated[Optional[InputValueRequest], Body(embed=True)] = None, @@ -143,8 +144,8 @@ async def build_vertex( HTTPException: If there is an error building the vertex. """ + flow_id = str(flow_id) - start_time = time.perf_counter() next_runnable_vertices = [] top_level_vertices = [] try: @@ -158,8 +159,7 @@ async def build_vertex( ) else: graph = cache.get("result") - result_data_response = ResultDataResponse(results={}) - duration = "" + ResultDataResponse(results={}) vertex = graph.get_vertex(vertex_id) try: lock = chat_service._cache_locks[flow_id] @@ -240,7 +240,7 @@ async def build_vertex( @router.get("/build/{flow_id}/{vertex_id}/stream", response_class=StreamingResponse) async def build_vertex_stream( - flow_id: str, + flow_id: uuid.UUID, vertex_id: str, session_id: Optional[str] = None, chat_service: "ChatService" = Depends(get_chat_service), @@ -272,6 +272,7 @@ async def build_vertex_stream( HTTPException: If an error occurs while building the vertex. """ try: + flow_id = str(flow_id) async def stream_vertex(): try: diff --git a/src/backend/base/langflow/api/v1/endpoints.py b/src/backend/base/langflow/api/v1/endpoints.py index e206935c4..b84fff034 100644 --- a/src/backend/base/langflow/api/v1/endpoints.py +++ b/src/backend/base/langflow/api/v1/endpoints.py @@ -1,5 +1,6 @@ from http import HTTPStatus from typing import Annotated, List, Optional, Union +from uuid import UUID import sqlalchemy as sa from fastapi import APIRouter, Body, Depends, HTTPException, UploadFile, status @@ -54,7 +55,7 @@ def get_all( @router.post("/run/{flow_id}", response_model=RunResponse, response_model_exclude_none=True) async def simplified_run_flow( db: Annotated[Session, Depends(get_session)], - flow_id: str, + flow_id: UUID, input_request: SimplifiedAPIRequest = SimplifiedAPIRequest(), stream: bool = False, api_key_user: User = Depends(api_key_security), @@ -111,6 +112,7 @@ async def simplified_run_flow( session_id = input_request.session_id try: + flow_id = str(flow_id) task_result: List[RunOutputs] = [] artifacts = {} if input_request.session_id: @@ -187,7 +189,7 @@ async def simplified_run_flow( @router.post("/run/advanced/{flow_id}", response_model=RunResponse, response_model_exclude_none=True) async def experimental_run_flow( session: Annotated[Session, Depends(get_session)], - flow_id: str, + flow_id: UUID, inputs: Optional[List[InputValueRequest]] = [InputValueRequest(components=[], input_value="")], outputs: Optional[List[str]] = [], tweaks: Annotated[Optional[Tweaks], Body(embed=True)] = None, # noqa: F821 @@ -235,6 +237,7 @@ async def experimental_run_flow( This endpoint facilitates complex flow executions with customized inputs, outputs, and configurations, catering to diverse application requirements. """ try: + flow_id = str(flow_id) if outputs is None: outputs = [] @@ -357,9 +360,10 @@ async def get_task_status(task_id: str): ) async def create_upload_file( file: UploadFile, - flow_id: str, + flow_id: UUID, ): try: + flow_id = str(flow_id) file_path = save_uploaded_file(file, folder_name=flow_id) return UploadFileResponse( @@ -400,23 +404,6 @@ async def custom_component( return built_frontend_node -@router.post("/custom_component/reload", status_code=HTTPStatus.OK) -async def reload_custom_component(path: str, user: User = Depends(get_current_active_user)): - from langflow.interface.custom.utils import build_custom_component_template - - try: - reader = DirectoryReader("") - valid, content = reader.process_file(path) - if not valid: - raise ValueError(content) - - extractor = CustomComponent(code=content) - frontend_node, _ = build_custom_component_template(extractor, user_id=user.id) - return frontend_node - except Exception as exc: - raise HTTPException(status_code=400, detail=str(exc)) - - @router.post("/custom_component/update", status_code=HTTPStatus.OK) async def custom_component_update( code_request: UpdateCustomComponentRequest, diff --git a/src/backend/base/langflow/api/v1/files.py b/src/backend/base/langflow/api/v1/files.py index 435aea826..762d39da9 100644 --- a/src/backend/base/langflow/api/v1/files.py +++ b/src/backend/base/langflow/api/v1/files.py @@ -1,6 +1,7 @@ import hashlib from http import HTTPStatus from io import BytesIO +from uuid import UUID from fastapi import APIRouter, Depends, HTTPException, UploadFile from fastapi.responses import StreamingResponse @@ -20,10 +21,11 @@ router = APIRouter(tags=["Files"], prefix="/files") # then finds it in the database and returns it while # using the current user as the owner def get_flow_id( - flow_id: str, + flow_id: UUID, current_user=Depends(get_current_active_user), session=Depends(get_session), ): + flow_id = str(flow_id) # AttributeError: 'SelectOfScalar' object has no attribute 'first' flow = session.get(Flow, flow_id) if not flow: @@ -36,10 +38,11 @@ def get_flow_id( @router.post("/upload/{flow_id}", status_code=HTTPStatus.CREATED) async def upload_file( file: UploadFile, - flow_id: str = Depends(get_flow_id), + flow_id: UUID = Depends(get_flow_id), storage_service: StorageService = Depends(get_storage_service), ): try: + flow_id = str(flow_id) file_content = await file.read() file_name = file.filename or hashlib.sha256(file_content).hexdigest() folder = flow_id @@ -50,8 +53,9 @@ async def upload_file( @router.get("/download/{flow_id}/{file_name}") -async def download_file(file_name: str, flow_id: str, storage_service: StorageService = Depends(get_storage_service)): +async def download_file(file_name: str, flow_id: UUID, storage_service: StorageService = Depends(get_storage_service)): try: + flow_id = str(flow_id) extension = file_name.split(".")[-1] if not extension: @@ -74,9 +78,10 @@ async def download_file(file_name: str, flow_id: str, storage_service: StorageSe @router.get("/images/{flow_id}/{file_name}") -async def download_image(file_name: str, flow_id: str, storage_service: StorageService = Depends(get_storage_service)): +async def download_image(file_name: str, flow_id: UUID, storage_service: StorageService = Depends(get_storage_service)): try: extension = file_name.split(".")[-1] + flow_id = str(flow_id) if not extension: raise HTTPException(status_code=500, detail=f"Extension not found for file {file_name}") @@ -96,9 +101,10 @@ async def download_image(file_name: str, flow_id: str, storage_service: StorageS @router.get("/list/{flow_id}") async def list_files( - flow_id: str = Depends(get_flow_id), storage_service: StorageService = Depends(get_storage_service) + flow_id: UUID = Depends(get_flow_id), storage_service: StorageService = Depends(get_storage_service) ): try: + flow_id = str(flow_id) files = await storage_service.list_files(flow_id=flow_id) return {"files": files} except Exception as e: @@ -107,9 +113,10 @@ async def list_files( @router.delete("/delete/{flow_id}/{file_name}") async def delete_file( - file_name: str, flow_id: str = Depends(get_flow_id), storage_service: StorageService = Depends(get_storage_service) + file_name: str, flow_id: UUID = Depends(get_flow_id), storage_service: StorageService = Depends(get_storage_service) ): try: + flow_id = str(flow_id) await storage_service.delete_file(flow_id=flow_id, file_name=file_name) return {"message": f"File {file_name} deleted successfully"} except Exception as e: diff --git a/src/backend/base/langflow/components/memories/AstraDBMessageReader.py b/src/backend/base/langflow/components/memories/AstraDBMessageReader.py new file mode 100644 index 000000000..9b82dd308 --- /dev/null +++ b/src/backend/base/langflow/components/memories/AstraDBMessageReader.py @@ -0,0 +1,95 @@ +from typing import Optional, cast + +from langchain_astradb.chat_message_histories import AstraDBChatMessageHistory + +from langflow.base.memory.memory import BaseMemoryComponent +from langflow.field_typing import Text +from langflow.schema.schema import Record + + +class AstraDBMessageReaderComponent(BaseMemoryComponent): + display_name = "Astra DB Message Reader" + description = "Retrieves stored chat messages from Astra DB." + + def build_config(self): + return { + "session_id": { + "display_name": "Session ID", + "info": "Session ID of the chat history.", + "input_types": ["Text"], + }, + "collection_name": { + "display_name": "Collection Name", + "info": "Collection name for Astra DB.", + "input_types": ["Text"], + }, + "token": { + "display_name": "Astra DB Application Token", + "info": "Token for the Astra DB instance.", + "password": True, + }, + "api_endpoint": { + "display_name": "Astra DB API Endpoint", + "info": "API Endpoint for the Astra DB instance.", + "password": True, + }, + "namespace": { + "display_name": "Namespace", + "info": "Namespace for the Astra DB instance.", + "input_types": ["Text"], + "advanced": True, + }, + } + + def get_messages(self, **kwargs) -> list[Record]: + """ + Retrieves messages from the AstraDBChatMessageHistory memory. + + Args: + memory (AstraDBChatMessageHistory): The AstraDBChatMessageHistory instance to retrieve messages from. + + Returns: + list[Record]: A list of Record objects representing the search results. + """ + memory: AstraDBChatMessageHistory = cast( + AstraDBChatMessageHistory, kwargs.get("memory") + ) + if not memory: + raise ValueError("AstraDBChatMessageHistory instance is required.") + + # Get messages from the memory + messages = memory.messages + results = [Record.from_lc_message(message) for message in messages] + + return list(results) + + def build( + self, + session_id: Text, + collection_name: str, + token: str, + api_endpoint: str, + namespace: Optional[str] = None, + ) -> list[Record]: + try: + from langchain_community.chat_message_histories.astradb import ( + AstraDBChatMessageHistory, + ) + except ImportError: + raise ImportError( + "Could not import langchain Astra DB integration package. " + "Please install it with `pip install langchain-astradb`." + ) + + memory = AstraDBChatMessageHistory( + session_id=session_id, + collection_name=collection_name, + token=token, + api_endpoint=api_endpoint, + namespace=namespace, + ) + + records = self.get_messages(memory=memory) + self.status = records + + return records diff --git a/src/backend/base/langflow/components/memories/AstraDBMessageWriter.py b/src/backend/base/langflow/components/memories/AstraDBMessageWriter.py new file mode 100644 index 000000000..33525656e --- /dev/null +++ b/src/backend/base/langflow/components/memories/AstraDBMessageWriter.py @@ -0,0 +1,117 @@ +from typing import Optional + +from langflow.base.memory.memory import BaseMemoryComponent +from langflow.field_typing import Text +from langflow.schema.schema import Record + +from langchain_core.messages import BaseMessage +from langchain_community.chat_message_histories.astradb import AstraDBChatMessageHistory + + +class AstraDBMessageWriterComponent(BaseMemoryComponent): + display_name = "Astra DB Message Writer" + description = "Writes a message to Astra DB." + + def build_config(self): + return { + "input_value": { + "display_name": "Input Record", + "info": "Record to write to Astra DB.", + }, + "session_id": { + "display_name": "Session ID", + "info": "Session ID of the chat history.", + "input_types": ["Text"], + }, + "collection_name": { + "display_name": "Collection Name", + "info": "Collection name for Astra DB.", + "input_types": ["Text"], + }, + "token": { + "display_name": "Astra DB Application Token", + "info": "Token for the Astra DB instance.", + "password": True, + }, + "api_endpoint": { + "display_name": "Astra DB API Endpoint", + "info": "API Endpoint for the Astra DB instance.", + "password": True, + }, + "namespace": { + "display_name": "Namespace", + "info": "Namespace for the Astra DB instance.", + "input_types": ["Text"], + "advanced": True, + }, + } + + def add_message( + self, + sender: str, + sender_name: str, + text: Text, + session_id: str, + metadata: Optional[dict] = None, + **kwargs, + ): + """ + Adds a message to the AstraDBChatMessageHistory memory. + + Args: + sender (Text): The type of the message sender. Valid values are "Machine" or "User". + sender_name (Text): The name of the message sender. + text (Text): The content of the message. + session_id (Text): The session ID associated with the message. + metadata (dict | None, optional): Additional metadata for the message. Defaults to None. + **kwargs: Additional keyword arguments. + + Raises: + ValueError: If the AstraDBChatMessageHistory instance is not provided. + + """ + memory: AstraDBChatMessageHistory | None = kwargs.pop("memory", None) + if memory is None: + raise ValueError("AstraDBChatMessageHistory instance is required.") + + text_list = [BaseMessage( + content=text, + sender=sender, + sender_name=sender_name, + metadata=metadata, + session_id=session_id, + )] + + memory.add_messages(text_list) + + def build( + self, + input_value: Record, + session_id: Text, + collection_name: str, + token: str, + api_endpoint: str, + namespace: Optional[str] = None, + ) -> Record: + try: + from langchain_community.chat_message_histories.astradb import ( + AstraDBChatMessageHistory, + ) + except ImportError: + raise ImportError( + "Could not import langchain Astra DB integration package. " + "Please install it with `pip install langchain-astradb`." + ) + + memory = AstraDBChatMessageHistory( + session_id=session_id, + collection_name=collection_name, + token=token, + api_endpoint=api_endpoint, + namespace=namespace, + ) + + self.add_message(**input_value.data, memory=memory) + self.status = f"Added message to Astra DB memory for session {session_id}" + + return input_value diff --git a/src/backend/base/langflow/components/vectorsearch/CouchbaseSearch.py b/src/backend/base/langflow/components/vectorsearch/CouchbaseSearch.py new file mode 100644 index 000000000..0c8a815a4 --- /dev/null +++ b/src/backend/base/langflow/components/vectorsearch/CouchbaseSearch.py @@ -0,0 +1,73 @@ +from typing import List, Optional + +from langflow.components.vectorstores.base.model import LCVectorStoreComponent +from langflow.components.vectorstores.Couchbase import CouchbaseComponent +from langflow.field_typing import Embeddings, NestedDict, Text +from langflow.schema import Record + + +class CouchbaseSearchComponent(LCVectorStoreComponent): + display_name = "Couchbase Search" + description = "Search a Couchbase Vector Store for similar documents." + documentation = "https://python.langchain.com/docs/integrations/vectorstores/couchbase" + icon = "Couchbase" + field_order = [ + "couchbase_connection_string", + "couchbase_username", + "couchbase_password", + "bucket_name", + "scope_name", + "collection_name", + "index_name", + ] + + def build_config(self): + return { + "input_value": {"display_name": "Input"}, + "embedding": {"display_name": "Embedding"}, + "couchbase_connection_string": {"display_name": "Couchbase Cluster connection string","required": True}, + "couchbase_username": {"display_name": "Couchbase username","required": True}, + "couchbase_password": { + "display_name": "Couchbase password", + "password": True, + "required": True + }, + "bucket_name": {"display_name": "Bucket Name","required": True}, + "scope_name": {"display_name": "Scope Name","required": True}, + "collection_name": {"display_name": "Collection Name","required": True}, + "index_name": {"display_name": "Index Name","required": True}, + "number_of_results": { + "display_name": "Number of Results", + "info": "Number of results to return.", + "advanced": True, + }, + } + + def build( # type: ignore[override] + self, + input_value: Text, + embedding: Embeddings, + number_of_results: int = 4, + bucket_name: str = "", + scope_name: str = "", + collection_name: str = "", + index_name: str = "", + couchbase_connection_string: str = "", + couchbase_username: str = "", + couchbase_password: str = "", + ) -> List[Record]: + vector_store = CouchbaseComponent().build( + couchbase_connection_string=couchbase_connection_string, + couchbase_username=couchbase_username, + couchbase_password=couchbase_password, + bucket_name=bucket_name, + scope_name=scope_name, + collection_name=collection_name, + embedding=embedding, + index_name=index_name, + ) + if not vector_store: + raise ValueError("Failed to create Couchbase Vector Store") + return self.search_with_vector_store( + vector_store=vector_store, input_value=input_value, search_type="similarity", k=number_of_results + ) diff --git a/src/backend/base/langflow/components/vectorsearch/__init__.py b/src/backend/base/langflow/components/vectorsearch/__init__.py index 28ea85fce..4cdf5b83c 100644 --- a/src/backend/base/langflow/components/vectorsearch/__init__.py +++ b/src/backend/base/langflow/components/vectorsearch/__init__.py @@ -9,10 +9,12 @@ from .SupabaseVectorStoreSearch import SupabaseSearchComponent from .VectaraSearch import VectaraSearchComponent from .WeaviateSearch import WeaviateSearchVectorStore from .pgvectorSearch import PGVectorSearchComponent +from .Couchbase import CouchbaseSearchComponent # type: ignore __all__ = [ "AstraDBSearchComponent", "ChromaSearchComponent", + "CouchbaseSearchComponent", "FAISSSearchComponent", "MongoDBAtlasSearchComponent", "PineconeSearchComponent", diff --git a/src/backend/base/langflow/components/vectorstores/Couchbase.py b/src/backend/base/langflow/components/vectorstores/Couchbase.py new file mode 100644 index 000000000..1816e85fb --- /dev/null +++ b/src/backend/base/langflow/components/vectorstores/Couchbase.py @@ -0,0 +1,95 @@ +from typing import List, Optional, Union + +from langchain.schema import BaseRetriever + +from langchain_community.vectorstores import CouchbaseVectorStore + +from langflow.custom import CustomComponent +from langflow.field_typing import Embeddings, VectorStore +from langflow.schema import Record + +from datetime import timedelta + +from couchbase.auth import PasswordAuthenticator # type: ignore +from couchbase.cluster import Cluster # type: ignore +from couchbase.options import ClusterOptions # type: ignore + + +class CouchbaseComponent(CustomComponent): + display_name = "Couchbase" + description = "Construct a `Couchbase Vector Search` vector store from raw documents." + documentation = "https://python.langchain.com/docs/integrations/vectorstores/couchbase" + icon = "Couchbase" + field_order = [ + "couchbase_connection_string", + "couchbase_username", + "couchbase_password", + "bucket_name", + "scope_name", + "collection_name", + "index_name", + ] + + def build_config(self): + return { + "inputs": {"display_name": "Input", "input_types": ["Document", "Record"]}, + "embedding": {"display_name": "Embedding"}, + "couchbase_connection_string": {"display_name": "Couchbase Cluster connection string","required": True}, + "couchbase_username": {"display_name": "Couchbase username","required": True}, + "couchbase_password": { + "display_name": "Couchbase password", + "password": True, + "required": True + }, + "bucket_name": {"display_name": "Bucket Name","required": True}, + "scope_name": {"display_name": "Scope Name","required": True}, + "collection_name": {"display_name": "Collection Name","required": True}, + "index_name": {"display_name": "Index Name","required": True}, + } + + def build( + self, + embedding: Embeddings, + inputs: Optional[List[Record]] = None, + bucket_name: str = "", + scope_name: str = "", + collection_name: str = "", + index_name: str = "", + couchbase_connection_string: str = "", + couchbase_username: str = "", + couchbase_password: str = "", + ) -> Union[VectorStore, BaseRetriever]: + try: + auth = PasswordAuthenticator(couchbase_username, couchbase_password) + options = ClusterOptions(auth) + cluster = Cluster(couchbase_connection_string, options) + + cluster.wait_until_ready(timedelta(seconds=5)) + except Exception as e: + raise ValueError(f"Failed to connect to Couchbase: {e}") + documents = [] + for _input in inputs or []: + if isinstance(_input, Record): + documents.append(_input.to_lc_document()) + else: + documents.append(_input) + if documents: + vector_store = CouchbaseVectorStore.from_documents( + documents=documents, + cluster=cluster, + bucket_name=bucket_name, + scope_name=scope_name, + collection_name=collection_name, + embedding=embedding, + index_name=index_name, + ) + else: + vector_store = CouchbaseVectorStore( + cluster=cluster, + bucket_name=bucket_name, + scope_name=scope_name, + collection_name=collection_name, + embedding=embedding, + index_name=index_name, + ) + return vector_store diff --git a/src/backend/base/langflow/components/vectorstores/__init__.py b/src/backend/base/langflow/components/vectorstores/__init__.py index 48e1bf9c7..d38b0a735 100644 --- a/src/backend/base/langflow/components/vectorstores/__init__.py +++ b/src/backend/base/langflow/components/vectorstores/__init__.py @@ -9,10 +9,12 @@ from .SupabaseVectorStore import SupabaseComponent from .Vectara import VectaraComponent from .Weaviate import WeaviateVectorStoreComponent from .pgvector import PGVectorComponent +from .Couchbase import CouchbaseComponent __all__ = [ "AstraDBVectorStoreComponent", "ChromaComponent", + "CouchbaseComponent", "FAISSComponent", "MongoDBAtlasComponent", "PineconeComponent", diff --git a/src/frontend/src/components/inputGlobalComponent/index.tsx b/src/frontend/src/components/inputGlobalComponent/index.tsx index 7ab3cae33..e9edd2e50 100644 --- a/src/frontend/src/components/inputGlobalComponent/index.tsx +++ b/src/frontend/src/components/inputGlobalComponent/index.tsx @@ -34,6 +34,7 @@ export default function InputGlobalComponent({ useEffect(() => { if (data.node?.template[name]) if ( + globalVariablesEntries && !globalVariablesEntries.includes(data.node?.template[name].value) && data.node?.template[name].load_from_db ) { @@ -138,6 +139,7 @@ export default function InputGlobalComponent({ )} selectedOption={ data?.node?.template[name].load_from_db && + globalVariablesEntries && globalVariablesEntries.includes(data?.node?.template[name].value ?? "") ? data?.node?.template[name].value : "" diff --git a/src/frontend/src/icons/Couchbase/Couchbase.jsx b/src/frontend/src/icons/Couchbase/Couchbase.jsx new file mode 100644 index 000000000..9259aae60 --- /dev/null +++ b/src/frontend/src/icons/Couchbase/Couchbase.jsx @@ -0,0 +1,17 @@ +const SvgCouchbaseIcon = (props) => ( + + + +); + +export default SvgCouchbaseIcon; diff --git a/src/frontend/src/icons/Couchbase/couchbase.svg b/src/frontend/src/icons/Couchbase/couchbase.svg new file mode 100644 index 000000000..6c86a8a9c --- /dev/null +++ b/src/frontend/src/icons/Couchbase/couchbase.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/src/frontend/src/icons/Couchbase/index.tsx b/src/frontend/src/icons/Couchbase/index.tsx new file mode 100644 index 000000000..85149c204 --- /dev/null +++ b/src/frontend/src/icons/Couchbase/index.tsx @@ -0,0 +1,9 @@ +import React, { forwardRef } from "react"; +import SvgCouchbaseIcon from "./Couchbase"; + +export const CouchbaseIcon = forwardRef< + SVGSVGElement, + React.PropsWithChildren<{}> +>((props, ref) => { + return ; +}); diff --git a/src/frontend/src/pages/MainPage/components/myCollectionComponent/components/headerTabsSearchComponent/index.tsx b/src/frontend/src/pages/MainPage/components/myCollectionComponent/components/headerTabsSearchComponent/index.tsx index a15b84601..8dca45f3c 100644 --- a/src/frontend/src/pages/MainPage/components/myCollectionComponent/components/headerTabsSearchComponent/index.tsx +++ b/src/frontend/src/pages/MainPage/components/myCollectionComponent/components/headerTabsSearchComponent/index.tsx @@ -19,6 +19,11 @@ const HeaderTabsSearchComponent = ({}: HeaderTabsSearchComponentProps) => { const [tabActive, setTabActive] = useState("Flows"); const setErrorData = useAlertStore((state) => state.setErrorData); const allFlows = useFlowsManagerStore((state) => state.allFlows); + const [inputValue, setInputValue] = useState(""); + + const setSearchFlowsComponents = useFlowsManagerStore( + (state) => state.setSearchFlowsComponents, + ); const handleDownloadFolder = () => { if (allFlows.length === 0) { @@ -34,8 +39,19 @@ const HeaderTabsSearchComponent = ({}: HeaderTabsSearchComponentProps) => { return ( <> - - + { + setSearchFlowsComponents(e.target.value); + setInputValue(e.target.value); + }} + onKeyDown={(e) => { + if (e.key === "Enter") { + setSearchFlowsComponents(inputValue); + } + }} + /> ) => void; + onClick?: () => void; + value: string; + onKeyDown: (e: KeyboardEvent) => void; }; -const InputSearchComponent = ({ loading }: InputSearchComponentProps) => { +const InputSearchComponent = ({ + loading, + divClasses, + onChange, + onClick, + value, + onKeyDown, +}: InputSearchComponentProps) => { const pagePath = window.location.pathname; - - const [inputValue, setInputValue] = useState(""); const allFlows = useFlowsManagerStore((state) => state.allFlows); - - const setSearchFlowsComponents = useFlowsManagerStore( - (state) => state.setSearchFlowsComponents, - ); - const searchFlowsComponents = useFlowsManagerStore( (state) => state.searchFlowsComponents, ); @@ -38,24 +43,18 @@ const InputSearchComponent = ({ loading }: InputSearchComponentProps) => { return ( <> - + { - setSearchFlowsComponents(e.target.value); - setInputValue(e.target.value); - }} - onKeyDown={(e) => { - if (e.key === "Enter") { - setSearchFlowsComponents(inputValue); - } - }} - value={inputValue} + onChange={onChange} + onKeyDown={onKeyDown} + value={value} /> = []; + if (globalVariablesEntries === undefined) return; globalVariablesEntries.forEach((entrie) => { const globalVariableObj = globalVariables[entrie]; rows.push({ diff --git a/src/frontend/src/pages/StorePage/index.tsx b/src/frontend/src/pages/StorePage/index.tsx index 37f9efb15..3b4612281 100644 --- a/src/frontend/src/pages/StorePage/index.tsx +++ b/src/frontend/src/pages/StorePage/index.tsx @@ -35,6 +35,7 @@ import useFlowsManagerStore from "../../stores/flowsManagerStore"; import { useStoreStore } from "../../stores/storeStore"; import { storeComponent } from "../../types/store"; import { cn } from "../../utils/utils"; +import InputSearchComponent from "../MainPage/components/myCollectionComponent/components/inputSearchComponent"; export default function StorePage(): JSX.Element { const hasApiKey = useStoreStore((state) => state.hasApiKey); @@ -47,7 +48,7 @@ export default function StorePage(): JSX.Element { const setErrorData = useAlertStore((state) => state.setErrorData); const setCurrentFlowId = useFlowsManagerStore( - (state) => state.setCurrentFlowId + (state) => state.setCurrentFlowId, ); const currentFlowId = useFlowsManagerStore((state) => state.currentFlowId); const [loading, setLoading] = useState(true); @@ -144,7 +145,7 @@ export default function StorePage(): JSX.Element { setTotalRowsCount( filteredCategories?.length === 0 ? Number(res?.count ?? 0) - : res?.results?.length ?? 0 + : res?.results?.length ?? 0, ); } }) @@ -187,7 +188,7 @@ export default function StorePage(): JSX.Element { disabled={loading} className={cn( `${!validApiKey ? "animate-pulse border-error" : ""}`, - loading ? "cursor-not-allowed" : "" + loading ? "cursor-not-allowed" : "", )} variant="primary" > @@ -202,36 +203,20 @@ export default function StorePage(): JSX.Element { - - { - setInputText(e.target.value); - }} - onKeyDown={(e) => { - if (e.key === "Enter") { - setSearchNow(uniqueId()); - } - }} - value={inputText} - /> - { + setInputText(e.target.value)} + onKeyDown={(e) => { + if (e.key === "Enter") { setSearchNow(uniqueId()); - }} - data-testid="search-store-button" - > - - - + } + }} + onClick={() => { + setSearchNow(uniqueId()); + }} + /> ( delete newFields[field]; set({ unavaliableFields: newFields }); }, - globalVariablesEntries: [], + globalVariablesEntries: undefined, globalVariables: {}, setGlobalVariables: (variables) => { set({ diff --git a/src/frontend/src/types/zustand/globalVariables/index.ts b/src/frontend/src/types/zustand/globalVariables/index.ts index d22170ed2..4b178088c 100644 --- a/src/frontend/src/types/zustand/globalVariables/index.ts +++ b/src/frontend/src/types/zustand/globalVariables/index.ts @@ -1,5 +1,5 @@ export type GlobalVariablesStore = { - globalVariablesEntries: Array; + globalVariablesEntries: Array | undefined; globalVariables: { [name: string]: { id: string; diff --git a/src/frontend/src/utils/styleUtils.ts b/src/frontend/src/utils/styleUtils.ts index 1bedfa64d..c73ec5d23 100644 --- a/src/frontend/src/utils/styleUtils.ts +++ b/src/frontend/src/utils/styleUtils.ts @@ -153,6 +153,7 @@ import { AzureIcon } from "../icons/Azure"; import { BingIcon } from "../icons/Bing"; import { BotMessageSquareIcon } from "../icons/BotMessageSquare"; import { ChromaIcon } from "../icons/ChromaIcon"; +import { CouchbaseIcon } from "../icons/Couchbase"; import { CohereIcon } from "../icons/Cohere"; import { ElasticsearchIcon } from "../icons/ElasticsearchStore"; import { EvernoteIcon } from "../icons/Evernote"; @@ -324,6 +325,7 @@ export const nodeIconsLucide: iconsType = { Vectara: VectaraIcon, ArrowUpToLine: ArrowUpToLine, Chroma: ChromaIcon, + Couchbase: CouchbaseIcon, AirbyteJSONLoader: AirbyteIcon, AmazonBedrockEmbeddings: AWSIcon, Amazon: AWSIcon,