ref: Refactor vectorstore components structure (#9486)
* Refactor vectorstore components structure Moved vectorstore components for Chroma, ClickHouse, Couchbase, DataStax, Elastic, Milvus, MongoDB, Pinecone, Qdrant, Supabase, Upstash, Vectara, and Weaviate into dedicated subfolders with __init__.py files for each. Updated Redis vectorstore implementation to reside in redis.py and removed the old vectorstores/redis.py. Adjusted starter project JSONs and frontend constants to reflect new module paths and sidebar entries for these vectorstores. * Refactor vectorstore components and add lazy imports Moved Datastax-related files from vectorstores to a dedicated datastax directory. Added lazy import logic to __init__.py files for chroma, clickhouse, couchbase, elastic, milvus, mongodb, pinecone, qdrant, supabase, upstash, vectara, and weaviate components. Cleaned up vectorstores/__init__.py to only include local and faiss components, improving modularity and import efficiency. * [autofix.ci] apply automated fixes * Refactor vectorstore components structure Moved FAISS, Cassandra, and pgvector components to dedicated subdirectories with lazy-loading __init__.py files. Updated imports and references throughout the backend and frontend to reflect new locations. Removed obsolete datastax Cassandra component. Added new sidebar bundle entries for FAISS, Cassandra, and pgvector in frontend constants and style utilities. * Add lazy imports and Redis chat memory component Refactored the Redis module to support lazy imports for RedisIndexChatMemory and RedisVectorStoreComponent, improving import efficiency. Added a new redis_chat.py file implementing RedisIndexChatMemory for chat message storage and retrieval using Redis. * Fix vector store astra imports * Revert package lock changes * More test fixes * Update test_vector_store_rag.py * Update test_dynamic_imports.py * Update vector_store_rag.py * Update test_dynamic_imports.py * Refactor the cassandra chat component * Fix frontend tests for bundle * Mark Local DB as legacy * Update inputComponent.spec.ts * [autofix.ci] apply automated fixes --------- Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> Co-authored-by: Eric Hare <ericrhare@gmail.com> Co-authored-by: Carlos Coelho <80289056+carlosrcoelho@users.noreply.github.com>
This commit is contained in:
parent
4939801b91
commit
ae274a3e6d
63 changed files with 743 additions and 224 deletions
34
src/backend/base/langflow/components/FAISS/__init__.py
Normal file
34
src/backend/base/langflow/components/FAISS/__init__.py
Normal file
|
|
@ -0,0 +1,34 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from langflow.components._importing import import_mod
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .faiss import FaissVectorStoreComponent
|
||||
|
||||
_dynamic_imports = {
|
||||
"FaissVectorStoreComponent": "faiss",
|
||||
}
|
||||
|
||||
__all__ = [
|
||||
"FaissVectorStoreComponent",
|
||||
]
|
||||
|
||||
|
||||
def __getattr__(attr_name: str) -> Any:
|
||||
"""Lazily import FAISS components on attribute access."""
|
||||
if attr_name not in _dynamic_imports:
|
||||
msg = f"module '{__name__}' has no attribute '{attr_name}'"
|
||||
raise AttributeError(msg)
|
||||
try:
|
||||
result = import_mod(attr_name, _dynamic_imports[attr_name], __spec__.parent)
|
||||
except (ModuleNotFoundError, ImportError, AttributeError) as e:
|
||||
msg = f"Could not import '{attr_name}' from '{__name__}': {e}"
|
||||
raise AttributeError(msg) from e
|
||||
globals()[attr_name] = result
|
||||
return result
|
||||
|
||||
|
||||
def __dir__() -> list[str]:
|
||||
return list(__all__)
|
||||
40
src/backend/base/langflow/components/cassandra/__init__.py
Normal file
40
src/backend/base/langflow/components/cassandra/__init__.py
Normal file
|
|
@ -0,0 +1,40 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from langflow.components._importing import import_mod
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .cassandra import CassandraVectorStoreComponent
|
||||
from .cassandra_chat import CassandraChatMemory
|
||||
from .cassandra_graph import CassandraGraphVectorStoreComponent
|
||||
|
||||
_dynamic_imports = {
|
||||
"CassandraVectorStoreComponent": "cassandra",
|
||||
"CassandraGraphVectorStoreComponent": "cassandra_graph",
|
||||
"CassandraChatMemory": "cassandra_chat",
|
||||
}
|
||||
|
||||
__all__ = [
|
||||
"CassandraChatMemory",
|
||||
"CassandraGraphVectorStoreComponent",
|
||||
"CassandraVectorStoreComponent",
|
||||
]
|
||||
|
||||
|
||||
def __getattr__(attr_name: str) -> Any:
|
||||
"""Lazily import Cassandra components on attribute access."""
|
||||
if attr_name not in _dynamic_imports:
|
||||
msg = f"module '{__name__}' has no attribute '{attr_name}'"
|
||||
raise AttributeError(msg)
|
||||
try:
|
||||
result = import_mod(attr_name, _dynamic_imports[attr_name], __spec__.parent)
|
||||
except (ModuleNotFoundError, ImportError, AttributeError) as e:
|
||||
msg = f"Could not import '{attr_name}' from '{__name__}': {e}"
|
||||
raise AttributeError(msg) from e
|
||||
globals()[attr_name] = result
|
||||
return result
|
||||
|
||||
|
||||
def __dir__() -> list[str]:
|
||||
return list(__all__)
|
||||
34
src/backend/base/langflow/components/chroma/__init__.py
Normal file
34
src/backend/base/langflow/components/chroma/__init__.py
Normal file
|
|
@ -0,0 +1,34 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from langflow.components._importing import import_mod
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .chroma import ChromaVectorStoreComponent
|
||||
|
||||
_dynamic_imports = {
|
||||
"ChromaVectorStoreComponent": "chroma",
|
||||
}
|
||||
|
||||
__all__ = [
|
||||
"ChromaVectorStoreComponent",
|
||||
]
|
||||
|
||||
|
||||
def __getattr__(attr_name: str) -> Any:
|
||||
"""Lazily import Chroma components on attribute access."""
|
||||
if attr_name not in _dynamic_imports:
|
||||
msg = f"module '{__name__}' has no attribute '{attr_name}'"
|
||||
raise AttributeError(msg)
|
||||
try:
|
||||
result = import_mod(attr_name, _dynamic_imports[attr_name], __spec__.parent)
|
||||
except (ModuleNotFoundError, ImportError, AttributeError) as e:
|
||||
msg = f"Could not import '{attr_name}' from '{__name__}': {e}"
|
||||
raise AttributeError(msg) from e
|
||||
globals()[attr_name] = result
|
||||
return result
|
||||
|
||||
|
||||
def __dir__() -> list[str]:
|
||||
return list(__all__)
|
||||
34
src/backend/base/langflow/components/clickhouse/__init__.py
Normal file
34
src/backend/base/langflow/components/clickhouse/__init__.py
Normal file
|
|
@ -0,0 +1,34 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from langflow.components._importing import import_mod
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .clickhouse import ClickhouseVectorStoreComponent
|
||||
|
||||
_dynamic_imports = {
|
||||
"ClickhouseVectorStoreComponent": "clickhouse",
|
||||
}
|
||||
|
||||
__all__ = [
|
||||
"ClickhouseVectorStoreComponent",
|
||||
]
|
||||
|
||||
|
||||
def __getattr__(attr_name: str) -> Any:
|
||||
"""Lazily import ClickHouse components on attribute access."""
|
||||
if attr_name not in _dynamic_imports:
|
||||
msg = f"module '{__name__}' has no attribute '{attr_name}'"
|
||||
raise AttributeError(msg)
|
||||
try:
|
||||
result = import_mod(attr_name, _dynamic_imports[attr_name], __spec__.parent)
|
||||
except (ModuleNotFoundError, ImportError, AttributeError) as e:
|
||||
msg = f"Could not import '{attr_name}' from '{__name__}': {e}"
|
||||
raise AttributeError(msg) from e
|
||||
globals()[attr_name] = result
|
||||
return result
|
||||
|
||||
|
||||
def __dir__() -> list[str]:
|
||||
return list(__all__)
|
||||
34
src/backend/base/langflow/components/couchbase/__init__.py
Normal file
34
src/backend/base/langflow/components/couchbase/__init__.py
Normal file
|
|
@ -0,0 +1,34 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from langflow.components._importing import import_mod
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .couchbase import CouchbaseVectorStoreComponent
|
||||
|
||||
_dynamic_imports = {
|
||||
"CouchbaseVectorStoreComponent": "couchbase",
|
||||
}
|
||||
|
||||
__all__ = [
|
||||
"CouchbaseVectorStoreComponent",
|
||||
]
|
||||
|
||||
|
||||
def __getattr__(attr_name: str) -> Any:
|
||||
"""Lazily import Couchbase components on attribute access."""
|
||||
if attr_name not in _dynamic_imports:
|
||||
msg = f"module '{__name__}' has no attribute '{attr_name}'"
|
||||
raise AttributeError(msg)
|
||||
try:
|
||||
result = import_mod(attr_name, _dynamic_imports[attr_name], __spec__.parent)
|
||||
except (ModuleNotFoundError, ImportError, AttributeError) as e:
|
||||
msg = f"Could not import '{attr_name}' from '{__name__}': {e}"
|
||||
raise AttributeError(msg) from e
|
||||
globals()[attr_name] = result
|
||||
return result
|
||||
|
||||
|
||||
def __dir__() -> list[str]:
|
||||
return list(__all__)
|
||||
|
|
@ -8,9 +8,9 @@ if TYPE_CHECKING:
|
|||
from .astra_assistant_manager import AstraAssistantManager
|
||||
from .astra_db import AstraDBChatMemory
|
||||
from .astra_vectorize import AstraVectorizeComponent
|
||||
from .astradb import AstraDBVectorStoreComponent
|
||||
from .astradb_cql import AstraDBCQLToolComponent
|
||||
from .astradb_tool import AstraDBToolComponent
|
||||
from .cassandra import CassandraChatMemory
|
||||
from .create_assistant import AssistantsCreateAssistant
|
||||
from .create_thread import AssistantsCreateThread
|
||||
from .dotenv import Dotenv
|
||||
|
|
@ -29,8 +29,8 @@ _dynamic_imports = {
|
|||
"AstraDBCQLToolComponent": "astradb_cql",
|
||||
"AstraDBChatMemory": "astra_db",
|
||||
"AstraDBToolComponent": "astradb_tool",
|
||||
"AstraDBVectorStoreComponent": "astradb",
|
||||
"AstraVectorizeComponent": "astra_vectorize",
|
||||
"CassandraChatMemory": "cassandra",
|
||||
"Dotenv": "dotenv",
|
||||
"GetEnvVar": "getenvvar",
|
||||
}
|
||||
|
|
@ -45,8 +45,8 @@ __all__ = [
|
|||
"AstraDBCQLToolComponent",
|
||||
"AstraDBChatMemory",
|
||||
"AstraDBToolComponent",
|
||||
"AstraDBVectorStoreComponent",
|
||||
"AstraVectorizeComponent",
|
||||
"CassandraChatMemory",
|
||||
"Dotenv",
|
||||
"GetEnvVar",
|
||||
]
|
||||
|
|
|
|||
37
src/backend/base/langflow/components/elastic/__init__.py
Normal file
37
src/backend/base/langflow/components/elastic/__init__.py
Normal file
|
|
@ -0,0 +1,37 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from langflow.components._importing import import_mod
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .elasticsearch import ElasticsearchVectorStoreComponent
|
||||
from .opensearch import OpenSearchVectorStoreComponent
|
||||
|
||||
_dynamic_imports = {
|
||||
"ElasticsearchVectorStoreComponent": "elasticsearch",
|
||||
"OpenSearchVectorStoreComponent": "opensearch",
|
||||
}
|
||||
|
||||
__all__ = [
|
||||
"ElasticsearchVectorStoreComponent",
|
||||
"OpenSearchVectorStoreComponent",
|
||||
]
|
||||
|
||||
|
||||
def __getattr__(attr_name: str) -> Any:
|
||||
"""Lazily import Elastic components on attribute access."""
|
||||
if attr_name not in _dynamic_imports:
|
||||
msg = f"module '{__name__}' has no attribute '{attr_name}'"
|
||||
raise AttributeError(msg)
|
||||
try:
|
||||
result = import_mod(attr_name, _dynamic_imports[attr_name], __spec__.parent)
|
||||
except (ModuleNotFoundError, ImportError, AttributeError) as e:
|
||||
msg = f"Could not import '{attr_name}' from '{__name__}': {e}"
|
||||
raise AttributeError(msg) from e
|
||||
globals()[attr_name] = result
|
||||
return result
|
||||
|
||||
|
||||
def __dir__() -> list[str]:
|
||||
return list(__all__)
|
||||
34
src/backend/base/langflow/components/milvus/__init__.py
Normal file
34
src/backend/base/langflow/components/milvus/__init__.py
Normal file
|
|
@ -0,0 +1,34 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from langflow.components._importing import import_mod
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .milvus import MilvusVectorStoreComponent
|
||||
|
||||
_dynamic_imports = {
|
||||
"MilvusVectorStoreComponent": "milvus",
|
||||
}
|
||||
|
||||
__all__ = [
|
||||
"MilvusVectorStoreComponent",
|
||||
]
|
||||
|
||||
|
||||
def __getattr__(attr_name: str) -> Any:
|
||||
"""Lazily import Milvus components on attribute access."""
|
||||
if attr_name not in _dynamic_imports:
|
||||
msg = f"module '{__name__}' has no attribute '{attr_name}'"
|
||||
raise AttributeError(msg)
|
||||
try:
|
||||
result = import_mod(attr_name, _dynamic_imports[attr_name], __spec__.parent)
|
||||
except (ModuleNotFoundError, ImportError, AttributeError) as e:
|
||||
msg = f"Could not import '{attr_name}' from '{__name__}': {e}"
|
||||
raise AttributeError(msg) from e
|
||||
globals()[attr_name] = result
|
||||
return result
|
||||
|
||||
|
||||
def __dir__() -> list[str]:
|
||||
return list(__all__)
|
||||
34
src/backend/base/langflow/components/mongodb/__init__.py
Normal file
34
src/backend/base/langflow/components/mongodb/__init__.py
Normal file
|
|
@ -0,0 +1,34 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from langflow.components._importing import import_mod
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .mongodb_atlas import MongoVectorStoreComponent
|
||||
|
||||
_dynamic_imports = {
|
||||
"MongoVectorStoreComponent": "mongodb_atlas",
|
||||
}
|
||||
|
||||
__all__ = [
|
||||
"MongoVectorStoreComponent",
|
||||
]
|
||||
|
||||
|
||||
def __getattr__(attr_name: str) -> Any:
|
||||
"""Lazily import MongoDB components on attribute access."""
|
||||
if attr_name not in _dynamic_imports:
|
||||
msg = f"module '{__name__}' has no attribute '{attr_name}'"
|
||||
raise AttributeError(msg)
|
||||
try:
|
||||
result = import_mod(attr_name, _dynamic_imports[attr_name], __spec__.parent)
|
||||
except (ModuleNotFoundError, ImportError, AttributeError) as e:
|
||||
msg = f"Could not import '{attr_name}' from '{__name__}': {e}"
|
||||
raise AttributeError(msg) from e
|
||||
globals()[attr_name] = result
|
||||
return result
|
||||
|
||||
|
||||
def __dir__() -> list[str]:
|
||||
return list(__all__)
|
||||
34
src/backend/base/langflow/components/pgvector/__init__.py
Normal file
34
src/backend/base/langflow/components/pgvector/__init__.py
Normal file
|
|
@ -0,0 +1,34 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from langflow.components._importing import import_mod
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .pgvector import PGVectorStoreComponent
|
||||
|
||||
_dynamic_imports = {
|
||||
"PGVectorStoreComponent": "pgvector",
|
||||
}
|
||||
|
||||
__all__ = [
|
||||
"PGVectorStoreComponent",
|
||||
]
|
||||
|
||||
|
||||
def __getattr__(attr_name: str) -> Any:
|
||||
"""Lazily import pgvector components on attribute access."""
|
||||
if attr_name not in _dynamic_imports:
|
||||
msg = f"module '{__name__}' has no attribute '{attr_name}'"
|
||||
raise AttributeError(msg)
|
||||
try:
|
||||
result = import_mod(attr_name, _dynamic_imports[attr_name], __spec__.parent)
|
||||
except (ModuleNotFoundError, ImportError, AttributeError) as e:
|
||||
msg = f"Could not import '{attr_name}' from '{__name__}': {e}"
|
||||
raise AttributeError(msg) from e
|
||||
globals()[attr_name] = result
|
||||
return result
|
||||
|
||||
|
||||
def __dir__() -> list[str]:
|
||||
return list(__all__)
|
||||
34
src/backend/base/langflow/components/pinecone/__init__.py
Normal file
34
src/backend/base/langflow/components/pinecone/__init__.py
Normal file
|
|
@ -0,0 +1,34 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from langflow.components._importing import import_mod
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .pinecone import PineconeVectorStoreComponent
|
||||
|
||||
_dynamic_imports = {
|
||||
"PineconeVectorStoreComponent": "pinecone",
|
||||
}
|
||||
|
||||
__all__ = [
|
||||
"PineconeVectorStoreComponent",
|
||||
]
|
||||
|
||||
|
||||
def __getattr__(attr_name: str) -> Any:
|
||||
"""Lazily import Pinecone components on attribute access."""
|
||||
if attr_name not in _dynamic_imports:
|
||||
msg = f"module '{__name__}' has no attribute '{attr_name}'"
|
||||
raise AttributeError(msg)
|
||||
try:
|
||||
result = import_mod(attr_name, _dynamic_imports[attr_name], __spec__.parent)
|
||||
except (ModuleNotFoundError, ImportError, AttributeError) as e:
|
||||
msg = f"Could not import '{attr_name}' from '{__name__}': {e}"
|
||||
raise AttributeError(msg) from e
|
||||
globals()[attr_name] = result
|
||||
return result
|
||||
|
||||
|
||||
def __dir__() -> list[str]:
|
||||
return list(__all__)
|
||||
34
src/backend/base/langflow/components/qdrant/__init__.py
Normal file
34
src/backend/base/langflow/components/qdrant/__init__.py
Normal file
|
|
@ -0,0 +1,34 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from langflow.components._importing import import_mod
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .qdrant import QdrantVectorStoreComponent
|
||||
|
||||
_dynamic_imports = {
|
||||
"QdrantVectorStoreComponent": "qdrant",
|
||||
}
|
||||
|
||||
__all__ = [
|
||||
"QdrantVectorStoreComponent",
|
||||
]
|
||||
|
||||
|
||||
def __getattr__(attr_name: str) -> Any:
|
||||
"""Lazily import Qdrant components on attribute access."""
|
||||
if attr_name not in _dynamic_imports:
|
||||
msg = f"module '{__name__}' has no attribute '{attr_name}'"
|
||||
raise AttributeError(msg)
|
||||
try:
|
||||
result = import_mod(attr_name, _dynamic_imports[attr_name], __spec__.parent)
|
||||
except (ModuleNotFoundError, ImportError, AttributeError) as e:
|
||||
msg = f"Could not import '{attr_name}' from '{__name__}': {e}"
|
||||
raise AttributeError(msg) from e
|
||||
globals()[attr_name] = result
|
||||
return result
|
||||
|
||||
|
||||
def __dir__() -> list[str]:
|
||||
return list(__all__)
|
||||
|
|
@ -1,3 +1,37 @@
|
|||
from .redis import RedisIndexChatMemory
|
||||
from __future__ import annotations
|
||||
|
||||
__all__ = ["RedisIndexChatMemory"]
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from langflow.components._importing import import_mod
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .redis import RedisVectorStoreComponent
|
||||
from .redis_chat import RedisIndexChatMemory
|
||||
|
||||
_dynamic_imports = {
|
||||
"RedisVectorStoreComponent": "redis",
|
||||
"RedisIndexChatMemory": "redis_chat",
|
||||
}
|
||||
|
||||
__all__ = [
|
||||
"RedisIndexChatMemory",
|
||||
"RedisVectorStoreComponent",
|
||||
]
|
||||
|
||||
|
||||
def __getattr__(attr_name: str) -> Any:
|
||||
"""Lazily import Redis components on attribute access."""
|
||||
if attr_name not in _dynamic_imports:
|
||||
msg = f"module '{__name__}' has no attribute '{attr_name}'"
|
||||
raise AttributeError(msg)
|
||||
try:
|
||||
result = import_mod(attr_name, _dynamic_imports[attr_name], __spec__.parent)
|
||||
except (ModuleNotFoundError, ImportError, AttributeError) as e:
|
||||
msg = f"Could not import '{attr_name}' from '{__name__}': {e}"
|
||||
raise AttributeError(msg) from e
|
||||
globals()[attr_name] = result
|
||||
return result
|
||||
|
||||
|
||||
def __dir__() -> list[str]:
|
||||
return list(__all__)
|
||||
|
|
|
|||
|
|
@ -1,43 +1,89 @@
|
|||
from urllib import parse
|
||||
from pathlib import Path
|
||||
|
||||
from langchain_community.chat_message_histories.redis import RedisChatMessageHistory
|
||||
from langchain.text_splitter import CharacterTextSplitter
|
||||
from langchain_community.vectorstores.redis import Redis
|
||||
|
||||
from langflow.base.memory.model import LCChatMemoryComponent
|
||||
from langflow.field_typing.constants import Memory
|
||||
from langflow.inputs.inputs import IntInput, MessageTextInput, SecretStrInput, StrInput
|
||||
from langflow.base.vectorstores.model import LCVectorStoreComponent, check_cached_vector_store
|
||||
from langflow.helpers.data import docs_to_data
|
||||
from langflow.io import HandleInput, IntInput, SecretStrInput, StrInput
|
||||
from langflow.schema.data import Data
|
||||
|
||||
|
||||
class RedisIndexChatMemory(LCChatMemoryComponent):
|
||||
display_name = "Redis Chat Memory"
|
||||
description = "Retrieves and store chat messages from Redis."
|
||||
name = "RedisChatMemory"
|
||||
class RedisVectorStoreComponent(LCVectorStoreComponent):
|
||||
"""A custom component for implementing a Vector Store using Redis."""
|
||||
|
||||
display_name: str = "Redis"
|
||||
description: str = "Implementation of Vector Store using Redis"
|
||||
name = "Redis"
|
||||
icon = "Redis"
|
||||
|
||||
inputs = [
|
||||
SecretStrInput(name="redis_server_url", display_name="Redis Server Connection String", required=True),
|
||||
StrInput(
|
||||
name="host", display_name="hostname", required=True, value="localhost", info="IP address or hostname."
|
||||
name="redis_index_name",
|
||||
display_name="Redis Index",
|
||||
),
|
||||
IntInput(name="port", display_name="port", required=True, value=6379, info="Redis Port Number."),
|
||||
StrInput(name="database", display_name="database", required=True, value="0", info="Redis database."),
|
||||
MessageTextInput(
|
||||
name="username", display_name="Username", value="", info="The Redis user name.", advanced=True
|
||||
StrInput(name="code", display_name="Code", advanced=True),
|
||||
StrInput(
|
||||
name="schema",
|
||||
display_name="Schema",
|
||||
),
|
||||
SecretStrInput(
|
||||
name="password", display_name="Password", value="", info="The password for username.", advanced=True
|
||||
),
|
||||
StrInput(name="key_prefix", display_name="Key prefix", info="Key prefix.", advanced=True),
|
||||
MessageTextInput(
|
||||
name="session_id", display_name="Session ID", info="Session ID for the message.", advanced=True
|
||||
*LCVectorStoreComponent.inputs,
|
||||
IntInput(
|
||||
name="number_of_results",
|
||||
display_name="Number of Results",
|
||||
info="Number of results to return.",
|
||||
value=4,
|
||||
advanced=True,
|
||||
),
|
||||
HandleInput(name="embedding", display_name="Embedding", input_types=["Embeddings"]),
|
||||
]
|
||||
|
||||
def build_message_history(self) -> Memory:
|
||||
kwargs = {}
|
||||
password: str | None = self.password
|
||||
if self.key_prefix:
|
||||
kwargs["key_prefix"] = self.key_prefix
|
||||
if password:
|
||||
password = parse.quote_plus(password)
|
||||
@check_cached_vector_store
|
||||
def build_vector_store(self) -> Redis:
|
||||
# Convert DataFrame to Data if needed using parent's method
|
||||
self.ingest_data = self._prepare_ingest_data()
|
||||
|
||||
url = f"redis://{self.username}:{self.password}@{self.host}:{self.port}/{self.database}"
|
||||
return RedisChatMessageHistory(session_id=self.session_id, url=url, **kwargs)
|
||||
documents = []
|
||||
for _input in self.ingest_data or []:
|
||||
if isinstance(_input, Data):
|
||||
documents.append(_input.to_lc_document())
|
||||
else:
|
||||
documents.append(_input)
|
||||
Path("docuemnts.txt").write_text(str(documents), encoding="utf-8")
|
||||
|
||||
if not documents:
|
||||
if self.schema is None:
|
||||
msg = "If no documents are provided, a schema must be provided."
|
||||
raise ValueError(msg)
|
||||
redis_vs = Redis.from_existing_index(
|
||||
embedding=self.embedding,
|
||||
index_name=self.redis_index_name,
|
||||
schema=self.schema,
|
||||
key_prefix=None,
|
||||
redis_url=self.redis_server_url,
|
||||
)
|
||||
else:
|
||||
text_splitter = CharacterTextSplitter(chunk_size=1000, chunk_overlap=0)
|
||||
docs = text_splitter.split_documents(documents)
|
||||
redis_vs = Redis.from_documents(
|
||||
documents=docs,
|
||||
embedding=self.embedding,
|
||||
redis_url=self.redis_server_url,
|
||||
index_name=self.redis_index_name,
|
||||
)
|
||||
return redis_vs
|
||||
|
||||
def search_documents(self) -> list[Data]:
|
||||
vector_store = self.build_vector_store()
|
||||
|
||||
if self.search_query and isinstance(self.search_query, str) and self.search_query.strip():
|
||||
docs = vector_store.similarity_search(
|
||||
query=self.search_query,
|
||||
k=self.number_of_results,
|
||||
)
|
||||
|
||||
data = docs_to_data(docs)
|
||||
self.status = data
|
||||
return data
|
||||
return []
|
||||
|
|
|
|||
43
src/backend/base/langflow/components/redis/redis_chat.py
Normal file
43
src/backend/base/langflow/components/redis/redis_chat.py
Normal file
|
|
@ -0,0 +1,43 @@
|
|||
from urllib import parse
|
||||
|
||||
from langchain_community.chat_message_histories.redis import RedisChatMessageHistory
|
||||
|
||||
from langflow.base.memory.model import LCChatMemoryComponent
|
||||
from langflow.field_typing.constants import Memory
|
||||
from langflow.inputs.inputs import IntInput, MessageTextInput, SecretStrInput, StrInput
|
||||
|
||||
|
||||
class RedisIndexChatMemory(LCChatMemoryComponent):
|
||||
display_name = "Redis Chat Memory"
|
||||
description = "Retrieves and store chat messages from Redis."
|
||||
name = "RedisChatMemory"
|
||||
icon = "Redis"
|
||||
|
||||
inputs = [
|
||||
StrInput(
|
||||
name="host", display_name="hostname", required=True, value="localhost", info="IP address or hostname."
|
||||
),
|
||||
IntInput(name="port", display_name="port", required=True, value=6379, info="Redis Port Number."),
|
||||
StrInput(name="database", display_name="database", required=True, value="0", info="Redis database."),
|
||||
MessageTextInput(
|
||||
name="username", display_name="Username", value="", info="The Redis user name.", advanced=True
|
||||
),
|
||||
SecretStrInput(
|
||||
name="password", display_name="Password", value="", info="The password for username.", advanced=True
|
||||
),
|
||||
StrInput(name="key_prefix", display_name="Key prefix", info="Key prefix.", advanced=True),
|
||||
MessageTextInput(
|
||||
name="session_id", display_name="Session ID", info="Session ID for the message.", advanced=True
|
||||
),
|
||||
]
|
||||
|
||||
def build_message_history(self) -> Memory:
|
||||
kwargs = {}
|
||||
password: str | None = self.password
|
||||
if self.key_prefix:
|
||||
kwargs["key_prefix"] = self.key_prefix
|
||||
if password:
|
||||
password = parse.quote_plus(password)
|
||||
|
||||
url = f"redis://{self.username}:{self.password}@{self.host}:{self.port}/{self.database}"
|
||||
return RedisChatMessageHistory(session_id=self.session_id, url=url, **kwargs)
|
||||
37
src/backend/base/langflow/components/supabase/__init__.py
Normal file
37
src/backend/base/langflow/components/supabase/__init__.py
Normal file
|
|
@ -0,0 +1,37 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from langflow.components._importing import import_mod
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .supabase import SupabaseVectorStoreComponent
|
||||
from .supabase_composio import SupabaseComposioComponent
|
||||
|
||||
_dynamic_imports = {
|
||||
"SupabaseVectorStoreComponent": "supabase",
|
||||
"SupabaseComposioComponent": "supabase_composio",
|
||||
}
|
||||
|
||||
__all__ = [
|
||||
"SupabaseComposioComponent",
|
||||
"SupabaseVectorStoreComponent",
|
||||
]
|
||||
|
||||
|
||||
def __getattr__(attr_name: str) -> Any:
|
||||
"""Lazily import Supabase components on attribute access."""
|
||||
if attr_name not in _dynamic_imports:
|
||||
msg = f"module '{__name__}' has no attribute '{attr_name}'"
|
||||
raise AttributeError(msg)
|
||||
try:
|
||||
result = import_mod(attr_name, _dynamic_imports[attr_name], __spec__.parent)
|
||||
except (ModuleNotFoundError, ImportError, AttributeError) as e:
|
||||
msg = f"Could not import '{attr_name}' from '{__name__}': {e}"
|
||||
raise AttributeError(msg) from e
|
||||
globals()[attr_name] = result
|
||||
return result
|
||||
|
||||
|
||||
def __dir__() -> list[str]:
|
||||
return list(__all__)
|
||||
34
src/backend/base/langflow/components/upstash/__init__.py
Normal file
34
src/backend/base/langflow/components/upstash/__init__.py
Normal file
|
|
@ -0,0 +1,34 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from langflow.components._importing import import_mod
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .upstash import UpstashVectorStoreComponent
|
||||
|
||||
_dynamic_imports = {
|
||||
"UpstashVectorStoreComponent": "upstash",
|
||||
}
|
||||
|
||||
__all__ = [
|
||||
"UpstashVectorStoreComponent",
|
||||
]
|
||||
|
||||
|
||||
def __getattr__(attr_name: str) -> Any:
|
||||
"""Lazily import Upstash components on attribute access."""
|
||||
if attr_name not in _dynamic_imports:
|
||||
msg = f"module '{__name__}' has no attribute '{attr_name}'"
|
||||
raise AttributeError(msg)
|
||||
try:
|
||||
result = import_mod(attr_name, _dynamic_imports[attr_name], __spec__.parent)
|
||||
except (ModuleNotFoundError, ImportError, AttributeError) as e:
|
||||
msg = f"Could not import '{attr_name}' from '{__name__}': {e}"
|
||||
raise AttributeError(msg) from e
|
||||
globals()[attr_name] = result
|
||||
return result
|
||||
|
||||
|
||||
def __dir__() -> list[str]:
|
||||
return list(__all__)
|
||||
37
src/backend/base/langflow/components/vectara/__init__.py
Normal file
37
src/backend/base/langflow/components/vectara/__init__.py
Normal file
|
|
@ -0,0 +1,37 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from langflow.components._importing import import_mod
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .vectara import VectaraVectorStoreComponent
|
||||
from .vectara_rag import VectaraRagComponent
|
||||
|
||||
_dynamic_imports = {
|
||||
"VectaraVectorStoreComponent": "vectara",
|
||||
"VectaraRagComponent": "vectara_rag",
|
||||
}
|
||||
|
||||
__all__ = [
|
||||
"VectaraRagComponent",
|
||||
"VectaraVectorStoreComponent",
|
||||
]
|
||||
|
||||
|
||||
def __getattr__(attr_name: str) -> Any:
|
||||
"""Lazily import Vectara components on attribute access."""
|
||||
if attr_name not in _dynamic_imports:
|
||||
msg = f"module '{__name__}' has no attribute '{attr_name}'"
|
||||
raise AttributeError(msg)
|
||||
try:
|
||||
result = import_mod(attr_name, _dynamic_imports[attr_name], __spec__.parent)
|
||||
except (ModuleNotFoundError, ImportError, AttributeError) as e:
|
||||
msg = f"Could not import '{attr_name}' from '{__name__}': {e}"
|
||||
raise AttributeError(msg) from e
|
||||
globals()[attr_name] = result
|
||||
return result
|
||||
|
||||
|
||||
def __dir__() -> list[str]:
|
||||
return list(__all__)
|
||||
|
|
@ -5,83 +5,14 @@ from typing import TYPE_CHECKING, Any
|
|||
from langflow.components._importing import import_mod
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .astradb import AstraDBVectorStoreComponent
|
||||
from .astradb_graph import AstraDBGraphVectorStoreComponent
|
||||
from .cassandra import CassandraVectorStoreComponent
|
||||
from .cassandra_graph import CassandraGraphVectorStoreComponent
|
||||
from .chroma import ChromaVectorStoreComponent
|
||||
from .clickhouse import ClickhouseVectorStoreComponent
|
||||
from .couchbase import CouchbaseVectorStoreComponent
|
||||
from .elasticsearch import ElasticsearchVectorStoreComponent
|
||||
from .faiss import FaissVectorStoreComponent
|
||||
from .graph_rag import GraphRAGComponent
|
||||
from .hcd import HCDVectorStoreComponent
|
||||
from .local_db import LocalDBComponent
|
||||
from .milvus import MilvusVectorStoreComponent
|
||||
from .mongodb_atlas import MongoVectorStoreComponent
|
||||
from .opensearch import OpenSearchVectorStoreComponent
|
||||
from .pgvector import PGVectorStoreComponent
|
||||
from .pinecone import PineconeVectorStoreComponent
|
||||
from .qdrant import QdrantVectorStoreComponent
|
||||
from .redis import RedisVectorStoreComponent
|
||||
from .supabase import SupabaseVectorStoreComponent
|
||||
from .upstash import UpstashVectorStoreComponent
|
||||
from .vectara import VectaraVectorStoreComponent
|
||||
from .vectara_rag import VectaraRagComponent
|
||||
from .weaviate import WeaviateVectorStoreComponent
|
||||
|
||||
_dynamic_imports = {
|
||||
"AstraDBVectorStoreComponent": "astradb",
|
||||
"AstraDBGraphVectorStoreComponent": "astradb_graph",
|
||||
"CassandraVectorStoreComponent": "cassandra",
|
||||
"CassandraGraphVectorStoreComponent": "cassandra_graph",
|
||||
"ChromaVectorStoreComponent": "chroma",
|
||||
"ClickhouseVectorStoreComponent": "clickhouse",
|
||||
"CouchbaseVectorStoreComponent": "couchbase",
|
||||
"ElasticsearchVectorStoreComponent": "elasticsearch",
|
||||
"FaissVectorStoreComponent": "faiss",
|
||||
"GraphRAGComponent": "graph_rag",
|
||||
"HCDVectorStoreComponent": "hcd",
|
||||
"LocalDBComponent": "local_db",
|
||||
"MilvusVectorStoreComponent": "milvus",
|
||||
"MongoVectorStoreComponent": "mongodb_atlas",
|
||||
"OpenSearchVectorStoreComponent": "opensearch",
|
||||
"PGVectorStoreComponent": "pgvector",
|
||||
"PineconeVectorStoreComponent": "pinecone",
|
||||
"QdrantVectorStoreComponent": "qdrant",
|
||||
"RedisVectorStoreComponent": "redis",
|
||||
"SupabaseVectorStoreComponent": "supabase",
|
||||
"UpstashVectorStoreComponent": "upstash",
|
||||
"VectaraVectorStoreComponent": "vectara",
|
||||
"VectaraRagComponent": "vectara_rag",
|
||||
"WeaviateVectorStoreComponent": "weaviate",
|
||||
}
|
||||
|
||||
__all__ = [
|
||||
"AstraDBGraphVectorStoreComponent",
|
||||
"AstraDBVectorStoreComponent",
|
||||
"CassandraGraphVectorStoreComponent",
|
||||
"CassandraVectorStoreComponent",
|
||||
"ChromaVectorStoreComponent",
|
||||
"ClickhouseVectorStoreComponent",
|
||||
"CouchbaseVectorStoreComponent",
|
||||
"ElasticsearchVectorStoreComponent",
|
||||
"FaissVectorStoreComponent",
|
||||
"GraphRAGComponent",
|
||||
"HCDVectorStoreComponent",
|
||||
"LocalDBComponent",
|
||||
"MilvusVectorStoreComponent",
|
||||
"MongoVectorStoreComponent",
|
||||
"OpenSearchVectorStoreComponent",
|
||||
"PGVectorStoreComponent",
|
||||
"PineconeVectorStoreComponent",
|
||||
"QdrantVectorStoreComponent",
|
||||
"RedisVectorStoreComponent",
|
||||
"SupabaseVectorStoreComponent",
|
||||
"UpstashVectorStoreComponent",
|
||||
"VectaraRagComponent",
|
||||
"VectaraVectorStoreComponent",
|
||||
"WeaviateVectorStoreComponent",
|
||||
]
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -21,6 +21,7 @@ class LocalDBComponent(LCVectorStoreComponent):
|
|||
description: str = "Local Vector Store with search capabilities"
|
||||
name = "LocalDB"
|
||||
icon = "database"
|
||||
legacy = True
|
||||
|
||||
inputs = [
|
||||
TabInput(
|
||||
|
|
|
|||
|
|
@ -1,89 +0,0 @@
|
|||
from pathlib import Path
|
||||
|
||||
from langchain.text_splitter import CharacterTextSplitter
|
||||
from langchain_community.vectorstores.redis import Redis
|
||||
|
||||
from langflow.base.vectorstores.model import LCVectorStoreComponent, check_cached_vector_store
|
||||
from langflow.helpers.data import docs_to_data
|
||||
from langflow.io import HandleInput, IntInput, SecretStrInput, StrInput
|
||||
from langflow.schema.data import Data
|
||||
|
||||
|
||||
class RedisVectorStoreComponent(LCVectorStoreComponent):
|
||||
"""A custom component for implementing a Vector Store using Redis."""
|
||||
|
||||
display_name: str = "Redis"
|
||||
description: str = "Implementation of Vector Store using Redis"
|
||||
name = "Redis"
|
||||
icon = "Redis"
|
||||
|
||||
inputs = [
|
||||
SecretStrInput(name="redis_server_url", display_name="Redis Server Connection String", required=True),
|
||||
StrInput(
|
||||
name="redis_index_name",
|
||||
display_name="Redis Index",
|
||||
),
|
||||
StrInput(name="code", display_name="Code", advanced=True),
|
||||
StrInput(
|
||||
name="schema",
|
||||
display_name="Schema",
|
||||
),
|
||||
*LCVectorStoreComponent.inputs,
|
||||
IntInput(
|
||||
name="number_of_results",
|
||||
display_name="Number of Results",
|
||||
info="Number of results to return.",
|
||||
value=4,
|
||||
advanced=True,
|
||||
),
|
||||
HandleInput(name="embedding", display_name="Embedding", input_types=["Embeddings"]),
|
||||
]
|
||||
|
||||
@check_cached_vector_store
|
||||
def build_vector_store(self) -> Redis:
|
||||
# Convert DataFrame to Data if needed using parent's method
|
||||
self.ingest_data = self._prepare_ingest_data()
|
||||
|
||||
documents = []
|
||||
for _input in self.ingest_data or []:
|
||||
if isinstance(_input, Data):
|
||||
documents.append(_input.to_lc_document())
|
||||
else:
|
||||
documents.append(_input)
|
||||
Path("docuemnts.txt").write_text(str(documents), encoding="utf-8")
|
||||
|
||||
if not documents:
|
||||
if self.schema is None:
|
||||
msg = "If no documents are provided, a schema must be provided."
|
||||
raise ValueError(msg)
|
||||
redis_vs = Redis.from_existing_index(
|
||||
embedding=self.embedding,
|
||||
index_name=self.redis_index_name,
|
||||
schema=self.schema,
|
||||
key_prefix=None,
|
||||
redis_url=self.redis_server_url,
|
||||
)
|
||||
else:
|
||||
text_splitter = CharacterTextSplitter(chunk_size=1000, chunk_overlap=0)
|
||||
docs = text_splitter.split_documents(documents)
|
||||
redis_vs = Redis.from_documents(
|
||||
documents=docs,
|
||||
embedding=self.embedding,
|
||||
redis_url=self.redis_server_url,
|
||||
index_name=self.redis_index_name,
|
||||
)
|
||||
return redis_vs
|
||||
|
||||
def search_documents(self) -> list[Data]:
|
||||
vector_store = self.build_vector_store()
|
||||
|
||||
if self.search_query and isinstance(self.search_query, str) and self.search_query.strip():
|
||||
docs = vector_store.similarity_search(
|
||||
query=self.search_query,
|
||||
k=self.number_of_results,
|
||||
)
|
||||
|
||||
data = docs_to_data(docs)
|
||||
self.status = data
|
||||
return data
|
||||
return []
|
||||
34
src/backend/base/langflow/components/weaviate/__init__.py
Normal file
34
src/backend/base/langflow/components/weaviate/__init__.py
Normal file
|
|
@ -0,0 +1,34 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from langflow.components._importing import import_mod
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .weaviate import WeaviateVectorStoreComponent
|
||||
|
||||
_dynamic_imports = {
|
||||
"WeaviateVectorStoreComponent": "weaviate",
|
||||
}
|
||||
|
||||
__all__ = [
|
||||
"WeaviateVectorStoreComponent",
|
||||
]
|
||||
|
||||
|
||||
def __getattr__(attr_name: str) -> Any:
|
||||
"""Lazily import Weaviate components on attribute access."""
|
||||
if attr_name not in _dynamic_imports:
|
||||
msg = f"module '{__name__}' has no attribute '{attr_name}'"
|
||||
raise AttributeError(msg)
|
||||
try:
|
||||
result = import_mod(attr_name, _dynamic_imports[attr_name], __spec__.parent)
|
||||
except (ModuleNotFoundError, ImportError, AttributeError) as e:
|
||||
msg = f"Could not import '{attr_name}' from '{__name__}': {e}"
|
||||
raise AttributeError(msg) from e
|
||||
globals()[attr_name] = result
|
||||
return result
|
||||
|
||||
|
||||
def __dir__() -> list[str]:
|
||||
return list(__all__)
|
||||
|
|
@ -1199,7 +1199,7 @@
|
|||
"lf_version": "1.4.3",
|
||||
"metadata": {
|
||||
"code_hash": "23fbe9daca09",
|
||||
"module": "langflow.components.vectorstores.astradb.AstraDBVectorStoreComponent"
|
||||
"module": "langflow.components.datastax.astradb.AstraDBVectorStoreComponent"
|
||||
},
|
||||
"minimized": false,
|
||||
"output_types": [],
|
||||
|
|
|
|||
|
|
@ -2183,7 +2183,7 @@
|
|||
"lf_version": "1.4.2",
|
||||
"metadata": {
|
||||
"code_hash": "ed38680af3a6",
|
||||
"module": "langflow.components.vectorstores.faiss.FaissVectorStoreComponent"
|
||||
"module": "langflow.components.FAISS.faiss.FaissVectorStoreComponent"
|
||||
},
|
||||
"minimized": false,
|
||||
"output_types": [],
|
||||
|
|
|
|||
|
|
@ -2710,7 +2710,7 @@
|
|||
"legacy": false,
|
||||
"metadata": {
|
||||
"code_hash": "23fbe9daca09",
|
||||
"module": "langflow.components.vectorstores.astradb.AstraDBVectorStoreComponent"
|
||||
"module": "langflow.components.datastax.astradb.AstraDBVectorStoreComponent"
|
||||
},
|
||||
"minimized": false,
|
||||
"output_types": [],
|
||||
|
|
@ -3486,7 +3486,7 @@
|
|||
"legacy": false,
|
||||
"metadata": {
|
||||
"code_hash": "23fbe9daca09",
|
||||
"module": "langflow.components.vectorstores.astradb.AstraDBVectorStoreComponent"
|
||||
"module": "langflow.components.datastax.astradb.AstraDBVectorStoreComponent"
|
||||
},
|
||||
"minimized": false,
|
||||
"output_types": [],
|
||||
|
|
|
|||
|
|
@ -1,12 +1,12 @@
|
|||
from textwrap import dedent
|
||||
|
||||
from langflow.components.data import FileComponent
|
||||
from langflow.components.datastax import AstraDBVectorStoreComponent
|
||||
from langflow.components.input_output import ChatInput, ChatOutput
|
||||
from langflow.components.models import LanguageModelComponent
|
||||
from langflow.components.openai.openai import OpenAIEmbeddingsComponent
|
||||
from langflow.components.processing import ParserComponent, PromptComponent
|
||||
from langflow.components.processing.split_text import SplitTextComponent
|
||||
from langflow.components.vectorstores import AstraDBVectorStoreComponent
|
||||
from langflow.graph import Graph
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -4,8 +4,8 @@ import pytest
|
|||
from astrapy import DataAPIClient
|
||||
from langchain_astradb import AstraDBVectorStore, VectorServiceOptions
|
||||
from langchain_core.documents import Document
|
||||
from langflow.components.datastax import AstraDBVectorStoreComponent
|
||||
from langflow.components.openai.openai import OpenAIEmbeddingsComponent
|
||||
from langflow.components.vectorstores import AstraDBVectorStoreComponent
|
||||
from langflow.schema.data import Data
|
||||
|
||||
from tests.api_keys import get_astradb_api_endpoint, get_astradb_application_token, get_openai_api_key
|
||||
|
|
|
|||
|
|
@ -112,7 +112,7 @@ class TestDynamicImportIntegration:
|
|||
|
||||
# Time the import of a large module
|
||||
start_time = time.time()
|
||||
from langflow.components import vectorstores
|
||||
from langflow.components import chroma
|
||||
|
||||
import_time = time.time() - start_time
|
||||
|
||||
|
|
@ -124,7 +124,7 @@ class TestDynamicImportIntegration:
|
|||
|
||||
# Now access a component - this should trigger loading
|
||||
start_time = time.time()
|
||||
chroma_component = vectorstores.ChromaVectorStoreComponent
|
||||
chroma_component = chroma.ChromaVectorStoreComponent
|
||||
access_time = time.time() - start_time
|
||||
|
||||
assert chroma_component is not None
|
||||
|
|
@ -236,21 +236,21 @@ class TestDynamicImportIntegration:
|
|||
|
||||
def test_large_scale_component_access(self):
|
||||
"""Test accessing many components doesn't cause issues."""
|
||||
from langflow.components import vectorstores
|
||||
from langflow.components import datastax
|
||||
|
||||
# Access multiple components rapidly
|
||||
components_accessed = []
|
||||
component_names = [
|
||||
"ChromaVectorStoreComponent",
|
||||
"PineconeVectorStoreComponent",
|
||||
"FaissVectorStoreComponent",
|
||||
"WeaviateVectorStoreComponent",
|
||||
"QdrantVectorStoreComponent",
|
||||
"AstraDBVectorStoreComponent",
|
||||
"AstraDBChatComponent",
|
||||
"AstraDBToolComponent",
|
||||
"AstraDBCQLToolComponent",
|
||||
"AstraAssistantManager",
|
||||
]
|
||||
|
||||
for name in component_names:
|
||||
if hasattr(vectorstores, name):
|
||||
component = getattr(vectorstores, name)
|
||||
if hasattr(datastax, name):
|
||||
component = getattr(datastax, name)
|
||||
components_accessed.append(component)
|
||||
|
||||
# Should have accessed multiple components without issues
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
from typing import Any
|
||||
|
||||
import pytest
|
||||
from langflow.components.vectorstores import AstraDBVectorStoreComponent
|
||||
from langflow.components.datastax import AstraDBVectorStoreComponent
|
||||
|
||||
from tests.base import ComponentTestBaseWithoutClient, VersionComponentMapping
|
||||
|
||||
|
|
|
|||
|
|
@ -287,7 +287,7 @@ class TestSpecificModulePatterns:
|
|||
import time
|
||||
|
||||
# Test large modules
|
||||
large_modules = ["vectorstores", "processing", "langchain_utilities"]
|
||||
large_modules = ["data", "processing", "langchain_utilities"]
|
||||
|
||||
for module_name in large_modules:
|
||||
if module_name in components.__all__:
|
||||
|
|
|
|||
|
|
@ -209,17 +209,17 @@ class TestPerformanceCharacteristics:
|
|||
|
||||
def test_lazy_loading_performance(self):
|
||||
"""Test that components can be accessed and cached properly."""
|
||||
from langflow.components import vectorstores
|
||||
from langflow.components import chroma as chromamodules
|
||||
|
||||
# Test that we can access a component
|
||||
chroma = vectorstores.ChromaVectorStoreComponent
|
||||
chroma = chromamodules.ChromaVectorStoreComponent
|
||||
assert chroma is not None
|
||||
|
||||
# After access, it should be cached in the module's globals
|
||||
assert "ChromaVectorStoreComponent" in vectorstores.__dict__
|
||||
assert "ChromaVectorStoreComponent" in chromamodules.__dict__
|
||||
|
||||
# Subsequent access should return the same cached object
|
||||
chroma_2 = vectorstores.ChromaVectorStoreComponent
|
||||
chroma_2 = chromamodules.ChromaVectorStoreComponent
|
||||
assert chroma_2 is chroma
|
||||
|
||||
def test_caching_behavior(self):
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ from pathlib import Path
|
|||
from typing import Any
|
||||
|
||||
import pytest
|
||||
from langflow.components.vectorstores.chroma import ChromaVectorStoreComponent
|
||||
from langflow.components.chroma import ChromaVectorStoreComponent
|
||||
from langflow.schema.data import Data
|
||||
|
||||
from tests.base import ComponentTestBaseWithoutClient, VersionComponentMapping
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ from faker import Faker
|
|||
from langchain_community.embeddings.fake import DeterministicFakeEmbedding
|
||||
from langchain_core.documents import Document
|
||||
from langchain_core.vectorstores.in_memory import InMemoryVectorStore
|
||||
from langflow.components.vectorstores.graph_rag import GraphRAGComponent
|
||||
from langflow.components.datastax.graph_rag import GraphRAGComponent
|
||||
|
||||
from tests.base import ComponentTestBaseWithoutClient
|
||||
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ from typing import Any
|
|||
|
||||
import pytest
|
||||
from langchain_community.embeddings.fake import DeterministicFakeEmbedding
|
||||
from langflow.components.vectorstores.mongodb_atlas import MongoVectorStoreComponent
|
||||
from langflow.components.mongodb import MongoVectorStoreComponent
|
||||
from langflow.schema.data import Data
|
||||
from pymongo.collection import Collection
|
||||
|
||||
|
|
|
|||
|
|
@ -4,12 +4,12 @@ from textwrap import dedent
|
|||
|
||||
import pytest
|
||||
from langflow.components.data import FileComponent
|
||||
from langflow.components.datastax import AstraDBVectorStoreComponent
|
||||
from langflow.components.input_output import ChatInput, ChatOutput
|
||||
from langflow.components.openai.openai import OpenAIEmbeddingsComponent
|
||||
from langflow.components.openai.openai_chat_model import OpenAIModelComponent
|
||||
from langflow.components.processing import ParseDataComponent, PromptComponent
|
||||
from langflow.components.processing.split_text import SplitTextComponent
|
||||
from langflow.components.vectorstores import AstraDBVectorStoreComponent
|
||||
from langflow.graph.graph.base import Graph
|
||||
from langflow.graph.graph.constants import Finish
|
||||
from langflow.schema import Data
|
||||
|
|
|
|||
|
|
@ -763,6 +763,21 @@ export const BUNDLES_SIDEBAR_FOLDER_NAMES = [
|
|||
"langwatch",
|
||||
"YouTube",
|
||||
"youtube",
|
||||
"pinecone",
|
||||
"weaviate",
|
||||
"qdrant",
|
||||
"mongodb",
|
||||
"elastic",
|
||||
"supabase",
|
||||
"milvus",
|
||||
"chroma",
|
||||
"clickhouse",
|
||||
"couchbase",
|
||||
"upstash",
|
||||
"vectara",
|
||||
"cassandra",
|
||||
"FAISS",
|
||||
"pgvector",
|
||||
];
|
||||
|
||||
export const AUTHORIZED_DUPLICATE_REQUESTS = [
|
||||
|
|
|
|||
|
|
@ -247,17 +247,23 @@ export const SIDEBAR_BUNDLES = [
|
|||
{ display_name: "Azure", name: "azure", icon: "Azure" },
|
||||
{ display_name: "Baidu", name: "baidu", icon: "BaiduQianfan" },
|
||||
{ display_name: "Bing", name: "bing", icon: "Bing" },
|
||||
{ display_name: "Cassandra", name: "cassandra", icon: "Cassandra" },
|
||||
{ display_name: "Chroma", name: "chroma", icon: "Chroma" },
|
||||
{ display_name: "ClickHouse", name: "clickhouse", icon: "Clickhouse" },
|
||||
{ display_name: "Cleanlab", name: "cleanlab", icon: "Cleanlab" },
|
||||
{ display_name: "Cloudflare", name: "cloudflare", icon: "Cloudflare" },
|
||||
{ display_name: "Cohere", name: "cohere", icon: "Cohere" },
|
||||
{ display_name: "Composio", name: "composio", icon: "Composio" },
|
||||
{ display_name: "Confluence", name: "confluence", icon: "Confluence" },
|
||||
{ display_name: "Couchbase", name: "couchbase", icon: "Couchbase" },
|
||||
{ display_name: "CrewAI", name: "crewai", icon: "CrewAI" },
|
||||
{ display_name: "DataStax", name: "datastax", icon: "AstraDB" },
|
||||
{ display_name: "DeepSeek", name: "deepseek", icon: "DeepSeek" },
|
||||
{ display_name: "Docling", name: "docling", icon: "Docling" },
|
||||
{ display_name: "DuckDuckGo", name: "duckduckgo", icon: "DuckDuckGo" },
|
||||
{ display_name: "Elastic", name: "elastic", icon: "ElasticsearchStore" },
|
||||
{ display_name: "Exa", name: "exa", icon: "Exa" },
|
||||
{ display_name: "FAISS", name: "FAISS", icon: "FAISS" },
|
||||
{ display_name: "Firecrawl", name: "firecrawl", icon: "FirecrawlCrawlApi" },
|
||||
{ display_name: "Git", name: "git", icon: "GitLoader" },
|
||||
{ display_name: "Glean", name: "glean", icon: "Glean" },
|
||||
|
|
@ -279,7 +285,9 @@ export const SIDEBAR_BUNDLES = [
|
|||
{ display_name: "MariTalk", name: "maritalk", icon: "Maritalk" },
|
||||
{ display_name: "Mem0", name: "mem0", icon: "Mem0" },
|
||||
{ display_name: "Memories", name: "memories", icon: "Cpu" },
|
||||
{ display_name: "Milvus", name: "milvus", icon: "Milvus" },
|
||||
{ display_name: "MistralAI", name: "mistral", icon: "MistralAI" },
|
||||
{ display_name: "MongoDB", name: "mongodb", icon: "MongoDB" },
|
||||
{ display_name: "Needle", name: "needle", icon: "Needle" },
|
||||
{ display_name: "Not Diamond", name: "notdiamond", icon: "NotDiamond" },
|
||||
{ display_name: "Notion", name: "Notion", icon: "Notion" },
|
||||
|
|
@ -290,17 +298,23 @@ export const SIDEBAR_BUNDLES = [
|
|||
{ display_name: "OpenAI", name: "openai", icon: "OpenAI" },
|
||||
{ display_name: "OpenRouter", name: "openrouter", icon: "OpenRouter" },
|
||||
{ display_name: "Perplexity", name: "perplexity", icon: "Perplexity" },
|
||||
{ display_name: "pgvector", name: "pgvector", icon: "cpu" },
|
||||
{ display_name: "Pinecone", name: "pinecone", icon: "Pinecone" },
|
||||
{ display_name: "Qdrant", name: "qdrant", icon: "Qdrant" },
|
||||
{ display_name: "Redis", name: "redis", icon: "Redis" },
|
||||
{ display_name: "SambaNova", name: "sambanova", icon: "SambaNova" },
|
||||
{ display_name: "ScrapeGraph AI", name: "scrapegraph", icon: "ScrapeGraph" },
|
||||
{ display_name: "SearchApi", name: "searchapi", icon: "SearchAPI" },
|
||||
{ display_name: "SerpApi", name: "serpapi", icon: "SerpSearch" },
|
||||
{ display_name: "Serper", name: "serper", icon: "Serper" },
|
||||
{ display_name: "Supabase", name: "supabase", icon: "Supabase" },
|
||||
{ display_name: "Tavily", name: "tavily", icon: "TavilyIcon" },
|
||||
{ display_name: "TwelveLabs", name: "twelvelabs", icon: "TwelveLabs" },
|
||||
{ display_name: "Unstructured", name: "unstructured", icon: "Unstructured" },
|
||||
{ display_name: "Upstash", name: "upstash", icon: "Upstash" },
|
||||
{ display_name: "Vectara", name: "vectara", icon: "Vectara" },
|
||||
{ display_name: "Vector Stores", name: "vectorstores", icon: "Layers" },
|
||||
{ display_name: "Weaviate", name: "weaviate", icon: "Weaviate" },
|
||||
{ display_name: "Vertex AI", name: "vertexai", icon: "VertexAI" },
|
||||
{ display_name: "Wikipedia", name: "wikipedia", icon: "Wikipedia" },
|
||||
{
|
||||
|
|
|
|||
|
|
@ -121,7 +121,7 @@ test(
|
|||
await expect(page.getByTestId("disclosure-tools")).toBeVisible();
|
||||
|
||||
await expect(page.getByTestId("dataAPI Request")).toBeVisible();
|
||||
await expect(page.getByTestId("vectorstoresAstra DB")).toBeVisible();
|
||||
await expect(page.getByTestId("datastaxAstra DB")).toBeVisible();
|
||||
await expect(page.getByTestId("logicSub Flow [Deprecated]")).toBeVisible();
|
||||
|
||||
await page.getByTestId("sidebar-options-trigger").click();
|
||||
|
|
@ -137,7 +137,7 @@ test(
|
|||
await page.getByTestId("icon-X").first().click();
|
||||
|
||||
await expect(page.getByTestId("dataAPI Request")).not.toBeVisible();
|
||||
await expect(page.getByTestId("vectorstoresAstra DB")).not.toBeVisible();
|
||||
await expect(page.getByTestId("datastaxAstra DB")).not.toBeVisible();
|
||||
await expect(
|
||||
page.getByTestId("logicSub Flow [Deprecated]"),
|
||||
).not.toBeVisible();
|
||||
|
|
|
|||
|
|
@ -15,11 +15,11 @@ test(
|
|||
await page.getByTestId("sidebar-search-input").click();
|
||||
await page.getByTestId("sidebar-search-input").fill("Chroma");
|
||||
|
||||
await page.waitForSelector('[data-testid="vectorstoresChroma DB"]', {
|
||||
await page.waitForSelector('[data-testid="chromaChroma DB"]', {
|
||||
timeout: 3000,
|
||||
});
|
||||
await page
|
||||
.getByTestId("vectorstoresChroma DB")
|
||||
.getByTestId("chromaChroma DB")
|
||||
.dragTo(page.locator('//*[@id="react-flow-id"]'));
|
||||
await page.mouse.up();
|
||||
await page.mouse.down();
|
||||
|
|
|
|||
|
|
@ -65,13 +65,12 @@ test(
|
|||
"disclosure-bundles-langchain",
|
||||
"disclosure-bundles-assemblyai",
|
||||
"disclosure-bundles-datastax",
|
||||
"disclosure-bundles-vector stores",
|
||||
];
|
||||
|
||||
const elementTestIds = [
|
||||
"input_outputChat Output",
|
||||
"dataAPI Request",
|
||||
"vectorstoresAstra DB Graph",
|
||||
"datastaxAstra DB",
|
||||
"langchain_utilitiesTool Calling Agent",
|
||||
"langchain_utilitiesConversationChain",
|
||||
"mem0Mem0 Chat Memory",
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue