From 80ebe03d94bf93be58a10d02ba2e60eff891b77f Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Tue, 5 Aug 2025 17:48:59 -0300 Subject: [PATCH] refactor(core): implement centralized dynamic lazy import system for components (#8932) * feat: add import utilities for LangFlow components - Introduced a new module `_importing.py` containing the `import_mod` function. - This function dynamically imports attributes from specified modules, enhancing modularity and flexibility in component initialization. - Comprehensive docstring added for clarity on usage and parameters. * feat: implement dynamic imports for LangFlow components - Added dynamic import functionality to various LangFlow components, allowing for lazy loading of attributes on access. - Introduced mapping in each component's to manage imports efficiently. - Enhanced error handling for import failures, providing clearer messages for missing attributes. - Updated method to reflect available attributes for better introspection and tab-completion support. - Comprehensive docstrings added to improve documentation and usability. * test: add comprehensive tests for dynamic imports and component accessibility - Introduced integration tests for dynamic import functionality, ensuring components are discoverable and instantiable post-refactor. - Added unit tests for the `_import_utils` module, validating the `import_mod` function's behavior and error handling. - Implemented tests to confirm all component modules are importable and maintain backward compatibility with existing import patterns. - Enhanced performance tests to measure lazy loading efficiency and memory usage during component access. - Ensured that all components have the required attributes for dynamic loading and that circular imports are prevented. * chore: update ruff pre-commit hook to version 0.12.2 in configuration file * refactor: update warning handling for dynamic imports - Moved the warning suppression for LangChainDeprecationWarning into the dynamic import context to ensure it only applies during the import process. - This change enhances clarity and maintains the original functionality while improving the robustness of the import mechanism. * test: enhance dynamic import integration tests for component attributes - Removed unnecessary import of AgentComponent and added assertions to verify essential attributes of OpenAIModelComponent, including display_name, description, icon, and inputs. - Ensured that each input field has the required attributes for better validation of component integrity during dynamic imports. * refactor: update import paths for Message class in conversation utilities - Changed the import of the Message class from langflow.field_typing to langflow.schema.message across multiple utility files, ensuring consistency and alignment with the updated module structure. - This refactor enhances code clarity and maintains compatibility with the latest schema definitions. * refactor: remove Vectara components from LangFlow - Deleted the Vectara components module from the codebase, streamlining the component structure. - This change helps to reduce complexity and maintain focus on core functionalities. * refactor: remove Vectara references from LangFlow component imports - Eliminated Vectara from both the import statements and dynamic imports mapping, streamlining the component structure. - This change contributes to reducing complexity and maintaining focus on core functionalities within the LangFlow framework. * [autofix.ci] apply automated fixes * fix: remove 'vectara' from __all__ in components module * refactor: improve error handling tests for dynamic imports * test: add tests for ModuleNotFoundError handling with None and special module names --------- Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> Co-authored-by: Edwin Jose --- .../base/langflow/components/__init__.py | 273 ++++++++++++++++ .../base/langflow/components/_importing.py | 37 +++ .../base/langflow/components/aiml/__init__.py | 34 +- .../langflow/components/amazon/__init__.py | 37 ++- .../langflow/components/anthropic/__init__.py | 31 +- .../components/assemblyai/__init__.py | 43 ++- .../langflow/components/azure/__init__.py | 34 +- .../langflow/components/baidu/__init__.py | 31 +- .../langflow/components/cleanlab/__init__.py | 43 ++- .../components/cloudflare/__init__.py | 31 +- .../langflow/components/cohere/__init__.py | 43 ++- .../langflow/components/composio/__init__.py | 46 ++- .../langflow/components/crewai/__init__.py | 46 ++- .../components/custom_component/__init__.py | 31 +- .../langflow/components/datastax/__init__.py | 67 +++- .../langflow/components/deepseek/__init__.py | 35 +- .../langflow/components/docling/__init__.py | 40 ++- .../components/embeddings/__init__.py | 36 +- .../langflow/components/firecrawl/__init__.py | 47 ++- .../base/langflow/components/groq/__init__.py | 35 +- .../langflow/components/helpers/__init__.py | 49 ++- .../components/huggingface/__init__.py | 34 +- .../base/langflow/components/ibm/__init__.py | 34 +- .../components/input_output/__init__.py | 40 ++- .../langchain_utilities/__init__.py | 106 ++++-- .../langchain_utilities/conversation.py | 2 +- .../langchain_utilities/llm_checker.py | 2 +- .../langchain_utilities/llm_math.py | 2 +- .../langchain_utilities/retrieval_qa.py | 6 +- .../langchain_utilities/sql_generator.py | 2 +- .../langflow/components/lmstudio/__init__.py | 34 +- .../langflow/components/logic/__init__.py | 49 ++- .../langflow/components/maritalk/__init__.py | 31 +- .../langflow/components/mistral/__init__.py | 39 ++- .../langflow/components/models/__init__.py | 34 +- .../components/notdiamond/__init__.py | 36 ++ .../langflow/components/novita/__init__.py | 31 +- .../langflow/components/nvidia/__init__.py | 54 ++- .../langflow/components/ollama/__init__.py | 34 +- .../langflow/components/openai/__init__.py | 34 +- .../components/openrouter/__init__.py | 31 +- .../components/perplexity/__init__.py | 35 +- .../components/processing/__init__.py | 113 +++++-- .../components/prototypes/__init__.py | 31 +- .../langflow/components/sambanova/__init__.py | 31 +- .../components/scrapegraph/__init__.py | 43 ++- .../langflow/components/searchapi/__init__.py | 36 ++ .../langflow/components/tools/__init__.py | 71 +++- .../components/twelvelabs/__init__.py | 49 ++- .../langflow/components/vectara/__init__.py | 0 .../components/vectorstores/__init__.py | 100 ++++-- .../langflow/components/vertexai/__init__.py | 34 +- .../base/langflow/components/xai/__init__.py | 31 +- .../langflow/components/youtube/__init__.py | 49 ++- .../test_dynamic_import_integration.py | 299 +++++++++++++++++ .../components/test_all_modules_importable.py | 307 ++++++++++++++++++ .../unit/components/test_dynamic_imports.py | 297 +++++++++++++++++ src/backend/tests/unit/test_import_utils.py | 176 ++++++++++ 58 files changed, 3174 insertions(+), 232 deletions(-) create mode 100644 src/backend/base/langflow/components/_importing.py delete mode 100644 src/backend/base/langflow/components/vectara/__init__.py create mode 100644 src/backend/tests/integration/test_dynamic_import_integration.py create mode 100644 src/backend/tests/unit/components/test_all_modules_importable.py create mode 100644 src/backend/tests/unit/components/test_dynamic_imports.py create mode 100644 src/backend/tests/unit/test_import_utils.py diff --git a/src/backend/base/langflow/components/__init__.py b/src/backend/base/langflow/components/__init__.py index e69de29bb..7dfd9bf27 100644 --- a/src/backend/base/langflow/components/__init__.py +++ b/src/backend/base/langflow/components/__init__.py @@ -0,0 +1,273 @@ +"""LangFlow Components module.""" + +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +from langflow.components._importing import import_mod + +if TYPE_CHECKING: + from langflow.components import ( + Notion, + agentql, + agents, + aiml, + amazon, + anthropic, + apify, + arxiv, + assemblyai, + azure, + baidu, + bing, + cleanlab, + cloudflare, + cohere, + composio, + confluence, + crewai, + custom_component, + data, + datastax, + deepseek, + docling, + duckduckgo, + embeddings, + exa, + firecrawl, + git, + glean, + google, + groq, + helpers, + homeassistant, + huggingface, + ibm, + icosacomputing, + input_output, + langchain_utilities, + langwatch, + lmstudio, + logic, + maritalk, + mem0, + mistral, + models, + needle, + notdiamond, + novita, + nvidia, + olivya, + ollama, + openai, + openrouter, + perplexity, + processing, + prototypes, + redis, + sambanova, + scrapegraph, + searchapi, + serpapi, + tavily, + tools, + twelvelabs, + unstructured, + vectorstores, + vertexai, + wikipedia, + wolframalpha, + xai, + yahoosearch, + youtube, + zep, + ) + +_dynamic_imports = { + "agents": "langflow.components.agents", + "data": "langflow.components.data", + "processing": "langflow.components.processing", + "vectorstores": "langflow.components.vectorstores", + "tools": "langflow.components.tools", + "models": "langflow.components.models", + "embeddings": "langflow.components.embeddings", + "helpers": "langflow.components.helpers", + "input_output": "langflow.components.input_output", + "logic": "langflow.components.logic", + "custom_component": "langflow.components.custom_component", + "prototypes": "langflow.components.prototypes", + "openai": "langflow.components.openai", + "anthropic": "langflow.components.anthropic", + "google": "langflow.components.google", + "azure": "langflow.components.azure", + "huggingface": "langflow.components.huggingface", + "ollama": "langflow.components.ollama", + "groq": "langflow.components.groq", + "cohere": "langflow.components.cohere", + "mistral": "langflow.components.mistral", + "deepseek": "langflow.components.deepseek", + "nvidia": "langflow.components.nvidia", + "amazon": "langflow.components.amazon", + "vertexai": "langflow.components.vertexai", + "xai": "langflow.components.xai", + "perplexity": "langflow.components.perplexity", + "openrouter": "langflow.components.openrouter", + "lmstudio": "langflow.components.lmstudio", + "sambanova": "langflow.components.sambanova", + "maritalk": "langflow.components.maritalk", + "novita": "langflow.components.novita", + "olivya": "langflow.components.olivya", + "notdiamond": "langflow.components.notdiamond", + "needle": "langflow.components.needle", + "cloudflare": "langflow.components.cloudflare", + "baidu": "langflow.components.baidu", + "aiml": "langflow.components.aiml", + "ibm": "langflow.components.ibm", + "langchain_utilities": "langflow.components.langchain_utilities", + "crewai": "langflow.components.crewai", + "composio": "langflow.components.composio", + "mem0": "langflow.components.mem0", + "datastax": "langflow.components.datastax", + "cleanlab": "langflow.components.cleanlab", + "langwatch": "langflow.components.langwatch", + "icosacomputing": "langflow.components.icosacomputing", + "homeassistant": "langflow.components.homeassistant", + "agentql": "langflow.components.agentql", + "assemblyai": "langflow.components.assemblyai", + "twelvelabs": "langflow.components.twelvelabs", + "docling": "langflow.components.docling", + "unstructured": "langflow.components.unstructured", + "redis": "langflow.components.redis", + "zep": "langflow.components.zep", + "bing": "langflow.components.bing", + "duckduckgo": "langflow.components.duckduckgo", + "serpapi": "langflow.components.serpapi", + "searchapi": "langflow.components.searchapi", + "tavily": "langflow.components.tavily", + "exa": "langflow.components.exa", + "glean": "langflow.components.glean", + "yahoosearch": "langflow.components.yahoosearch", + "apify": "langflow.components.apify", + "arxiv": "langflow.components.arxiv", + "confluence": "langflow.components.confluence", + "firecrawl": "langflow.components.firecrawl", + "git": "langflow.components.git", + "wikipedia": "langflow.components.wikipedia", + "youtube": "langflow.components.youtube", + "scrapegraph": "langflow.components.scrapegraph", + "Notion": "langflow.components.Notion", + "wolframalpha": "langflow.components.wolframalpha", +} + +__all__: list[str] = [ + "Notion", + "agentql", + "agents", + "aiml", + "amazon", + "anthropic", + "apify", + "arxiv", + "assemblyai", + "azure", + "baidu", + "bing", + "cleanlab", + "cloudflare", + "cohere", + "composio", + "confluence", + "crewai", + "custom_component", + "data", + "datastax", + "deepseek", + "docling", + "duckduckgo", + "embeddings", + "exa", + "firecrawl", + "git", + "glean", + "google", + "groq", + "helpers", + "homeassistant", + "huggingface", + "ibm", + "icosacomputing", + "input_output", + "langchain_utilities", + "langwatch", + "lmstudio", + "logic", + "maritalk", + "mem0", + "mistral", + "models", + "needle", + "notdiamond", + "novita", + "nvidia", + "olivya", + "ollama", + "openai", + "openrouter", + "perplexity", + "processing", + "prototypes", + "redis", + "sambanova", + "scrapegraph", + "searchapi", + "serpapi", + "tavily", + "tools", + "twelvelabs", + "unstructured", + "vectorstores", + "vertexai", + "wikipedia", + "wolframalpha", + "xai", + "yahoosearch", + "youtube", + "zep", +] + + +def __getattr__(attr_name: str) -> Any: + """Lazily import component modules on attribute access. + + Args: + attr_name (str): The attribute/module name to import. + + Returns: + Any: The imported module or attribute. + + Raises: + AttributeError: If the attribute is not a known component or cannot be imported. + """ + if attr_name not in _dynamic_imports: + msg = f"module '{__name__}' has no attribute '{attr_name}'" + raise AttributeError(msg) + try: + # Use import_mod as in LangChain, passing the module name and package + result = import_mod(attr_name, "__module__", __spec__.parent) + except (ModuleNotFoundError, ImportError, AttributeError) as e: + msg = f"Could not import '{attr_name}' from '{__name__}': {e}" + raise AttributeError(msg) from e + globals()[attr_name] = result # Cache for future access + return result + + +def __dir__() -> list[str]: + """Return list of available attributes for tab-completion and dir().""" + return list(__all__) + + +# Optional: Consistency check (can be removed in production) +_missing = set(__all__) - set(_dynamic_imports) +if _missing: + msg = f"Missing dynamic import mapping for: {', '.join(_missing)}" + raise ImportError(msg) diff --git a/src/backend/base/langflow/components/_importing.py b/src/backend/base/langflow/components/_importing.py new file mode 100644 index 000000000..b0655c7ba --- /dev/null +++ b/src/backend/base/langflow/components/_importing.py @@ -0,0 +1,37 @@ +"""Import utilities for LangFlow components.""" + +from __future__ import annotations + +from importlib import import_module + + +def import_mod( + attr_name: str, + module_name: str | None, + package: str | None, +) -> object: + """Import an attribute from a module located in a package. + + This utility function is used in custom __getattr__ methods within __init__.py + files to dynamically import attributes. + + Args: + attr_name: The name of the attribute to import. + module_name: The name of the module to import from. If None, the attribute + is imported from the package itself. + package: The name of the package where the module is located. + """ + if module_name == "__module__" or module_name is None: + try: + result = import_module(f".{attr_name}", package=package) + except ModuleNotFoundError: + msg = f"module '{package!r}' has no attribute {attr_name!r}" + raise AttributeError(msg) from None + else: + try: + module = import_module(f".{module_name}", package=package) + except ModuleNotFoundError: + msg = f"module '{package!r}.{module_name!r}' not found" + raise ImportError(msg) from None + result = getattr(module, attr_name) + return result diff --git a/src/backend/base/langflow/components/aiml/__init__.py b/src/backend/base/langflow/components/aiml/__init__.py index 89dc93268..b340152ec 100644 --- a/src/backend/base/langflow/components/aiml/__init__.py +++ b/src/backend/base/langflow/components/aiml/__init__.py @@ -1,7 +1,37 @@ -from .aiml import AIMLModelComponent -from .aiml_embeddings import AIMLEmbeddingsComponent +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +from langflow.components._importing import import_mod + +if TYPE_CHECKING: + from langflow.components.aiml.aiml import AIMLModelComponent + from langflow.components.aiml.aiml_embeddings import AIMLEmbeddingsComponent + +_dynamic_imports = { + "AIMLModelComponent": "aiml", + "AIMLEmbeddingsComponent": "aiml_embeddings", +} __all__ = [ "AIMLEmbeddingsComponent", "AIMLModelComponent", ] + + +def __getattr__(attr_name: str) -> Any: + """Lazily import aiml components on attribute access.""" + if attr_name not in _dynamic_imports: + msg = f"module '{__name__}' has no attribute '{attr_name}'" + raise AttributeError(msg) + try: + result = import_mod(attr_name, _dynamic_imports[attr_name], __spec__.parent) + except (ModuleNotFoundError, ImportError, AttributeError) as e: + msg = f"Could not import '{attr_name}' from '{__name__}': {e}" + raise AttributeError(msg) from e + globals()[attr_name] = result + return result + + +def __dir__() -> list[str]: + return list(__all__) diff --git a/src/backend/base/langflow/components/amazon/__init__.py b/src/backend/base/langflow/components/amazon/__init__.py index f9bed4614..1273ecd61 100644 --- a/src/backend/base/langflow/components/amazon/__init__.py +++ b/src/backend/base/langflow/components/amazon/__init__.py @@ -1,5 +1,36 @@ -from .amazon_bedrock_embedding import AmazonBedrockEmbeddingsComponent -from .amazon_bedrock_model import AmazonBedrockComponent -from .s3_bucket_uploader import S3BucketUploaderComponent +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +from langflow.components._importing import import_mod + +if TYPE_CHECKING: + from langflow.components.amazon.amazon_bedrock_embedding import AmazonBedrockEmbeddingsComponent + from langflow.components.amazon.amazon_bedrock_model import AmazonBedrockComponent + from langflow.components.amazon.s3_bucket_uploader import S3BucketUploaderComponent + +_dynamic_imports = { + "AmazonBedrockEmbeddingsComponent": "amazon_bedrock_embedding", + "AmazonBedrockComponent": "amazon_bedrock_model", + "S3BucketUploaderComponent": "s3_bucket_uploader", +} __all__ = ["AmazonBedrockComponent", "AmazonBedrockEmbeddingsComponent", "S3BucketUploaderComponent"] + + +def __getattr__(attr_name: str) -> Any: + """Lazily import amazon components on attribute access.""" + if attr_name not in _dynamic_imports: + msg = f"module '{__name__}' has no attribute '{attr_name}'" + raise AttributeError(msg) + try: + result = import_mod(attr_name, _dynamic_imports[attr_name], __spec__.parent) + except (ModuleNotFoundError, ImportError, AttributeError) as e: + msg = f"Could not import '{attr_name}' from '{__name__}': {e}" + raise AttributeError(msg) from e + globals()[attr_name] = result + return result + + +def __dir__() -> list[str]: + return list(__all__) diff --git a/src/backend/base/langflow/components/anthropic/__init__.py b/src/backend/base/langflow/components/anthropic/__init__.py index 8b97adcd5..2f79d728e 100644 --- a/src/backend/base/langflow/components/anthropic/__init__.py +++ b/src/backend/base/langflow/components/anthropic/__init__.py @@ -1,5 +1,34 @@ -from .anthropic import AnthropicModelComponent +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +from langflow.components._importing import import_mod + +if TYPE_CHECKING: + from langflow.components.anthropic.anthropic import AnthropicModelComponent + +_dynamic_imports = { + "AnthropicModelComponent": "anthropic", +} __all__ = [ "AnthropicModelComponent", ] + + +def __getattr__(attr_name: str) -> Any: + """Lazily import Anthropic components on attribute access.""" + if attr_name not in _dynamic_imports: + msg = f"module '{__name__}' has no attribute '{attr_name}'" + raise AttributeError(msg) + try: + result = import_mod(attr_name, _dynamic_imports[attr_name], __spec__.parent) + except (ModuleNotFoundError, ImportError, AttributeError) as e: + msg = f"Could not import '{attr_name}' from '{__name__}': {e}" + raise AttributeError(msg) from e + globals()[attr_name] = result + return result + + +def __dir__() -> list[str]: + return list(__all__) diff --git a/src/backend/base/langflow/components/assemblyai/__init__.py b/src/backend/base/langflow/components/assemblyai/__init__.py index 8272a8c92..6a80b4cd4 100644 --- a/src/backend/base/langflow/components/assemblyai/__init__.py +++ b/src/backend/base/langflow/components/assemblyai/__init__.py @@ -1,8 +1,23 @@ -from .assemblyai_get_subtitles import AssemblyAIGetSubtitles -from .assemblyai_lemur import AssemblyAILeMUR -from .assemblyai_list_transcripts import AssemblyAIListTranscripts -from .assemblyai_poll_transcript import AssemblyAITranscriptionJobPoller -from .assemblyai_start_transcript import AssemblyAITranscriptionJobCreator +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +from langflow.components._importing import import_mod + +if TYPE_CHECKING: + from .assemblyai_get_subtitles import AssemblyAIGetSubtitles + from .assemblyai_lemur import AssemblyAILeMUR + from .assemblyai_list_transcripts import AssemblyAIListTranscripts + from .assemblyai_poll_transcript import AssemblyAITranscriptionJobPoller + from .assemblyai_start_transcript import AssemblyAITranscriptionJobCreator + +_dynamic_imports = { + "AssemblyAIGetSubtitles": "assemblyai_get_subtitles", + "AssemblyAILeMUR": "assemblyai_lemur", + "AssemblyAIListTranscripts": "assemblyai_list_transcripts", + "AssemblyAITranscriptionJobCreator": "assemblyai_start_transcript", + "AssemblyAITranscriptionJobPoller": "assemblyai_poll_transcript", +} __all__ = [ "AssemblyAIGetSubtitles", @@ -11,3 +26,21 @@ __all__ = [ "AssemblyAITranscriptionJobCreator", "AssemblyAITranscriptionJobPoller", ] + + +def __getattr__(attr_name: str) -> Any: + """Lazily import assemblyai components on attribute access.""" + if attr_name not in _dynamic_imports: + msg = f"module '{__name__}' has no attribute '{attr_name}'" + raise AttributeError(msg) + try: + result = import_mod(attr_name, _dynamic_imports[attr_name], __spec__.parent) + except (ModuleNotFoundError, ImportError, AttributeError) as e: + msg = f"Could not import '{attr_name}' from '{__name__}': {e}" + raise AttributeError(msg) from e + globals()[attr_name] = result + return result + + +def __dir__() -> list[str]: + return list(__all__) diff --git a/src/backend/base/langflow/components/azure/__init__.py b/src/backend/base/langflow/components/azure/__init__.py index 73671151e..40ea85e85 100644 --- a/src/backend/base/langflow/components/azure/__init__.py +++ b/src/backend/base/langflow/components/azure/__init__.py @@ -1,7 +1,37 @@ -from .azure_openai import AzureChatOpenAIComponent -from .azure_openai_embeddings import AzureOpenAIEmbeddingsComponent +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +from langflow.components._importing import import_mod + +if TYPE_CHECKING: + from .azure_openai import AzureChatOpenAIComponent + from .azure_openai_embeddings import AzureOpenAIEmbeddingsComponent + +_dynamic_imports = { + "AzureChatOpenAIComponent": "azure_openai", + "AzureOpenAIEmbeddingsComponent": "azure_openai_embeddings", +} __all__ = [ "AzureChatOpenAIComponent", "AzureOpenAIEmbeddingsComponent", ] + + +def __getattr__(attr_name: str) -> Any: + """Lazily import azure components on attribute access.""" + if attr_name not in _dynamic_imports: + msg = f"module '{__name__}' has no attribute '{attr_name}'" + raise AttributeError(msg) + try: + result = import_mod(attr_name, _dynamic_imports[attr_name], __spec__.parent) + except (ModuleNotFoundError, ImportError, AttributeError) as e: + msg = f"Could not import '{attr_name}' from '{__name__}': {e}" + raise AttributeError(msg) from e + globals()[attr_name] = result + return result + + +def __dir__() -> list[str]: + return list(__all__) diff --git a/src/backend/base/langflow/components/baidu/__init__.py b/src/backend/base/langflow/components/baidu/__init__.py index f6428f279..8d5c3eda0 100644 --- a/src/backend/base/langflow/components/baidu/__init__.py +++ b/src/backend/base/langflow/components/baidu/__init__.py @@ -1,3 +1,32 @@ -from .baidu_qianfan_chat import QianfanChatEndpoint +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +from langflow.components._importing import import_mod + +if TYPE_CHECKING: + from langflow.components.baidu.baidu_qianfan_chat import QianfanChatEndpoint + +_dynamic_imports = { + "QianfanChatEndpoint": "baidu_qianfan_chat", +} __all__ = ["QianfanChatEndpoint"] + + +def __getattr__(attr_name: str) -> Any: + """Lazily import baidu components on attribute access.""" + if attr_name not in _dynamic_imports: + msg = f"module '{__name__}' has no attribute '{attr_name}'" + raise AttributeError(msg) + try: + result = import_mod(attr_name, _dynamic_imports[attr_name], __spec__.parent) + except (ModuleNotFoundError, ImportError, AttributeError) as e: + msg = f"Could not import '{attr_name}' from '{__name__}': {e}" + raise AttributeError(msg) from e + globals()[attr_name] = result + return result + + +def __dir__() -> list[str]: + return list(__all__) diff --git a/src/backend/base/langflow/components/cleanlab/__init__.py b/src/backend/base/langflow/components/cleanlab/__init__.py index a0549ae05..466cdb21f 100644 --- a/src/backend/base/langflow/components/cleanlab/__init__.py +++ b/src/backend/base/langflow/components/cleanlab/__init__.py @@ -1,5 +1,40 @@ -from .cleanlab_evaluator import CleanlabEvaluator -from .cleanlab_rag_evaluator import CleanlabRAGEvaluator -from .cleanlab_remediator import CleanlabRemediator +from __future__ import annotations -__all__ = ["CleanlabEvaluator", "CleanlabRAGEvaluator", "CleanlabRemediator"] +from typing import TYPE_CHECKING, Any + +from langflow.components._importing import import_mod + +if TYPE_CHECKING: + from .cleanlab_evaluator import CleanlabEvaluator + from .cleanlab_rag_evaluator import CleanlabRAGEvaluator + from .cleanlab_remediator import CleanlabRemediator + +_dynamic_imports = { + "CleanlabEvaluator": "cleanlab_evaluator", + "CleanlabRAGEvaluator": "cleanlab_rag_evaluator", + "CleanlabRemediator": "cleanlab_remediator", +} + +__all__ = [ + "CleanlabEvaluator", + "CleanlabRAGEvaluator", + "CleanlabRemediator", +] + + +def __getattr__(attr_name: str) -> Any: + """Lazily import cleanlab components on attribute access.""" + if attr_name not in _dynamic_imports: + msg = f"module '{__name__}' has no attribute '{attr_name}'" + raise AttributeError(msg) + try: + result = import_mod(attr_name, _dynamic_imports[attr_name], __spec__.parent) + except (ModuleNotFoundError, ImportError, AttributeError) as e: + msg = f"Could not import '{attr_name}' from '{__name__}': {e}" + raise AttributeError(msg) from e + globals()[attr_name] = result + return result + + +def __dir__() -> list[str]: + return list(__all__) diff --git a/src/backend/base/langflow/components/cloudflare/__init__.py b/src/backend/base/langflow/components/cloudflare/__init__.py index 1bf129f6a..3151bf396 100644 --- a/src/backend/base/langflow/components/cloudflare/__init__.py +++ b/src/backend/base/langflow/components/cloudflare/__init__.py @@ -1,3 +1,32 @@ -from .cloudflare import CloudflareWorkersAIEmbeddingsComponent +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +from langflow.components._importing import import_mod + +if TYPE_CHECKING: + from langflow.components.cloudflare.cloudflare import CloudflareWorkersAIEmbeddingsComponent + +_dynamic_imports = { + "CloudflareWorkersAIEmbeddingsComponent": "cloudflare", +} __all__ = ["CloudflareWorkersAIEmbeddingsComponent"] + + +def __getattr__(attr_name: str) -> Any: + """Lazily import cloudflare components on attribute access.""" + if attr_name not in _dynamic_imports: + msg = f"module '{__name__}' has no attribute '{attr_name}'" + raise AttributeError(msg) + try: + result = import_mod(attr_name, _dynamic_imports[attr_name], __spec__.parent) + except (ModuleNotFoundError, ImportError, AttributeError) as e: + msg = f"Could not import '{attr_name}' from '{__name__}': {e}" + raise AttributeError(msg) from e + globals()[attr_name] = result + return result + + +def __dir__() -> list[str]: + return list(__all__) diff --git a/src/backend/base/langflow/components/cohere/__init__.py b/src/backend/base/langflow/components/cohere/__init__.py index c4b44ad77..7c2c77df2 100644 --- a/src/backend/base/langflow/components/cohere/__init__.py +++ b/src/backend/base/langflow/components/cohere/__init__.py @@ -1,5 +1,40 @@ -from .cohere_embeddings import CohereEmbeddingsComponent -from .cohere_models import CohereComponent -from .cohere_rerank import CohereRerankComponent +from __future__ import annotations -__all__ = ["CohereComponent", "CohereEmbeddingsComponent", "CohereRerankComponent"] +from typing import TYPE_CHECKING, Any + +from langflow.components._importing import import_mod + +if TYPE_CHECKING: + from .cohere_embeddings import CohereEmbeddingsComponent + from .cohere_models import CohereComponent + from .cohere_rerank import CohereRerankComponent + +_dynamic_imports = { + "CohereComponent": "cohere_models", + "CohereEmbeddingsComponent": "cohere_embeddings", + "CohereRerankComponent": "cohere_rerank", +} + +__all__ = [ + "CohereComponent", + "CohereEmbeddingsComponent", + "CohereRerankComponent", +] + + +def __getattr__(attr_name: str) -> Any: + """Lazily import cohere components on attribute access.""" + if attr_name not in _dynamic_imports: + msg = f"module '{__name__}' has no attribute '{attr_name}'" + raise AttributeError(msg) + try: + result = import_mod(attr_name, _dynamic_imports[attr_name], __spec__.parent) + except (ModuleNotFoundError, ImportError, AttributeError) as e: + msg = f"Could not import '{attr_name}' from '{__name__}': {e}" + raise AttributeError(msg) from e + globals()[attr_name] = result + return result + + +def __dir__() -> list[str]: + return list(__all__) diff --git a/src/backend/base/langflow/components/composio/__init__.py b/src/backend/base/langflow/components/composio/__init__.py index 0b6e6bc3e..d9afb88aa 100644 --- a/src/backend/base/langflow/components/composio/__init__.py +++ b/src/backend/base/langflow/components/composio/__init__.py @@ -1,9 +1,25 @@ -from .composio_api import ComposioAPIComponent -from .github_composio import ComposioGitHubAPIComponent -from .gmail_composio import ComposioGmailAPIComponent -from .googlecalendar_composio import ComposioGoogleCalendarAPIComponent -from .outlook_composio import ComposioOutlookAPIComponent -from .slack_composio import ComposioSlackAPIComponent +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +from langflow.components._importing import import_mod + +if TYPE_CHECKING: + from .composio_api import ComposioAPIComponent + from .github_composio import ComposioGitHubAPIComponent + from .gmail_composio import ComposioGmailAPIComponent + from .googlecalendar_composio import ComposioGoogleCalendarAPIComponent + from .outlook_composio import ComposioOutlookAPIComponent + from .slack_composio import ComposioSlackAPIComponent + +_dynamic_imports = { + "ComposioAPIComponent": "composio_api", + "ComposioGitHubAPIComponent": "github_composio", + "ComposioGmailAPIComponent": "gmail_composio", + "ComposioGoogleCalendarAPIComponent": "googlecalendar_composio", + "ComposioOutlookAPIComponent": "outlook_composio", + "ComposioSlackAPIComponent": "slack_composio", +} __all__ = [ "ComposioAPIComponent", @@ -13,3 +29,21 @@ __all__ = [ "ComposioOutlookAPIComponent", "ComposioSlackAPIComponent", ] + + +def __getattr__(attr_name: str) -> Any: + """Lazily import composio components on attribute access.""" + if attr_name not in _dynamic_imports: + msg = f"module '{__name__}' has no attribute '{attr_name}'" + raise AttributeError(msg) + try: + result = import_mod(attr_name, _dynamic_imports[attr_name], __spec__.parent) + except (ModuleNotFoundError, ImportError, AttributeError) as e: + msg = f"Could not import '{attr_name}' from '{__name__}': {e}" + raise AttributeError(msg) from e + globals()[attr_name] = result + return result + + +def __dir__() -> list[str]: + return list(__all__) diff --git a/src/backend/base/langflow/components/crewai/__init__.py b/src/backend/base/langflow/components/crewai/__init__.py index df9274449..610e95d3c 100644 --- a/src/backend/base/langflow/components/crewai/__init__.py +++ b/src/backend/base/langflow/components/crewai/__init__.py @@ -1,9 +1,25 @@ -from .crewai import CrewAIAgentComponent -from .hierarchical_crew import HierarchicalCrewComponent -from .hierarchical_task import HierarchicalTaskComponent -from .sequential_crew import SequentialCrewComponent -from .sequential_task import SequentialTaskComponent -from .sequential_task_agent import SequentialTaskAgentComponent +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +from langflow.components._importing import import_mod + +if TYPE_CHECKING: + from .crewai import CrewAIAgentComponent + from .hierarchical_crew import HierarchicalCrewComponent + from .hierarchical_task import HierarchicalTaskComponent + from .sequential_crew import SequentialCrewComponent + from .sequential_task import SequentialTaskComponent + from .sequential_task_agent import SequentialTaskAgentComponent + +_dynamic_imports = { + "CrewAIAgentComponent": "crewai", + "HierarchicalCrewComponent": "hierarchical_crew", + "HierarchicalTaskComponent": "hierarchical_task", + "SequentialCrewComponent": "sequential_crew", + "SequentialTaskAgentComponent": "sequential_task_agent", + "SequentialTaskComponent": "sequential_task", +} __all__ = [ "CrewAIAgentComponent", @@ -13,3 +29,21 @@ __all__ = [ "SequentialTaskAgentComponent", "SequentialTaskComponent", ] + + +def __getattr__(attr_name: str) -> Any: + """Lazily import crewai components on attribute access.""" + if attr_name not in _dynamic_imports: + msg = f"module '{__name__}' has no attribute '{attr_name}'" + raise AttributeError(msg) + try: + result = import_mod(attr_name, _dynamic_imports[attr_name], __spec__.parent) + except (ModuleNotFoundError, ImportError, AttributeError) as e: + msg = f"Could not import '{attr_name}' from '{__name__}': {e}" + raise AttributeError(msg) from e + globals()[attr_name] = result + return result + + +def __dir__() -> list[str]: + return list(__all__) diff --git a/src/backend/base/langflow/components/custom_component/__init__.py b/src/backend/base/langflow/components/custom_component/__init__.py index fa37797fe..8766e9ffc 100644 --- a/src/backend/base/langflow/components/custom_component/__init__.py +++ b/src/backend/base/langflow/components/custom_component/__init__.py @@ -1,5 +1,34 @@ -from .custom_component import CustomComponent +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +from langflow.components._importing import import_mod + +if TYPE_CHECKING: + from .custom_component import CustomComponent + +_dynamic_imports = { + "CustomComponent": "custom_component", +} __all__ = [ "CustomComponent", ] + + +def __getattr__(attr_name: str) -> Any: + """Lazily import custom components on attribute access.""" + if attr_name not in _dynamic_imports: + msg = f"module '{__name__}' has no attribute '{attr_name}'" + raise AttributeError(msg) + try: + result = import_mod(attr_name, _dynamic_imports[attr_name], __spec__.parent) + except (ModuleNotFoundError, ImportError, AttributeError) as e: + msg = f"Could not import '{attr_name}' from '{__name__}': {e}" + raise AttributeError(msg) from e + globals()[attr_name] = result + return result + + +def __dir__() -> list[str]: + return list(__all__) diff --git a/src/backend/base/langflow/components/datastax/__init__.py b/src/backend/base/langflow/components/datastax/__init__.py index 91fc16c99..7802311bb 100644 --- a/src/backend/base/langflow/components/datastax/__init__.py +++ b/src/backend/base/langflow/components/datastax/__init__.py @@ -1,16 +1,39 @@ -from .astra_assistant_manager import AstraAssistantManager -from .astra_db import AstraDBChatMemory -from .astra_vectorize import AstraVectorizeComponent -from .astradb_cql import AstraDBCQLToolComponent -from .astradb_tool import AstraDBToolComponent -from .cassandra import CassandraChatMemory -from .create_assistant import AssistantsCreateAssistant -from .create_thread import AssistantsCreateThread -from .dotenv import Dotenv -from .get_assistant import AssistantsGetAssistantName -from .getenvvar import GetEnvVar -from .list_assistants import AssistantsListAssistants -from .run import AssistantsRun +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +from langflow.components._importing import import_mod + +if TYPE_CHECKING: + from .astra_assistant_manager import AstraAssistantManager + from .astra_db import AstraDBChatMemory + from .astra_vectorize import AstraVectorizeComponent + from .astradb_cql import AstraDBCQLToolComponent + from .astradb_tool import AstraDBToolComponent + from .cassandra import CassandraChatMemory + from .create_assistant import AssistantsCreateAssistant + from .create_thread import AssistantsCreateThread + from .dotenv import Dotenv + from .get_assistant import AssistantsGetAssistantName + from .getenvvar import GetEnvVar + from .list_assistants import AssistantsListAssistants + from .run import AssistantsRun + +_dynamic_imports = { + "AssistantsCreateAssistant": "create_assistant", + "AssistantsCreateThread": "create_thread", + "AssistantsGetAssistantName": "get_assistant", + "AssistantsListAssistants": "list_assistants", + "AssistantsRun": "run", + "AstraAssistantManager": "astra_assistant_manager", + "AstraDBCQLToolComponent": "astradb_cql", + "AstraDBChatMemory": "astra_db", + "AstraDBToolComponent": "astradb_tool", + "AstraVectorizeComponent": "astra_vectorize", + "CassandraChatMemory": "cassandra", + "Dotenv": "dotenv", + "GetEnvVar": "getenvvar", +} __all__ = [ "AssistantsCreateAssistant", @@ -27,3 +50,21 @@ __all__ = [ "Dotenv", "GetEnvVar", ] + + +def __getattr__(attr_name: str) -> Any: + """Lazily import datastax components on attribute access.""" + if attr_name not in _dynamic_imports: + msg = f"module '{__name__}' has no attribute '{attr_name}'" + raise AttributeError(msg) + try: + result = import_mod(attr_name, _dynamic_imports[attr_name], __spec__.parent) + except (ModuleNotFoundError, ImportError, AttributeError) as e: + msg = f"Could not import '{attr_name}' from '{__name__}': {e}" + raise AttributeError(msg) from e + globals()[attr_name] = result + return result + + +def __dir__() -> list[str]: + return list(__all__) diff --git a/src/backend/base/langflow/components/deepseek/__init__.py b/src/backend/base/langflow/components/deepseek/__init__.py index e03101663..559ea51ec 100644 --- a/src/backend/base/langflow/components/deepseek/__init__.py +++ b/src/backend/base/langflow/components/deepseek/__init__.py @@ -1,3 +1,34 @@ -from .deepseek import DeepSeekModelComponent +from __future__ import annotations -__all__ = ["DeepSeekModelComponent"] +from typing import TYPE_CHECKING, Any + +from langflow.components._importing import import_mod + +if TYPE_CHECKING: + from .deepseek import DeepSeekModelComponent + +_dynamic_imports = { + "DeepSeekModelComponent": "deepseek", +} + +__all__ = [ + "DeepSeekModelComponent", +] + + +def __getattr__(attr_name: str) -> Any: + """Lazily import deepseek components on attribute access.""" + if attr_name not in _dynamic_imports: + msg = f"module '{__name__}' has no attribute '{attr_name}'" + raise AttributeError(msg) + try: + result = import_mod(attr_name, _dynamic_imports[attr_name], __spec__.parent) + except (ModuleNotFoundError, ImportError, AttributeError) as e: + msg = f"Could not import '{attr_name}' from '{__name__}': {e}" + raise AttributeError(msg) from e + globals()[attr_name] = result + return result + + +def __dir__() -> list[str]: + return list(__all__) diff --git a/src/backend/base/langflow/components/docling/__init__.py b/src/backend/base/langflow/components/docling/__init__.py index dd0a6ff18..76f6035a8 100644 --- a/src/backend/base/langflow/components/docling/__init__.py +++ b/src/backend/base/langflow/components/docling/__init__.py @@ -1,7 +1,21 @@ -from .chunk_docling_document import ChunkDoclingDocumentComponent -from .docling_inline import DoclingInlineComponent -from .docling_remote import DoclingRemoteComponent -from .export_docling_document import ExportDoclingDocumentComponent +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +from langflow.components._importing import import_mod + +if TYPE_CHECKING: + from .chunk_docling_document import ChunkDoclingDocumentComponent + from .docling_inline import DoclingInlineComponent + from .docling_remote import DoclingRemoteComponent + from .export_docling_document import ExportDoclingDocumentComponent + +_dynamic_imports = { + "ChunkDoclingDocumentComponent": "chunk_docling_document", + "DoclingInlineComponent": "docling_inline", + "DoclingRemoteComponent": "docling_remote", + "ExportDoclingDocumentComponent": "export_docling_document", +} __all__ = [ "ChunkDoclingDocumentComponent", @@ -9,3 +23,21 @@ __all__ = [ "DoclingRemoteComponent", "ExportDoclingDocumentComponent", ] + + +def __getattr__(attr_name: str) -> Any: + """Lazily import docling components on attribute access.""" + if attr_name not in _dynamic_imports: + msg = f"module '{__name__}' has no attribute '{attr_name}'" + raise AttributeError(msg) + try: + result = import_mod(attr_name, _dynamic_imports[attr_name], __spec__.parent) + except (ModuleNotFoundError, ImportError, AttributeError) as e: + msg = f"Could not import '{attr_name}' from '{__name__}': {e}" + raise AttributeError(msg) from e + globals()[attr_name] = result + return result + + +def __dir__() -> list[str]: + return list(__all__) diff --git a/src/backend/base/langflow/components/embeddings/__init__.py b/src/backend/base/langflow/components/embeddings/__init__.py index 5afb22808..cb30e0af0 100644 --- a/src/backend/base/langflow/components/embeddings/__init__.py +++ b/src/backend/base/langflow/components/embeddings/__init__.py @@ -1,9 +1,37 @@ -from .similarity import EmbeddingSimilarityComponent -from .text_embedder import TextEmbedderComponent +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +from langflow.components._importing import import_mod + +if TYPE_CHECKING: + from langflow.components.embeddings.similarity import EmbeddingSimilarityComponent + from langflow.components.embeddings.text_embedder import TextEmbedderComponent + +_dynamic_imports = { + "EmbeddingSimilarityComponent": "similarity", + "TextEmbedderComponent": "text_embedder", +} __all__ = [ - "CloudflareWorkersAIEmbeddingsComponent", "EmbeddingSimilarityComponent", - "MistralAIEmbeddingsComponent", "TextEmbedderComponent", ] + + +def __getattr__(attr_name: str) -> Any: + """Lazily import embedding components on attribute access.""" + if attr_name not in _dynamic_imports: + msg = f"module '{__name__}' has no attribute '{attr_name}'" + raise AttributeError(msg) + try: + result = import_mod(attr_name, _dynamic_imports[attr_name], __spec__.parent) + except (ModuleNotFoundError, ImportError, AttributeError) as e: + msg = f"Could not import '{attr_name}' from '{__name__}': {e}" + raise AttributeError(msg) from e + globals()[attr_name] = result + return result + + +def __dir__() -> list[str]: + return list(__all__) diff --git a/src/backend/base/langflow/components/firecrawl/__init__.py b/src/backend/base/langflow/components/firecrawl/__init__.py index 1f718f020..c15d86c41 100644 --- a/src/backend/base/langflow/components/firecrawl/__init__.py +++ b/src/backend/base/langflow/components/firecrawl/__init__.py @@ -1,6 +1,43 @@ -from .firecrawl_crawl_api import FirecrawlCrawlApi -from .firecrawl_extract_api import FirecrawlExtractApi -from .firecrawl_map_api import FirecrawlMapApi -from .firecrawl_scrape_api import FirecrawlScrapeApi +from __future__ import annotations -__all__ = ["FirecrawlCrawlApi", "FirecrawlExtractApi", "FirecrawlMapApi", "FirecrawlScrapeApi"] +from typing import TYPE_CHECKING, Any + +from langflow.components._importing import import_mod + +if TYPE_CHECKING: + from .firecrawl_crawl_api import FirecrawlCrawlApi + from .firecrawl_extract_api import FirecrawlExtractApi + from .firecrawl_map_api import FirecrawlMapApi + from .firecrawl_scrape_api import FirecrawlScrapeApi + +_dynamic_imports = { + "FirecrawlCrawlApi": "firecrawl_crawl_api", + "FirecrawlExtractApi": "firecrawl_extract_api", + "FirecrawlMapApi": "firecrawl_map_api", + "FirecrawlScrapeApi": "firecrawl_scrape_api", +} + +__all__ = [ + "FirecrawlCrawlApi", + "FirecrawlExtractApi", + "FirecrawlMapApi", + "FirecrawlScrapeApi", +] + + +def __getattr__(attr_name: str) -> Any: + """Lazily import firecrawl components on attribute access.""" + if attr_name not in _dynamic_imports: + msg = f"module '{__name__}' has no attribute '{attr_name}'" + raise AttributeError(msg) + try: + result = import_mod(attr_name, _dynamic_imports[attr_name], __spec__.parent) + except (ModuleNotFoundError, ImportError, AttributeError) as e: + msg = f"Could not import '{attr_name}' from '{__name__}': {e}" + raise AttributeError(msg) from e + globals()[attr_name] = result + return result + + +def __dir__() -> list[str]: + return list(__all__) diff --git a/src/backend/base/langflow/components/groq/__init__.py b/src/backend/base/langflow/components/groq/__init__.py index ff90827ad..8a2dee75f 100644 --- a/src/backend/base/langflow/components/groq/__init__.py +++ b/src/backend/base/langflow/components/groq/__init__.py @@ -1,3 +1,34 @@ -from .groq import GroqModel +from __future__ import annotations -__all__ = ["GroqModel"] +from typing import TYPE_CHECKING, Any + +from langflow.components._importing import import_mod + +if TYPE_CHECKING: + from .groq import GroqModel + +_dynamic_imports = { + "GroqModel": "groq", +} + +__all__ = [ + "GroqModel", +] + + +def __getattr__(attr_name: str) -> Any: + """Lazily import groq components on attribute access.""" + if attr_name not in _dynamic_imports: + msg = f"module '{__name__}' has no attribute '{attr_name}'" + raise AttributeError(msg) + try: + result = import_mod(attr_name, _dynamic_imports[attr_name], __spec__.parent) + except (ModuleNotFoundError, ImportError, AttributeError) as e: + msg = f"Could not import '{attr_name}' from '{__name__}': {e}" + raise AttributeError(msg) from e + globals()[attr_name] = result + return result + + +def __dir__() -> list[str]: + return list(__all__) diff --git a/src/backend/base/langflow/components/helpers/__init__.py b/src/backend/base/langflow/components/helpers/__init__.py index 773ce8576..78872c51a 100644 --- a/src/backend/base/langflow/components/helpers/__init__.py +++ b/src/backend/base/langflow/components/helpers/__init__.py @@ -1,10 +1,27 @@ -from .calculator_core import CalculatorComponent -from .create_list import CreateListComponent -from .current_date import CurrentDateComponent -from .id_generator import IDGeneratorComponent -from .memory import MemoryComponent -from .output_parser import OutputParserComponent -from .store_message import MessageStoreComponent +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +from langflow.components._importing import import_mod + +if TYPE_CHECKING: + from langflow.components.helpers.calculator_core import CalculatorComponent + from langflow.components.helpers.create_list import CreateListComponent + from langflow.components.helpers.current_date import CurrentDateComponent + from langflow.components.helpers.id_generator import IDGeneratorComponent + from langflow.components.helpers.memory import MemoryComponent + from langflow.components.helpers.output_parser import OutputParserComponent + from langflow.components.helpers.store_message import MessageStoreComponent + +_dynamic_imports = { + "CalculatorComponent": "calculator_core", + "CreateListComponent": "create_list", + "CurrentDateComponent": "current_date", + "IDGeneratorComponent": "id_generator", + "MemoryComponent": "memory", + "OutputParserComponent": "output_parser", + "MessageStoreComponent": "store_message", +} __all__ = [ "CalculatorComponent", @@ -15,3 +32,21 @@ __all__ = [ "MessageStoreComponent", "OutputParserComponent", ] + + +def __getattr__(attr_name: str) -> Any: + """Lazily import helper components on attribute access.""" + if attr_name not in _dynamic_imports: + msg = f"module '{__name__}' has no attribute '{attr_name}'" + raise AttributeError(msg) + try: + result = import_mod(attr_name, _dynamic_imports[attr_name], __spec__.parent) + except (ModuleNotFoundError, ImportError, AttributeError) as e: + msg = f"Could not import '{attr_name}' from '{__name__}': {e}" + raise AttributeError(msg) from e + globals()[attr_name] = result + return result + + +def __dir__() -> list[str]: + return list(__all__) diff --git a/src/backend/base/langflow/components/huggingface/__init__.py b/src/backend/base/langflow/components/huggingface/__init__.py index 19fa698bd..794213731 100644 --- a/src/backend/base/langflow/components/huggingface/__init__.py +++ b/src/backend/base/langflow/components/huggingface/__init__.py @@ -1,7 +1,37 @@ -from .huggingface import HuggingFaceEndpointsComponent -from .huggingface_inference_api import HuggingFaceInferenceAPIEmbeddingsComponent +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +from langflow.components._importing import import_mod + +if TYPE_CHECKING: + from .huggingface import HuggingFaceEndpointsComponent + from .huggingface_inference_api import HuggingFaceInferenceAPIEmbeddingsComponent + +_dynamic_imports = { + "HuggingFaceEndpointsComponent": "huggingface", + "HuggingFaceInferenceAPIEmbeddingsComponent": "huggingface_inference_api", +} __all__ = [ "HuggingFaceEndpointsComponent", "HuggingFaceInferenceAPIEmbeddingsComponent", ] + + +def __getattr__(attr_name: str) -> Any: + """Lazily import huggingface components on attribute access.""" + if attr_name not in _dynamic_imports: + msg = f"module '{__name__}' has no attribute '{attr_name}'" + raise AttributeError(msg) + try: + result = import_mod(attr_name, _dynamic_imports[attr_name], __spec__.parent) + except (ModuleNotFoundError, ImportError, AttributeError) as e: + msg = f"Could not import '{attr_name}' from '{__name__}': {e}" + raise AttributeError(msg) from e + globals()[attr_name] = result + return result + + +def __dir__() -> list[str]: + return list(__all__) diff --git a/src/backend/base/langflow/components/ibm/__init__.py b/src/backend/base/langflow/components/ibm/__init__.py index e19e2af6e..e782a8bf0 100644 --- a/src/backend/base/langflow/components/ibm/__init__.py +++ b/src/backend/base/langflow/components/ibm/__init__.py @@ -1,4 +1,34 @@ -from .watsonx import WatsonxAIComponent -from .watsonx_embeddings import WatsonxEmbeddingsComponent +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +from langflow.components._importing import import_mod + +if TYPE_CHECKING: + from langflow.components.ibm.watsonx import WatsonxAIComponent + from langflow.components.ibm.watsonx_embeddings import WatsonxEmbeddingsComponent + +_dynamic_imports = { + "WatsonxAIComponent": "watsonx", + "WatsonxEmbeddingsComponent": "watsonx_embeddings", +} __all__ = ["WatsonxAIComponent", "WatsonxEmbeddingsComponent"] + + +def __getattr__(attr_name: str) -> Any: + """Lazily import ibm components on attribute access.""" + if attr_name not in _dynamic_imports: + msg = f"module '{__name__}' has no attribute '{attr_name}'" + raise AttributeError(msg) + try: + result = import_mod(attr_name, _dynamic_imports[attr_name], __spec__.parent) + except (ModuleNotFoundError, ImportError, AttributeError) as e: + msg = f"Could not import '{attr_name}' from '{__name__}': {e}" + raise AttributeError(msg) from e + globals()[attr_name] = result + return result + + +def __dir__() -> list[str]: + return list(__all__) diff --git a/src/backend/base/langflow/components/input_output/__init__.py b/src/backend/base/langflow/components/input_output/__init__.py index 8eab9968d..e403cd99e 100644 --- a/src/backend/base/langflow/components/input_output/__init__.py +++ b/src/backend/base/langflow/components/input_output/__init__.py @@ -1,6 +1,38 @@ -from .chat import ChatInput -from .chat_output import ChatOutput -from .text import TextInputComponent -from .text_output import TextOutputComponent +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +from langflow.components._importing import import_mod + +if TYPE_CHECKING: + from langflow.components.input_output.chat import ChatInput + from langflow.components.input_output.chat_output import ChatOutput + from langflow.components.input_output.text import TextInputComponent + from langflow.components.input_output.text_output import TextOutputComponent + +_dynamic_imports = { + "ChatInput": "chat", + "ChatOutput": "chat_output", + "TextInputComponent": "text", + "TextOutputComponent": "text_output", +} __all__ = ["ChatInput", "ChatOutput", "TextInputComponent", "TextOutputComponent"] + + +def __getattr__(attr_name: str) -> Any: + """Lazily import input/output components on attribute access.""" + if attr_name not in _dynamic_imports: + msg = f"module '{__name__}' has no attribute '{attr_name}'" + raise AttributeError(msg) + try: + result = import_mod(attr_name, _dynamic_imports[attr_name], __spec__.parent) + except (ModuleNotFoundError, ImportError, AttributeError) as e: + msg = f"Could not import '{attr_name}' from '{__name__}': {e}" + raise AttributeError(msg) from e + globals()[attr_name] = result + return result + + +def __dir__() -> list[str]: + return list(__all__) diff --git a/src/backend/base/langflow/components/langchain_utilities/__init__.py b/src/backend/base/langflow/components/langchain_utilities/__init__.py index 4d04ce16b..3033bc3d1 100644 --- a/src/backend/base/langflow/components/langchain_utilities/__init__.py +++ b/src/backend/base/langflow/components/langchain_utilities/__init__.py @@ -1,29 +1,65 @@ -from .character import CharacterTextSplitterComponent -from .conversation import ConversationChainComponent -from .csv_agent import CSVAgentComponent -from .fake_embeddings import FakeEmbeddingsComponent -from .html_link_extractor import HtmlLinkExtractorComponent -from .json_agent import JsonAgentComponent -from .langchain_hub import LangChainHubPromptComponent -from .language_recursive import LanguageRecursiveTextSplitterComponent -from .language_semantic import SemanticTextSplitterComponent -from .llm_checker import LLMCheckerChainComponent -from .llm_math import LLMMathChainComponent -from .natural_language import NaturalLanguageTextSplitterComponent -from .openai_tools import OpenAIToolsAgentComponent -from .openapi import OpenAPIAgentComponent -from .recursive_character import RecursiveCharacterTextSplitterComponent -from .retrieval_qa import RetrievalQAComponent -from .runnable_executor import RunnableExecComponent -from .self_query import SelfQueryRetrieverComponent -from .spider import SpiderTool -from .sql import SQLAgentComponent -from .sql_database import SQLDatabaseComponent -from .sql_generator import SQLGeneratorComponent -from .tool_calling import ToolCallingAgentComponent -from .vector_store_info import VectorStoreInfoComponent -from .vector_store_router import VectorStoreRouterAgentComponent -from .xml_agent import XMLAgentComponent +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +from langflow.components._importing import import_mod + +if TYPE_CHECKING: + from .character import CharacterTextSplitterComponent + from .conversation import ConversationChainComponent + from .csv_agent import CSVAgentComponent + from .fake_embeddings import FakeEmbeddingsComponent + from .html_link_extractor import HtmlLinkExtractorComponent + from .json_agent import JsonAgentComponent + from .langchain_hub import LangChainHubPromptComponent + from .language_recursive import LanguageRecursiveTextSplitterComponent + from .language_semantic import SemanticTextSplitterComponent + from .llm_checker import LLMCheckerChainComponent + from .llm_math import LLMMathChainComponent + from .natural_language import NaturalLanguageTextSplitterComponent + from .openai_tools import OpenAIToolsAgentComponent + from .openapi import OpenAPIAgentComponent + from .recursive_character import RecursiveCharacterTextSplitterComponent + from .retrieval_qa import RetrievalQAComponent + from .runnable_executor import RunnableExecComponent + from .self_query import SelfQueryRetrieverComponent + from .spider import SpiderTool + from .sql import SQLAgentComponent + from .sql_database import SQLDatabaseComponent + from .sql_generator import SQLGeneratorComponent + from .tool_calling import ToolCallingAgentComponent + from .vector_store_info import VectorStoreInfoComponent + from .vector_store_router import VectorStoreRouterAgentComponent + from .xml_agent import XMLAgentComponent + +_dynamic_imports = { + "CharacterTextSplitterComponent": "character", + "ConversationChainComponent": "conversation", + "CSVAgentComponent": "csv_agent", + "FakeEmbeddingsComponent": "fake_embeddings", + "HtmlLinkExtractorComponent": "html_link_extractor", + "JsonAgentComponent": "json_agent", + "LangChainHubPromptComponent": "langchain_hub", + "LanguageRecursiveTextSplitterComponent": "language_recursive", + "LLMCheckerChainComponent": "llm_checker", + "LLMMathChainComponent": "llm_math", + "NaturalLanguageTextSplitterComponent": "natural_language", + "OpenAIToolsAgentComponent": "openai_tools", + "OpenAPIAgentComponent": "openapi", + "RecursiveCharacterTextSplitterComponent": "recursive_character", + "RetrievalQAComponent": "retrieval_qa", + "RunnableExecComponent": "runnable_executor", + "SelfQueryRetrieverComponent": "self_query", + "SemanticTextSplitterComponent": "language_semantic", + "SpiderTool": "spider", + "SQLAgentComponent": "sql", + "SQLDatabaseComponent": "sql_database", + "SQLGeneratorComponent": "sql_generator", + "ToolCallingAgentComponent": "tool_calling", + "VectorStoreInfoComponent": "vector_store_info", + "VectorStoreRouterAgentComponent": "vector_store_router", + "XMLAgentComponent": "xml_agent", +} __all__ = [ "CSVAgentComponent", @@ -53,3 +89,21 @@ __all__ = [ "VectorStoreRouterAgentComponent", "XMLAgentComponent", ] + + +def __getattr__(attr_name: str) -> Any: + """Lazily import langchain utility components on attribute access.""" + if attr_name not in _dynamic_imports: + msg = f"module '{__name__}' has no attribute '{attr_name}'" + raise AttributeError(msg) + try: + result = import_mod(attr_name, _dynamic_imports[attr_name], __spec__.parent) + except (ModuleNotFoundError, ImportError, AttributeError) as e: + msg = f"Could not import '{attr_name}' from '{__name__}': {e}" + raise AttributeError(msg) from e + globals()[attr_name] = result + return result + + +def __dir__() -> list[str]: + return list(__all__) diff --git a/src/backend/base/langflow/components/langchain_utilities/conversation.py b/src/backend/base/langflow/components/langchain_utilities/conversation.py index 4542118a0..c83a12dc4 100644 --- a/src/backend/base/langflow/components/langchain_utilities/conversation.py +++ b/src/backend/base/langflow/components/langchain_utilities/conversation.py @@ -1,8 +1,8 @@ from langchain.chains import ConversationChain from langflow.base.chains.model import LCChainComponent -from langflow.field_typing import Message from langflow.inputs.inputs import HandleInput, MultilineInput +from langflow.schema.message import Message class ConversationChainComponent(LCChainComponent): diff --git a/src/backend/base/langflow/components/langchain_utilities/llm_checker.py b/src/backend/base/langflow/components/langchain_utilities/llm_checker.py index 54bae37ef..885e61c63 100644 --- a/src/backend/base/langflow/components/langchain_utilities/llm_checker.py +++ b/src/backend/base/langflow/components/langchain_utilities/llm_checker.py @@ -1,8 +1,8 @@ from langchain.chains import LLMCheckerChain from langflow.base.chains.model import LCChainComponent -from langflow.field_typing import Message from langflow.inputs.inputs import HandleInput, MultilineInput +from langflow.schema.message import Message class LLMCheckerChainComponent(LCChainComponent): diff --git a/src/backend/base/langflow/components/langchain_utilities/llm_math.py b/src/backend/base/langflow/components/langchain_utilities/llm_math.py index 6892a0714..96deff53d 100644 --- a/src/backend/base/langflow/components/langchain_utilities/llm_math.py +++ b/src/backend/base/langflow/components/langchain_utilities/llm_math.py @@ -1,8 +1,8 @@ from langchain.chains import LLMMathChain from langflow.base.chains.model import LCChainComponent -from langflow.field_typing import Message from langflow.inputs.inputs import HandleInput, MultilineInput +from langflow.schema.message import Message from langflow.template.field.base import Output diff --git a/src/backend/base/langflow/components/langchain_utilities/retrieval_qa.py b/src/backend/base/langflow/components/langchain_utilities/retrieval_qa.py index 66fa77478..ca6c764cc 100644 --- a/src/backend/base/langflow/components/langchain_utilities/retrieval_qa.py +++ b/src/backend/base/langflow/components/langchain_utilities/retrieval_qa.py @@ -1,8 +1,10 @@ +from typing import cast + from langchain.chains import RetrievalQA from langflow.base.chains.model import LCChainComponent -from langflow.field_typing import Message from langflow.inputs.inputs import BoolInput, DropdownInput, HandleInput, MultilineInput +from langflow.schema.message import Message class RetrievalQAComponent(LCChainComponent): @@ -78,4 +80,4 @@ class RetrievalQAComponent(LCChainComponent): result_str = f"{result_str}\n{references_str}" # put the entire result to debug history, query and content self.status = {**result, "source_documents": source_docs, "output": result_str} - return result_str + return cast("Message", result_str) diff --git a/src/backend/base/langflow/components/langchain_utilities/sql_generator.py b/src/backend/base/langflow/components/langchain_utilities/sql_generator.py index 31f3112a0..4f9669c92 100644 --- a/src/backend/base/langflow/components/langchain_utilities/sql_generator.py +++ b/src/backend/base/langflow/components/langchain_utilities/sql_generator.py @@ -4,8 +4,8 @@ from langchain.chains import create_sql_query_chain from langchain_core.prompts import PromptTemplate from langflow.base.chains.model import LCChainComponent -from langflow.field_typing import Message from langflow.inputs.inputs import HandleInput, IntInput, MultilineInput +from langflow.schema.message import Message from langflow.template.field.base import Output if TYPE_CHECKING: diff --git a/src/backend/base/langflow/components/lmstudio/__init__.py b/src/backend/base/langflow/components/lmstudio/__init__.py index 0c0c8d08a..354fe9270 100644 --- a/src/backend/base/langflow/components/lmstudio/__init__.py +++ b/src/backend/base/langflow/components/lmstudio/__init__.py @@ -1,4 +1,34 @@ -from .lmstudioembeddings import LMStudioEmbeddingsComponent -from .lmstudiomodel import LMStudioModelComponent +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +from langflow.components._importing import import_mod + +if TYPE_CHECKING: + from langflow.components.lmstudio.lmstudioembeddings import LMStudioEmbeddingsComponent + from langflow.components.lmstudio.lmstudiomodel import LMStudioModelComponent + +_dynamic_imports = { + "LMStudioEmbeddingsComponent": "lmstudioembeddings", + "LMStudioModelComponent": "lmstudiomodel", +} __all__ = ["LMStudioEmbeddingsComponent", "LMStudioModelComponent"] + + +def __getattr__(attr_name: str) -> Any: + """Lazily import lmstudio components on attribute access.""" + if attr_name not in _dynamic_imports: + msg = f"module '{__name__}' has no attribute '{attr_name}'" + raise AttributeError(msg) + try: + result = import_mod(attr_name, _dynamic_imports[attr_name], __spec__.parent) + except (ModuleNotFoundError, ImportError, AttributeError) as e: + msg = f"Could not import '{attr_name}' from '{__name__}': {e}" + raise AttributeError(msg) from e + globals()[attr_name] = result + return result + + +def __dir__() -> list[str]: + return list(__all__) diff --git a/src/backend/base/langflow/components/logic/__init__.py b/src/backend/base/langflow/components/logic/__init__.py index a5e213a68..b32540340 100644 --- a/src/backend/base/langflow/components/logic/__init__.py +++ b/src/backend/base/langflow/components/logic/__init__.py @@ -1,10 +1,27 @@ -from .conditional_router import ConditionalRouterComponent -from .data_conditional_router import DataConditionalRouterComponent -from .flow_tool import FlowToolComponent -from .loop import LoopComponent -from .pass_message import PassMessageComponent -from .run_flow import RunFlowComponent -from .sub_flow import SubFlowComponent +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +from langflow.components._importing import import_mod + +if TYPE_CHECKING: + from langflow.components.logic.conditional_router import ConditionalRouterComponent + from langflow.components.logic.data_conditional_router import DataConditionalRouterComponent + from langflow.components.logic.flow_tool import FlowToolComponent + from langflow.components.logic.loop import LoopComponent + from langflow.components.logic.pass_message import PassMessageComponent + from langflow.components.logic.run_flow import RunFlowComponent + from langflow.components.logic.sub_flow import SubFlowComponent + +_dynamic_imports = { + "ConditionalRouterComponent": "conditional_router", + "DataConditionalRouterComponent": "data_conditional_router", + "FlowToolComponent": "flow_tool", + "LoopComponent": "loop", + "PassMessageComponent": "pass_message", + "RunFlowComponent": "run_flow", + "SubFlowComponent": "sub_flow", +} __all__ = [ "ConditionalRouterComponent", @@ -15,3 +32,21 @@ __all__ = [ "RunFlowComponent", "SubFlowComponent", ] + + +def __getattr__(attr_name: str) -> Any: + """Lazily import logic components on attribute access.""" + if attr_name not in _dynamic_imports: + msg = f"module '{__name__}' has no attribute '{attr_name}'" + raise AttributeError(msg) + try: + result = import_mod(attr_name, _dynamic_imports[attr_name], __spec__.parent) + except (ModuleNotFoundError, ImportError, AttributeError) as e: + msg = f"Could not import '{attr_name}' from '{__name__}': {e}" + raise AttributeError(msg) from e + globals()[attr_name] = result + return result + + +def __dir__() -> list[str]: + return list(__all__) diff --git a/src/backend/base/langflow/components/maritalk/__init__.py b/src/backend/base/langflow/components/maritalk/__init__.py index 881e7b533..ab5a1f44a 100644 --- a/src/backend/base/langflow/components/maritalk/__init__.py +++ b/src/backend/base/langflow/components/maritalk/__init__.py @@ -1,3 +1,32 @@ -from .maritalk import MaritalkModelComponent +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +from langflow.components._importing import import_mod + +if TYPE_CHECKING: + from langflow.components.maritalk.maritalk import MaritalkModelComponent + +_dynamic_imports = { + "MaritalkModelComponent": "maritalk", +} __all__ = ["MaritalkModelComponent"] + + +def __getattr__(attr_name: str) -> Any: + """Lazily import maritalk components on attribute access.""" + if attr_name not in _dynamic_imports: + msg = f"module '{__name__}' has no attribute '{attr_name}'" + raise AttributeError(msg) + try: + result = import_mod(attr_name, _dynamic_imports[attr_name], __spec__.parent) + except (ModuleNotFoundError, ImportError, AttributeError) as e: + msg = f"Could not import '{attr_name}' from '{__name__}': {e}" + raise AttributeError(msg) from e + globals()[attr_name] = result + return result + + +def __dir__() -> list[str]: + return list(__all__) diff --git a/src/backend/base/langflow/components/mistral/__init__.py b/src/backend/base/langflow/components/mistral/__init__.py index 5e1329297..f6971b541 100644 --- a/src/backend/base/langflow/components/mistral/__init__.py +++ b/src/backend/base/langflow/components/mistral/__init__.py @@ -1,4 +1,37 @@ -from .mistral import MistralAIModelComponent -from .mistral_embeddings import MistralAIEmbeddingsComponent +from __future__ import annotations -__all__ = ["MistralAIEmbeddingsComponent", "MistralAIModelComponent"] +from typing import TYPE_CHECKING, Any + +from langflow.components._importing import import_mod + +if TYPE_CHECKING: + from .mistral import MistralAIModelComponent + from .mistral_embeddings import MistralAIEmbeddingsComponent + +_dynamic_imports = { + "MistralAIModelComponent": "mistral", + "MistralAIEmbeddingsComponent": "mistral_embeddings", +} + +__all__ = [ + "MistralAIEmbeddingsComponent", + "MistralAIModelComponent", +] + + +def __getattr__(attr_name: str) -> Any: + """Lazily import mistral components on attribute access.""" + if attr_name not in _dynamic_imports: + msg = f"module '{__name__}' has no attribute '{attr_name}'" + raise AttributeError(msg) + try: + result = import_mod(attr_name, _dynamic_imports[attr_name], __spec__.parent) + except (ModuleNotFoundError, ImportError, AttributeError) as e: + msg = f"Could not import '{attr_name}' from '{__name__}': {e}" + raise AttributeError(msg) from e + globals()[attr_name] = result + return result + + +def __dir__() -> list[str]: + return list(__all__) diff --git a/src/backend/base/langflow/components/models/__init__.py b/src/backend/base/langflow/components/models/__init__.py index e861703b6..adee43439 100644 --- a/src/backend/base/langflow/components/models/__init__.py +++ b/src/backend/base/langflow/components/models/__init__.py @@ -1,4 +1,34 @@ -from .embedding_model import EmbeddingModelComponent -from .language_model import LanguageModelComponent +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +from langflow.components._importing import import_mod + +if TYPE_CHECKING: + from langflow.components.models.embedding_model import EmbeddingModelComponent + from langflow.components.models.language_model import LanguageModelComponent + +_dynamic_imports = { + "EmbeddingModelComponent": "embedding_model", + "LanguageModelComponent": "language_model", +} __all__ = ["EmbeddingModelComponent", "LanguageModelComponent"] + + +def __getattr__(attr_name: str) -> Any: + """Lazily import model components on attribute access.""" + if attr_name not in _dynamic_imports: + msg = f"module '{__name__}' has no attribute '{attr_name}'" + raise AttributeError(msg) + try: + result = import_mod(attr_name, _dynamic_imports[attr_name], __spec__.parent) + except (ModuleNotFoundError, ImportError, AttributeError) as e: + msg = f"Could not import '{attr_name}' from '{__name__}': {e}" + raise AttributeError(msg) from e + globals()[attr_name] = result + return result + + +def __dir__() -> list[str]: + return list(__all__) diff --git a/src/backend/base/langflow/components/notdiamond/__init__.py b/src/backend/base/langflow/components/notdiamond/__init__.py index e69de29bb..e63a917f1 100644 --- a/src/backend/base/langflow/components/notdiamond/__init__.py +++ b/src/backend/base/langflow/components/notdiamond/__init__.py @@ -0,0 +1,36 @@ +"""NotDiamond components for LangFlow.""" + +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +from langflow.components._importing import import_mod + +if TYPE_CHECKING: + from langflow.components.notdiamond.notdiamond import NotDiamondComponent + +_dynamic_imports = { + "NotDiamondComponent": "notdiamond", +} + +__all__ = [ + "NotDiamondComponent", +] + + +def __getattr__(attr_name: str) -> Any: + """Lazily import notdiamond components on attribute access.""" + if attr_name not in _dynamic_imports: + msg = f"module '{__name__}' has no attribute '{attr_name}'" + raise AttributeError(msg) + try: + result = import_mod(attr_name, _dynamic_imports[attr_name], __spec__.parent) + except (ModuleNotFoundError, ImportError, AttributeError) as e: + msg = f"Could not import '{attr_name}' from '{__name__}': {e}" + raise AttributeError(msg) from e + globals()[attr_name] = result + return result + + +def __dir__() -> list[str]: + return list(__all__) diff --git a/src/backend/base/langflow/components/novita/__init__.py b/src/backend/base/langflow/components/novita/__init__.py index df6532831..1405e0a28 100644 --- a/src/backend/base/langflow/components/novita/__init__.py +++ b/src/backend/base/langflow/components/novita/__init__.py @@ -1,3 +1,32 @@ -from .novita import NovitaModelComponent +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +from langflow.components._importing import import_mod + +if TYPE_CHECKING: + from langflow.components.novita.novita import NovitaModelComponent + +_dynamic_imports = { + "NovitaModelComponent": "novita", +} __all__ = ["NovitaModelComponent"] + + +def __getattr__(attr_name: str) -> Any: + """Lazily import novita components on attribute access.""" + if attr_name not in _dynamic_imports: + msg = f"module '{__name__}' has no attribute '{attr_name}'" + raise AttributeError(msg) + try: + result = import_mod(attr_name, _dynamic_imports[attr_name], __spec__.parent) + except (ModuleNotFoundError, ImportError, AttributeError) as e: + msg = f"Could not import '{attr_name}' from '{__name__}': {e}" + raise AttributeError(msg) from e + globals()[attr_name] = result + return result + + +def __dir__() -> list[str]: + return list(__all__) diff --git a/src/backend/base/langflow/components/nvidia/__init__.py b/src/backend/base/langflow/components/nvidia/__init__.py index 5d58e6bf2..4c57c4c02 100644 --- a/src/backend/base/langflow/components/nvidia/__init__.py +++ b/src/backend/base/langflow/components/nvidia/__init__.py @@ -1,13 +1,28 @@ -import sys +from __future__ import annotations -from .nvidia import NVIDIAModelComponent -from .nvidia_embedding import NVIDIAEmbeddingsComponent -from .nvidia_ingest import NvidiaIngestComponent -from .nvidia_rerank import NvidiaRerankComponent +import sys +from typing import TYPE_CHECKING, Any + +from langflow.components._importing import import_mod + +if TYPE_CHECKING: + from .nvidia import NVIDIAModelComponent + from .nvidia_embedding import NVIDIAEmbeddingsComponent + from .nvidia_ingest import NvidiaIngestComponent + from .nvidia_rerank import NvidiaRerankComponent + + if sys.platform == "win32": + from .system_assist import NvidiaSystemAssistComponent + +_dynamic_imports = { + "NVIDIAModelComponent": "nvidia", + "NVIDIAEmbeddingsComponent": "nvidia_embedding", + "NvidiaIngestComponent": "nvidia_ingest", + "NvidiaRerankComponent": "nvidia_rerank", +} if sys.platform == "win32": - from .system_assist import NvidiaSystemAssistComponent - + _dynamic_imports["NvidiaSystemAssistComponent"] = "system_assist" __all__ = [ "NVIDIAEmbeddingsComponent", "NVIDIAModelComponent", @@ -16,4 +31,27 @@ if sys.platform == "win32": "NvidiaSystemAssistComponent", ] else: - __all__ = ["NVIDIAEmbeddingsComponent", "NVIDIAModelComponent", "NvidiaIngestComponent", "NvidiaRerankComponent"] + __all__ = [ + "NVIDIAEmbeddingsComponent", + "NVIDIAModelComponent", + "NvidiaIngestComponent", + "NvidiaRerankComponent", + ] + + +def __getattr__(attr_name: str) -> Any: + """Lazily import nvidia components on attribute access.""" + if attr_name not in _dynamic_imports: + msg = f"module '{__name__}' has no attribute '{attr_name}'" + raise AttributeError(msg) + try: + result = import_mod(attr_name, _dynamic_imports[attr_name], __spec__.parent) + except (ModuleNotFoundError, ImportError, AttributeError) as e: + msg = f"Could not import '{attr_name}' from '{__name__}': {e}" + raise AttributeError(msg) from e + globals()[attr_name] = result + return result + + +def __dir__() -> list[str]: + return list(__all__) diff --git a/src/backend/base/langflow/components/ollama/__init__.py b/src/backend/base/langflow/components/ollama/__init__.py index 45dc85aff..858df3c67 100644 --- a/src/backend/base/langflow/components/ollama/__init__.py +++ b/src/backend/base/langflow/components/ollama/__init__.py @@ -1,7 +1,37 @@ -from .ollama import ChatOllamaComponent -from .ollama_embeddings import OllamaEmbeddingsComponent +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +from langflow.components._importing import import_mod + +if TYPE_CHECKING: + from .ollama import ChatOllamaComponent + from .ollama_embeddings import OllamaEmbeddingsComponent + +_dynamic_imports = { + "ChatOllamaComponent": "ollama", + "OllamaEmbeddingsComponent": "ollama_embeddings", +} __all__ = [ "ChatOllamaComponent", "OllamaEmbeddingsComponent", ] + + +def __getattr__(attr_name: str) -> Any: + """Lazily import ollama components on attribute access.""" + if attr_name not in _dynamic_imports: + msg = f"module '{__name__}' has no attribute '{attr_name}'" + raise AttributeError(msg) + try: + result = import_mod(attr_name, _dynamic_imports[attr_name], __spec__.parent) + except (ModuleNotFoundError, ImportError, AttributeError) as e: + msg = f"Could not import '{attr_name}' from '{__name__}': {e}" + raise AttributeError(msg) from e + globals()[attr_name] = result + return result + + +def __dir__() -> list[str]: + return list(__all__) diff --git a/src/backend/base/langflow/components/openai/__init__.py b/src/backend/base/langflow/components/openai/__init__.py index b7bd9e044..03f72d8cf 100644 --- a/src/backend/base/langflow/components/openai/__init__.py +++ b/src/backend/base/langflow/components/openai/__init__.py @@ -1,7 +1,37 @@ -from .openai import OpenAIEmbeddingsComponent -from .openai_chat_model import OpenAIModelComponent +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +from langflow.components._importing import import_mod + +if TYPE_CHECKING: + from langflow.components.openai.openai import OpenAIEmbeddingsComponent + from langflow.components.openai.openai_chat_model import OpenAIModelComponent + +_dynamic_imports = { + "OpenAIEmbeddingsComponent": "openai", + "OpenAIModelComponent": "openai_chat_model", +} __all__ = [ "OpenAIEmbeddingsComponent", "OpenAIModelComponent", ] + + +def __getattr__(attr_name: str) -> Any: + """Lazily import OpenAI components on attribute access.""" + if attr_name not in _dynamic_imports: + msg = f"module '{__name__}' has no attribute '{attr_name}'" + raise AttributeError(msg) + try: + result = import_mod(attr_name, _dynamic_imports[attr_name], __spec__.parent) + except (ModuleNotFoundError, ImportError, AttributeError) as e: + msg = f"Could not import '{attr_name}' from '{__name__}': {e}" + raise AttributeError(msg) from e + globals()[attr_name] = result + return result + + +def __dir__() -> list[str]: + return list(__all__) diff --git a/src/backend/base/langflow/components/openrouter/__init__.py b/src/backend/base/langflow/components/openrouter/__init__.py index 7a4b6a70a..c2786bc69 100644 --- a/src/backend/base/langflow/components/openrouter/__init__.py +++ b/src/backend/base/langflow/components/openrouter/__init__.py @@ -1,3 +1,32 @@ -from .openrouter import OpenRouterComponent +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +from langflow.components._importing import import_mod + +if TYPE_CHECKING: + from langflow.components.openrouter.openrouter import OpenRouterComponent + +_dynamic_imports = { + "OpenRouterComponent": "openrouter", +} __all__ = ["OpenRouterComponent"] + + +def __getattr__(attr_name: str) -> Any: + """Lazily import openrouter components on attribute access.""" + if attr_name not in _dynamic_imports: + msg = f"module '{__name__}' has no attribute '{attr_name}'" + raise AttributeError(msg) + try: + result = import_mod(attr_name, _dynamic_imports[attr_name], __spec__.parent) + except (ModuleNotFoundError, ImportError, AttributeError) as e: + msg = f"Could not import '{attr_name}' from '{__name__}': {e}" + raise AttributeError(msg) from e + globals()[attr_name] = result + return result + + +def __dir__() -> list[str]: + return list(__all__) diff --git a/src/backend/base/langflow/components/perplexity/__init__.py b/src/backend/base/langflow/components/perplexity/__init__.py index b16cef4f6..4caa167d0 100644 --- a/src/backend/base/langflow/components/perplexity/__init__.py +++ b/src/backend/base/langflow/components/perplexity/__init__.py @@ -1,3 +1,34 @@ -from .perplexity import PerplexityComponent +from __future__ import annotations -__all__ = ["PerplexityComponent"] +from typing import TYPE_CHECKING, Any + +from langflow.components._importing import import_mod + +if TYPE_CHECKING: + from .perplexity import PerplexityComponent + +_dynamic_imports = { + "PerplexityComponent": "perplexity", +} + +__all__ = [ + "PerplexityComponent", +] + + +def __getattr__(attr_name: str) -> Any: + """Lazily import perplexity components on attribute access.""" + if attr_name not in _dynamic_imports: + msg = f"module '{__name__}' has no attribute '{attr_name}'" + raise AttributeError(msg) + try: + result = import_mod(attr_name, _dynamic_imports[attr_name], __spec__.parent) + except (ModuleNotFoundError, ImportError, AttributeError) as e: + msg = f"Could not import '{attr_name}' from '{__name__}': {e}" + raise AttributeError(msg) from e + globals()[attr_name] = result + return result + + +def __dir__() -> list[str]: + return list(__all__) diff --git a/src/backend/base/langflow/components/processing/__init__.py b/src/backend/base/langflow/components/processing/__init__.py index 4f26c27d8..40cd21fb9 100644 --- a/src/backend/base/langflow/components/processing/__init__.py +++ b/src/backend/base/langflow/components/processing/__init__.py @@ -1,26 +1,71 @@ -from .alter_metadata import AlterMetadataComponent -from .batch_run import BatchRunComponent -from .combine_text import CombineTextComponent -from .converter import TypeConverterComponent -from .create_data import CreateDataComponent -from .data_operations import DataOperationsComponent -from .extract_key import ExtractDataKeyComponent -from .filter_data_values import DataFilterComponent -from .json_cleaner import JSONCleaner -from .lambda_filter import LambdaFilterComponent -from .llm_router import LLMRouterComponent -from .merge_data import MergeDataComponent -from .message_to_data import MessageToDataComponent -from .parse_data import ParseDataComponent -from .parse_json_data import ParseJSONDataComponent -from .parser import ParserComponent -from .prompt import PromptComponent -from .python_repl_core import PythonREPLComponent -from .regex import RegexExtractorComponent -from .select_data import SelectDataComponent -from .split_text import SplitTextComponent -from .structured_output import StructuredOutputComponent -from .update_data import UpdateDataComponent +"""Processing components for LangFlow.""" + +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +from langflow.components._importing import import_mod + +if TYPE_CHECKING: + from langflow.components.processing.alter_metadata import AlterMetadataComponent + from langflow.components.processing.batch_run import BatchRunComponent + from langflow.components.processing.combine_text import CombineTextComponent + from langflow.components.processing.converter import TypeConverterComponent + from langflow.components.processing.create_data import CreateDataComponent + from langflow.components.processing.data_operations import DataOperationsComponent + from langflow.components.processing.data_to_dataframe import DataToDataFrameComponent + from langflow.components.processing.dataframe_operations import DataFrameOperationsComponent + from langflow.components.processing.extract_key import ExtractDataKeyComponent + from langflow.components.processing.filter_data import FilterDataComponent + from langflow.components.processing.filter_data_values import DataFilterComponent + from langflow.components.processing.json_cleaner import JSONCleaner + from langflow.components.processing.lambda_filter import LambdaFilterComponent + from langflow.components.processing.llm_router import LLMRouterComponent + from langflow.components.processing.merge_data import MergeDataComponent + from langflow.components.processing.message_to_data import MessageToDataComponent + from langflow.components.processing.parse_data import ParseDataComponent + from langflow.components.processing.parse_dataframe import ParseDataFrameComponent + from langflow.components.processing.parse_json_data import ParseJSONDataComponent + from langflow.components.processing.parser import ParserComponent + from langflow.components.processing.prompt import PromptComponent + from langflow.components.processing.python_repl_core import PythonREPLComponent + from langflow.components.processing.regex import RegexExtractorComponent + from langflow.components.processing.save_file import SaveToFileComponent + from langflow.components.processing.select_data import SelectDataComponent + from langflow.components.processing.split_text import SplitTextComponent + from langflow.components.processing.structured_output import StructuredOutputComponent + from langflow.components.processing.update_data import UpdateDataComponent + +_dynamic_imports = { + "AlterMetadataComponent": "alter_metadata", + "BatchRunComponent": "batch_run", + "CombineTextComponent": "combine_text", + "TypeConverterComponent": "converter", + "CreateDataComponent": "create_data", + "DataOperationsComponent": "data_operations", + "DataToDataFrameComponent": "data_to_dataframe", + "DataFrameOperationsComponent": "dataframe_operations", + "ExtractDataKeyComponent": "extract_key", + "FilterDataComponent": "filter_data", + "DataFilterComponent": "filter_data_values", + "JSONCleaner": "json_cleaner", + "LambdaFilterComponent": "lambda_filter", + "LLMRouterComponent": "llm_router", + "MergeDataComponent": "merge_data", + "MessageToDataComponent": "message_to_data", + "ParseDataComponent": "parse_data", + "ParseDataFrameComponent": "parse_dataframe", + "ParseJSONDataComponent": "parse_json_data", + "ParserComponent": "parser", + "PromptComponent": "prompt", + "PythonREPLComponent": "python_repl_core", + "RegexExtractorComponent": "regex", + "SaveToFileComponent": "save_file", + "SelectDataComponent": "select_data", + "SplitTextComponent": "split_text", + "StructuredOutputComponent": "structured_output", + "UpdateDataComponent": "update_data", +} __all__ = [ "AlterMetadataComponent", @@ -28,8 +73,11 @@ __all__ = [ "CombineTextComponent", "CreateDataComponent", "DataFilterComponent", + "DataFrameOperationsComponent", "DataOperationsComponent", + "DataToDataFrameComponent", "ExtractDataKeyComponent", + "FilterDataComponent", "JSONCleaner", "LLMRouterComponent", "LambdaFilterComponent", @@ -42,9 +90,28 @@ __all__ = [ "PromptComponent", "PythonREPLComponent", "RegexExtractorComponent", + "SaveToFileComponent", "SelectDataComponent", "SplitTextComponent", "StructuredOutputComponent", "TypeConverterComponent", "UpdateDataComponent", ] + + +def __getattr__(attr_name: str) -> Any: + """Lazily import processing components on attribute access.""" + if attr_name not in _dynamic_imports: + msg = f"module '{__name__}' has no attribute '{attr_name}'" + raise AttributeError(msg) + try: + result = import_mod(attr_name, _dynamic_imports[attr_name], __spec__.parent) + except (ModuleNotFoundError, ImportError, AttributeError) as e: + msg = f"Could not import '{attr_name}' from '{__name__}': {e}" + raise AttributeError(msg) from e + globals()[attr_name] = result + return result + + +def __dir__() -> list[str]: + return list(__all__) diff --git a/src/backend/base/langflow/components/prototypes/__init__.py b/src/backend/base/langflow/components/prototypes/__init__.py index 8ad61e02b..4f17dddb6 100644 --- a/src/backend/base/langflow/components/prototypes/__init__.py +++ b/src/backend/base/langflow/components/prototypes/__init__.py @@ -1,5 +1,34 @@ -from .python_function import PythonFunctionComponent +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +from langflow.components._importing import import_mod + +if TYPE_CHECKING: + from .python_function import PythonFunctionComponent + +_dynamic_imports = { + "PythonFunctionComponent": "python_function", +} __all__ = [ "PythonFunctionComponent", ] + + +def __getattr__(attr_name: str) -> Any: + """Lazily import prototype components on attribute access.""" + if attr_name not in _dynamic_imports: + msg = f"module '{__name__}' has no attribute '{attr_name}'" + raise AttributeError(msg) + try: + result = import_mod(attr_name, _dynamic_imports[attr_name], __spec__.parent) + except (ModuleNotFoundError, ImportError, AttributeError) as e: + msg = f"Could not import '{attr_name}' from '{__name__}': {e}" + raise AttributeError(msg) from e + globals()[attr_name] = result + return result + + +def __dir__() -> list[str]: + return list(__all__) diff --git a/src/backend/base/langflow/components/sambanova/__init__.py b/src/backend/base/langflow/components/sambanova/__init__.py index eea6e3946..fe11fedbf 100644 --- a/src/backend/base/langflow/components/sambanova/__init__.py +++ b/src/backend/base/langflow/components/sambanova/__init__.py @@ -1,3 +1,32 @@ -from .sambanova import SambaNovaComponent +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +from langflow.components._importing import import_mod + +if TYPE_CHECKING: + from langflow.components.sambanova.sambanova import SambaNovaComponent + +_dynamic_imports = { + "SambaNovaComponent": "sambanova", +} __all__ = ["SambaNovaComponent"] + + +def __getattr__(attr_name: str) -> Any: + """Lazily import sambanova components on attribute access.""" + if attr_name not in _dynamic_imports: + msg = f"module '{__name__}' has no attribute '{attr_name}'" + raise AttributeError(msg) + try: + result = import_mod(attr_name, _dynamic_imports[attr_name], __spec__.parent) + except (ModuleNotFoundError, ImportError, AttributeError) as e: + msg = f"Could not import '{attr_name}' from '{__name__}': {e}" + raise AttributeError(msg) from e + globals()[attr_name] = result + return result + + +def __dir__() -> list[str]: + return list(__all__) diff --git a/src/backend/base/langflow/components/scrapegraph/__init__.py b/src/backend/base/langflow/components/scrapegraph/__init__.py index f14fca52f..93c68ab0e 100644 --- a/src/backend/base/langflow/components/scrapegraph/__init__.py +++ b/src/backend/base/langflow/components/scrapegraph/__init__.py @@ -1,5 +1,40 @@ -from .scrapegraph_markdownify_api import ScrapeGraphMarkdownifyApi -from .scrapegraph_search_api import ScrapeGraphSearchApi -from .scrapegraph_smart_scraper_api import ScrapeGraphSmartScraperApi +from __future__ import annotations -__all__ = ["ScrapeGraphMarkdownifyApi", "ScrapeGraphSearchApi", "ScrapeGraphSmartScraperApi"] +from typing import TYPE_CHECKING, Any + +from langflow.components._importing import import_mod + +if TYPE_CHECKING: + from .scrapegraph_markdownify_api import ScrapeGraphMarkdownifyApi + from .scrapegraph_search_api import ScrapeGraphSearchApi + from .scrapegraph_smart_scraper_api import ScrapeGraphSmartScraperApi + +_dynamic_imports = { + "ScrapeGraphMarkdownifyApi": "scrapegraph_markdownify_api", + "ScrapeGraphSearchApi": "scrapegraph_search_api", + "ScrapeGraphSmartScraperApi": "scrapegraph_smart_scraper_api", +} + +__all__ = [ + "ScrapeGraphMarkdownifyApi", + "ScrapeGraphSearchApi", + "ScrapeGraphSmartScraperApi", +] + + +def __getattr__(attr_name: str) -> Any: + """Lazily import scrapegraph components on attribute access.""" + if attr_name not in _dynamic_imports: + msg = f"module '{__name__}' has no attribute '{attr_name}'" + raise AttributeError(msg) + try: + result = import_mod(attr_name, _dynamic_imports[attr_name], __spec__.parent) + except (ModuleNotFoundError, ImportError, AttributeError) as e: + msg = f"Could not import '{attr_name}' from '{__name__}': {e}" + raise AttributeError(msg) from e + globals()[attr_name] = result + return result + + +def __dir__() -> list[str]: + return list(__all__) diff --git a/src/backend/base/langflow/components/searchapi/__init__.py b/src/backend/base/langflow/components/searchapi/__init__.py index e69de29bb..31cb51336 100644 --- a/src/backend/base/langflow/components/searchapi/__init__.py +++ b/src/backend/base/langflow/components/searchapi/__init__.py @@ -0,0 +1,36 @@ +"""SearchAPI components for LangFlow.""" + +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +from langflow.components._importing import import_mod + +if TYPE_CHECKING: + from langflow.components.searchapi.search import SearchComponent + +_dynamic_imports = { + "SearchComponent": "search", +} + +__all__ = [ + "SearchComponent", +] + + +def __getattr__(attr_name: str) -> Any: + """Lazily import searchapi components on attribute access.""" + if attr_name not in _dynamic_imports: + msg = f"module '{__name__}' has no attribute '{attr_name}'" + raise AttributeError(msg) + try: + result = import_mod(attr_name, _dynamic_imports[attr_name], __spec__.parent) + except (ModuleNotFoundError, ImportError, AttributeError) as e: + msg = f"Could not import '{attr_name}' from '{__name__}': {e}" + raise AttributeError(msg) from e + globals()[attr_name] = result + return result + + +def __dir__() -> list[str]: + return list(__all__) diff --git a/src/backend/base/langflow/components/tools/__init__.py b/src/backend/base/langflow/components/tools/__init__.py index e6ab2d55f..ec9068337 100644 --- a/src/backend/base/langflow/components/tools/__init__.py +++ b/src/backend/base/langflow/components/tools/__init__.py @@ -1,29 +1,43 @@ +from __future__ import annotations + import warnings +from typing import TYPE_CHECKING, Any from langchain_core._api.deprecation import LangChainDeprecationWarning -from .calculator import CalculatorToolComponent -from .google_search_api import GoogleSearchAPIComponent -from .google_serper_api import GoogleSerperAPIComponent -from .python_code_structured_tool import PythonCodeStructuredTool -from .python_repl import PythonREPLToolComponent -from .search_api import SearchAPIComponent -from .searxng import SearXNGToolComponent -from .serp_api import SerpAPIComponent -from .wikidata_api import WikidataAPIComponent -from .wikipedia_api import WikipediaAPIComponent -from .yahoo_finance import YfinanceToolComponent +from langflow.components._importing import import_mod -with warnings.catch_warnings(): - warnings.simplefilter("ignore", LangChainDeprecationWarning) +if TYPE_CHECKING: + from .calculator import CalculatorToolComponent + from .google_search_api import GoogleSearchAPIComponent + from .google_serper_api import GoogleSerperAPIComponent + from .python_code_structured_tool import PythonCodeStructuredTool + from .python_repl import PythonREPLToolComponent + from .search_api import SearchAPIComponent + from .searxng import SearXNGToolComponent + from .serp_api import SerpAPIComponent + from .tavily_search_tool import TavilySearchToolComponent + from .wikidata_api import WikidataAPIComponent + from .wikipedia_api import WikipediaAPIComponent + from .yahoo_finance import YfinanceToolComponent + +_dynamic_imports = { + "CalculatorToolComponent": "calculator", + "GoogleSearchAPIComponent": "google_search_api", + "GoogleSerperAPIComponent": "google_serper_api", + "PythonCodeStructuredTool": "python_code_structured_tool", + "PythonREPLToolComponent": "python_repl", + "SearchAPIComponent": "search_api", + "SearXNGToolComponent": "searxng", + "SerpAPIComponent": "serp_api", + "TavilySearchToolComponent": "tavily_search_tool", + "WikidataAPIComponent": "wikidata_api", + "WikipediaAPIComponent": "wikipedia_api", + "YfinanceToolComponent": "yahoo_finance", +} __all__ = [ - "AstraDBCQLToolComponent", - "AstraDBToolComponent", "CalculatorToolComponent", - "DuckDuckGoSearchComponent", - "ExaSearchToolkit", - "GleanSearchAPIComponent", "GoogleSearchAPIComponent", "GoogleSerperAPIComponent", "PythonCodeStructuredTool", @@ -31,7 +45,28 @@ __all__ = [ "SearXNGToolComponent", "SearchAPIComponent", "SerpAPIComponent", + "TavilySearchToolComponent", "WikidataAPIComponent", "WikipediaAPIComponent", "YfinanceToolComponent", ] + + +def __getattr__(attr_name: str) -> Any: + """Lazily import tool components on attribute access.""" + if attr_name not in _dynamic_imports: + msg = f"module '{__name__}' has no attribute '{attr_name}'" + raise AttributeError(msg) + try: + with warnings.catch_warnings(): + warnings.simplefilter("ignore", LangChainDeprecationWarning) + result = import_mod(attr_name, _dynamic_imports[attr_name], __spec__.parent) + except (ModuleNotFoundError, ImportError, AttributeError) as e: + msg = f"Could not import '{attr_name}' from '{__name__}': {e}" + raise AttributeError(msg) from e + globals()[attr_name] = result + return result + + +def __dir__() -> list[str]: + return list(__all__) diff --git a/src/backend/base/langflow/components/twelvelabs/__init__.py b/src/backend/base/langflow/components/twelvelabs/__init__.py index d276f6153..6378b8a72 100644 --- a/src/backend/base/langflow/components/twelvelabs/__init__.py +++ b/src/backend/base/langflow/components/twelvelabs/__init__.py @@ -1,10 +1,27 @@ -from .convert_astra_results import ConvertAstraToTwelveLabs -from .pegasus_index import PegasusIndexVideo -from .split_video import SplitVideoComponent -from .text_embeddings import TwelveLabsTextEmbeddingsComponent -from .twelvelabs_pegasus import TwelveLabsPegasus -from .video_embeddings import TwelveLabsVideoEmbeddingsComponent -from .video_file import VideoFileComponent +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +from langflow.components._importing import import_mod + +if TYPE_CHECKING: + from .convert_astra_results import ConvertAstraToTwelveLabs + from .pegasus_index import PegasusIndexVideo + from .split_video import SplitVideoComponent + from .text_embeddings import TwelveLabsTextEmbeddingsComponent + from .twelvelabs_pegasus import TwelveLabsPegasus + from .video_embeddings import TwelveLabsVideoEmbeddingsComponent + from .video_file import VideoFileComponent + +_dynamic_imports = { + "ConvertAstraToTwelveLabs": "convert_astra_results", + "PegasusIndexVideo": "pegasus_index", + "SplitVideoComponent": "split_video", + "TwelveLabsPegasus": "twelvelabs_pegasus", + "TwelveLabsTextEmbeddingsComponent": "text_embeddings", + "TwelveLabsVideoEmbeddingsComponent": "video_embeddings", + "VideoFileComponent": "video_file", +} __all__ = [ "ConvertAstraToTwelveLabs", @@ -15,3 +32,21 @@ __all__ = [ "TwelveLabsVideoEmbeddingsComponent", "VideoFileComponent", ] + + +def __getattr__(attr_name: str) -> Any: + """Lazily import twelvelabs components on attribute access.""" + if attr_name not in _dynamic_imports: + msg = f"module '{__name__}' has no attribute '{attr_name}'" + raise AttributeError(msg) + try: + result = import_mod(attr_name, _dynamic_imports[attr_name], __spec__.parent) + except (ModuleNotFoundError, ImportError, AttributeError) as e: + msg = f"Could not import '{attr_name}' from '{__name__}': {e}" + raise AttributeError(msg) from e + globals()[attr_name] = result + return result + + +def __dir__() -> list[str]: + return list(__all__) diff --git a/src/backend/base/langflow/components/vectara/__init__.py b/src/backend/base/langflow/components/vectara/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/src/backend/base/langflow/components/vectorstores/__init__.py b/src/backend/base/langflow/components/vectorstores/__init__.py index ef1da8227..8a4115e65 100644 --- a/src/backend/base/langflow/components/vectorstores/__init__.py +++ b/src/backend/base/langflow/components/vectorstores/__init__.py @@ -1,27 +1,61 @@ -from .astradb import AstraDBVectorStoreComponent -from .astradb_graph import AstraDBGraphVectorStoreComponent -from .cassandra import CassandraVectorStoreComponent -from .cassandra_graph import CassandraGraphVectorStoreComponent -from .chroma import ChromaVectorStoreComponent -from .clickhouse import ClickhouseVectorStoreComponent -from .couchbase import CouchbaseVectorStoreComponent -from .elasticsearch import ElasticsearchVectorStoreComponent -from .faiss import FaissVectorStoreComponent -from .graph_rag import GraphRAGComponent -from .hcd import HCDVectorStoreComponent -from .local_db import LocalDBComponent -from .milvus import MilvusVectorStoreComponent -from .mongodb_atlas import MongoVectorStoreComponent -from .opensearch import OpenSearchVectorStoreComponent -from .pgvector import PGVectorStoreComponent -from .pinecone import PineconeVectorStoreComponent -from .qdrant import QdrantVectorStoreComponent -from .redis import RedisVectorStoreComponent -from .supabase import SupabaseVectorStoreComponent -from .upstash import UpstashVectorStoreComponent -from .vectara import VectaraVectorStoreComponent -from .vectara_rag import VectaraRagComponent -from .weaviate import WeaviateVectorStoreComponent +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +from langflow.components._importing import import_mod + +if TYPE_CHECKING: + from .astradb import AstraDBVectorStoreComponent + from .astradb_graph import AstraDBGraphVectorStoreComponent + from .cassandra import CassandraVectorStoreComponent + from .cassandra_graph import CassandraGraphVectorStoreComponent + from .chroma import ChromaVectorStoreComponent + from .clickhouse import ClickhouseVectorStoreComponent + from .couchbase import CouchbaseVectorStoreComponent + from .elasticsearch import ElasticsearchVectorStoreComponent + from .faiss import FaissVectorStoreComponent + from .graph_rag import GraphRAGComponent + from .hcd import HCDVectorStoreComponent + from .local_db import LocalDBComponent + from .milvus import MilvusVectorStoreComponent + from .mongodb_atlas import MongoVectorStoreComponent + from .opensearch import OpenSearchVectorStoreComponent + from .pgvector import PGVectorStoreComponent + from .pinecone import PineconeVectorStoreComponent + from .qdrant import QdrantVectorStoreComponent + from .redis import RedisVectorStoreComponent + from .supabase import SupabaseVectorStoreComponent + from .upstash import UpstashVectorStoreComponent + from .vectara import VectaraVectorStoreComponent + from .vectara_rag import VectaraRagComponent + from .weaviate import WeaviateVectorStoreComponent + +_dynamic_imports = { + "AstraDBVectorStoreComponent": "astradb", + "AstraDBGraphVectorStoreComponent": "astradb_graph", + "CassandraVectorStoreComponent": "cassandra", + "CassandraGraphVectorStoreComponent": "cassandra_graph", + "ChromaVectorStoreComponent": "chroma", + "ClickhouseVectorStoreComponent": "clickhouse", + "CouchbaseVectorStoreComponent": "couchbase", + "ElasticsearchVectorStoreComponent": "elasticsearch", + "FaissVectorStoreComponent": "faiss", + "GraphRAGComponent": "graph_rag", + "HCDVectorStoreComponent": "hcd", + "LocalDBComponent": "local_db", + "MilvusVectorStoreComponent": "milvus", + "MongoVectorStoreComponent": "mongodb_atlas", + "OpenSearchVectorStoreComponent": "opensearch", + "PGVectorStoreComponent": "pgvector", + "PineconeVectorStoreComponent": "pinecone", + "QdrantVectorStoreComponent": "qdrant", + "RedisVectorStoreComponent": "redis", + "SupabaseVectorStoreComponent": "supabase", + "UpstashVectorStoreComponent": "upstash", + "VectaraVectorStoreComponent": "vectara", + "VectaraRagComponent": "vectara_rag", + "WeaviateVectorStoreComponent": "weaviate", +} __all__ = [ "AstraDBGraphVectorStoreComponent", @@ -49,3 +83,21 @@ __all__ = [ "VectaraVectorStoreComponent", "WeaviateVectorStoreComponent", ] + + +def __getattr__(attr_name: str) -> Any: + """Lazily import vectorstore components on attribute access.""" + if attr_name not in _dynamic_imports: + msg = f"module '{__name__}' has no attribute '{attr_name}'" + raise AttributeError(msg) + try: + result = import_mod(attr_name, _dynamic_imports[attr_name], __spec__.parent) + except (ModuleNotFoundError, ImportError, AttributeError) as e: + msg = f"Could not import '{attr_name}' from '{__name__}': {e}" + raise AttributeError(msg) from e + globals()[attr_name] = result + return result + + +def __dir__() -> list[str]: + return list(__all__) diff --git a/src/backend/base/langflow/components/vertexai/__init__.py b/src/backend/base/langflow/components/vertexai/__init__.py index 6311eaa81..25edc054c 100644 --- a/src/backend/base/langflow/components/vertexai/__init__.py +++ b/src/backend/base/langflow/components/vertexai/__init__.py @@ -1,7 +1,37 @@ -from .vertexai import ChatVertexAIComponent -from .vertexai_embeddings import VertexAIEmbeddingsComponent +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +from langflow.components._importing import import_mod + +if TYPE_CHECKING: + from .vertexai import ChatVertexAIComponent + from .vertexai_embeddings import VertexAIEmbeddingsComponent + +_dynamic_imports = { + "ChatVertexAIComponent": "vertexai", + "VertexAIEmbeddingsComponent": "vertexai_embeddings", +} __all__ = [ "ChatVertexAIComponent", "VertexAIEmbeddingsComponent", ] + + +def __getattr__(attr_name: str) -> Any: + """Lazily import vertexai components on attribute access.""" + if attr_name not in _dynamic_imports: + msg = f"module '{__name__}' has no attribute '{attr_name}'" + raise AttributeError(msg) + try: + result = import_mod(attr_name, _dynamic_imports[attr_name], __spec__.parent) + except (ModuleNotFoundError, ImportError, AttributeError) as e: + msg = f"Could not import '{attr_name}' from '{__name__}': {e}" + raise AttributeError(msg) from e + globals()[attr_name] = result + return result + + +def __dir__() -> list[str]: + return list(__all__) diff --git a/src/backend/base/langflow/components/xai/__init__.py b/src/backend/base/langflow/components/xai/__init__.py index aa31ad183..7fc8f572b 100644 --- a/src/backend/base/langflow/components/xai/__init__.py +++ b/src/backend/base/langflow/components/xai/__init__.py @@ -1,3 +1,32 @@ -from .xai import XAIModelComponent +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +from langflow.components._importing import import_mod + +if TYPE_CHECKING: + from langflow.components.xai.xai import XAIModelComponent + +_dynamic_imports = { + "XAIModelComponent": "xai", +} __all__ = ["XAIModelComponent"] + + +def __getattr__(attr_name: str) -> Any: + """Lazily import xai components on attribute access.""" + if attr_name not in _dynamic_imports: + msg = f"module '{__name__}' has no attribute '{attr_name}'" + raise AttributeError(msg) + try: + result = import_mod(attr_name, _dynamic_imports[attr_name], __spec__.parent) + except (ModuleNotFoundError, ImportError, AttributeError) as e: + msg = f"Could not import '{attr_name}' from '{__name__}': {e}" + raise AttributeError(msg) from e + globals()[attr_name] = result + return result + + +def __dir__() -> list[str]: + return list(__all__) diff --git a/src/backend/base/langflow/components/youtube/__init__.py b/src/backend/base/langflow/components/youtube/__init__.py index 4c4ab6f33..a1aea34be 100644 --- a/src/backend/base/langflow/components/youtube/__init__.py +++ b/src/backend/base/langflow/components/youtube/__init__.py @@ -1,10 +1,27 @@ -from .channel import YouTubeChannelComponent -from .comments import YouTubeCommentsComponent -from .playlist import YouTubePlaylistComponent -from .search import YouTubeSearchComponent -from .trending import YouTubeTrendingComponent -from .video_details import YouTubeVideoDetailsComponent -from .youtube_transcripts import YouTubeTranscriptsComponent +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +from langflow.components._importing import import_mod + +if TYPE_CHECKING: + from .channel import YouTubeChannelComponent + from .comments import YouTubeCommentsComponent + from .playlist import YouTubePlaylistComponent + from .search import YouTubeSearchComponent + from .trending import YouTubeTrendingComponent + from .video_details import YouTubeVideoDetailsComponent + from .youtube_transcripts import YouTubeTranscriptsComponent + +_dynamic_imports = { + "YouTubeChannelComponent": "channel", + "YouTubeCommentsComponent": "comments", + "YouTubePlaylistComponent": "playlist", + "YouTubeSearchComponent": "search", + "YouTubeTranscriptsComponent": "youtube_transcripts", + "YouTubeTrendingComponent": "trending", + "YouTubeVideoDetailsComponent": "video_details", +} __all__ = [ "YouTubeChannelComponent", @@ -15,3 +32,21 @@ __all__ = [ "YouTubeTrendingComponent", "YouTubeVideoDetailsComponent", ] + + +def __getattr__(attr_name: str) -> Any: + """Lazily import youtube components on attribute access.""" + if attr_name not in _dynamic_imports: + msg = f"module '{__name__}' has no attribute '{attr_name}'" + raise AttributeError(msg) + try: + result = import_mod(attr_name, _dynamic_imports[attr_name], __spec__.parent) + except (ModuleNotFoundError, ImportError, AttributeError) as e: + msg = f"Could not import '{attr_name}' from '{__name__}': {e}" + raise AttributeError(msg) from e + globals()[attr_name] = result + return result + + +def __dir__() -> list[str]: + return list(__all__) diff --git a/src/backend/tests/integration/test_dynamic_import_integration.py b/src/backend/tests/integration/test_dynamic_import_integration.py new file mode 100644 index 000000000..b5c2e3317 --- /dev/null +++ b/src/backend/tests/integration/test_dynamic_import_integration.py @@ -0,0 +1,299 @@ +"""Integration tests for dynamic import refactor. + +Tests the dynamic import system in realistic usage scenarios to ensure +the refactor doesn't break existing functionality. +""" + +import sys +import time + +import pytest +from langflow.components.agents import AgentComponent +from langflow.components.data import APIRequestComponent +from langflow.components.openai import OpenAIModelComponent + + +class TestDynamicImportIntegration: + """Integration tests for the dynamic import system.""" + + def test_component_discovery_still_works(self): + """Test that component discovery mechanisms still work after refactor.""" + # This tests that the existing component discovery logic + # can still find and load components + from langflow import components + + # Test that we can discover components through the main module + openai_module = components.openai + assert hasattr(openai_module, "OpenAIModelComponent") + + data_module = components.data + assert hasattr(data_module, "APIRequestComponent") + + def test_existing_import_patterns_work(self): + """Test that all existing import patterns continue to work.""" + # Test direct imports + import langflow.components.data as data_comp + + # Test module imports + import langflow.components.openai as openai_comp + + # All should work + assert OpenAIModelComponent is not None + assert APIRequestComponent is not None + assert AgentComponent is not None + assert openai_comp.OpenAIModelComponent is not None + assert data_comp.APIRequestComponent is not None + + def test_component_instantiation_works(self): + """Test that components can still be instantiated normally.""" + # Test that we can create component instances + # (Note: Some components may require specific initialization parameters) + + from langflow.components.helpers import CalculatorComponent + + # Should be able to access the class + assert CalculatorComponent is not None + assert callable(CalculatorComponent) + + def test_template_creation_compatibility(self): + """Test that template creation still works with dynamic imports.""" + # Test accessing component attributes needed for templates + + # Components should have all necessary attributes for template creation + assert hasattr(OpenAIModelComponent, "__name__") + assert hasattr(OpenAIModelComponent, "__module__") + assert hasattr(OpenAIModelComponent, "display_name") + assert isinstance(OpenAIModelComponent.display_name, str) + assert OpenAIModelComponent.display_name + assert hasattr(OpenAIModelComponent, "description") + assert isinstance(OpenAIModelComponent.description, str) + assert OpenAIModelComponent.description + assert hasattr(OpenAIModelComponent, "icon") + assert isinstance(OpenAIModelComponent.icon, str) + assert OpenAIModelComponent.icon + assert hasattr(OpenAIModelComponent, "inputs") + assert isinstance(OpenAIModelComponent.inputs, list) + assert len(OpenAIModelComponent.inputs) > 0 + # Check that each input has required attributes + for input_field in OpenAIModelComponent.inputs: + assert hasattr(input_field, "name"), f"Input {input_field} missing 'name' attribute" + assert hasattr(input_field, "display_name"), f"Input {input_field} missing 'display_name' attribute" + + def test_multiple_import_styles_same_result(self): + """Test that different import styles yield the same component.""" + # Import the same component in different ways + from langflow import components + from langflow.components.openai import OpenAIModelComponent as DirectImport + + dynamic_import = components.openai.OpenAIModelComponent + + import langflow.components.openai as openai_module + + module_import = openai_module.OpenAIModelComponent + + # All three should be the exact same class object + assert DirectImport is dynamic_import + assert dynamic_import is module_import + assert DirectImport is module_import + + def test_startup_performance_improvement(self): + """Test that startup time is improved with lazy loading.""" + # This test measures the difference in import time + # Fresh modules to test startup behavior + modules_to_clean = [ + "langflow.components.vectorstores", + "langflow.components.tools", + "langflow.components.langchain_utilities", + ] + + for module_name in modules_to_clean: + if module_name in sys.modules: + del sys.modules[module_name] + + # Time the import of a large module + start_time = time.time() + from langflow.components import vectorstores + + import_time = time.time() - start_time + + # Import time should be very fast (just loading the __init__.py) + assert import_time < 0.1 # Should be well under 100ms + + # Test that we can access a component (it may already be cached from previous tests) + # This is expected behavior in a test suite where components get cached + + # Now access a component - this should trigger loading + start_time = time.time() + chroma_component = vectorstores.ChromaVectorStoreComponent + access_time = time.time() - start_time + + assert chroma_component is not None + # Access time should still be reasonable + assert access_time < 2.0 # Should be under 2 seconds + + def test_memory_usage_efficiency(self): + """Test that memory usage is more efficient with lazy loading.""" + from langflow.components import processing + + # Count currently loaded components + initial_component_count = len([k for k in processing.__dict__ if k.endswith("Component")]) + + # Access just one component + combine_text = processing.CombineTextComponent + assert combine_text is not None + + # At least one more component should be loaded now + after_one_access = len([k for k in processing.__dict__ if k.endswith("Component")]) + assert after_one_access >= initial_component_count + + # Access another component + split_text = processing.SplitTextComponent + assert split_text is not None + + # Should have at least one more component loaded + after_two_access = len([k for k in processing.__dict__ if k.endswith("Component")]) + assert after_two_access >= after_one_access + + def test_error_handling_in_realistic_scenarios(self): + """Test error handling in realistic usage scenarios.""" + from langflow import components + + # Test accessing non-existent component category + with pytest.raises(AttributeError): + _ = components.nonexistent_category + + # Test accessing non-existent component in valid category + with pytest.raises(AttributeError): + _ = components.openai.NonExistentComponent + + def test_ide_autocomplete_support(self): + """Test that IDE autocomplete support still works.""" + import langflow.components.openai as openai_components + from langflow import components + + # __dir__ should return all available components/modules + main_dir = dir(components) + assert "openai" in main_dir + assert "data" in main_dir + assert "agents" in main_dir + + openai_dir = dir(openai_components) + assert "OpenAIModelComponent" in openai_dir + assert "OpenAIEmbeddingsComponent" in openai_dir + + def test_concurrent_access(self): + """Test that concurrent access to components works correctly.""" + import threading + + from langflow.components import helpers + + results = [] + errors = [] + + def access_component(): + try: + component = helpers.CalculatorComponent + results.append(component) + except Exception as e: + errors.append(e) + + # Create multiple threads accessing the same component + threads = [] + for _ in range(5): + thread = threading.Thread(target=access_component) + threads.append(thread) + thread.start() + + # Wait for all threads to complete + for thread in threads: + thread.join() + + # Should have no errors + assert len(errors) == 0 + assert len(results) == 5 + + # All results should be the same component class + first_result = results[0] + for result in results[1:]: + assert result is first_result + + def test_circular_import_prevention(self): + """Test that the refactor doesn't introduce circular imports.""" + # This test ensures that importing components doesn't create + # circular dependency issues + + # These imports should work without circular import errors + from langflow import components + from langflow.components import openai + + # Access components in different orders + model1 = components.openai.OpenAIModelComponent + model2 = openai.OpenAIModelComponent + model3 = OpenAIModelComponent + + # All should be the same + assert model1 is model2 is model3 + + def test_large_scale_component_access(self): + """Test accessing many components doesn't cause issues.""" + from langflow.components import vectorstores + + # Access multiple components rapidly + components_accessed = [] + component_names = [ + "ChromaVectorStoreComponent", + "PineconeVectorStoreComponent", + "FaissVectorStoreComponent", + "WeaviateVectorStoreComponent", + "QdrantVectorStoreComponent", + ] + + for name in component_names: + if hasattr(vectorstores, name): + component = getattr(vectorstores, name) + components_accessed.append(component) + + # Should have accessed multiple components without issues + assert len(components_accessed) > 0 + + # All should be different classes + assert len(set(components_accessed)) == len(components_accessed) + + def test_component_metadata_preservation(self): + """Test that component metadata is preserved after dynamic loading.""" + # Component should have all expected metadata + assert hasattr(OpenAIModelComponent, "__name__") + assert hasattr(OpenAIModelComponent, "__module__") + assert hasattr(OpenAIModelComponent, "__doc__") + + # Module path should be correct + assert "openai" in OpenAIModelComponent.__module__ + + def test_backwards_compatibility_comprehensive(self): + """Comprehensive test of backwards compatibility.""" + # Test all major import patterns that should still work + + # 1. Direct component imports + from langflow.components.data import APIRequestComponent + + assert AgentComponent is not None + assert APIRequestComponent is not None + + # 2. Module imports + # 3. Main module access + import langflow.components as comp + import langflow.components.helpers as helpers_mod + import langflow.components.openai as openai_mod + + # 4. Nested access + nested_component = comp.openai.OpenAIModelComponent + direct_component = openai_mod.OpenAIModelComponent + + # All patterns should work and yield consistent results + assert openai_mod.OpenAIModelComponent is not None + assert helpers_mod.CalculatorComponent is not None + assert nested_component is direct_component + + +if __name__ == "__main__": + pytest.main([__file__, "-v"]) diff --git a/src/backend/tests/unit/components/test_all_modules_importable.py b/src/backend/tests/unit/components/test_all_modules_importable.py new file mode 100644 index 000000000..876e88498 --- /dev/null +++ b/src/backend/tests/unit/components/test_all_modules_importable.py @@ -0,0 +1,307 @@ +"""Test to ensure all component modules are importable after dynamic import refactor. + +This test validates that every component module can be imported successfully +and that all components listed in __all__ can be accessed. +""" + +import importlib + +import pytest +from langflow import components + + +class TestAllModulesImportable: + """Test that all component modules are importable.""" + + def test_all_component_categories_importable(self): + """Test that all component categories in __all__ can be imported.""" + failed_imports = [] + + for category_name in components.__all__: + try: + category_module = getattr(components, category_name) + assert category_module is not None, f"Category {category_name} is None" + + # Verify it's actually a module + assert hasattr(category_module, "__name__"), f"Category {category_name} is not a module" + + except Exception as e: + failed_imports.append(f"{category_name}: {e!s}") + + if failed_imports: + pytest.fail(f"Failed to import categories: {failed_imports}") + + def test_all_components_in_categories_importable(self): + """Test that all components in each category's __all__ can be imported.""" + failed_imports = [] + successful_imports = 0 + + for category_name in components.__all__: + try: + category_module = getattr(components, category_name) + + if hasattr(category_module, "__all__"): + for component_name in category_module.__all__: + try: + component = getattr(category_module, component_name) + assert component is not None, f"Component {component_name} is None" + assert callable(component), f"Component {component_name} is not callable" + successful_imports += 1 + + except Exception as e: + failed_imports.append(f"{category_name}.{component_name}: {e!s}") + else: + # Category doesn't have __all__, skip + continue + + except Exception as e: + failed_imports.append(f"Category {category_name}: {e!s}") + + print(f"Successfully imported {successful_imports} components") # noqa: T201 + + if failed_imports: + print(f"Failed imports ({len(failed_imports)}):") # noqa: T201 + for failure in failed_imports[:10]: # Show first 10 failures + print(f" - {failure}") # noqa: T201 + if len(failed_imports) > 10: + print(f" ... and {len(failed_imports) - 10} more") # noqa: T201 + + pytest.fail(f"Failed to import {len(failed_imports)} components") + + def test_dynamic_imports_mapping_complete(self): + """Test that _dynamic_imports mapping is complete for all categories.""" + failed_mappings = [] + + for category_name in components.__all__: + try: + category_module = getattr(components, category_name) + + if hasattr(category_module, "__all__") and hasattr(category_module, "_dynamic_imports"): + category_all = set(category_module.__all__) + dynamic_imports_keys = set(category_module._dynamic_imports.keys()) + + # Check that all items in __all__ have corresponding _dynamic_imports entries + missing_in_dynamic = category_all - dynamic_imports_keys + if missing_in_dynamic: + failed_mappings.append(f"{category_name}: Missing in _dynamic_imports: {missing_in_dynamic}") + + # Check that all _dynamic_imports keys are in __all__ + missing_in_all = dynamic_imports_keys - category_all + if missing_in_all: + failed_mappings.append(f"{category_name}: Missing in __all__: {missing_in_all}") + + except Exception as e: + failed_mappings.append(f"{category_name}: Error checking mappings: {e!s}") + + if failed_mappings: + pytest.fail(f"Inconsistent mappings: {failed_mappings}") + + def test_backward_compatibility_imports(self): + """Test that traditional import patterns still work.""" + # Test some key imports that should always work + traditional_imports = [ + ("langflow.components.openai", "OpenAIModelComponent"), + ("langflow.components.anthropic", "AnthropicModelComponent"), + ("langflow.components.data", "APIRequestComponent"), + ("langflow.components.agents", "AgentComponent"), + ("langflow.components.helpers", "CalculatorComponent"), + ] + + failed_imports = [] + + for module_name, component_name in traditional_imports: + try: + module = importlib.import_module(module_name) + component = getattr(module, component_name) + assert component is not None + assert callable(component) + + except Exception as e: + failed_imports.append(f"{module_name}.{component_name}: {e!s}") + + if failed_imports: + pytest.fail(f"Traditional imports failed: {failed_imports}") + + def test_component_modules_have_required_attributes(self): + """Test that component modules have required attributes for dynamic loading.""" + failed_modules = [] + + for category_name in components.__all__: + try: + category_module = getattr(components, category_name) + + # Check for required attributes + required_attrs = ["__all__"] + + failed_modules.extend( + f"{category_name}: Missing required attribute {attr}" + for attr in required_attrs + if not hasattr(category_module, attr) + ) + + # Check that if it has dynamic imports, it has the pattern + if hasattr(category_module, "_dynamic_imports"): + if not hasattr(category_module, "__getattr__"): + failed_modules.append(f"{category_name}: Has _dynamic_imports but no __getattr__") + if not hasattr(category_module, "__dir__"): + failed_modules.append(f"{category_name}: Has _dynamic_imports but no __dir__") + + except Exception as e: + failed_modules.append(f"{category_name}: Error checking attributes: {e!s}") + + if failed_modules: + pytest.fail(f"Module attribute issues: {failed_modules}") + + def test_no_circular_imports(self): + """Test that there are no circular import issues.""" + # Test importing in different orders to catch circular imports + import_orders = [ + ["agents", "data", "openai"], + ["openai", "agents", "data"], + ["data", "openai", "agents"], + ] + + for order in import_orders: + try: + for category_name in order: + category_module = getattr(components, category_name) + # Access a component to trigger dynamic import + if hasattr(category_module, "__all__") and category_module.__all__: + first_component_name = category_module.__all__[0] + getattr(category_module, first_component_name) + + except Exception as e: + pytest.fail(f"Circular import issue with order {order}: {e!s}") + + def test_component_access_caching(self): + """Test that component access caching works correctly.""" + # Access the same component multiple times and ensure caching works + test_cases = [ + ("openai", "OpenAIModelComponent"), + ("data", "APIRequestComponent"), + ("helpers", "CalculatorComponent"), + ] + + for category_name, component_name in test_cases: + category_module = getattr(components, category_name) + + # First access + component1 = getattr(category_module, component_name) + + # Component should now be cached in module globals + assert component_name in category_module.__dict__ + + # Second access should return the same object + component2 = getattr(category_module, component_name) + assert component1 is component2, f"Caching failed for {category_name}.{component_name}" + + def test_error_handling_for_missing_components(self): + """Test that appropriate errors are raised for missing components.""" + test_cases = [ + ("openai", "NonExistentComponent"), + ("data", "AnotherNonExistentComponent"), + ] + + for category_name, component_name in test_cases: + category_module = getattr(components, category_name) + + with pytest.raises(AttributeError, match=f"has no attribute '{component_name}'"): + getattr(category_module, component_name) + + def test_dir_functionality(self): + """Test that __dir__ functionality works for all modules.""" + # Test main components module + main_dir = dir(components) + assert "openai" in main_dir + assert "data" in main_dir + assert "agents" in main_dir + + # Test category modules + for category_name in ["openai", "data", "helpers"]: + category_module = getattr(components, category_name) + category_dir = dir(category_module) + + # Should include all components from __all__ + if hasattr(category_module, "__all__"): + for component_name in category_module.__all__: + assert component_name in category_dir, f"{component_name} missing from dir({category_name})" + + def test_module_metadata_preservation(self): + """Test that module metadata is preserved after dynamic loading.""" + test_components = [ + ("openai", "OpenAIModelComponent"), + ("anthropic", "AnthropicModelComponent"), + ("data", "APIRequestComponent"), + ] + + for category_name, component_name in test_components: + category_module = getattr(components, category_name) + component = getattr(category_module, component_name) + + # Check that component has expected metadata + assert hasattr(component, "__name__") + assert hasattr(component, "__module__") + assert component.__name__ == component_name + assert category_name in component.__module__ + + +class TestSpecificModulePatterns: + """Test specific module patterns and edge cases.""" + + def test_empty_init_modules(self): + """Test modules that might have empty __init__.py files.""" + # These modules might have empty __init__.py files in the original structure + potentially_empty_modules = [ + "chains", + "output_parsers", + "textsplitters", + "toolkits", + "link_extractors", + "documentloaders", + ] + + for module_name in potentially_empty_modules: + if module_name in components.__all__: + try: + module = getattr(components, module_name) + # Should be able to import even if empty + assert module is not None + except Exception as e: + pytest.fail(f"Failed to import potentially empty module {module_name}: {e}") + + def test_platform_specific_imports(self): + """Test platform-specific imports like NVIDIA Windows components.""" + # Test NVIDIA module which has platform-specific logic + nvidia_module = components.nvidia + assert nvidia_module is not None + + # Should have basic components regardless of platform + assert "NVIDIAModelComponent" in nvidia_module.__all__ + + # Should be able to access components + nvidia_model = nvidia_module.NVIDIAModelComponent + assert nvidia_model is not None + + def test_large_modules_import_efficiently(self): + """Test that large modules with many components import efficiently.""" + import time + + # Test large modules + large_modules = ["vectorstores", "processing", "langchain_utilities"] + + for module_name in large_modules: + if module_name in components.__all__: + start_time = time.time() + module = getattr(components, module_name) + import_time = time.time() - start_time + + # Initial import should be fast (just loading __init__.py) + assert import_time < 0.5, f"Module {module_name} took too long to import: {import_time}s" + + # Should have components available + assert hasattr(module, "__all__") + assert len(module.__all__) > 0 + + +if __name__ == "__main__": + pytest.main([__file__, "-v"]) diff --git a/src/backend/tests/unit/components/test_dynamic_imports.py b/src/backend/tests/unit/components/test_dynamic_imports.py new file mode 100644 index 000000000..e51016c81 --- /dev/null +++ b/src/backend/tests/unit/components/test_dynamic_imports.py @@ -0,0 +1,297 @@ +"""Tests for dynamic import refactor in langflow components. + +This module tests the new langchain-style dynamic import system to ensure: +1. Lazy loading works correctly +2. Components are imported only when accessed +3. Caching works properly +4. Error handling for missing components +5. __dir__ functionality for IDE autocomplete +6. Backward compatibility with existing imports +""" + +from unittest.mock import patch + +import pytest +from langflow.components._importing import import_mod + + +class TestImportUtils: + """Test the import_mod utility function.""" + + def test_import_mod_with_module_name(self): + """Test importing specific attribute from a module.""" + # Test importing a specific class from a module + result = import_mod("OpenAIModelComponent", "openai_chat_model", "langflow.components.openai") + assert result is not None + assert hasattr(result, "__name__") + assert "OpenAI" in result.__name__ + + def test_import_mod_without_module_name(self): + """Test importing entire module when module_name is None.""" + result = import_mod("agents", "__module__", "langflow.components") + assert result is not None + # Should return the agents module + assert hasattr(result, "__all__") + + def test_import_mod_module_not_found(self): + """Test error handling when module doesn't exist.""" + with pytest.raises(ImportError, match="not found"): + import_mod("NonExistentComponent", "nonexistent_module", "langflow.components.openai") + + def test_import_mod_attribute_not_found(self): + """Test error handling when attribute doesn't exist in module.""" + with pytest.raises(AttributeError): + import_mod("NonExistentComponent", "openai_chat_model", "langflow.components.openai") + + +class TestComponentDynamicImports: + """Test dynamic import behavior in component modules.""" + + def test_main_components_module_dynamic_import(self): + """Test that main components module imports submodules dynamically.""" + # Import the main components module + from langflow import components + + # Test that submodules are in __all__ + assert "agents" in components.__all__ + assert "data" in components.__all__ + assert "openai" in components.__all__ + + # Access agents module - this should work via dynamic import + agents_module = components.agents + assert agents_module is not None + + # Should be cached in globals after access + assert "agents" in components.__dict__ + assert components.__dict__["agents"] is agents_module + + # Second access should return cached version + agents_module_2 = components.agents + assert agents_module_2 is agents_module + + def test_main_components_module_dir(self): + """Test __dir__ functionality for main components module.""" + from langflow import components + + dir_result = dir(components) + # Should include all component categories + assert "agents" in dir_result + assert "data" in dir_result + assert "openai" in dir_result + assert "vectorstores" in dir_result + + def test_main_components_module_missing_attribute(self): + """Test error handling for non-existent component category.""" + from langflow import components + + with pytest.raises(AttributeError, match="has no attribute 'nonexistent_category'"): + _ = components.nonexistent_category + + def test_category_module_dynamic_import(self): + """Test dynamic import behavior in category modules like openai.""" + import langflow.components.openai as openai_components + + # Test that components are in __all__ + assert "OpenAIModelComponent" in openai_components.__all__ + assert "OpenAIEmbeddingsComponent" in openai_components.__all__ + + # Access component - this should work via dynamic import + openai_model = openai_components.OpenAIModelComponent + assert openai_model is not None + + # Should be cached in globals after access + assert "OpenAIModelComponent" in openai_components.__dict__ + assert openai_components.__dict__["OpenAIModelComponent"] is openai_model + + # Second access should return cached version + openai_model_2 = openai_components.OpenAIModelComponent + assert openai_model_2 is openai_model + + def test_category_module_dir(self): + """Test __dir__ functionality for category modules.""" + import langflow.components.openai as openai_components + + dir_result = dir(openai_components) + assert "OpenAIModelComponent" in dir_result + assert "OpenAIEmbeddingsComponent" in dir_result + + def test_category_module_missing_component(self): + """Test error handling for non-existent component in category.""" + import langflow.components.openai as openai_components + + with pytest.raises(AttributeError, match="has no attribute 'NonExistentComponent'"): + _ = openai_components.NonExistentComponent + + def test_multiple_category_modules(self): + """Test dynamic imports work across multiple category modules.""" + import langflow.components.anthropic as anthropic_components + import langflow.components.data as data_components + + # Test different categories work independently + anthropic_model = anthropic_components.AnthropicModelComponent + api_request = data_components.APIRequestComponent + + assert anthropic_model is not None + assert api_request is not None + + # Test they're cached in their respective modules + assert "AnthropicModelComponent" in anthropic_components.__dict__ + assert "APIRequestComponent" in data_components.__dict__ + + def test_backward_compatibility(self): + """Test that existing import patterns still work.""" + # These imports should work the same as before + from langflow.components.agents import AgentComponent + from langflow.components.data import APIRequestComponent + from langflow.components.openai import OpenAIModelComponent + + assert OpenAIModelComponent is not None + assert APIRequestComponent is not None + assert AgentComponent is not None + + def test_component_instantiation(self): + """Test that dynamically imported components can be instantiated.""" + from langflow.components import helpers + + # Import component dynamically + calculator_class = helpers.CalculatorComponent + + # Should be able to instantiate (even if it requires parameters) + assert callable(calculator_class) + assert hasattr(calculator_class, "__init__") + + def test_import_error_handling(self): + """Test error handling when import fails.""" + import langflow.components.notdiamond as notdiamond_components + + # Patch the import_mod function directly + with patch("langflow.components.notdiamond.import_mod") as mock_import_mod: + # Mock import_mod to raise ImportError + mock_import_mod.side_effect = ImportError("Module not found") + + # Clear any cached attribute + if "NotDiamondComponent" in notdiamond_components.__dict__: + del notdiamond_components.__dict__["NotDiamondComponent"] + + with pytest.raises(AttributeError, match="Could not import"): + _ = notdiamond_components.NotDiamondComponent + + def test_consistency_check(self): + """Test that __all__ and _dynamic_imports are consistent.""" + import langflow.components.openai as openai_components + + # All items in __all__ should have corresponding entries in _dynamic_imports + for component_name in openai_components.__all__: + assert component_name in openai_components._dynamic_imports + + # All keys in _dynamic_imports should be in __all__ + for component_name in openai_components._dynamic_imports: + assert component_name in openai_components.__all__ + + def test_type_checking_imports(self): + """Test that TYPE_CHECKING imports work correctly with dynamic loading.""" + # This test ensures that imports in TYPE_CHECKING blocks + # work correctly with the dynamic import system + import langflow.components.searchapi as searchapi_components + + # Components should be available for dynamic loading + assert "SearchComponent" in searchapi_components.__all__ + assert "SearchComponent" in searchapi_components._dynamic_imports + + # Accessing should trigger dynamic import and caching + component = searchapi_components.SearchComponent + assert component is not None + assert "SearchComponent" in searchapi_components.__dict__ + + +class TestPerformanceCharacteristics: + """Test performance characteristics of dynamic imports.""" + + def test_lazy_loading_performance(self): + """Test that components can be accessed and cached properly.""" + from langflow.components import vectorstores + + # Test that we can access a component + chroma = vectorstores.ChromaVectorStoreComponent + assert chroma is not None + + # After access, it should be cached in the module's globals + assert "ChromaVectorStoreComponent" in vectorstores.__dict__ + + # Subsequent access should return the same cached object + chroma_2 = vectorstores.ChromaVectorStoreComponent + assert chroma_2 is chroma + + def test_caching_behavior(self): + """Test that components are cached after first access.""" + from langflow.components import models + + # First access + embedding_model_1 = models.EmbeddingModelComponent + + # Second access should return the exact same object (cached) + embedding_model_2 = models.EmbeddingModelComponent + + assert embedding_model_1 is embedding_model_2 + + def test_memory_usage_multiple_accesses(self): + """Test memory behavior with multiple component accesses.""" + from langflow.components import processing + + # Access multiple components + components = [] + component_names = ["CombineTextComponent", "SplitTextComponent", "JSONCleaner", "RegexExtractorComponent"] + + for name in component_names: + component = getattr(processing, name) + components.append(component) + # Each should be cached + assert name in processing.__dict__ + + # All should be different classes + assert len(set(components)) == len(components) + + +class TestSpecialCases: + """Test special cases and edge conditions.""" + + def test_empty_init_files(self): + """Test that empty __init__.py files are handled gracefully.""" + # Test accessing components from categories that might have empty __init__.py + from langflow import components + + # These should work even if some categories have empty __init__.py files + agents = components.agents + assert agents is not None + + def test_platform_specific_components(self): + """Test platform-specific component handling (like NVIDIA Windows components).""" + import langflow.components.nvidia as nvidia_components + + # NVIDIA components should be available + nvidia_model = nvidia_components.NVIDIAModelComponent + assert nvidia_model is not None + + # Platform-specific components should be handled correctly + # (This test will pass regardless of platform since the import structure handles it) + assert "NVIDIAModelComponent" in nvidia_components.__all__ + + def test_import_structure_integrity(self): + """Test that the import structure maintains integrity.""" + from langflow import components + + # Test that we can access nested components through the hierarchy + openai_model = components.openai.OpenAIModelComponent + data_api = components.data.APIRequestComponent + + assert openai_model is not None + assert data_api is not None + + # Test that both main module and submodules are properly cached + assert "openai" in components.__dict__ + assert "data" in components.__dict__ + + +if __name__ == "__main__": + # Run tests + pytest.main([__file__, "-v"]) diff --git a/src/backend/tests/unit/test_import_utils.py b/src/backend/tests/unit/test_import_utils.py new file mode 100644 index 000000000..3c9cc63bf --- /dev/null +++ b/src/backend/tests/unit/test_import_utils.py @@ -0,0 +1,176 @@ +"""Unit tests for the _import_utils module. + +Tests the core import_mod function used throughout the dynamic import system. +""" + +from unittest.mock import patch + +import pytest +from langflow.components._importing import import_mod + + +class TestImportAttr: + """Test the import_mod utility function in detail.""" + + def test_import_module_with_none_module_name(self): + """Test importing a module when module_name is None.""" + # This should import the module directly using the attr_name + result = import_mod("agents", None, "langflow.components") + + # Should return the agents module + assert result is not None + assert hasattr(result, "__all__") + + def test_import_module_with_module_name(self): + """Test importing a module when module_name is __module__.""" + # This should import the module directly using the attr_name + result = import_mod("agents", "__module__", "langflow.components") + + # Should return the agents module + assert result is not None + assert hasattr(result, "__all__") + + def test_import_modibute_from_module(self): + """Test importing a specific attribute from a module.""" + # Test importing a class from a specific module + result = import_mod("AnthropicModelComponent", "anthropic", "langflow.components.anthropic") + + assert result is not None + assert hasattr(result, "__name__") + assert "Component" in result.__name__ + + def test_import_nonexistent_module(self): + """Test error handling when module doesn't exist.""" + with pytest.raises(ImportError, match="not found"): + import_mod("SomeComponent", "nonexistent_module", "langflow.components.openai") + + def test_module_not_found_with_none_module_name(self): + """Test ModuleNotFoundError handling when module_name is None.""" + with pytest.raises(AttributeError, match="has no attribute"): + import_mod("nonexistent_module", None, "langflow.components") + + def test_module_not_found_with_module_special_name(self): + """Test ModuleNotFoundError handling when module_name is '__module__'.""" + with pytest.raises(AttributeError, match="has no attribute"): + import_mod("nonexistent_module", "__module__", "langflow.components") + + def test_import_nonexistent_attribute(self): + """Test error handling when attribute doesn't exist in module.""" + with pytest.raises(AttributeError): + import_mod("NonExistentComponent", "anthropic", "langflow.components.anthropic") + + def test_import_with_none_package(self): + """Test behavior when package is None.""" + # This should raise TypeError because relative imports require a package + with pytest.raises(TypeError, match="package.*required"): + import_mod("something", "some_module", None) + + def test_module_not_found_error_handling(self): + """Test specific ModuleNotFoundError handling.""" + with patch("importlib.import_module") as mock_import_module: + mock_import_module.side_effect = ModuleNotFoundError("No module named 'test'") + + with pytest.raises(ImportError, match="not found"): + import_mod("TestComponent", "test_module", "test.package") + + def test_getattr_error_handling(self): + """Test AttributeError handling when getting attribute from module.""" + # Test the case where the module exists but doesn't have the attribute + # Use a real module that exists + with pytest.raises(AttributeError): + # os module exists but doesn't have 'NonExistentAttribute' + import_mod("NonExistentAttribute", "path", "os") + + def test_relative_import_behavior(self): + """Test that relative imports are constructed correctly.""" + # This test verifies the relative import logic + result = import_mod("helpers", "__module__", "langflow.components") + assert result is not None + + def test_package_resolution(self): + """Test that package parameter is used correctly.""" + # Test with a known working package and module + result = import_mod("CalculatorComponent", "calculator_core", "langflow.components.helpers") + assert result is not None + assert callable(result) + + def test_import_mod_with_special_module_name(self): + """Test behavior with special module_name values.""" + # Test with "__module__" - should import the attr_name as a module + result = import_mod("data", "__module__", "langflow.components") + assert result is not None + + # Test with None - should also import the attr_name as a module + result2 = import_mod("data", None, "langflow.components") + assert result2 is not None + + def test_error_message_formatting(self): + """Test that error messages are properly formatted.""" + with pytest.raises(ImportError) as exc_info: + import_mod("NonExistent", "nonexistent", "langflow.components") + + error_msg = str(exc_info.value) + assert "langflow.components" in error_msg + assert "nonexistent" in error_msg + + def test_return_value_types(self): + """Test that import_mod returns appropriate types.""" + # Test module import + module_result = import_mod("openai", "__module__", "langflow.components") + assert hasattr(module_result, "__name__") + + # Test class import + class_result = import_mod("OpenAIModelComponent", "openai_chat_model", "langflow.components.openai") + assert callable(class_result) + assert hasattr(class_result, "__name__") + + def test_caching_independence(self): + """Test that import_mod doesn't interfere with Python's module caching.""" + # Multiple calls should work consistently + result1 = import_mod("agents", "__module__", "langflow.components") + result2 = import_mod("agents", "__module__", "langflow.components") + + # Should return the same module object (Python's import caching) + assert result1 is result2 + + +class TestImportAttrEdgeCases: + """Test edge cases and boundary conditions for import_mod.""" + + def test_empty_strings(self): + """Test behavior with empty strings.""" + with pytest.raises((ImportError, ValueError)): + import_mod("", "module", "package") + + with pytest.raises((ImportError, ValueError)): + import_mod("attr", "", "package") + + def test_whitespace_handling(self): + """Test that whitespace in names is handled appropriately.""" + with pytest.raises(ImportError): + import_mod("attr name", "module", "package") + + def test_special_characters(self): + """Test handling of special characters in names.""" + with pytest.raises((ImportError, ValueError)): + import_mod("attr-name", "module", "package") + + def test_unicode_names(self): + """Test handling of unicode characters in names.""" + with pytest.raises(ImportError): + import_mod("attß", "module", "package") + + def test_very_long_names(self): + """Test handling of very long module/attribute names.""" + long_name = "a" * 1000 + with pytest.raises(ImportError): + import_mod(long_name, "module", "package") + + def test_numeric_names(self): + """Test handling of numeric names.""" + with pytest.raises(ImportError): + import_mod("123", "module", "package") + + +if __name__ == "__main__": + pytest.main([__file__, "-v"])