diff --git a/src/backend/langflow/interface/utils.py b/src/backend/langflow/interface/utils.py index ff89e92bf..1ab2b4ce5 100644 --- a/src/backend/langflow/interface/utils.py +++ b/src/backend/langflow/interface/utils.py @@ -66,17 +66,24 @@ def extract_input_variables_from_prompt(prompt: str) -> list[str]: def setup_llm_caching(): """Setup LLM caching.""" + from langflow.settings import settings + try: - import langchain - from langflow.settings import settings - from langflow.interface.importing.utils import import_class - - cache_class = import_class(f"langchain.cache.{settings.cache}") - - logger.debug(f"Setting up LLM caching with {cache_class.__name__}") - langchain.llm_cache = cache_class() - logger.info(f"LLM caching setup with {cache_class.__name__}") + set_langchain_cache(settings) except ImportError: logger.warning(f"Could not import {settings.cache}. ") except Exception as exc: logger.warning(f"Could not setup LLM caching. Error: {exc}") + + +# TODO Rename this here and in `setup_llm_caching` +def set_langchain_cache(settings): + import langchain + from langflow.interface.importing.utils import import_class + + cache_type = os.getenv("LANGFLOW_LANGCHAIN_CACHE") + cache_class = import_class(f"langchain.cache.{cache_type or settings.cache}") + + logger.debug(f"Setting up LLM caching with {cache_class.__name__}") + langchain.llm_cache = cache_class() + logger.info(f"LLM caching setup with {cache_class.__name__}")