🐛 fix(utils.py): rename cache_class variable to cache_type for clarity and consistency

🔧 chore(utils.py): refactor `setup_llm_caching` to extract cache setup logic into a separate function for better modularity and readability
The variable `cache_class` has been renamed to `cache_type` to improve clarity and consistency with the naming conventions. The `setup_llm_caching` function has been refactored to extract the cache setup logic into a separate function called `set_langchain_cache`. This improves modularity and readability of the code.
This commit is contained in:
Gabriel Luiz Freitas Almeida 2023-07-05 12:09:53 -03:00
commit 4b1fb4a49e

View file

@ -66,17 +66,24 @@ def extract_input_variables_from_prompt(prompt: str) -> list[str]:
def setup_llm_caching():
"""Setup LLM caching."""
from langflow.settings import settings
try:
import langchain
from langflow.settings import settings
from langflow.interface.importing.utils import import_class
cache_class = import_class(f"langchain.cache.{settings.cache}")
logger.debug(f"Setting up LLM caching with {cache_class.__name__}")
langchain.llm_cache = cache_class()
logger.info(f"LLM caching setup with {cache_class.__name__}")
set_langchain_cache(settings)
except ImportError:
logger.warning(f"Could not import {settings.cache}. ")
except Exception as exc:
logger.warning(f"Could not setup LLM caching. Error: {exc}")
# TODO Rename this here and in `setup_llm_caching`
def set_langchain_cache(settings):
import langchain
from langflow.interface.importing.utils import import_class
cache_type = os.getenv("LANGFLOW_LANGCHAIN_CACHE")
cache_class = import_class(f"langchain.cache.{cache_type or settings.cache}")
logger.debug(f"Setting up LLM caching with {cache_class.__name__}")
langchain.llm_cache = cache_class()
logger.info(f"LLM caching setup with {cache_class.__name__}")