diff --git a/src/backend/base/langflow/services/cache/factory.py b/src/backend/base/langflow/services/cache/factory.py index 638d5ff73..5cc6b12af 100644 --- a/src/backend/base/langflow/services/cache/factory.py +++ b/src/backend/base/langflow/services/cache/factory.py @@ -28,10 +28,11 @@ class CacheServiceFactory(ServiceFactory): if redis_cache.is_connected(): logger.debug("Redis cache is connected") return redis_cache - logger.warning("Redis cache is not connected, falling back to in-memory cache") - return AsyncInMemoryCache() + else: + # do not attempt to fallback to another cache type + raise ConnectionError("Failed to connect to Redis cache") elif settings_service.settings.cache_type == "memory": - return ThreadingInMemoryCache() + return ThreadingInMemoryCache(expiration_time=settings_service.settings.cache_expire) elif settings_service.settings.cache_type == "async": - return AsyncInMemoryCache() + return AsyncInMemoryCache(expiration_time=settings_service.settings.cache_expire) diff --git a/src/backend/base/langflow/services/settings/base.py b/src/backend/base/langflow/services/settings/base.py index 46aee5c1f..4575069b7 100644 --- a/src/backend/base/langflow/services/settings/base.py +++ b/src/backend/base/langflow/services/settings/base.py @@ -74,6 +74,8 @@ class Settings(BaseSettings): """The number of connections to allow that can be opened beyond the pool size. If not provided, the default is 10.""" cache_type: str = "async" """The cache type can be 'async' or 'redis'.""" + cache_expire: int = 3600 + """The cache expire in seconds.""" variable_store: str = "db" """The store can be 'db' or 'kubernetes'."""