diff --git a/src/backend/langflow/services/cache/manager.py b/src/backend/langflow/services/cache/manager.py index fc4a590fb..b73908e51 100644 --- a/src/backend/langflow/services/cache/manager.py +++ b/src/backend/langflow/services/cache/manager.py @@ -72,7 +72,8 @@ class InMemoryCache(BaseCacheService, Service): ): # Move the key to the end to make it recently used self._cache.move_to_end(key) - return item["value"] + unpickled = pickle.loads(item["value"]) + return unpickled else: self.delete(key) return None @@ -94,7 +95,9 @@ class InMemoryCache(BaseCacheService, Service): elif self.max_size and len(self._cache) >= self.max_size: # Remove least recently used item self._cache.popitem(last=False) - self._cache[key] = {"value": value, "time": time.time()} + # pickle locally to mimic Redis + pickled = pickle.dumps(value) + self._cache[key] = {"value": pickled, "time": time.time()} def upsert(self, key, value): """ @@ -257,7 +260,10 @@ class RedisCache(BaseCacheService, Service): value: The value to cache. """ try: - self._client.setex(key, self.expiration_time, pickle.dumps(value)) + if pickled := pickle.dumps(value): + result = self._client.setex(key, self.expiration_time, pickled) + if not result: + raise ValueError("RedisCache could not set the value.") except TypeError as exc: raise TypeError( "RedisCache only accepts values that can be pickled. " diff --git a/src/backend/langflow/services/session/manager.py b/src/backend/langflow/services/session/manager.py index 44108b14f..e0ddeb0c2 100644 --- a/src/backend/langflow/services/session/manager.py +++ b/src/backend/langflow/services/session/manager.py @@ -2,7 +2,7 @@ from typing import TYPE_CHECKING from langflow.interface.run import build_sorted_vertices from langflow.services.base import Service from langflow.services.cache.utils import compute_dict_hash - +from loguru import logger from langflow.services.session.utils import session_id_generator if TYPE_CHECKING: @@ -20,8 +20,12 @@ class SessionService(Service): if key in self.cache_service: return self.cache_service.get(key) + if key is None: + key = self.generate_key(session_id=None, data_graph=data_graph) + # If not cached, build the graph and cache it graph, artifacts = build_sorted_vertices(data_graph) + self.cache_service.set(key, (graph, artifacts)) return graph, artifacts