🐛 fix(cache/manager.py): unpickle cached value before returning it to fix deserialization issue

🐛 fix(cache/manager.py): pickle value before caching it to mimic Redis behavior
🐛 fix(cache/manager.py): raise ValueError if RedisCache fails to set the value
🐛 fix(session/manager.py): generate key if it is None before checking cache
 feat(session/manager.py): add logging import to enable logging in the session manager
This commit is contained in:
Gabriel Luiz Freitas Almeida 2023-09-22 10:48:13 -03:00
commit df19298637
2 changed files with 14 additions and 4 deletions

View file

@ -72,7 +72,8 @@ class InMemoryCache(BaseCacheService, Service):
):
# Move the key to the end to make it recently used
self._cache.move_to_end(key)
return item["value"]
unpickled = pickle.loads(item["value"])
return unpickled
else:
self.delete(key)
return None
@ -94,7 +95,9 @@ class InMemoryCache(BaseCacheService, Service):
elif self.max_size and len(self._cache) >= self.max_size:
# Remove least recently used item
self._cache.popitem(last=False)
self._cache[key] = {"value": value, "time": time.time()}
# pickle locally to mimic Redis
pickled = pickle.dumps(value)
self._cache[key] = {"value": pickled, "time": time.time()}
def upsert(self, key, value):
"""
@ -257,7 +260,10 @@ class RedisCache(BaseCacheService, Service):
value: The value to cache.
"""
try:
self._client.setex(key, self.expiration_time, pickle.dumps(value))
if pickled := pickle.dumps(value):
result = self._client.setex(key, self.expiration_time, pickled)
if not result:
raise ValueError("RedisCache could not set the value.")
except TypeError as exc:
raise TypeError(
"RedisCache only accepts values that can be pickled. "

View file

@ -2,7 +2,7 @@ from typing import TYPE_CHECKING
from langflow.interface.run import build_sorted_vertices
from langflow.services.base import Service
from langflow.services.cache.utils import compute_dict_hash
from loguru import logger
from langflow.services.session.utils import session_id_generator
if TYPE_CHECKING:
@ -20,8 +20,12 @@ class SessionService(Service):
if key in self.cache_service:
return self.cache_service.get(key)
if key is None:
key = self.generate_key(session_id=None, data_graph=data_graph)
# If not cached, build the graph and cache it
graph, artifacts = build_sorted_vertices(data_graph)
self.cache_service.set(key, (graph, artifacts))
return graph, artifacts