🐛 fix(process.py): caching objects does not depend on is_first_message anymore

This commit is contained in:
Gabriel Luiz Freitas Almeida 2023-06-17 13:34:35 -03:00
commit b93d5020b3
2 changed files with 1 additions and 24 deletions

View file

@ -109,23 +109,13 @@ def get_result_and_thought(langchain_object, message: str):
return result, thought
def load_or_build_langchain_object(data_graph, is_first_message=False):
"""
Load langchain object from cache if it exists, otherwise build it.
"""
if is_first_message:
build_langchain_object_with_caching.clear_cache()
return build_langchain_object_with_caching(data_graph)
def process_graph_cached(data_graph: Dict[str, Any], message: str):
"""
Process graph by extracting input variables and replacing ZeroShotPrompt
with PromptTemplate,then run the graph and return the result and thought.
"""
# Load langchain object
is_first_message = len(data_graph.get("chatHistory", [])) == 0
langchain_object = load_or_build_langchain_object(data_graph, is_first_message)
langchain_object = build_langchain_object_with_caching(data_graph)
logger.debug("Loaded langchain object")
if langchain_object is None:

View file

@ -1,6 +1,5 @@
import json
from langflow.graph import Graph
from langflow.processing.process import load_or_build_langchain_object
import pytest
from langflow.interface.run import (
@ -41,18 +40,6 @@ def langchain_objects_are_equal(obj1, obj2):
return str(obj1) == str(obj2)
# Test load_or_build_langchain_object
def test_load_or_build_langchain_object_first_message_true(basic_data_graph):
build_langchain_object_with_caching.clear_cache()
graph = load_or_build_langchain_object(basic_data_graph, is_first_message=True)
assert graph is not None
def test_load_or_build_langchain_object_first_message_false(basic_data_graph):
graph = load_or_build_langchain_object(basic_data_graph, is_first_message=False)
assert graph is not None
# Test build_langchain_object_with_caching
def test_build_langchain_object_with_caching(basic_data_graph):
build_langchain_object_with_caching.clear_cache()