🐛 fix(endpoints.py): change type annotation of session_id parameter in process_flow function to Union[None, str] to allow for None value

🐛 fix(schemas.py): change type annotation of session_id field in ProcessResponse schema to Optional[str] to allow for None value
🐛 fix(run.py): change return type annotation of build_sorted_vertices_with_caching function to Tuple[Any, Dict] to specify the return types
🐛 fix(process.py): change return type annotation of load_langchain_object function to Tuple[Union[Chain, VectorStore], Dict[str, Any], str] to include the session_id value in the return tuple
This commit is contained in:
Gabriel Luiz Freitas Almeida 2023-08-16 21:43:35 -03:00
commit 87958c255e
4 changed files with 6 additions and 5 deletions

View file

@ -1,5 +1,5 @@
from http import HTTPStatus
from typing import Annotated, Optional
from typing import Annotated, Optional, Union
from langflow.services.cache.utils import save_uploaded_file
from langflow.services.database.models.flow import Flow
@ -75,7 +75,7 @@ async def process_flow(
inputs: Optional[dict] = None,
tweaks: Optional[dict] = None,
clear_cache: Annotated[bool, Body(embed=True)] = False, # noqa: F821
session_id: Annotated[str, Body(embed=True)] = None, # noqa: F821
session_id: Annotated[Union[None, str], Body(embed=True)] = None, # noqa: F821
session: Session = Depends(get_session),
):
"""

View file

@ -47,7 +47,7 @@ class ProcessResponse(BaseModel):
"""Process response schema."""
result: dict
session_id: str = None
session_id: Optional[str] = None
class ChatMessage(BaseModel):

View file

@ -1,3 +1,4 @@
from typing import Any, Dict, Tuple
from langflow.services.cache.utils import memoize_dict
from langflow.graph import Graph
from langflow.utils.logger import logger
@ -15,7 +16,7 @@ def build_langchain_object_with_caching(data_graph):
@memoize_dict(maxsize=10)
def build_sorted_vertices_with_caching(data_graph):
def build_sorted_vertices_with_caching(data_graph) -> Tuple[Any, Dict]:
"""
Build langchain object from data_graph.
"""

View file

@ -109,7 +109,7 @@ def clear_caches_if_needed(clear_cache: bool):
def load_langchain_object(
data_graph: Dict[str, Any], session_id: str
) -> Tuple[Union[Chain, VectorStore], Dict[str, Any]]:
) -> Tuple[Union[Chain, VectorStore], Dict[str, Any], str]:
langchain_object, artifacts = get_build_result(data_graph, session_id)
session_id = build_sorted_vertices_with_caching.hash
logger.debug("Loaded LangChain object")