Merge branch 'cz/mergeAll' of https://github.com/langflow-ai/langflow into cz/mergeAll

This commit is contained in:
cristhianzl 2024-06-07 16:22:09 -03:00
commit d7fef29fa3
5 changed files with 8 additions and 41 deletions

View file

@ -214,7 +214,7 @@ async def build_and_cache_graph_from_db(flow_id: str, session: Session, chat_ser
vertex = graph.get_vertex(vertex_id)
if vertex is None:
raise ValueError(f"Vertex {vertex_id} not found")
if vertex._raw_params.get("session_id") is None:
if not vertex._raw_params.get("session_id"):
vertex.update_raw_params({"session_id": flow_id})
await chat_service.set_cache(flow_id, graph)
return graph

View file

@ -10,11 +10,11 @@ from loguru import logger
from langflow.graph.schema import INPUT_COMPONENTS, OUTPUT_COMPONENTS, InterfaceComponentTypes, ResultData
from langflow.graph.utils import ArtifactType, UnbuiltObject, UnbuiltResult
from langflow.graph.vertex.utils import log_transaction
from langflow.interface.initialize import loading
from langflow.interface.listing import lazy_load_dict
from langflow.schema.schema import INPUT_FIELD_NAME
from langflow.services.deps import get_storage_service
from langflow.services.monitor.utils import log_transaction
from langflow.utils.constants import DIRECT_TYPES
from langflow.utils.schemas import ChatOutputResponse
from langflow.utils.util import sync_to_async, unescape_string
@ -530,11 +530,11 @@ class Vertex:
The built result if use_result is True, else the built object.
"""
if not self._built:
log_transaction(source=self, target=requester, flow_id=self.graph.flow_id, status="error")
log_transaction(vertex=self, target=requester, status="error")
raise ValueError(f"Component {self.display_name} has not been built yet")
result = self._built_result if self.use_result else self._built_object
log_transaction(source=self, target=requester, flow_id=self.graph.flow_id, status="success")
log_transaction(vertex=self, target=requester, status="success")
return result
async def _build_vertex_and_update_params(self, key, vertex: "Vertex"):

View file

@ -1,9 +1,5 @@
from typing import TYPE_CHECKING
from loguru import logger
from langflow.services.deps import get_monitor_service
if TYPE_CHECKING:
from langflow.graph.vertex.base import Vertex
@ -21,34 +17,3 @@ def build_clean_params(target: "Vertex") -> dict:
if isinstance(value, list):
params[key] = [item for item in value if isinstance(item, (str, int, bool, float, list, dict))]
return params
def log_transaction(source: "Vertex", target: "Vertex", flow_id, status, error=None):
"""
Logs a transaction between two vertices.
Args:
source (Vertex): The source vertex of the transaction.
target (Vertex): The target vertex of the transaction.
status: The status of the transaction.
error (Optional): Any error associated with the transaction.
Raises:
Exception: If there is an error while logging the transaction.
"""
try:
monitor_service = get_monitor_service()
clean_params = build_clean_params(target)
data = {
"source": source.vertex_type,
"target": target.vertex_type,
"target_args": clean_params,
"timestamp": monitor_service.get_timestamp(),
"status": status,
"error": error,
"flow_id": flow_id,
}
monitor_service.add_row(table_name="transactions", data=data)
except Exception as e:
logger.error(f"Error logging transaction: {e}")

View file

@ -12,6 +12,7 @@ class TransactionModel(BaseModel):
index: Optional[int] = Field(default=None)
timestamp: Optional[datetime] = Field(default_factory=datetime.now, alias="timestamp")
vertex_id: str
target_id: str | None = None
inputs: dict
outputs: dict
status: str

View file

@ -178,12 +178,13 @@ def build_clean_params(target: "Vertex") -> dict:
return params
def log_transaction(vertex: "Vertex", status, error=None):
def log_transaction(vertex: "Vertex", status, target: Optional["Vertex"] = None, error=None):
try:
monitor_service = get_monitor_service()
clean_params = build_clean_params(vertex)
data = {
"vertex_id": vertex.id,
"vertex_id": str(vertex.id),
"target_id": str(target.id) if target else None,
"inputs": clean_params,
"outputs": vertex.result.model_dump_json(),
"timestamp": monitor_service.get_timestamp(),