From bfee5a508f57b6aeaae687c51d96860b689c6ae8 Mon Sep 17 00:00:00 2001 From: italojohnny Date: Mon, 17 Jun 2024 09:27:49 -0300 Subject: [PATCH 1/2] unified vertex log creation --- src/backend/base/langflow/api/v1/chat.py | 5 +---- src/backend/base/langflow/graph/vertex/base.py | 10 +++++----- src/backend/base/langflow/graph/vertex/types.py | 4 ++-- src/backend/base/langflow/schema/schema.py | 12 ++++-------- 4 files changed, 12 insertions(+), 19 deletions(-) diff --git a/src/backend/base/langflow/api/v1/chat.py b/src/backend/base/langflow/api/v1/chat.py index fb20b8cd3..1affd42b0 100644 --- a/src/backend/base/langflow/api/v1/chat.py +++ b/src/backend/base/langflow/api/v1/chat.py @@ -149,7 +149,6 @@ async def build_vertex( next_runnable_vertices = [] top_level_vertices = [] - logs = {} try: start_time = time.perf_counter() cache = await chat_service.get_cache(flow_id_str) @@ -189,15 +188,13 @@ async def build_vertex( valid = False output_label = vertex.outputs[0]["name"] if vertex.outputs else "output" logs = {output_label: [Log(message=params, type="error")]} - result_data_response = ResultDataResponse(results={}) + result_data_response = ResultDataResponse(results={}, logs=logs) artifacts = {} # If there's an error building the vertex # we need to clear the cache await chat_service.clear_cache(flow_id_str) result_data_response.message = artifacts - if logs: - result_data_response.logs = logs # Log the vertex build if not vertex.will_stream: diff --git a/src/backend/base/langflow/graph/vertex/base.py b/src/backend/base/langflow/graph/vertex/base.py index bb0ddce23..30c370687 100644 --- a/src/backend/base/langflow/graph/vertex/base.py +++ b/src/backend/base/langflow/graph/vertex/base.py @@ -13,7 +13,7 @@ from langflow.graph.utils import UnbuiltObject, UnbuiltResult from langflow.interface.initialize import loading from langflow.interface.listing import lazy_load_dict from langflow.schema.artifact import ArtifactType -from langflow.schema.schema import INPUT_FIELD_NAME, Log, build_log_from_raw_and_type +from langflow.schema.schema import INPUT_FIELD_NAME, Log, build_logs from langflow.services.deps import get_storage_service from langflow.services.monitor.utils import log_transaction from langflow.utils.constants import DIRECT_TYPES @@ -113,6 +113,8 @@ class Vertex: self.build_times.append(time) def set_result(self, result: ResultData) -> None: + from pprint import pprint + pprint(result.model_dump()) self.result = result def get_built_result(self): @@ -658,13 +660,11 @@ class Vertex: if len(result) == 2: self._built_object, self.artifacts = result elif len(result) == 3: + import pdb; pdb.set_trace() self._custom_component, self._built_object, self.artifacts = result self.artifacts_raw = self.artifacts.get("raw", None) self.artifacts_type = self.artifacts.get("type", None) or ArtifactType.UNKNOWN.value - self.logs[self.outputs[0]["name"]] = build_log_from_raw_and_type( - self.artifacts_raw, self.artifacts_type - ) - + self.logs = build_log(self) else: self._built_object = result diff --git a/src/backend/base/langflow/graph/vertex/types.py b/src/backend/base/langflow/graph/vertex/types.py index cba31c6bb..14bb7b2bc 100644 --- a/src/backend/base/langflow/graph/vertex/types.py +++ b/src/backend/base/langflow/graph/vertex/types.py @@ -10,7 +10,7 @@ from langflow.graph.utils import UnbuiltObject, serialize_field from langflow.graph.vertex.base import Vertex from langflow.schema import Data from langflow.schema.artifact import ArtifactType -from langflow.schema.schema import INPUT_FIELD_NAME, build_logs_from_artifacts +from langflow.schema.schema import INPUT_FIELD_NAME, build_logs from langflow.services.monitor.utils import log_transaction, log_vertex_build from langflow.utils.schemas import ChatOutputResponse, DataOutputResponse from langflow.utils.util import unescape_string @@ -48,7 +48,7 @@ class ComponentVertex(Vertex): for key in self.artifacts: self.artifacts_raw[key] = self.artifacts[key].get("raw", None) self.artifacts_type[key] = self.artifacts[key].get("type", None) or ArtifactType.UNKNOWN.value - self.logs = build_logs_from_artifacts(self.artifacts) + self.logs = build_logs(self) else: self._built_object = result diff --git a/src/backend/base/langflow/schema/schema.py b/src/backend/base/langflow/schema/schema.py index 24dd30e68..65aad288d 100644 --- a/src/backend/base/langflow/schema/schema.py +++ b/src/backend/base/langflow/schema/schema.py @@ -17,11 +17,11 @@ class Log(TypedDict): type: str -def build_logs_from_artifacts(artifacts: dict) -> dict: +def build_logs(vertex) -> dict: logs = {} - for key in artifacts: - message = artifacts[key]["raw"] - _type = artifacts[key]["type"] + for key in vertex.artifacts: + message = vertex.artifacts[key]["raw"] + _type = vertex.artifacts[key]["type"] if "stream_url" in message and "type" in message: stream_url = StreamURL(location=message["stream_url"]) @@ -31,7 +31,3 @@ def build_logs_from_artifacts(artifacts: dict) -> dict: logs[key] = [log] return logs - - -def build_log_from_raw_and_type(raw: Any, log_type: str) -> Log: - return Log(message=raw, type=log_type) From d1ab180040ddfeaefefe81aae0a288dd337313b5 Mon Sep 17 00:00:00 2001 From: ogabrielluiz Date: Mon, 17 Jun 2024 10:08:59 -0300 Subject: [PATCH 2/2] Refactor `utils.py` and `message.py` modules This commit refactors the `utils.py` module in the `base/prompts` directory and the `message.py` module in the `schema` directory. The changes include: - Importing the `Message` class from `langflow.schema.message` in `utils.py` - Importing various modules and classes from `langchain_core` in `message.py` - Adding a new method `load_lc_prompt` to the `Message` class in `message.py` - Adding a new class method `from_lc_prompt` to the `Message` class in `message.py` - Adding a new method `format_text` to the `Message` class in `message.py` - Adding a new class method `from_template_and_variables` to the `Message` class in `message.py` These changes improve the organization and functionality of the code in the mentioned modules. --- .../base/langflow/base/prompts/utils.py | 3 +- src/backend/base/langflow/schema/message.py | 37 +++++++++++++++++++ 2 files changed, 39 insertions(+), 1 deletion(-) diff --git a/src/backend/base/langflow/base/prompts/utils.py b/src/backend/base/langflow/base/prompts/utils.py index 77411a578..948d09954 100644 --- a/src/backend/base/langflow/base/prompts/utils.py +++ b/src/backend/base/langflow/base/prompts/utils.py @@ -3,7 +3,6 @@ from copy import deepcopy from langchain_core.documents import Document from langflow.schema import Data -from langflow.schema.message import Message def data_to_string(record: Data) -> str: @@ -29,6 +28,8 @@ def dict_values_to_string(d: dict) -> dict: Returns: dict: The dictionary with values converted to strings. """ + from langflow.schema.message import Message + # Do something similar to the above d_copy = deepcopy(d) for key, value in d_copy.items(): diff --git a/src/backend/base/langflow/schema/message.py b/src/backend/base/langflow/schema/message.py index 706d337aa..4dde3afae 100644 --- a/src/backend/base/langflow/schema/message.py +++ b/src/backend/base/langflow/schema/message.py @@ -1,11 +1,14 @@ from datetime import datetime, timezone from typing import Annotated, Any, AsyncIterator, Iterator, Optional +from langchain_core.load import load from langchain_core.messages import AIMessage, BaseMessage, HumanMessage from langchain_core.prompt_values import ImagePromptValue +from langchain_core.prompts import BaseChatPromptTemplate, ChatPromptTemplate, PromptTemplate from langchain_core.prompts.image import ImagePromptTemplate from pydantic import BeforeValidator, ConfigDict, Field, field_serializer +from langflow.base.prompts.utils import dict_values_to_string from langflow.schema.data import Data from langflow.schema.image import Image, get_file_paths, is_image_file @@ -110,3 +113,37 @@ class Message(Data): image_prompt_value: ImagePromptValue = image_template.invoke(input={"path": file}) content_dicts.append({"type": "image_url", "image_url": image_prompt_value.image_url}) return content_dicts + + def load_lc_prompt(self): + if "prompt" not in self: + raise ValueError("Prompt is required.") + return load(self.prompt) + + @classmethod + def from_lc_prompt( + cls, + prompt: BaseChatPromptTemplate, + ): + prompt_json = prompt.to_json() + return cls(prompt=prompt_json) + + def format_text(self): + prompt_template = PromptTemplate.from_template(self.template) + variables_with_str_values = dict_values_to_string(self.variables) + formatted_prompt = prompt_template.format(**variables_with_str_values) + self.text = formatted_prompt + return formatted_prompt + + @classmethod + async def from_template_and_variables(cls, template: str, variables: dict): + instance = cls(template=template, variables=variables) + contents = [{"type": "text", "text": instance.format_text()}] + # Get all Message instances from the kwargs + for value in variables.values(): + if isinstance(value, cls): + content_dicts = await value.get_file_content_dicts() + contents.extend(content_dicts) + prompt_template = ChatPromptTemplate.from_messages([HumanMessage(content=contents)]) + instance.messages = prompt_template.messages + instance.prompt = prompt_template.to_json() + return instance