Merge branch 'two_edges' of https://github.com/langflow-ai/langflow into two_edges

This commit is contained in:
cristhianzl 2024-06-17 10:20:16 -03:00
commit a5b63711db
6 changed files with 51 additions and 20 deletions

View file

@ -149,7 +149,6 @@ async def build_vertex(
next_runnable_vertices = []
top_level_vertices = []
logs = {}
try:
start_time = time.perf_counter()
cache = await chat_service.get_cache(flow_id_str)
@ -189,15 +188,13 @@ async def build_vertex(
valid = False
output_label = vertex.outputs[0]["name"] if vertex.outputs else "output"
logs = {output_label: [Log(message=params, type="error")]}
result_data_response = ResultDataResponse(results={})
result_data_response = ResultDataResponse(results={}, logs=logs)
artifacts = {}
# If there's an error building the vertex
# we need to clear the cache
await chat_service.clear_cache(flow_id_str)
result_data_response.message = artifacts
if logs:
result_data_response.logs = logs
# Log the vertex build
if not vertex.will_stream:

View file

@ -3,7 +3,6 @@ from copy import deepcopy
from langchain_core.documents import Document
from langflow.schema import Data
from langflow.schema.message import Message
def data_to_string(record: Data) -> str:
@ -29,6 +28,8 @@ def dict_values_to_string(d: dict) -> dict:
Returns:
dict: The dictionary with values converted to strings.
"""
from langflow.schema.message import Message
# Do something similar to the above
d_copy = deepcopy(d)
for key, value in d_copy.items():

View file

@ -13,7 +13,7 @@ from langflow.graph.utils import UnbuiltObject, UnbuiltResult
from langflow.interface.initialize import loading
from langflow.interface.listing import lazy_load_dict
from langflow.schema.artifact import ArtifactType
from langflow.schema.schema import INPUT_FIELD_NAME, Log, build_log_from_raw_and_type
from langflow.schema.schema import INPUT_FIELD_NAME, Log, build_logs
from langflow.services.deps import get_storage_service
from langflow.services.monitor.utils import log_transaction
from langflow.utils.constants import DIRECT_TYPES
@ -113,6 +113,8 @@ class Vertex:
self.build_times.append(time)
def set_result(self, result: ResultData) -> None:
from pprint import pprint
pprint(result.model_dump())
self.result = result
def get_built_result(self):
@ -658,13 +660,11 @@ class Vertex:
if len(result) == 2:
self._built_object, self.artifacts = result
elif len(result) == 3:
import pdb; pdb.set_trace()
self._custom_component, self._built_object, self.artifacts = result
self.artifacts_raw = self.artifacts.get("raw", None)
self.artifacts_type = self.artifacts.get("type", None) or ArtifactType.UNKNOWN.value
self.logs[self.outputs[0]["name"]] = build_log_from_raw_and_type(
self.artifacts_raw, self.artifacts_type
)
self.logs = build_log(self)
else:
self._built_object = result

View file

@ -10,7 +10,7 @@ from langflow.graph.utils import UnbuiltObject, serialize_field
from langflow.graph.vertex.base import Vertex
from langflow.schema import Data
from langflow.schema.artifact import ArtifactType
from langflow.schema.schema import INPUT_FIELD_NAME, build_logs_from_artifacts
from langflow.schema.schema import INPUT_FIELD_NAME, build_logs
from langflow.services.monitor.utils import log_transaction, log_vertex_build
from langflow.utils.schemas import ChatOutputResponse, DataOutputResponse
from langflow.utils.util import unescape_string
@ -48,7 +48,7 @@ class ComponentVertex(Vertex):
for key in self.artifacts:
self.artifacts_raw[key] = self.artifacts[key].get("raw", None)
self.artifacts_type[key] = self.artifacts[key].get("type", None) or ArtifactType.UNKNOWN.value
self.logs = build_logs_from_artifacts(self.artifacts)
self.logs = build_logs(self)
else:
self._built_object = result

View file

@ -1,11 +1,14 @@
from datetime import datetime, timezone
from typing import Annotated, Any, AsyncIterator, Iterator, Optional
from langchain_core.load import load
from langchain_core.messages import AIMessage, BaseMessage, HumanMessage
from langchain_core.prompt_values import ImagePromptValue
from langchain_core.prompts import BaseChatPromptTemplate, ChatPromptTemplate, PromptTemplate
from langchain_core.prompts.image import ImagePromptTemplate
from pydantic import BeforeValidator, ConfigDict, Field, field_serializer
from langflow.base.prompts.utils import dict_values_to_string
from langflow.schema.data import Data
from langflow.schema.image import Image, get_file_paths, is_image_file
@ -110,3 +113,37 @@ class Message(Data):
image_prompt_value: ImagePromptValue = image_template.invoke(input={"path": file})
content_dicts.append({"type": "image_url", "image_url": image_prompt_value.image_url})
return content_dicts
def load_lc_prompt(self):
if "prompt" not in self:
raise ValueError("Prompt is required.")
return load(self.prompt)
@classmethod
def from_lc_prompt(
cls,
prompt: BaseChatPromptTemplate,
):
prompt_json = prompt.to_json()
return cls(prompt=prompt_json)
def format_text(self):
prompt_template = PromptTemplate.from_template(self.template)
variables_with_str_values = dict_values_to_string(self.variables)
formatted_prompt = prompt_template.format(**variables_with_str_values)
self.text = formatted_prompt
return formatted_prompt
@classmethod
async def from_template_and_variables(cls, template: str, variables: dict):
instance = cls(template=template, variables=variables)
contents = [{"type": "text", "text": instance.format_text()}]
# Get all Message instances from the kwargs
for value in variables.values():
if isinstance(value, cls):
content_dicts = await value.get_file_content_dicts()
contents.extend(content_dicts)
prompt_template = ChatPromptTemplate.from_messages([HumanMessage(content=contents)])
instance.messages = prompt_template.messages
instance.prompt = prompt_template.to_json()
return instance

View file

@ -17,11 +17,11 @@ class Log(TypedDict):
type: str
def build_logs_from_artifacts(artifacts: dict) -> dict:
def build_logs(vertex) -> dict:
logs = {}
for key in artifacts:
message = artifacts[key]["raw"]
_type = artifacts[key]["type"]
for key in vertex.artifacts:
message = vertex.artifacts[key]["raw"]
_type = vertex.artifacts[key]["type"]
if "stream_url" in message and "type" in message:
stream_url = StreamURL(location=message["stream_url"])
@ -31,7 +31,3 @@ def build_logs_from_artifacts(artifacts: dict) -> dict:
logs[key] = [log]
return logs
def build_log_from_raw_and_type(raw: Any, log_type: str) -> Log:
return Log(message=raw, type=log_type)