Fix code formatting and apply tweaks

This commit is contained in:
anovazzi1 2024-01-19 22:18:39 -03:00
commit 5ffb60e90f
14 changed files with 55 additions and 35 deletions

View file

@ -1,9 +1,17 @@
import time
from fastapi import APIRouter, Depends, HTTPException, Query, WebSocket, WebSocketException, status,Body
from fastapi import APIRouter, Depends, HTTPException, Query, WebSocket, WebSocketException, status, Body
from fastapi.responses import StreamingResponse
from langflow.api.utils import build_input_keys_response, format_elapsed_time
from langflow.api.v1.schemas import BuildStatus, BuiltResponse, InitResponse, ResultDict, StreamData, VertexBuildResponse, VerticesOrderResponse
from langflow.api.v1.schemas import (
BuildStatus,
BuiltResponse,
InitResponse,
ResultDict,
StreamData,
VertexBuildResponse,
VerticesOrderResponse,
)
from langflow.graph.vertex.base import StatelessVertex
from langflow.processing.process import process_tweaks_on_graph
from langflow.services.database.models.flow.flow import Flow
@ -237,6 +245,7 @@ async def try_running_celery_task(vertex, user_id):
await vertex.build(user_id=user_id)
return vertex
@router.get("/build/{flow_id}/vertices", response_model=VerticesOrderResponse)
async def get_vertices(
flow_id: str,
@ -260,6 +269,7 @@ async def get_vertices(
logger.error(f"Error checking build status: {exc}")
raise HTTPException(status_code=500, detail=str(exc)) from exc
@router.post("/build/{flow_id}/vertices/{vertex_id}")
async def build_vertex(
flow_id: str,

View file

@ -214,6 +214,7 @@ class Token(BaseModel):
class ApiKeyCreateRequest(BaseModel):
api_key: str
class VerticesOrderResponse(BaseModel):
ids: List[List[str]]
@ -223,6 +224,7 @@ class ResultDict(BaseModel):
artifacts: Optional[Any] = Field(default_factory=dict)
"""Outputs of the vertex build process."""
class VertexBuildResponse(BaseModel):
id: str
valid: bool
@ -235,4 +237,4 @@ class VertexBuildResponse(BaseModel):
class VerticesBuiltResponse(BaseModel):
vertices: List[VertexBuildResponse]
vertices: List[VertexBuildResponse]

View file

@ -26,7 +26,7 @@ class AzureChatOpenAIComponent(CustomComponent):
"2023-07-01-preview",
"2023-08-01-preview",
"2023-09-01-preview",
"2023-12-01-preview"
"2023-12-01-preview",
]
def build_config(self):

View file

@ -98,6 +98,7 @@ class Edge:
def __eq__(self, __value: object) -> bool:
return self.__repr__() == __value.__repr__() if isinstance(__value, Edge) else False
class ContractEdge(Edge):
def __init__(self, source: "Vertex", target: "Vertex", raw_edge: dict):
super().__init__(source, target, raw_edge)
@ -132,9 +133,7 @@ class ContractEdge(Edge):
"""
# Removes all keys that the values aren't python types like str, int, bool, etc.
params = {
key: value
for key, value in target.params.items()
if isinstance(value, (str, int, bool, float, list, dict))
key: value for key, value in target.params.items() if isinstance(value, (str, int, bool, float, list, dict))
}
# if it is a list we need to check if the contents are python types
for key, value in params.items():
@ -195,7 +194,7 @@ async def log_message(
from langflow.graph.vertex.base import Vertex
if isinstance(session_id, Vertex):
session_id = await session_id.build() # type: ignore
session_id = await session_id.build() # type: ignore
monitor_service = get_monitor_service()
row = {

View file

@ -254,7 +254,7 @@ class Graph:
vertex_ids = [vertex.id for vertex in self.vertices]
edges_repr = "\n".join([f"{edge.source_id} --> {edge.target_id}" for edge in self.edges])
return f"Graph:\nNodes: {vertex_ids}\nConnections:\n{edges_repr}"
def layered_topological_sort(self):
in_degree = {vertex: 0 for vertex in self.vertices} # Initialize in-degrees
graph = defaultdict(list) # Adjacency list representation

View file

@ -6,6 +6,7 @@ from langflow.interface.utils import extract_input_variables_from_prompt
class UnbuiltObject:
pass
class UnbuiltResult:
pass

View file

@ -2,8 +2,7 @@ import ast
import inspect
import types
from enum import Enum
from typing import (TYPE_CHECKING, Any, Callable, Coroutine, Dict, List,
Optional)
from typing import TYPE_CHECKING, Any, Callable, Coroutine, Dict, List, Optional
from langflow.graph.utils import UnbuiltObject, UnbuiltResult
from langflow.interface.initialize import loading
@ -64,7 +63,13 @@ class Vertex:
edge_results = {}
for edge in self.edges:
target = self.graph.get_vertex(edge.target_id)
if edge.is_fulfilled and isinstance(await edge.get_result(source=self, target=target, ), str):
if edge.is_fulfilled and isinstance(
await edge.get_result(
source=self,
target=target,
),
str,
):
if edge.target_id not in edge_results:
edge_results[edge.target_id] = {}
edge_results[edge.target_id][edge.target_param] = await edge.get_result(source=self, target=target)
@ -321,7 +326,7 @@ class Vertex:
# Check if the Vertex was built already
if self._built:
return self._built_object
if self.is_task and self.task_id is not None:
task = self.get_task()

View file

@ -67,7 +67,9 @@ Human: {input}
class MidJourneyPromptChain(BaseCustomConversationChain):
"""MidJourneyPromptChain is a chain you can use to generate new MidJourney prompts."""
template: Optional[str] = """I want you to act as a prompt generator for Midjourney's artificial intelligence program.
template: Optional[
str
] = """I want you to act as a prompt generator for Midjourney's artificial intelligence program.
Your job is to provide detailed and creative descriptions that will inspire unique and interesting images from the AI.
Keep in mind that the AI is capable of understanding a wide range of language and can interpret abstract concepts, so feel free to be as imaginative and descriptive as possible.
For example, you could describe a scene from a futuristic city, or a surreal landscape filled with strange creatures.
@ -81,7 +83,9 @@ class MidJourneyPromptChain(BaseCustomConversationChain):
class TimeTravelGuideChain(BaseCustomConversationChain):
template: Optional[str] = """I want you to act as my time travel guide. You are helpful and creative. I will provide you with the historical period or future time I want to visit and you will suggest the best events, sights, or people to experience. Provide the suggestions and any necessary information.
template: Optional[
str
] = """I want you to act as my time travel guide. You are helpful and creative. I will provide you with the historical period or future time I want to visit and you will suggest the best events, sights, or people to experience. Provide the suggestions and any necessary information.
Current conversation:
{history}
Human: {input}

View file

@ -273,13 +273,13 @@ def apply_tweaks(node: Dict[str, Any], node_tweaks: Dict[str, Any]) -> None:
key = tweak_name if tweak_name == "file_path" else "value"
template_data[tweak_name][key] = tweak_value
def apply_tweaks_on_vertex(vertex: Vertex, node_tweaks: Dict[str, Any]) -> None:
for tweak_name, tweak_value in node_tweaks.items():
if tweak_name and tweak_value and tweak_name in vertex.params:
vertex.params[tweak_name] = tweak_value
def process_tweaks(graph_data: Dict[str, Any], tweaks: Dict[str, Dict[str, Any]]) -> Dict[str, Any]:
"""
This function is used to tweak the graph data using the node id and the tweaks dict.
@ -305,6 +305,7 @@ def process_tweaks(graph_data: Dict[str, Any], tweaks: Dict[str, Dict[str, Any]]
return graph_data
def process_tweaks_on_graph(graph: Graph, tweaks: Dict[str, Dict[str, Any]]):
for vertex in graph.vertices:
if isinstance(vertex, Vertex) and isinstance(vertex.id, str):
@ -314,4 +315,4 @@ def process_tweaks_on_graph(graph: Graph, tweaks: Dict[str, Dict[str, Any]]):
else:
logger.warning("Each node should be a Vertex with an 'id' attribute of type str")
return graph
return graph

View file

@ -235,7 +235,7 @@ class ChatService(Service):
except Exception as exc:
logger.error(f"Error closing connection: {exc}")
self.disconnect(client_id)
def get_cache(self, client_id: str) -> Any:
"""
Get the cache for a client.
@ -263,4 +263,4 @@ def list_of_dicts_to_markdown_table(dict_list):
row = [str(row_dict.get(header, "")) for header in headers]
markdown_table += "| " + " | ".join(row) + " |\n"
return markdown_table
return markdown_table

View file

@ -51,4 +51,4 @@ class FlowRead(FlowBase):
class FlowUpdate(SQLModelSerializable):
name: Optional[str] = None
description: Optional[str] = None
data: Optional[Dict] = None
data: Optional[Dict] = None

View file

@ -5,8 +5,7 @@ from typing import TYPE_CHECKING
import duckdb
from langflow.services.base import Service
from langflow.services.monitor.schema import MessageModel, TransactionModel
from langflow.services.monitor.utils import (
add_row_to_table, drop_and_create_table_if_schema_mismatch)
from langflow.services.monitor.utils import add_row_to_table, drop_and_create_table_if_schema_mismatch
from loguru import logger
from platformdirs import user_cache_dir

View file

@ -30,9 +30,7 @@ class SettingsService(Service):
for key in settings_dict:
if key not in Settings.__fields__.keys():
raise KeyError(f"Key {key} not found in settings")
logger.debug(
f"Loading {len(settings_dict[key])} {key} from {file_path}"
)
logger.debug(f"Loading {len(settings_dict[key])} {key} from {file_path}")
settings = Settings(**settings_dict)
if not settings.CONFIG_DIR:

View file

@ -25,7 +25,7 @@ export async function buildVertices({
let orderResponse = await getVerticesOrder(flow.id);
console.log(orderResponse);
let verticesOrder: Array<Array<string>> = orderResponse.data.ids;
console.log('order', verticesOrder);
console.log("order", verticesOrder);
// Determine the range of vertices to build
let vertexIndex: number | null = null;
@ -33,7 +33,9 @@ export async function buildVertices({
vertexIndex = verticesOrder.findIndex((ids) => ids.includes(nodeId));
}
let buildRange =
vertexIndex !== null ? verticesOrder.slice(0, vertexIndex + 1) : verticesOrder;
vertexIndex !== null
? verticesOrder.slice(0, vertexIndex + 1)
: verticesOrder;
console.log(buildRange);
const buildResults: boolean[] = [];
@ -67,10 +69,10 @@ export async function buildVertices({
} catch (error) {
if (onBuildError) {
console.log(error);
onBuildError(
"Error Building Component",
[(error as AxiosError<any>).response?.data?.detail ?? "Unknown Error"]
);
onBuildError("Error Building Component", [
(error as AxiosError<any>).response?.data?.detail ??
"Unknown Error",
]);
}
}
})
@ -85,10 +87,9 @@ export async function buildVertices({
} catch (error) {
// Callback for handling errors
if (onBuildError) {
onBuildError(
"Error Building Component",
[(error as AxiosError<any>).response?.data?.detail ?? "Unknown Error"]
);
onBuildError("Error Building Component", [
(error as AxiosError<any>).response?.data?.detail ?? "Unknown Error",
]);
}
}
}