Add new langflow helper functions and remove base model component
This commit is contained in:
parent
d54e4504f9
commit
827d6befec
25 changed files with 84 additions and 296 deletions
|
|
@ -1,201 +0,0 @@
|
|||
from typing import TYPE_CHECKING, Any, Callable, Coroutine, List, Optional, Tuple, Union
|
||||
|
||||
from pydantic.v1 import BaseModel, Field, create_model
|
||||
from sqlmodel import select
|
||||
|
||||
from langflow.schema.schema import INPUT_FIELD_NAME, Record
|
||||
from langflow.services.database.models.flow.model import Flow
|
||||
from langflow.services.deps import session_scope
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from langflow.graph.graph.base import Graph
|
||||
from langflow.graph.vertex.base import Vertex
|
||||
|
||||
INPUT_TYPE_MAP = {
|
||||
"ChatInput": {"type_hint": "Optional[str]", "default": '""'},
|
||||
"TextInput": {"type_hint": "Optional[str]", "default": '""'},
|
||||
"JSONInput": {"type_hint": "Optional[dict]", "default": "{}"},
|
||||
}
|
||||
|
||||
|
||||
def list_flows(*, user_id: Optional[str] = None) -> List[Record]:
|
||||
if not user_id:
|
||||
raise ValueError("Session is invalid")
|
||||
try:
|
||||
with session_scope() as session:
|
||||
flows = session.exec(
|
||||
select(Flow).where(Flow.user_id == user_id).where(Flow.is_component == False) # noqa
|
||||
).all()
|
||||
|
||||
flows_records = [flow.to_record() for flow in flows]
|
||||
return flows_records
|
||||
except Exception as e:
|
||||
raise ValueError(f"Error listing flows: {e}")
|
||||
|
||||
|
||||
async def load_flow(
|
||||
user_id: str, flow_id: Optional[str] = None, flow_name: Optional[str] = None, tweaks: Optional[dict] = None
|
||||
) -> "Graph":
|
||||
from langflow.graph.graph.base import Graph
|
||||
from langflow.processing.process import process_tweaks
|
||||
|
||||
if not flow_id and not flow_name:
|
||||
raise ValueError("Flow ID or Flow Name is required")
|
||||
if not flow_id and flow_name:
|
||||
flow_id = find_flow(flow_name, user_id)
|
||||
if not flow_id:
|
||||
raise ValueError(f"Flow {flow_name} not found")
|
||||
|
||||
with session_scope() as session:
|
||||
graph_data = flow.data if (flow := session.get(Flow, flow_id)) else None
|
||||
if not graph_data:
|
||||
raise ValueError(f"Flow {flow_id} not found")
|
||||
if tweaks:
|
||||
graph_data = process_tweaks(graph_data=graph_data, tweaks=tweaks)
|
||||
graph = Graph.from_payload(graph_data, flow_id=flow_id)
|
||||
return graph
|
||||
|
||||
|
||||
def find_flow(flow_name: str, user_id: str) -> Optional[str]:
|
||||
with session_scope() as session:
|
||||
flow = session.exec(select(Flow).where(Flow.name == flow_name).where(Flow.user_id == user_id)).first()
|
||||
return flow.id if flow else None
|
||||
|
||||
|
||||
async def run_flow(
|
||||
inputs: Union[dict, List[dict]] = None,
|
||||
tweaks: Optional[dict] = None,
|
||||
flow_id: Optional[str] = None,
|
||||
flow_name: Optional[str] = None,
|
||||
user_id: Optional[str] = None,
|
||||
) -> Any:
|
||||
if not user_id:
|
||||
raise ValueError("Session is invalid")
|
||||
graph = await load_flow(user_id, flow_id, flow_name, tweaks)
|
||||
|
||||
if inputs is None:
|
||||
inputs = []
|
||||
inputs_list = []
|
||||
inputs_components = []
|
||||
types = []
|
||||
for input_dict in inputs:
|
||||
inputs_list.append({INPUT_FIELD_NAME: input_dict.get("input_value")})
|
||||
inputs_components.append(input_dict.get("components", []))
|
||||
types.append(input_dict.get("type", []))
|
||||
|
||||
return await graph.arun(inputs_list, inputs_components=inputs_components, types=types)
|
||||
|
||||
|
||||
def generate_function_for_flow(inputs: List["Vertex"], flow_id: str) -> Coroutine:
|
||||
"""
|
||||
Generate a dynamic flow function based on the given inputs and flow ID.
|
||||
|
||||
Args:
|
||||
inputs (List[Vertex]): The list of input vertices for the flow.
|
||||
flow_id (str): The ID of the flow.
|
||||
|
||||
Returns:
|
||||
Coroutine: The dynamic flow function.
|
||||
|
||||
Raises:
|
||||
None
|
||||
|
||||
Example:
|
||||
inputs = [vertex1, vertex2]
|
||||
flow_id = "my_flow"
|
||||
function = generate_function_for_flow(inputs, flow_id)
|
||||
result = function(input1, input2)
|
||||
"""
|
||||
# Prepare function arguments with type hints and default values
|
||||
args = [
|
||||
f"{input_.display_name.lower().replace(' ', '_')}: {INPUT_TYPE_MAP[input_.base_name]['type_hint']} = {INPUT_TYPE_MAP[input_.base_name]['default']}"
|
||||
for input_ in inputs
|
||||
]
|
||||
|
||||
# Maintain original argument names for constructing the tweaks dictionary
|
||||
original_arg_names = [input_.display_name for input_ in inputs]
|
||||
|
||||
# Prepare a Pythonic, valid function argument string
|
||||
func_args = ", ".join(args)
|
||||
|
||||
# Map original argument names to their corresponding Pythonic variable names in the function
|
||||
arg_mappings = ", ".join(
|
||||
f'"{original_name}": {name}'
|
||||
for original_name, name in zip(original_arg_names, [arg.split(":")[0] for arg in args])
|
||||
)
|
||||
|
||||
func_body = f"""
|
||||
from typing import Optional
|
||||
async def flow_function({func_args}):
|
||||
tweaks = {{ {arg_mappings} }}
|
||||
from langflow.helpers.flow import run_flow
|
||||
from langchain_core.tools import ToolException
|
||||
try:
|
||||
return await run_flow(
|
||||
tweaks={{key: {{'input_value': value}} for key, value in tweaks.items()}},
|
||||
flow_id="{flow_id}",
|
||||
)
|
||||
except Exception as e:
|
||||
raise ToolException(f'Error running flow: ' + e)
|
||||
"""
|
||||
|
||||
compiled_func = compile(func_body, "<string>", "exec")
|
||||
local_scope = {}
|
||||
exec(compiled_func, globals(), local_scope)
|
||||
return local_scope["flow_function"]
|
||||
|
||||
|
||||
def build_function_and_schema(flow_record: Record, graph: "Graph") -> Tuple[Callable, BaseModel]:
|
||||
"""
|
||||
Builds a dynamic function and schema for a given flow.
|
||||
|
||||
Args:
|
||||
flow_record (Record): The flow record containing information about the flow.
|
||||
graph (Graph): The graph representing the flow.
|
||||
|
||||
Returns:
|
||||
Tuple[Callable, BaseModel]: A tuple containing the dynamic function and the schema.
|
||||
"""
|
||||
flow_id = flow_record.id
|
||||
inputs = get_flow_inputs(graph)
|
||||
dynamic_flow_function = generate_function_for_flow(inputs, flow_id)
|
||||
schema = build_schema_from_inputs(flow_record.name, inputs)
|
||||
return dynamic_flow_function, schema
|
||||
|
||||
|
||||
def get_flow_inputs(graph: "Graph") -> List["Vertex"]:
|
||||
"""
|
||||
Retrieves the flow inputs from the given graph.
|
||||
|
||||
Args:
|
||||
graph (Graph): The graph object representing the flow.
|
||||
|
||||
Returns:
|
||||
List[Record]: A list of input records, where each record contains the ID, name, and description of the input vertex.
|
||||
"""
|
||||
inputs = []
|
||||
for vertex in graph.vertices:
|
||||
if vertex.is_input:
|
||||
inputs.append(vertex)
|
||||
return inputs
|
||||
|
||||
|
||||
def build_schema_from_inputs(name: str, inputs: List["Vertex"]) -> BaseModel:
|
||||
"""
|
||||
Builds a schema from the given inputs.
|
||||
|
||||
Args:
|
||||
name (str): The name of the schema.
|
||||
inputs (List[tuple[str, str, str]]): A list of tuples representing the inputs.
|
||||
Each tuple contains three elements: the input name, the input type, and the input description.
|
||||
|
||||
Returns:
|
||||
BaseModel: The schema model.
|
||||
|
||||
"""
|
||||
fields = {}
|
||||
for input_ in inputs:
|
||||
field_name = input_.display_name.lower().replace(" ", "_")
|
||||
description = input_.description
|
||||
fields[field_name] = (str, Field(default="", description=description))
|
||||
return create_model(name, **fields)
|
||||
|
|
@ -1,34 +0,0 @@
|
|||
from langchain_core.documents import Document
|
||||
|
||||
from langflow.schema import Record
|
||||
|
||||
|
||||
def docs_to_records(documents: list[Document]) -> list[Record]:
|
||||
"""
|
||||
Converts a list of Documents to a list of Records.
|
||||
|
||||
Args:
|
||||
documents (list[Document]): The list of Documents to convert.
|
||||
|
||||
Returns:
|
||||
list[Record]: The converted list of Records.
|
||||
"""
|
||||
return [Record.from_document(document) for document in documents]
|
||||
|
||||
|
||||
def records_to_text(template: str, records: list[Record]) -> str:
|
||||
"""
|
||||
Converts a list of Records to a list of texts.
|
||||
|
||||
Args:
|
||||
records (list[Record]): The list of Records to convert.
|
||||
|
||||
Returns:
|
||||
list[str]: The converted list of texts.
|
||||
"""
|
||||
if isinstance(records, Record):
|
||||
records = [records]
|
||||
# Check if there are any format strings in the template
|
||||
|
||||
formated_records = [template.format(data=record.data, **record.data) for record in records]
|
||||
return "\n".join(formated_records)
|
||||
|
|
@ -164,7 +164,7 @@ def get_is_component_from_data(data: dict):
|
|||
|
||||
|
||||
async def check_langflow_version(component: StoreComponentCreate):
|
||||
from langflow import __version__ as current_version
|
||||
from langflow.version import __version__ as current_version
|
||||
|
||||
if not component.last_tested_version:
|
||||
component.last_tested_version = current_version
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import time
|
||||
import uuid
|
||||
from functools import partial
|
||||
from typing import TYPE_CHECKING, Annotated, Optional
|
||||
from typing import TYPE_CHECKING, Annotated, Callable, Optional
|
||||
|
||||
from fastapi import APIRouter, BackgroundTasks, Body, Depends, HTTPException
|
||||
from fastapi.responses import StreamingResponse
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
from typing import List, Optional, Union
|
||||
|
||||
from langchain.agents.agent import AgentExecutor, BaseMultiActionAgent, BaseSingleActionAgent, RunnableMultiActionAgent
|
||||
from langchain.agents.agent import AgentExecutor, BaseMultiActionAgent, BaseSingleActionAgent
|
||||
from langchain_core.runnables import Runnable
|
||||
|
||||
from langflow.field_typing import BaseMemory, Text, Tool
|
||||
|
|
@ -50,11 +50,13 @@ class LCAgentComponent(CustomComponent):
|
|||
) -> Text:
|
||||
if isinstance(agent, AgentExecutor):
|
||||
runnable = agent
|
||||
elif isinstance(agent, Runnable):
|
||||
runnable = RunnableMultiActionAgent(runnable=agent, stream_runnable=False)
|
||||
else:
|
||||
runnable = AgentExecutor.from_agent_and_tools(
|
||||
agent=agent, tools=tools, verbose=True, memory=memory, handle_parsing_errors=handle_parsing_errors
|
||||
agent=agent, # type: ignore
|
||||
tools=tools,
|
||||
verbose=True,
|
||||
memory=memory,
|
||||
handle_parsing_errors=handle_parsing_errors,
|
||||
)
|
||||
input_dict = {"input": inputs}
|
||||
for var in input_variables:
|
||||
|
|
@ -70,4 +72,5 @@ class LCAgentComponent(CustomComponent):
|
|||
else:
|
||||
raise ValueError("Output key not found in result. Tried 'output'.")
|
||||
|
||||
return result.get("output")
|
||||
output: str = result.get("output")
|
||||
return output
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
from typing import Optional
|
||||
from typing import Optional, Union
|
||||
|
||||
from langchain_core.language_models.chat_models import BaseChatModel
|
||||
from langchain_core.language_models.llms import LLM
|
||||
|
|
@ -34,11 +34,11 @@ class LCModelComponent(CustomComponent):
|
|||
def get_chat_result(
|
||||
self, runnable: BaseChatModel, stream: bool, input_value: str, system_message: Optional[str] = None
|
||||
):
|
||||
messages = []
|
||||
if input_value:
|
||||
messages.append(HumanMessage(input_value))
|
||||
messages: list[Union[HumanMessage, SystemMessage]] = []
|
||||
if system_message:
|
||||
messages.append(SystemMessage(system_message))
|
||||
messages.append(SystemMessage(content=system_message))
|
||||
if input_value:
|
||||
messages.append(HumanMessage(content=input_value))
|
||||
if stream:
|
||||
result = runnable.stream(messages)
|
||||
else:
|
||||
|
|
|
|||
|
|
@ -22,7 +22,7 @@ class CohereEmbeddingsComponent(CustomComponent):
|
|||
self,
|
||||
request_timeout: Optional[float] = None,
|
||||
cohere_api_key: str = "",
|
||||
max_retries: Optional[int] = None,
|
||||
max_retries: int = 3,
|
||||
model: str = "embed-english-v2.0",
|
||||
truncate: Optional[str] = None,
|
||||
user_agent: str = "langchain",
|
||||
|
|
|
|||
|
|
@ -100,8 +100,6 @@ class OpenAIEmbeddingsComponent(CustomComponent):
|
|||
if disallowed_special == ["all"]:
|
||||
disallowed_special = "all" # type: ignore
|
||||
|
||||
api_key = SecretStr(openai_api_key) if openai_api_key else None
|
||||
|
||||
return OpenAIEmbeddings(
|
||||
tiktoken_enabled=tiktoken_enable,
|
||||
default_headers=default_headers,
|
||||
|
|
@ -116,7 +114,7 @@ class OpenAIEmbeddingsComponent(CustomComponent):
|
|||
model=model,
|
||||
model_kwargs=model_kwargs,
|
||||
base_url=openai_api_base,
|
||||
api_key=api_key,
|
||||
api_key=openai_api_key,
|
||||
openai_api_type=openai_api_type,
|
||||
api_version=openai_api_version,
|
||||
organization=openai_organization,
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
from typing import Any, List, Optional, Text
|
||||
from typing import Any, List, Optional
|
||||
|
||||
from langchain_core.tools import StructuredTool
|
||||
from loguru import logger
|
||||
|
|
@ -8,6 +8,7 @@ from langflow.field_typing import Tool
|
|||
from langflow.graph.graph.base import Graph
|
||||
from langflow.helpers.flow import build_function_and_schema
|
||||
from langflow.schema.dotdict import dotdict
|
||||
from langflow.schema.schema import Record
|
||||
|
||||
|
||||
class FlowToolComponent(CustomComponent):
|
||||
|
|
@ -19,7 +20,7 @@ class FlowToolComponent(CustomComponent):
|
|||
flow_records = self.list_flows()
|
||||
return [flow_record.data["name"] for flow_record in flow_records]
|
||||
|
||||
def get_flow(self, flow_name: str) -> Optional[Text]:
|
||||
def get_flow(self, flow_name: str) -> Optional[Record]:
|
||||
"""
|
||||
Retrieves a flow by its name.
|
||||
|
||||
|
|
@ -82,4 +83,4 @@ class FlowToolComponent(CustomComponent):
|
|||
description_repr = repr(tool.description).strip("'")
|
||||
args_str = "\n".join([f"- {arg_name}: {arg_data['description']}" for arg_name, arg_data in tool.args.items()])
|
||||
self.status = f"{description_repr}\nArguments:\n{args_str}"
|
||||
return tool
|
||||
return tool # type: ignore
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
from typing import Any, List, Optional, Text, Tuple
|
||||
from typing import Any, List, Optional, Tuple
|
||||
|
||||
from loguru import logger
|
||||
|
||||
|
|
@ -20,7 +20,7 @@ class SubFlowComponent(CustomComponent):
|
|||
flow_records = self.list_flows()
|
||||
return [flow_record.data["name"] for flow_record in flow_records]
|
||||
|
||||
def get_flow(self, flow_name: str) -> Optional[Text]:
|
||||
def get_flow(self, flow_name: str) -> Optional[Record]:
|
||||
flow_records = self.list_flows()
|
||||
for flow_record in flow_records:
|
||||
if flow_record.data["name"] == flow_name:
|
||||
|
|
@ -110,12 +110,15 @@ class SubFlowComponent(CustomComponent):
|
|||
tweaks=tweaks,
|
||||
flow_name=flow_name,
|
||||
)
|
||||
if not run_outputs:
|
||||
return []
|
||||
run_output = run_outputs[0]
|
||||
|
||||
records = []
|
||||
for output in run_output.outputs:
|
||||
if output:
|
||||
records.extend(self.build_records_from_result_data(output))
|
||||
if run_outputs is not None:
|
||||
for output in run_output.outputs:
|
||||
if output:
|
||||
records.extend(self.build_records_from_result_data(output))
|
||||
|
||||
self.status = records
|
||||
logger.debug(records)
|
||||
|
|
|
|||
|
|
@ -1,3 +0,0 @@
|
|||
from .model import LCModelComponent
|
||||
|
||||
__all__ = ["LCModelComponent"]
|
||||
|
|
@ -34,4 +34,4 @@ class SearchApiToolComponent(CustomComponent):
|
|||
tool = SearchAPIRun(api_wrapper=search_api_wrapper)
|
||||
|
||||
self.status = tool
|
||||
return tool
|
||||
return tool # type: ignore
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
from typing import List, Optional
|
||||
|
||||
from langchain_astradb import AstraDBVectorStore
|
||||
from langchain_astradb.utils.astradb import SetupMode
|
||||
|
||||
from langflow.custom import CustomComponent
|
||||
from langflow.field_typing import Embeddings, VectorStore
|
||||
|
|
@ -83,6 +84,10 @@ class AstraDBVectorStoreComponent(CustomComponent):
|
|||
metadata_indexing_exclude: Optional[List[str]] = None,
|
||||
collection_indexing_policy: Optional[dict] = None,
|
||||
) -> VectorStore:
|
||||
try:
|
||||
setup_mode_value = SetupMode[setup_mode.upper()]
|
||||
except KeyError:
|
||||
raise ValueError(f"Invalid setup mode: {setup_mode}")
|
||||
if inputs:
|
||||
documents = [_input.to_lc_document() for _input in inputs]
|
||||
|
||||
|
|
@ -98,7 +103,7 @@ class AstraDBVectorStoreComponent(CustomComponent):
|
|||
bulk_insert_batch_concurrency=bulk_insert_batch_concurrency,
|
||||
bulk_insert_overwrite_concurrency=bulk_insert_overwrite_concurrency,
|
||||
bulk_delete_concurrency=bulk_delete_concurrency,
|
||||
setup_mode=setup_mode,
|
||||
setup_mode=setup_mode_value,
|
||||
pre_delete_collection=pre_delete_collection,
|
||||
metadata_indexing_include=metadata_indexing_include,
|
||||
metadata_indexing_exclude=metadata_indexing_exclude,
|
||||
|
|
@ -116,7 +121,7 @@ class AstraDBVectorStoreComponent(CustomComponent):
|
|||
bulk_insert_batch_concurrency=bulk_insert_batch_concurrency,
|
||||
bulk_insert_overwrite_concurrency=bulk_insert_overwrite_concurrency,
|
||||
bulk_delete_concurrency=bulk_delete_concurrency,
|
||||
setup_mode=setup_mode,
|
||||
setup_mode=setup_mode_value,
|
||||
pre_delete_collection=pre_delete_collection,
|
||||
metadata_indexing_include=metadata_indexing_include,
|
||||
metadata_indexing_exclude=metadata_indexing_exclude,
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ from langflow.field_typing import Embeddings, Text
|
|||
from langflow.schema import Record
|
||||
|
||||
|
||||
class AstraDBSearchComponent(AstraDBVectorStoreComponent, LCVectorStoreComponent):
|
||||
class AstraDBSearchComponent(LCVectorStoreComponent):
|
||||
display_name = "AstraDB Search"
|
||||
description = "Searches an existing AstraDB Vector Store"
|
||||
|
||||
|
|
@ -90,7 +90,7 @@ class AstraDBSearchComponent(AstraDBVectorStoreComponent, LCVectorStoreComponent
|
|||
metadata_indexing_exclude: Optional[List[str]] = None,
|
||||
collection_indexing_policy: Optional[dict] = None,
|
||||
) -> List[Record]:
|
||||
vector_store = super().build(
|
||||
vector_store = AstraDBVectorStoreComponent().build(
|
||||
embedding=embedding,
|
||||
collection_name=collection_name,
|
||||
token=token,
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ from langflow.field_typing import Embeddings, NestedDict, Text
|
|||
from langflow.schema import Record
|
||||
|
||||
|
||||
class MongoDBAtlasSearchComponent(MongoDBAtlasComponent, LCVectorStoreComponent):
|
||||
class MongoDBAtlasSearchComponent(LCVectorStoreComponent):
|
||||
display_name = "MongoDB Atlas Search"
|
||||
description = "Search a MongoDB Atlas Vector Store for similar documents."
|
||||
|
||||
|
|
@ -37,9 +37,10 @@ class MongoDBAtlasSearchComponent(MongoDBAtlasComponent, LCVectorStoreComponent)
|
|||
search_kwargs: Optional[NestedDict] = None,
|
||||
) -> List[Record]:
|
||||
search_kwargs = search_kwargs or {}
|
||||
vector_store = super().build(
|
||||
vector_store = MongoDBAtlasComponent().build(
|
||||
mongodb_atlas_cluster_uri=mongodb_atlas_cluster_uri,
|
||||
namespace=f"{db_name}.{collection_name}",
|
||||
collection_name=collection_name,
|
||||
db_name=db_name,
|
||||
embedding=embedding,
|
||||
index_name=index_name,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import asyncio
|
||||
from collections import defaultdict, deque
|
||||
from itertools import chain
|
||||
from typing import TYPE_CHECKING, Coroutine, Dict, Generator, List, Optional, Type, Union
|
||||
from typing import TYPE_CHECKING, Callable, Coroutine, Dict, Generator, List, Literal, Optional, Type, Union
|
||||
|
||||
from loguru import logger
|
||||
|
||||
|
|
@ -269,9 +269,9 @@ class Graph:
|
|||
|
||||
def run(
|
||||
self,
|
||||
inputs: Dict[str, str],
|
||||
input_components: Optional[list[str]] = None,
|
||||
types: Optional[list[str]] = None,
|
||||
inputs: list[Dict[str, str]],
|
||||
input_components: Optional[list[list[str]]] = None,
|
||||
types: Optional[list[Literal["chat", "text", "json", "any"] | None]] = None,
|
||||
outputs: Optional[list[str]] = None,
|
||||
session_id: Optional[str] = None,
|
||||
stream: bool = False,
|
||||
|
|
@ -309,7 +309,7 @@ class Graph:
|
|||
self,
|
||||
inputs: list[Dict[str, str]],
|
||||
inputs_components: Optional[list[list[str]]] = None,
|
||||
types: Optional[list[str]] = None,
|
||||
types: Optional[list[Literal["chat", "text", "json", "any"] | None]] = None,
|
||||
outputs: Optional[list[str]] = None,
|
||||
session_id: Optional[str] = None,
|
||||
stream: bool = False,
|
||||
|
|
@ -338,8 +338,12 @@ class Graph:
|
|||
inputs = [{}]
|
||||
# Length of all should be the as inputs length
|
||||
# just add empty lists to complete the length
|
||||
if inputs_components is None:
|
||||
inputs_components = []
|
||||
for _ in range(len(inputs) - len(inputs_components)):
|
||||
inputs_components.append([])
|
||||
if types is None:
|
||||
types = []
|
||||
for _ in range(len(inputs) - len(types)):
|
||||
types.append("any")
|
||||
for run_inputs, components, input_type in zip(inputs, inputs_components, types):
|
||||
|
|
@ -650,7 +654,7 @@ class Graph:
|
|||
async def build_vertex(
|
||||
self,
|
||||
lock: asyncio.Lock,
|
||||
set_cache_coro: Coroutine,
|
||||
set_cache_coro: Callable[["Graph", asyncio.Lock], Coroutine],
|
||||
vertex_id: str,
|
||||
inputs_dict: Optional[Dict[str, str]] = None,
|
||||
user_id: Optional[str] = None,
|
||||
|
|
@ -693,7 +697,9 @@ class Graph:
|
|||
logger.exception(f"Error building vertex: {exc}")
|
||||
raise exc
|
||||
|
||||
async def get_next_and_top_level_vertices(self, lock: asyncio.Lock, set_cache_coro: Coroutine, vertex: Vertex):
|
||||
async def get_next_and_top_level_vertices(
|
||||
self, lock: asyncio.Lock, set_cache_coro: Callable[["Graph", asyncio.Lock], Coroutine], vertex: Vertex
|
||||
):
|
||||
"""
|
||||
Retrieves the next runnable vertices and the top level vertices for a given vertex.
|
||||
|
||||
|
|
|
|||
|
|
@ -85,7 +85,7 @@ class RunnableVerticesManager:
|
|||
for v_id in set(next_runnable_vertices): # Use set to avoid duplicates
|
||||
self.update_vertex_run_state(v_id, is_runnable=False)
|
||||
self.remove_from_predecessors(v_id)
|
||||
await set_cache_coro(data=graph, lock=lock)
|
||||
await set_cache_coro(graph, lock)
|
||||
return next_runnable_vertices
|
||||
|
||||
@staticmethod
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
from typing import TYPE_CHECKING, Any, Coroutine, List, Optional, Tuple, Union
|
||||
from typing import TYPE_CHECKING, Any, Awaitable, Callable, List, Optional, Tuple, Type, Union, cast
|
||||
|
||||
from pydantic.v1 import BaseModel, Field, create_model
|
||||
from sqlmodel import select
|
||||
|
|
@ -63,7 +63,7 @@ def find_flow(flow_name: str, user_id: str) -> Optional[str]:
|
|||
|
||||
|
||||
async def run_flow(
|
||||
inputs: Union[dict, List[dict]] = None,
|
||||
inputs: Optional[Union[dict, List[dict]]] = None,
|
||||
tweaks: Optional[dict] = None,
|
||||
flow_id: Optional[str] = None,
|
||||
flow_name: Optional[str] = None,
|
||||
|
|
@ -75,18 +75,18 @@ async def run_flow(
|
|||
|
||||
if inputs is None:
|
||||
inputs = []
|
||||
inputs_list = []
|
||||
inputs_list: list[dict[str, str]] = []
|
||||
inputs_components = []
|
||||
types = []
|
||||
for input_dict in inputs:
|
||||
inputs_list.append({INPUT_FIELD_NAME: input_dict.get("input_value", "")})
|
||||
inputs_list.append({INPUT_FIELD_NAME: cast(str, input_dict.get("input_value", ""))})
|
||||
inputs_components.append(input_dict.get("components", []))
|
||||
types.append(input_dict.get("type", []))
|
||||
|
||||
return await graph.arun(inputs_list, inputs_components=inputs_components, types=types)
|
||||
|
||||
|
||||
def generate_function_for_flow(inputs: List["Vertex"], flow_id: str) -> Coroutine:
|
||||
def generate_function_for_flow(inputs: List["Vertex"], flow_id: str) -> Callable[..., Awaitable[Any]]:
|
||||
"""
|
||||
Generate a dynamic flow function based on the given inputs and flow ID.
|
||||
|
||||
|
|
@ -145,7 +145,9 @@ async def flow_function({func_args}):
|
|||
return local_scope["flow_function"]
|
||||
|
||||
|
||||
def build_function_and_schema(flow_record: Record, graph: "Graph") -> Tuple[Coroutine, BaseModel]:
|
||||
def build_function_and_schema(
|
||||
flow_record: Record, graph: "Graph"
|
||||
) -> Tuple[Callable[..., Awaitable[Any]], Type[BaseModel]]:
|
||||
"""
|
||||
Builds a dynamic function and schema for a given flow.
|
||||
|
||||
|
|
@ -180,7 +182,7 @@ def get_flow_inputs(graph: "Graph") -> List["Vertex"]:
|
|||
return inputs
|
||||
|
||||
|
||||
def build_schema_from_inputs(name: str, inputs: List["Vertex"]) -> BaseModel:
|
||||
def build_schema_from_inputs(name: str, inputs: List["Vertex"]) -> Type[BaseModel]:
|
||||
"""
|
||||
Builds a schema from the given inputs.
|
||||
|
||||
|
|
@ -198,4 +200,4 @@ def build_schema_from_inputs(name: str, inputs: List["Vertex"]) -> BaseModel:
|
|||
field_name = input_.display_name.lower().replace(" ", "_")
|
||||
description = input_.description
|
||||
fields[field_name] = (str, Field(default="", description=description))
|
||||
return create_model(name, **fields)
|
||||
return create_model(name, **fields) # type: ignore
|
||||
|
|
|
|||
|
|
@ -423,11 +423,13 @@ class CustomComponent(Component):
|
|||
return validate.create_function(self.code, self.function_entrypoint_name)
|
||||
|
||||
async def load_flow(self, flow_id: str, tweaks: Optional[dict] = None) -> "Graph":
|
||||
return await load_flow(flow_id, tweaks)
|
||||
if not self._user_id:
|
||||
raise ValueError("Session is invalid")
|
||||
return await load_flow(user_id=self._user_id, flow_id=flow_id, tweaks=tweaks)
|
||||
|
||||
async def run_flow(
|
||||
self,
|
||||
inputs: Union[dict, List[dict]] = None,
|
||||
inputs: Optional[Union[dict, List[dict]]] = None,
|
||||
flow_id: Optional[str] = None,
|
||||
flow_name: Optional[str] = None,
|
||||
tweaks: Optional[dict] = None,
|
||||
|
|
|
|||
|
|
@ -38,7 +38,7 @@ async def instantiate_class(
|
|||
user_id=None,
|
||||
) -> Any:
|
||||
"""Instantiate class from module type and key, and params"""
|
||||
from langflow.legacy_custom.customs import CUSTOM_NODES
|
||||
from langflow.interface.custom_lists import CUSTOM_NODES
|
||||
|
||||
vertex_type = vertex.vertex_type
|
||||
base_type = vertex.base_type
|
||||
|
|
@ -50,7 +50,9 @@ async def instantiate_class(
|
|||
if custom_node := CUSTOM_NODES.get(vertex_type):
|
||||
if hasattr(custom_node, "initialize"):
|
||||
return custom_node.initialize(**params)
|
||||
return custom_node(**params)
|
||||
if callable(custom_node):
|
||||
return custom_node(**params)
|
||||
raise ValueError(f"Custom node {vertex_type} is not callable")
|
||||
logger.debug(f"Instantiating {vertex_type} of type {base_type}")
|
||||
if not base_type:
|
||||
raise ValueError("No base type provided for vertex")
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
from langflow.template import frontend_node
|
||||
|
||||
# These should always be instantiated
|
||||
CUSTOM_NODES = {
|
||||
CUSTOM_NODES: dict[str, dict[str, frontend_node.base.FrontendNode]] = {
|
||||
# "prompts": {
|
||||
# "ZeroShotPrompt": frontend_node.prompts.ZeroShotPromptNode(),
|
||||
# },
|
||||
|
|
|
|||
|
|
@ -125,7 +125,7 @@ class Result(BaseModel):
|
|||
|
||||
|
||||
async def run_graph(
|
||||
graph: Union["Graph", dict],
|
||||
graph: "Graph",
|
||||
flow_id: str,
|
||||
stream: bool,
|
||||
session_id: Optional[str] = None,
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ class AuthSettings(BaseSettings):
|
|||
# Login settings
|
||||
CONFIG_DIR: str
|
||||
SECRET_KEY: SecretStr = Field(
|
||||
default="",
|
||||
default=SecretStr(""),
|
||||
description="Secret key for JWT. If not provided, a random one will be generated.",
|
||||
frozen=False,
|
||||
)
|
||||
|
|
@ -86,9 +86,10 @@ class AuthSettings(BaseSettings):
|
|||
|
||||
secret_key_path = Path(config_dir) / "secret_key"
|
||||
|
||||
if value:
|
||||
if value and isinstance(value, SecretStr):
|
||||
logger.debug("Secret key provided")
|
||||
write_secret_to_file(secret_key_path, value)
|
||||
secret_value = value.get_secret_value()
|
||||
write_secret_to_file(secret_key_path, secret_value)
|
||||
else:
|
||||
logger.debug("No secret key provided, generating a random one")
|
||||
|
||||
|
|
@ -104,4 +105,4 @@ class AuthSettings(BaseSettings):
|
|||
write_secret_to_file(secret_key_path, value)
|
||||
logger.debug("Saved secret key")
|
||||
|
||||
return value
|
||||
return value if isinstance(value, SecretStr) else SecretStr(value)
|
||||
|
|
|
|||
|
|
@ -10,10 +10,12 @@ from langflow.template.frontend_node import (
|
|||
textsplitters,
|
||||
tools,
|
||||
vectorstores,
|
||||
base,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"agents",
|
||||
"base",
|
||||
"chains",
|
||||
"embeddings",
|
||||
"memories",
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue