ref: Add ruff rules for boolean trap (FBT) (#4126)

Add ruff rules for boolean trap (FBT)
This commit is contained in:
Christophe Bornet 2024-10-14 18:41:16 +02:00 committed by GitHub
commit f5ffbc414b
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
56 changed files with 158 additions and 102 deletions

View file

@ -287,7 +287,7 @@ def get_letter_from_version(version: str):
def build_version_notice(current_version: str, package_name: str) -> str:
latest_version = fetch_latest_version(package_name, langflow_is_pre_release(current_version))
latest_version = fetch_latest_version(package_name, include_prerelease=langflow_is_pre_release(current_version))
if latest_version and pkg_version.parse(current_version) < pkg_version.parse(latest_version):
release_type = "pre-release" if langflow_is_pre_release(latest_version) else "version"
return f"A new {release_type} of {package_name} is available: {latest_version}"
@ -302,7 +302,7 @@ def generate_pip_command(package_names, is_pre_release):
return f"{base_command} {' '.join(package_names)} -U"
def stylize_text(text: str, to_style: str, is_prerelease: bool) -> str:
def stylize_text(text: str, to_style: str, *, is_prerelease: bool) -> str:
color = "#42a7f5" if is_prerelease else "#6e42f5"
# return "".join(f"[{color}]{char}[/]" for char in text)
styled_text = f"[{color}]{to_style}[/]"
@ -322,7 +322,7 @@ def print_banner(host: str, port: int):
is_pre_release |= langflow_is_pre_release(langflow_version) # Update pre-release status
notice = build_version_notice(langflow_version, package_name)
notice = stylize_text(notice, package_name, is_pre_release)
notice = stylize_text(notice, package_name, is_prerelease=is_pre_release)
if notice:
notices.append(notice)
package_names.append(package_name)
@ -335,7 +335,9 @@ def print_banner(host: str, port: int):
notices.append(f"Run '{pip_command}' to update.")
styled_notices = [f"[bold]{notice}[/bold]" for notice in notices if notice]
styled_package_name = stylize_text(package_name, package_name, any("pre-release" in notice for notice in notices))
styled_package_name = stylize_text(
package_name, package_name, is_prerelease=any("pre-release" in notice for notice in notices)
)
title = f"[bold]Welcome to :chains: {styled_package_name}[/bold]\n"
info_text = (
@ -445,9 +447,9 @@ def copy_db():
@app.command()
def migration(
test: bool = typer.Option(True, help="Run migrations in test mode."),
fix: bool = typer.Option(
False,
test: bool = typer.Option(default=True, help="Run migrations in test mode."), # noqa: FBT001
fix: bool = typer.Option( # noqa: FBT001
default=False,
help="Fix migrations. This is a destructive operation, and should only be used if you know what you are doing.",
),
):

View file

@ -141,6 +141,7 @@ async def retrieve_vertices_order(
@router.post("/build/{flow_id}/flow")
async def build_flow(
*,
background_tasks: BackgroundTasks,
flow_id: uuid.UUID,
inputs: Annotated[InputValueRequest | None, Body(embed=True)] = None,

View file

@ -59,6 +59,7 @@ router = APIRouter(tags=["Base"])
@router.get("/all", dependencies=[Depends(get_current_active_user)])
async def get_all(
*,
settings_service=Depends(get_settings_service),
):
from langflow.interface.types import get_and_cache_all_types_dict
@ -96,6 +97,7 @@ def validate_input_and_tweaks(input_request: SimplifiedAPIRequest):
async def simple_run_flow(
flow: Flow,
input_request: SimplifiedAPIRequest,
*,
stream: bool = False,
api_key_user: User | None = None,
):
@ -144,6 +146,7 @@ async def simple_run_flow(
async def simple_run_flow_task(
flow: Flow,
input_request: SimplifiedAPIRequest,
*,
stream: bool = False,
api_key_user: User | None = None,
):
@ -162,6 +165,7 @@ async def simple_run_flow_task(
@router.post("/run/{flow_id_or_name}", response_model=RunResponse, response_model_exclude_none=True) # noqa: RUF100, FAST003
async def simplified_run_flow(
*,
background_tasks: BackgroundTasks,
flow: Annotated[FlowRead | None, Depends(get_flow_by_id_or_endpoint_name)],
input_request: SimplifiedAPIRequest | None = None,
@ -361,6 +365,7 @@ async def webhook_run_flow(
@router.post("/run/advanced/{flow_id}", response_model=RunResponse, response_model_exclude_none=True)
async def experimental_run_flow(
*,
session: Annotated[Session, Depends(get_session)],
flow_id: UUID,
inputs: list[InputValueRequest] | None = None,

View file

@ -101,6 +101,7 @@ async def update_shared_component(
@router.get("/components/", response_model=ListComponentResponseModel)
async def get_components(
*,
component_id: Annotated[str | None, Query()] = None,
search: Annotated[str | None, Query()] = None,
private: Annotated[bool | None, Query()] = None,

View file

@ -75,7 +75,7 @@ class LCAgentComponent(Component):
msg = f"Method '{method_name}' must be defined."
raise ValueError(msg)
def get_agent_kwargs(self, flatten: bool = False) -> dict:
def get_agent_kwargs(self, *, flatten: bool = False) -> dict:
base = {
"handle_parsing_errors": self.handle_parsing_errors,
"verbose": self.verbose,

View file

@ -46,6 +46,7 @@ def is_hidden(path: Path) -> bool:
def retrieve_file_paths(
path: str,
*,
load_hidden: bool,
recursive: bool,
depth: int,
@ -74,7 +75,7 @@ def retrieve_file_paths(
return [str(p) for p in paths if p.is_file() and match_types(p) and is_not_hidden(p)]
def partition_file_to_data(file_path: str, silent_errors: bool) -> Data | None:
def partition_file_to_data(file_path: str, *, silent_errors: bool) -> Data | None:
# Use the partition function to load the file
from unstructured.partition.auto import partition
@ -122,7 +123,7 @@ def parse_pdf_to_text(file_path: str) -> str:
return "\n\n".join([page.extract_text() for page in reader.pages])
def parse_text_file_to_data(file_path: str, silent_errors: bool) -> Data | None:
def parse_text_file_to_data(file_path: str, *, silent_errors: bool) -> Data | None:
try:
if file_path.endswith(".pdf"):
text = parse_pdf_to_text(file_path)
@ -172,13 +173,14 @@ def parse_text_file_to_data(file_path: str, silent_errors: bool) -> Data | None:
def parallel_load_data(
file_paths: list[str],
*,
silent_errors: bool,
max_concurrency: int,
load_function: Callable = parse_text_file_to_data,
) -> list[Data | None]:
with futures.ThreadPoolExecutor(max_workers=max_concurrency) as executor:
loaded_files = executor.map(
lambda file_path: load_function(file_path, silent_errors),
lambda file_path: load_function(file_path, silent_errors=silent_errors),
file_paths,
)
# loaded_files is an iterator, so we need to convert it to a list

View file

@ -74,6 +74,7 @@ class ChatComponent(Component):
def build_with_data(
self,
*,
sender: str | None = "User",
sender_name: str | None = "User",
input_value: str | Data | Message | None = None,

View file

@ -59,11 +59,13 @@ class LCModelComponent(Component):
stream = self.stream
system_message = self.system_message
output = self.build_model()
result = self.get_chat_result(output, stream, input_value, system_message)
result = self.get_chat_result(
runnable=output, stream=stream, input_value=input_value, system_message=system_message
)
self.status = result
return result
def get_result(self, runnable: LLM, stream: bool, input_value: str):
def get_result(self, *, runnable: LLM, stream: bool, input_value: str):
"""Retrieves the result from the output of a Runnable object.
Args:
@ -139,6 +141,7 @@ class LCModelComponent(Component):
def get_chat_result(
self,
*,
runnable: LanguageModel,
stream: bool,
input_value: str | Message,

View file

@ -121,7 +121,7 @@ def _check_input_variables(input_variables):
return fixed_variables
def validate_prompt(prompt_template: str, silent_errors: bool = False) -> list[str]:
def validate_prompt(prompt_template: str, *, silent_errors: bool = False) -> list[str]:
input_variables = extract_input_variables_from_prompt(prompt_template)
# Check if there are invalid characters in the input_variables

View file

@ -54,7 +54,7 @@ class RetrievalQAComponent(LCChainComponent):
result = runnable.invoke({"query": self.input_value}, config={"callbacks": self.get_langchain_callbacks()})
source_docs = self.to_data(result.get("source_documents", []))
source_docs = self.to_data(result.get("source_documents", keys=[]))
result_str = str(result.get("result", ""))
if self.return_source_documents and len(source_docs):
references_str = self.create_references_from_data(source_docs)

View file

@ -79,7 +79,9 @@ class DirectoryComponent(Component):
use_multithreading = self.use_multithreading
resolved_path = self.resolve_path(path)
file_paths = retrieve_file_paths(resolved_path, load_hidden, recursive, depth, types)
file_paths = retrieve_file_paths(
resolved_path, load_hidden=load_hidden, recursive=recursive, depth=depth, types=types
)
if types:
file_paths = [fp for fp in file_paths if any(fp.endswith(ext) for ext in types)]
@ -87,9 +89,9 @@ class DirectoryComponent(Component):
loaded_data = []
if use_multithreading:
loaded_data = parallel_load_data(file_paths, silent_errors, max_concurrency)
loaded_data = parallel_load_data(file_paths, silent_errors=silent_errors, max_concurrency=max_concurrency)
else:
loaded_data = [parse_text_file_to_data(file_path, silent_errors) for file_path in file_paths]
loaded_data = [parse_text_file_to_data(file_path, silent_errors=silent_errors) for file_path in file_paths]
loaded_data = list(filter(None, loaded_data))
self.status = loaded_data
return loaded_data # type: ignore[return-value]

View file

@ -47,6 +47,6 @@ class FileComponent(Component):
msg = f"Unsupported file type: {extension}"
raise ValueError(msg)
data = parse_text_file_to_data(resolved_path, silent_errors)
data = parse_text_file_to_data(resolved_path, silent_errors=silent_errors)
self.status = data or "No data"
return data or Data()

View file

@ -68,7 +68,7 @@ class GmailLoaderComponent(Component):
def load_emails(self) -> Data:
class CustomGMailLoader(GMailLoader):
def __init__(
self, creds: Any, n: int = 100, label_ids: list[str] | None = None, raise_error: bool = False
self, creds: Any, *, n: int = 100, label_ids: list[str] | None = None, raise_error: bool = False
) -> None:
super().__init__(creds, n, raise_error)
self.label_ids = label_ids if label_ids is not None else ["SENT"]

View file

@ -22,7 +22,7 @@ class ExtractKeyFromDataComponent(CustomComponent):
},
}
def build(self, data: Data, keys: list[str], silent_error: bool = True) -> Data:
def build(self, data: Data, keys: list[str], *, silent_error: bool = True) -> Data:
"""Extracts the keys from a data.
Args:

View file

@ -44,7 +44,9 @@ class SelectivePassThroughComponent(Component):
Output(display_name="Passed Output", name="passed_output", method="pass_through"),
]
def evaluate_condition(self, input_value: str, comparison_value: str, operator: str, case_sensitive: bool) -> bool:
def evaluate_condition(
self, input_value: str, comparison_value: str, operator: str, *, case_sensitive: bool
) -> bool:
if not case_sensitive:
input_value = input_value.lower()
comparison_value = comparison_value.lower()
@ -68,7 +70,7 @@ class SelectivePassThroughComponent(Component):
value_to_pass = self.value_to_pass
case_sensitive = self.case_sensitive
if self.evaluate_condition(input_value, comparison_value, operator, case_sensitive):
if self.evaluate_condition(input_value, comparison_value, operator, case_sensitive=case_sensitive):
self.status = value_to_pass
return value_to_pass
self.status = ""

View file

@ -69,13 +69,13 @@ class FirecrawlCrawlApi(CustomComponent):
app = FirecrawlApp(api_key=api_key)
crawl_result = app.crawl_url(
url,
{
params={
"crawlerOptions": crawler_options_dict,
"pageOptions": page_options_dict,
},
True,
int(timeout / 1000),
idempotency_key,
wait_until_done=True,
poll_interval=int(timeout / 1000),
idempotency_key=idempotency_key,
)
return Data(data={"results": crawl_result})

View file

@ -47,7 +47,7 @@ class ConditionalRouterComponent(Component):
Output(display_name="False Route", name="false_result", method="false_response"),
]
def evaluate_condition(self, input_text: str, match_text: str, operator: str, case_sensitive: bool) -> bool:
def evaluate_condition(self, input_text: str, match_text: str, operator: str, *, case_sensitive: bool) -> bool:
if not case_sensitive:
input_text = input_text.lower()
match_text = match_text.lower()
@ -65,7 +65,9 @@ class ConditionalRouterComponent(Component):
return False
def true_response(self) -> Message:
result = self.evaluate_condition(self.input_text, self.match_text, self.operator, self.case_sensitive)
result = self.evaluate_condition(
self.input_text, self.match_text, self.operator, case_sensitive=self.case_sensitive
)
if result:
self.status = self.message
return self.message
@ -73,7 +75,9 @@ class ConditionalRouterComponent(Component):
return None # type: ignore[return-value]
def false_response(self) -> Message:
result = self.evaluate_condition(self.input_text, self.match_text, self.operator, self.case_sensitive)
result = self.evaluate_condition(
self.input_text, self.match_text, self.operator, case_sensitive=self.case_sensitive
)
if not result:
self.status = self.message
return self.message

View file

@ -19,7 +19,7 @@ class NotifyComponent(CustomComponent):
},
}
def build(self, name: str, data: Data | None = None, append: bool = False) -> Data:
def build(self, name: str, *, data: Data | None = None, append: bool = False) -> Data:
if data and not isinstance(data, Data):
if isinstance(data, str):
data = Data(text=data)

View file

@ -40,6 +40,7 @@ class SQLExecutorComponent(CustomComponent):
self,
query: str,
database_url: str,
*,
include_columns: bool = False,
passthrough: bool = False,
add_error: bool = False,

View file

@ -78,17 +78,19 @@ Note: Check 'Advanced' for all options.
search_depth: str = Field("basic", description="The depth of the search.")
topic: str = Field("general", description="The category of the search.")
max_results: int = Field(5, description="The maximum number of search results to return.")
include_images: bool = Field(False, description="Include a list of query-related images in the response.")
include_answer: bool = Field(False, description="Include a short answer to original query.")
include_images: bool = Field(
default=False, description="Include a list of query-related images in the response."
)
include_answer: bool = Field(default=False, description="Include a short answer to original query.")
def run_model(self) -> list[Data]:
return self._tavily_search(
self.query,
self.search_depth,
self.topic,
self.max_results,
self.include_images,
self.include_answer,
search_depth=self.search_depth,
topic=self.topic,
max_results=self.max_results,
include_images=self.include_images,
include_answer=self.include_answer,
)
def build_tool(self) -> Tool:
@ -102,6 +104,7 @@ Note: Check 'Advanced' for all options.
def _tavily_search(
self,
query: str,
*,
search_depth: str = "basic",
topic: str = "general",
max_results: int = 5,

View file

@ -583,7 +583,7 @@ class Component(CustomComponent):
field_config = self.get_template_config(self)
frontend_node = ComponentFrontendNode.from_inputs(**field_config)
for key in self._inputs:
frontend_node.set_field_load_from_db_in_template(key, False)
frontend_node.set_field_load_from_db_in_template(key, value=False)
self._map_parameters_on_frontend_node(frontend_node)
frontend_node_dict = frontend_node.to_dict(keep_name=False)

View file

@ -235,7 +235,7 @@ class CustomComponent(BaseComponent):
"""
return self.get_code_tree(self._code or "")
def to_data(self, data: Any, keys: list[str] | None = None, silent_errors: bool = False) -> list[Data]:
def to_data(self, data: Any, *, keys: list[str] | None = None, silent_errors: bool = False) -> list[Data]:
"""Converts input data into a list of Data objects.
Args:
@ -298,7 +298,7 @@ class CustomComponent(BaseComponent):
return self._extract_return_type(return_type)
def create_references_from_data(self, data: list[Data], include_data: bool = False) -> str:
def create_references_from_data(self, data: list[Data], *, include_data: bool = False) -> str:
"""Create references from a list of data.
Args:

View file

@ -39,7 +39,7 @@ class DirectoryReader:
# the custom components from this directory.
base_path = ""
def __init__(self, directory_path, compress_code_field=False):
def __init__(self, directory_path, *, compress_code_field=False):
"""Initialize DirectoryReader with a directory path and a flag indicating whether to compress the code."""
self.directory_path = directory_path
self.compress_code_field = compress_code_field
@ -57,7 +57,7 @@ class DirectoryReader:
"""Check if the file content is empty."""
return len(file_content.strip()) == 0
def filter_loaded_components(self, data: dict, with_errors: bool) -> dict:
def filter_loaded_components(self, data: dict, *, with_errors: bool) -> dict:
from langflow.custom.utils import build_component
items = []

View file

@ -63,7 +63,7 @@ async def abuild_and_validate_all_files(reader: DirectoryReader, file_list):
def load_files_from_path(path: str):
"""Load all files from a given path."""
reader = DirectoryReader(path, False)
reader = DirectoryReader(path, compress_code_field=False)
return reader.get_files()
@ -71,7 +71,7 @@ def load_files_from_path(path: str):
def build_custom_component_list_from_path(path: str):
"""Build a list of custom components for the langchain from a given path."""
file_list = load_files_from_path(path)
reader = DirectoryReader(path, False)
reader = DirectoryReader(path, compress_code_field=False)
valid_components, invalid_components = build_and_validate_all_files(reader, file_list)
@ -84,7 +84,7 @@ def build_custom_component_list_from_path(path: str):
async def abuild_custom_component_list_from_path(path: str):
"""Build a list of custom components for the langchain from a given path."""
file_list = load_files_from_path(path)
reader = DirectoryReader(path, False)
reader = DirectoryReader(path, compress_code_field=False)
valid_components, invalid_components = await abuild_and_validate_all_files(reader, file_list)

View file

@ -136,6 +136,7 @@ def process_type(field_type: str):
def add_new_custom_field(
*,
frontend_node: CustomComponentFrontendNode,
field_name: str,
field_type: str,
@ -215,12 +216,12 @@ def add_extra_fields(frontend_node, field_config, function_args):
field_name, field_type, field_value, field_required = get_field_properties(extra_field)
config = _field_config.pop(field_name, {})
frontend_node = add_new_custom_field(
frontend_node,
field_name,
field_type,
field_value,
field_required,
config,
frontend_node=frontend_node,
field_name=field_name,
field_type=field_type,
field_value=field_value,
field_required=field_required,
field_config=config,
)
if "kwargs" in function_args_names and not all(key in function_args_names for key in field_config):
for field_name, config in _field_config.items():
@ -509,6 +510,7 @@ def update_field_dict(
custom_component_instance: "CustomComponent",
field_dict: dict,
build_config: dict,
*,
update_field: str | None = None,
update_field_value: Any | None = None,
call: bool = False,

View file

@ -134,7 +134,7 @@ def build_sugiyama_layout(vertexes, edges):
return sug
def draw_graph(vertexes, edges, return_ascii=True):
def draw_graph(vertexes, edges, *, return_ascii=True):
"""Build a DAG and draw it in ASCII."""
sug = build_sugiyama_layout(vertexes, edges)

View file

@ -627,6 +627,7 @@ class Graph:
async def _run(
self,
*,
inputs: dict[str, str],
input_components: list[str],
input_type: InputType | None,
@ -705,6 +706,7 @@ class Graph:
def run(
self,
inputs: list[dict[str, str]],
*,
input_components: list[list[str]] | None = None,
types: list[InputType | None] | None = None,
outputs: list[str] | None = None,
@ -756,6 +758,7 @@ class Graph:
async def arun(
self,
inputs: list[dict[str, str]],
*,
inputs_components: list[list[str]] | None = None,
types: list[InputType | None] | None = None,
outputs: list[str] | None = None,
@ -1307,6 +1310,7 @@ class Graph:
async def build_vertex(
self,
vertex_id: str,
*,
get_cache: GetCache | None = None,
set_cache: SetCache | None = None,
inputs_dict: dict[str, str] | None = None,
@ -1434,7 +1438,7 @@ class Graph:
vertices.append(vertex)
return vertices
async def process(self, fallback_to_env_vars: bool, start_component_id: str | None = None) -> Graph:
async def process(self, *, fallback_to_env_vars: bool, start_component_id: str | None = None) -> Graph:
"""Processes the graph with vertices in each layer run in parallel."""
first_layer = self.sort_vertices(start_component_id=start_component_id)
vertex_task_run_count: dict[str, int] = {}
@ -1490,7 +1494,7 @@ class Graph:
return list(next_runnable_vertices)
async def get_next_runnable_vertices(self, lock: asyncio.Lock, vertex: Vertex, cache: bool = True) -> list[str]:
async def get_next_runnable_vertices(self, lock: asyncio.Lock, vertex: Vertex, *, cache: bool = True) -> list[str]:
v_id = vertex.id
v_successors_ids = vertex.successors_ids
async with lock:
@ -1583,7 +1587,7 @@ class Graph:
"""Returns the predecessors of a vertex."""
return [self.get_vertex(source_id) for source_id in self.predecessor_map.get(vertex.id, [])]
def get_all_successors(self, vertex: Vertex, recursive=True, flat=True, visited=None):
def get_all_successors(self, vertex: Vertex, *, recursive=True, flat=True, visited=None):
if visited is None:
visited = set()
@ -1782,6 +1786,7 @@ class Graph:
def layered_topological_sort(
self,
vertices: list[Vertex],
*,
filter_graphs: bool = False,
) -> list[list[str]]:
"""Performs a layered topological sort of the vertices in the graph."""
@ -1926,7 +1931,7 @@ class Graph:
result |= {vertex_id: {"successors": sucessors, "predecessors": predecessors}}
return result
def __filter_vertices(self, vertex_id: str, is_start: bool = False):
def __filter_vertices(self, vertex_id: str, *, is_start: bool = False):
dictionaryized_graph = self.__to_dict()
parent_node_map = {vertex.id: vertex.parent_node_id for vertex in self.vertices}
vertex_ids = sort_up_to_vertex(
@ -2002,7 +2007,7 @@ class Graph:
def is_vertex_runnable(self, vertex_id: str) -> bool:
"""Returns whether a vertex is runnable."""
is_active = self.get_vertex(vertex_id).is_active()
return self.run_manager.is_vertex_runnable(vertex_id, is_active)
return self.run_manager.is_vertex_runnable(vertex_id, is_active=is_active)
def build_run_map(self):
"""Builds the run map for the graph.
@ -2037,7 +2042,7 @@ class Graph:
return
visited.add(predecessor_id)
is_active = self.get_vertex(predecessor_id).is_active()
if self.run_manager.is_vertex_runnable(predecessor_id, is_active):
if self.run_manager.is_vertex_runnable(predecessor_id, is_active=is_active):
runnable_vertices.append(predecessor_id)
else:
for pred_pred_id in self.run_manager.run_predecessors.get(predecessor_id, []):

View file

@ -47,7 +47,7 @@ class RunnableVerticesManager:
self.vertices_to_run.update(vertices_to_run)
self.build_run_map(self.run_predecessors, self.vertices_to_run)
def is_vertex_runnable(self, vertex_id: str, is_active: bool) -> bool:
def is_vertex_runnable(self, vertex_id: str, *, is_active: bool) -> bool:
"""Determines if a vertex is runnable."""
if not is_active:
return False
@ -76,7 +76,7 @@ class RunnableVerticesManager:
self.run_predecessors = predecessor_map.copy()
self.vertices_to_run = vertices_to_run
def update_vertex_run_state(self, vertex_id: str, is_runnable: bool):
def update_vertex_run_state(self, vertex_id: str, *, is_runnable: bool):
"""Updates the runnable state of a vertex."""
if is_runnable:
self.vertices_to_run.add(vertex_id)

View file

@ -268,6 +268,7 @@ def get_root_of_group_node(
def sort_up_to_vertex(
graph: dict[str, dict[str, list[str]]],
vertex_id: str,
*,
parent_node_map: dict[str, str | None] | None = None,
is_start: bool = False,
) -> list[str]:

View file

@ -36,7 +36,7 @@ def __validate_method(method: Callable) -> None:
raise ValueError(msg)
def build_output_getter(method: Callable, validate: bool = True) -> Callable:
def build_output_getter(method: Callable, *, validate: bool = True) -> Callable:
"""Builds an output getter function for a given method in a graph component.
This function creates a new callable that, when invoked, retrieves the output
@ -88,7 +88,7 @@ def build_output_getter(method: Callable, validate: bool = True) -> Callable:
return output_getter
def build_output_setter(method: Callable, validate: bool = True) -> Callable:
def build_output_setter(method: Callable, *, validate: bool = True) -> Callable:
"""Build an output setter function for a given method in a graph component.
This function creates a new callable that, when invoked, sets the output
@ -136,7 +136,7 @@ def build_output_setter(method: Callable, validate: bool = True) -> Callable:
return output_setter
def create_state_model(model_name: str = "State", validate: bool = True, **kwargs) -> type:
def create_state_model(model_name: str = "State", *, validate: bool = True, **kwargs) -> type:
"""Create a dynamic Pydantic state model based on the provided keyword arguments.
This function generates a Pydantic model class with fields corresponding to the
@ -206,8 +206,8 @@ def create_state_model(model_name: str = "State", validate: bool = True, **kwarg
# Define the field with the return type
try:
__validate_method(value)
getter = build_output_getter(value, validate)
setter = build_output_setter(value, validate)
getter = build_output_getter(value, validate=validate)
setter = build_output_setter(value, validate=validate)
property_method = property(getter, setter)
except ValueError as e:
# If the method is not valid,assume it is already a getter

View file

@ -165,6 +165,7 @@ async def log_transaction(
def log_vertex_build(
*,
flow_id: str,
vertex_id: str,
valid: bool,

View file

@ -51,6 +51,7 @@ class Vertex:
self,
data: NodeData,
graph: Graph,
*,
base_type: str | None = None,
is_task: bool = False,
params: dict | None = None,
@ -121,7 +122,7 @@ class Vertex:
def add_result(self, name: str, result: Any):
self.results[name] = result
def update_graph_state(self, key, new_state, append: bool):
def update_graph_state(self, key, new_state, *, append: bool):
if append:
self.graph.append_state(key, new_state, caller=self.id)
else:
@ -437,7 +438,7 @@ class Vertex:
self.load_from_db_fields = load_from_db_fields
self._raw_params = params.copy()
def update_raw_params(self, new_params: Mapping[str, str | list[str]], overwrite: bool = False):
def update_raw_params(self, new_params: Mapping[str, str | list[str]], *, overwrite: bool = False):
"""Update the raw parameters of the vertex with the given new parameters.
Args:
@ -702,7 +703,7 @@ class Vertex:
if isinstance(self.params[key], list):
self.params[key].extend(result)
async def _build_results(self, custom_component, custom_params, fallback_to_env_vars=False):
async def _build_results(self, custom_component, custom_params, *, fallback_to_env_vars=False):
try:
result = await initialize.loading.get_instance_results(
custom_component=custom_component,

View file

@ -3,7 +3,7 @@ from typing import Annotated
from pydantic import PlainValidator
def validate_boolean(value: bool) -> bool:
def validate_boolean(value: bool) -> bool: # noqa: FBT001
valid_trues = ["True", "true", "1", "yes"]
valid_falses = ["False", "false", "0", "no"]
if value in valid_trues:

View file

@ -52,11 +52,12 @@ async def get_instance_results(
custom_component,
custom_params: dict,
vertex: Vertex,
*,
fallback_to_env_vars: bool = False,
base_type: str = "component",
):
custom_params = update_params_with_load_from_db_fields(
custom_component, custom_params, vertex.load_from_db_fields, fallback_to_env_vars
custom_component, custom_params, vertex.load_from_db_fields, fallback_to_env_vars=fallback_to_env_vars
)
with warnings.catch_warnings():
warnings.filterwarnings("ignore", category=PydanticDeprecatedSince20)
@ -105,6 +106,7 @@ def update_params_with_load_from_db_fields(
custom_component: CustomComponent,
params,
load_from_db_fields,
*,
fallback_to_env_vars=False,
):
# For each field in load_from_db_fields, we will check if it's in the params

View file

@ -27,7 +27,7 @@ def key_func(*args, **kwargs):
return json.dumps(args) + json.dumps(kwargs)
async def aget_all_components(components_paths, as_dict=False):
async def aget_all_components(components_paths, *, as_dict=False):
"""Get all components names combining native and custom components."""
all_types_dict = await aget_all_types_dict(components_paths)
components = {} if as_dict else []
@ -41,7 +41,7 @@ async def aget_all_components(components_paths, as_dict=False):
return components
def get_all_components(components_paths, as_dict=False):
def get_all_components(components_paths, *, as_dict=False):
"""Get all components names combining native and custom components."""
all_types_dict = get_all_types_dict(components_paths)
components = [] if not as_dict else {}

View file

@ -13,6 +13,7 @@ from langflow.utils.util import update_settings
def load_flow_from_json(
flow: Path | str | dict,
*,
tweaks: dict | None = None,
log_level: str | None = None,
log_file: str | None = None,
@ -71,6 +72,7 @@ def load_flow_from_json(
def run_flow_from_json(
flow: Path | str | dict,
input_value: str,
*,
session_id: str | None = None,
tweaks: dict | None = None,
input_type: str = "chat",

View file

@ -137,6 +137,7 @@ class LogConfig(TypedDict):
def configure(
*,
log_level: str | None = None,
log_file: Path | None = None,
disable: bool | None = False,

View file

@ -90,7 +90,7 @@ class JavaScriptMIMETypeMiddleware(BaseHTTPMiddleware):
telemetry_service_tasks = set()
def get_lifespan(fix_migration=False, version=None):
def get_lifespan(*, fix_migration=False, version=None):
@asynccontextmanager
async def lifespan(_app: FastAPI):
nest_asyncio.apply()
@ -267,7 +267,7 @@ def get_static_files_dir():
return frontend_path / "frontend"
def setup_app(static_files_dir: Path | None = None, backend_only: bool = False) -> FastAPI:
def setup_app(static_files_dir: Path | None = None, *, backend_only: bool = False) -> FastAPI:
"""Setup the FastAPI app."""
# get the directory of the current file
logger.info(f"Setting up app with static files directory {static_files_dir}")

View file

@ -24,6 +24,7 @@ class Result(BaseModel):
async def run_graph_internal(
graph: Graph,
flow_id: str,
*,
stream: bool = False,
session_id: str | None = None,
inputs: list[InputValueRequest] | None = None,
@ -62,6 +63,7 @@ def run_graph(
input_value: str,
input_type: str,
output_type: str,
*,
session_id: str | None = None,
fallback_to_env_vars: bool = False,
output_component: str | None = None,
@ -104,9 +106,9 @@ def run_graph(
types.append(input_value_request.type)
return graph.run(
inputs_list,
components,
types,
outputs or [],
input_components=components,
types=types,
outputs=outputs or [],
stream=False,
session_id=session_id,
fallback_to_env_vars=fallback_to_env_vars,
@ -156,7 +158,7 @@ def apply_tweaks_on_vertex(vertex: Vertex, node_tweaks: dict[str, Any]) -> None:
def process_tweaks(
graph_data: dict[str, Any], tweaks: Tweaks | dict[str, dict[str, Any]], stream: bool = False
graph_data: dict[str, Any], tweaks: Tweaks | dict[str, dict[str, Any]], *, stream: bool = False
) -> dict[str, Any]:
"""This function is used to tweak the graph data using the node id and the tweaks dict.

View file

@ -28,6 +28,7 @@ async def get_file_paths(files: list[str]):
async def get_files(
file_paths: list[str],
*,
convert_to_base64: bool = False,
):
storage_service = get_storage_service()

View file

@ -267,7 +267,7 @@ def get_user_id_from_token(token: str) -> UUID:
return UUID(int=0)
def create_user_tokens(user_id: UUID, db: Session = Depends(get_session), update_last_login: bool = False) -> dict:
def create_user_tokens(user_id: UUID, db: Session = Depends(get_session), *, update_last_login: bool = False) -> dict:
settings_service = get_settings_service()
access_token_expires = timedelta(seconds=settings_service.auth_settings.ACCESS_TOKEN_EXPIRE_SECONDS)

View file

@ -168,7 +168,7 @@ class DatabaseService(Service):
command.upgrade(alembic_cfg, "head")
logger.info("Alembic initialized")
def run_migrations(self, fix=False):
def run_migrations(self, *, fix=False):
# First we need to check if alembic has been initialized
# If not, we need to initialize it
# if not self.script_location.exists(): # this is not the correct way to check if alembic has been initialized

View file

@ -12,7 +12,7 @@ if TYPE_CHECKING:
from langflow.services.database.service import DatabaseService
def initialize_database(fix_migration: bool = False):
def initialize_database(*, fix_migration: bool = False):
logger.debug("Initializing database")
from langflow.services.deps import get_db_service

View file

@ -41,7 +41,8 @@ def is_list_of_any(field: FieldInfo) -> bool:
class MyCustomSource(EnvSettingsSource):
def prepare_field_value(self, field_name: str, field: FieldInfo, value: Any, value_is_complex: bool) -> Any:
@override
def prepare_field_value(self, field_name: str, field: FieldInfo, value: Any, value_is_complex: bool) -> Any: # type: ignore[misc]
# allow comma-separated list parsing
# fieldInfo contains the annotation of the field
@ -317,7 +318,7 @@ class Settings(BaseSettings):
model_config = SettingsConfigDict(validate_assignment=True, extra="ignore", env_prefix="LANGFLOW_")
def update_from_yaml(self, file_path: str, dev: bool = False):
def update_from_yaml(self, file_path: str, *, dev: bool = False):
new_settings = load_settings_from_yaml(file_path)
self.components_path = new_settings.components_path or []
self.dev = dev

View file

@ -173,6 +173,7 @@ class StoreService(Service):
async def count_components(
self,
filter_conditions: list[dict[str, Any]],
*,
api_key: str | None = None,
use_api_key: bool | None = False,
) -> int:
@ -198,6 +199,7 @@ class StoreService(Service):
def build_filter_conditions(
self,
*,
component_id: str | None = None,
search: str | None = None,
private: bool | None = None,
@ -257,6 +259,7 @@ class StoreService(Service):
async def query_components(
self,
*,
api_key: str | None = None,
sort: list[str] | None = None,
page: int = 1,
@ -501,6 +504,7 @@ class StoreService(Service):
async def get_list_component_response_model(
self,
*,
component_id: str | None = None,
search: str | None = None,
private: bool | None = None,

View file

@ -19,6 +19,7 @@ async def update_components_with_user_data(
components: list["ListComponentResponse"],
store_service: "StoreService",
store_api_key: str,
*,
liked: bool,
):
"""Updates the components with the user data (liked_by_user and in_users_collection)."""

View file

@ -138,7 +138,7 @@ class OpenTelemetry(metaclass=ThreadSafeSingletonMetaUsingWeakref):
labels={"flow_id": mandatory_label},
)
def __init__(self, prometheus_enabled: bool = True):
def __init__(self, *, prometheus_enabled: bool = True):
if not self._metrics_registry:
self._register_metric()

View file

@ -149,7 +149,7 @@ def initialize_session_service():
)
def initialize_services(fix_migration: bool = False):
def initialize_services(*, fix_migration: bool = False):
"""Initialize all the services needed."""
# Test cache connection
get_service(ServiceType.CACHE_SERVICE, default=CacheServiceFactory())

View file

@ -94,7 +94,7 @@ class FrontendNode(BaseModel):
return cls(**data)
# For backwards compatibility
def to_dict(self, keep_name=True) -> dict:
def to_dict(self, *, keep_name=True) -> dict:
"""Returns a dict representation of the frontend node."""
dump = self.model_dump(by_alias=True, exclude_none=True)
if not keep_name:

View file

@ -25,7 +25,7 @@ def remove_ansi_escape_codes(text):
return re.sub(r"\x1b\[[0-9;]*[a-zA-Z]", "", text)
def build_template_from_function(name: str, type_to_loader_dict: dict, add_function: bool = False):
def build_template_from_function(name: str, type_to_loader_dict: dict, *, add_function: bool = False):
classes = [item.__annotations__["return"].__name__ for item in type_to_loader_dict.values()]
# Raise error if name is not in chains
@ -76,6 +76,7 @@ def build_template_from_method(
class_name: str,
method_name: str,
type_to_cls_dict: dict,
*,
add_function: bool = False,
):
classes = [item.__name__ for item in type_to_cls_dict.values()]
@ -168,7 +169,7 @@ def get_default_factory(module: str, function: str):
return None
def update_verbose(d: dict, new_value: bool) -> dict:
def update_verbose(d: dict, *, new_value: bool) -> dict:
"""Recursively updates the value of the 'verbose' key in a dictionary.
Args:
@ -180,7 +181,7 @@ def update_verbose(d: dict, new_value: bool) -> dict:
"""
for k, v in d.items():
if isinstance(v, dict):
update_verbose(v, new_value)
update_verbose(v, new_value=new_value)
elif k == "verbose":
d[k] = new_value
return d
@ -402,6 +403,7 @@ def build_loader_repr_from_data(data: list[Data]) -> str:
def update_settings(
*,
config: str | None = None,
cache: str | None = None,
dev: bool = False,

View file

@ -72,7 +72,7 @@ def is_nightly(v: str) -> bool:
return "dev" in v
def fetch_latest_version(package_name: str, include_prerelease: bool) -> str | None:
def fetch_latest_version(package_name: str, *, include_prerelease: bool) -> str | None:
from packaging import version as pkg_version
package_name = package_name.replace(" ", "-").lower()

View file

@ -31,7 +31,7 @@ def build_vertex(self, vertex: Vertex) -> Vertex:
def process_graph_cached_task(
data_graph: dict[str, Any], # noqa: ARG001
inputs: dict | list[dict] | None = None, # noqa: ARG001
clear_cache=False, # noqa: ARG001
clear_cache=False, # noqa: ARG001, FBT002
session_id=None, # noqa: ARG001
) -> dict[str, Any]:
msg = "This task is not implemented yet"

View file

@ -58,7 +58,6 @@ ignore = [
# Rules that are TODOs
"ANN",
"D1", # Missing docstrings
"FBT",
"N",
"S",
"SLF",

View file

@ -1,3 +1,4 @@
import inspect
from langflow.load import run_flow_from_json
@ -20,7 +21,8 @@ def test_run_flow_from_json_params():
}
# Check if the function accepts all expected parameters
params = run_flow_from_json.__code__.co_varnames[: run_flow_from_json.__code__.co_argcount]
func_spec = inspect.getfullargspec(run_flow_from_json)
params = func_spec.args + func_spec.kwonlyargs
assert expected_params.issubset(params), "Not all expected parameters are present in run_flow_from_json"
# TODO: Add tests by loading a flow and running it need to text with fake llm and check if it returns the correct output

View file

@ -94,7 +94,7 @@ def test_is_vertex_runnable(data):
vertex_id = "A"
is_active = True
result = manager.is_vertex_runnable(vertex_id, is_active)
result = manager.is_vertex_runnable(vertex_id, is_active=is_active)
assert result is False
@ -104,7 +104,7 @@ def test_is_vertex_runnable__wrong_is_active(data):
vertex_id = "A"
is_active = False
result = manager.is_vertex_runnable(vertex_id, is_active)
result = manager.is_vertex_runnable(vertex_id, is_active=is_active)
assert result is False
@ -114,7 +114,7 @@ def test_is_vertex_runnable__wrong_vertices_to_run(data):
vertex_id = "D"
is_active = True
result = manager.is_vertex_runnable(vertex_id, is_active)
result = manager.is_vertex_runnable(vertex_id, is_active=is_active)
assert result is False
@ -124,7 +124,7 @@ def test_is_vertex_runnable__wrong_run_predecessors(data):
vertex_id = "C"
is_active = True
result = manager.is_vertex_runnable(vertex_id, is_active)
result = manager.is_vertex_runnable(vertex_id, is_active=is_active)
assert result is False
@ -172,7 +172,7 @@ def test_update_vertex_run_state(data):
vertex_id = "C"
is_runnable = True
manager.update_vertex_run_state(vertex_id, is_runnable)
manager.update_vertex_run_state(vertex_id, is_runnable=is_runnable)
assert vertex_id in manager.vertices_to_run
@ -182,7 +182,7 @@ def test_update_vertex_run_state__bad_case(data):
vertex_id = "C"
is_runnable = False
manager.update_vertex_run_state(vertex_id, is_runnable)
manager.update_vertex_run_state(vertex_id, is_runnable=is_runnable)
assert vertex_id not in manager.vertices_being_run

View file

@ -158,9 +158,11 @@ def test_directory_component_build_with_multithreading(
# Assert
mock_resolve_path.assert_called_once_with(path)
mock_retrieve_file_paths.assert_called_once_with(path, load_hidden, recursive, depth, ANY)
mock_retrieve_file_paths.assert_called_once_with(
path, load_hidden=load_hidden, recursive=recursive, depth=depth, types=ANY
)
mock_parallel_load_data.assert_called_once_with(
mock_retrieve_file_paths.return_value, silent_errors, max_concurrency
mock_retrieve_file_paths.return_value, silent_errors=silent_errors, max_concurrency=max_concurrency
)