ref: Add ruff rules for pylint (PL) (#4032)
* Add ruff rules for pylint (PL) * Changes following review
This commit is contained in:
parent
06c8e3d6c8
commit
f01ea48d61
45 changed files with 174 additions and 166 deletions
|
|
@ -228,7 +228,7 @@ def wait_for_server_ready(host, port):
|
|||
Wait for the server to become ready by polling the health endpoint.
|
||||
"""
|
||||
status_code = 0
|
||||
while status_code != 200:
|
||||
while status_code != httpx.codes.OK:
|
||||
try:
|
||||
status_code = httpx.get(f"http://{host}:{port}/health").status_code
|
||||
except Exception:
|
||||
|
|
@ -250,7 +250,6 @@ def run_on_windows(host, port, log_level, options, app):
|
|||
"""
|
||||
print_banner(host, port)
|
||||
run_langflow(host, port, log_level, options, app)
|
||||
return
|
||||
|
||||
|
||||
def is_port_in_use(port, host="localhost"):
|
||||
|
|
|
|||
|
|
@ -11,8 +11,11 @@ from langflow.logging.logger import log_buffer
|
|||
log_router = APIRouter(tags=["Log"])
|
||||
|
||||
|
||||
NUMBER_OF_NOT_SENT_BEFORE_KEEPALIVE = 5
|
||||
|
||||
|
||||
async def event_generator(request: Request):
|
||||
global log_buffer
|
||||
global log_buffer # noqa: PLW0602
|
||||
last_read_item = None
|
||||
current_not_sent = 0
|
||||
while not await request.is_disconnected():
|
||||
|
|
@ -41,7 +44,7 @@ async def event_generator(request: Request):
|
|||
yield f"{json.dumps({ts:msg})}\n\n"
|
||||
else:
|
||||
current_not_sent += 1
|
||||
if current_not_sent == 5:
|
||||
if current_not_sent == NUMBER_OF_NOT_SENT_BEFORE_KEEPALIVE:
|
||||
current_not_sent = 0
|
||||
yield "keepalive\n\n"
|
||||
|
||||
|
|
@ -57,7 +60,7 @@ async def stream_logs(
|
|||
it establishes a long-lived connection to the server and receives log messages in real-time
|
||||
the client should use the head "Accept: text/event-stream"
|
||||
"""
|
||||
global log_buffer
|
||||
global log_buffer # noqa: PLW0602
|
||||
if log_buffer.enabled() is False:
|
||||
raise HTTPException(
|
||||
status_code=HTTPStatus.NOT_IMPLEMENTED,
|
||||
|
|
@ -73,7 +76,7 @@ async def logs(
|
|||
lines_after: int = Query(0, description="The number of logs after the timestamp"),
|
||||
timestamp: int = Query(0, description="The timestamp to start getting logs from"),
|
||||
):
|
||||
global log_buffer
|
||||
global log_buffer # noqa: PLW0602
|
||||
if log_buffer.enabled() is False:
|
||||
raise HTTPException(
|
||||
status_code=HTTPStatus.NOT_IMPLEMENTED,
|
||||
|
|
@ -91,11 +94,10 @@ async def logs(
|
|||
detail="Timestamp is required when requesting logs after the timestamp",
|
||||
)
|
||||
content = log_buffer.get_last_n(10) if lines_before <= 0 else log_buffer.get_last_n(lines_before)
|
||||
elif lines_before > 0:
|
||||
content = log_buffer.get_before_timestamp(timestamp=timestamp, lines=lines_before)
|
||||
elif lines_after > 0:
|
||||
content = log_buffer.get_after_timestamp(timestamp=timestamp, lines=lines_after)
|
||||
else:
|
||||
if lines_before > 0:
|
||||
content = log_buffer.get_before_timestamp(timestamp=timestamp, lines=lines_before)
|
||||
elif lines_after > 0:
|
||||
content = log_buffer.get_after_timestamp(timestamp=timestamp, lines=lines_after)
|
||||
else:
|
||||
content = log_buffer.get_before_timestamp(timestamp=timestamp, lines=10)
|
||||
content = log_buffer.get_before_timestamp(timestamp=timestamp, lines=10)
|
||||
return JSONResponse(content=content)
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import uuid
|
||||
from datetime import timedelta
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from fastapi import HTTPException
|
||||
|
|
@ -114,15 +115,18 @@ def format_elapsed_time(elapsed_time: float) -> str:
|
|||
- Less than 1 minute: returns seconds rounded to 2 decimals
|
||||
- 1 minute or more: returns minutes and seconds
|
||||
"""
|
||||
if elapsed_time < 1:
|
||||
milliseconds = int(round(elapsed_time * 1000))
|
||||
delta = timedelta(seconds=elapsed_time)
|
||||
if delta < timedelta(seconds=1):
|
||||
milliseconds = round(delta / timedelta(milliseconds=1))
|
||||
return f"{milliseconds} ms"
|
||||
if elapsed_time < 60:
|
||||
|
||||
if delta < timedelta(minutes=1):
|
||||
seconds = round(elapsed_time, 2)
|
||||
unit = "second" if seconds == 1 else "seconds"
|
||||
return f"{seconds} {unit}"
|
||||
minutes = int(elapsed_time // 60)
|
||||
seconds = round(elapsed_time % 60, 2)
|
||||
|
||||
minutes = delta // timedelta(minutes=1)
|
||||
seconds = round((delta - timedelta(minutes=minutes)).total_seconds(), 2)
|
||||
minutes_unit = "minute" if minutes == 1 else "minutes"
|
||||
seconds_unit = "second" if seconds == 1 else "seconds"
|
||||
return f"{minutes} {minutes_unit}, {seconds} {seconds_unit}"
|
||||
|
|
|
|||
|
|
@ -137,7 +137,7 @@ def update_folder(
|
|||
|
||||
folder_data = existing_folder.model_dump(exclude_unset=True)
|
||||
for key, value in folder_data.items():
|
||||
if key != "components" and key != "flows":
|
||||
if key not in ("components", "flows"):
|
||||
setattr(existing_folder, key, value)
|
||||
session.add(existing_folder)
|
||||
session.commit()
|
||||
|
|
|
|||
|
|
@ -12,6 +12,8 @@ from langflow.field_typing import Tool
|
|||
from langflow.inputs import MultilineInput, SecretStrInput, StrInput
|
||||
from langflow.schema import Data
|
||||
|
||||
MIN_ROWS_IN_TABLE = 3
|
||||
|
||||
|
||||
class AddContentToPage(LCToolComponent):
|
||||
display_name: str = "Add Content to Page "
|
||||
|
|
@ -91,12 +93,8 @@ class AddContentToPage(LCToolComponent):
|
|||
if text.startswith("#"):
|
||||
heading_level = text.count("#", 0, 6)
|
||||
heading_text = text[heading_level:].strip()
|
||||
if heading_level == 1:
|
||||
blocks.append(self.create_block("heading_1", heading_text))
|
||||
elif heading_level == 2:
|
||||
blocks.append(self.create_block("heading_2", heading_text))
|
||||
elif heading_level == 3:
|
||||
blocks.append(self.create_block("heading_3", heading_text))
|
||||
if heading_level in range(3):
|
||||
blocks.append(self.create_block(f"heading_{heading_level+1}", heading_text))
|
||||
else:
|
||||
blocks.append(self.create_block("paragraph", text))
|
||||
elif node.name == "h1":
|
||||
|
|
@ -154,7 +152,7 @@ class AddContentToPage(LCToolComponent):
|
|||
|
||||
def is_table(self, text):
|
||||
rows = text.split("\n")
|
||||
if len(rows) < 2:
|
||||
if len(rows) < MIN_ROWS_IN_TABLE:
|
||||
return False
|
||||
|
||||
has_separator = False
|
||||
|
|
@ -167,7 +165,7 @@ class AddContentToPage(LCToolComponent):
|
|||
elif not cells:
|
||||
return False
|
||||
|
||||
return has_separator and len(rows) >= 3
|
||||
return has_separator
|
||||
|
||||
def process_list(self, node, list_type):
|
||||
blocks = []
|
||||
|
|
@ -191,7 +189,7 @@ class AddContentToPage(LCToolComponent):
|
|||
if header_row or body_rows:
|
||||
table_width = max(
|
||||
len(header_row.find_all(["th", "td"])) if header_row else 0,
|
||||
max(len(row.find_all(["th", "td"])) for row in body_rows),
|
||||
*(len(row.find_all(["th", "td"])) for row in body_rows),
|
||||
)
|
||||
|
||||
table_block = self.create_block("table", "", table_width=table_width, has_column_header=bool(header_row))
|
||||
|
|
|
|||
|
|
@ -32,9 +32,7 @@ class ConversationChainComponent(LCChainComponent):
|
|||
if isinstance(result, dict):
|
||||
result = result.get(chain.output_key, "") # type: ignore
|
||||
|
||||
elif isinstance(result, str):
|
||||
result = result
|
||||
else:
|
||||
elif not isinstance(result, str):
|
||||
result = result.get("response")
|
||||
result = str(result)
|
||||
self.status = result
|
||||
|
|
|
|||
|
|
@ -129,13 +129,13 @@ class GmailLoaderComponent(Component):
|
|||
messages = thread["messages"]
|
||||
|
||||
response_email = None
|
||||
for message in messages:
|
||||
email_data = message["payload"]["headers"]
|
||||
for _message in messages:
|
||||
email_data = _message["payload"]["headers"]
|
||||
for values in email_data:
|
||||
if values["name"] == "Message-ID":
|
||||
message_id = values["value"]
|
||||
if message_id == in_reply_to:
|
||||
response_email = message
|
||||
response_email = _message
|
||||
if response_email is None:
|
||||
msg = "Response email not found in the thread."
|
||||
raise ValueError(msg)
|
||||
|
|
|
|||
|
|
@ -34,7 +34,7 @@ class EmbeddingSimilarityComponent(Component):
|
|||
embedding_vectors: list[Data] = self.embedding_vectors
|
||||
|
||||
# Assert that the list contains exactly two Data objects
|
||||
assert len(embedding_vectors) == 2, "Exactly two embedding vectors are required."
|
||||
assert len(embedding_vectors) == 2, "Exactly two embedding vectors are required." # noqa: PLR2004
|
||||
|
||||
embedding_1 = np.array(embedding_vectors[0].data["embeddings"])
|
||||
embedding_2 = np.array(embedding_vectors[1].data["embeddings"])
|
||||
|
|
|
|||
|
|
@ -62,7 +62,7 @@ class HuggingFaceInferenceAPIEmbeddingsComponent(LCEmbeddingsModel):
|
|||
)
|
||||
raise ValueError(msg) from e
|
||||
|
||||
if response.status_code != 200:
|
||||
if response.status_code != requests.codes.ok:
|
||||
msg = f"HuggingFace health check failed: {response.status_code}"
|
||||
raise ValueError(msg)
|
||||
# returning True to solve linting error
|
||||
|
|
|
|||
|
|
@ -76,7 +76,7 @@ class GroqModel(LCModelComponent):
|
|||
return []
|
||||
|
||||
def update_build_config(self, build_config: dict, field_value: str, field_name: str | None = None):
|
||||
if field_name == "groq_api_key" or field_name == "groq_api_base" or field_name == "model_name":
|
||||
if field_name in ("groq_api_key", "groq_api_base", "model_name"):
|
||||
models = self.get_models()
|
||||
build_config["model_name"]["options"] = models
|
||||
return build_config
|
||||
|
|
|
|||
|
|
@ -7,6 +7,8 @@ from langflow.io import Output
|
|||
from langflow.schema import Data
|
||||
from langflow.schema.dotdict import dotdict
|
||||
|
||||
MAX_NUMBER_OF_FIELDS = 15
|
||||
|
||||
|
||||
class CreateDataComponent(Component):
|
||||
display_name: str = "Create Data"
|
||||
|
|
@ -48,9 +50,11 @@ class CreateDataComponent(Component):
|
|||
except ValueError:
|
||||
return build_config
|
||||
existing_fields = {}
|
||||
if field_value_int > 15:
|
||||
build_config["number_of_fields"]["value"] = 15
|
||||
msg = "Number of fields cannot exceed 15. Try using a Component to combine two Data."
|
||||
if field_value_int > MAX_NUMBER_OF_FIELDS:
|
||||
build_config["number_of_fields"]["value"] = MAX_NUMBER_OF_FIELDS
|
||||
msg = (
|
||||
f"Number of fields cannot exceed {MAX_NUMBER_OF_FIELDS}. Try using a Component to combine two Data."
|
||||
)
|
||||
raise ValueError(msg)
|
||||
if len(build_config) > len(default_keys):
|
||||
# back up the existing template fields
|
||||
|
|
@ -89,10 +93,10 @@ class CreateDataComponent(Component):
|
|||
for value_dict in self._attributes.values():
|
||||
if isinstance(value_dict, dict):
|
||||
# Check if the value of the value_dict is a Data
|
||||
value_dict = {
|
||||
_value_dict = {
|
||||
key: value.get_text() if isinstance(value, Data) else value for key, value in value_dict.items()
|
||||
}
|
||||
data.update(value_dict)
|
||||
data.update(_value_dict)
|
||||
return data
|
||||
|
||||
def validate_text_key(self):
|
||||
|
|
|
|||
|
|
@ -66,7 +66,7 @@ class RunnableExecComponent(Component):
|
|||
|
||||
if output_key in result:
|
||||
result_value = result.get(output_key)
|
||||
elif len(result) == 2 and input_key in result:
|
||||
elif len(result) == 2 and input_key in result: # noqa: PLR2004
|
||||
# get the other key from the result dict
|
||||
other_key = next(k for k in result if k != input_key)
|
||||
if other_key == output_key:
|
||||
|
|
|
|||
|
|
@ -7,6 +7,8 @@ from langflow.io import Output
|
|||
from langflow.schema import Data
|
||||
from langflow.schema.dotdict import dotdict
|
||||
|
||||
MAX_NUMBER_OF_FIELDS = 15
|
||||
|
||||
|
||||
class UpdateDataComponent(Component):
|
||||
display_name: str = "Update data"
|
||||
|
|
@ -54,9 +56,11 @@ class UpdateDataComponent(Component):
|
|||
except ValueError:
|
||||
return build_config
|
||||
existing_fields = {}
|
||||
if field_value_int > 15:
|
||||
build_config["number_of_fields"]["value"] = 15
|
||||
msg = "Number of fields cannot exceed 15. Try using a Component to combine two Data."
|
||||
if field_value_int > MAX_NUMBER_OF_FIELDS:
|
||||
build_config["number_of_fields"]["value"] = MAX_NUMBER_OF_FIELDS
|
||||
msg = (
|
||||
f"Number of fields cannot exceed {MAX_NUMBER_OF_FIELDS}. Try using a Component to combine two Data."
|
||||
)
|
||||
raise ValueError(msg)
|
||||
if len(build_config) > len(default_keys):
|
||||
# back up the existing template fields
|
||||
|
|
@ -96,10 +100,10 @@ class UpdateDataComponent(Component):
|
|||
for value_dict in self._attributes.values():
|
||||
if isinstance(value_dict, dict):
|
||||
# Check if the value of the value_dict is a Data
|
||||
value_dict = {
|
||||
_value_dict = {
|
||||
key: value.get_text() if isinstance(value, Data) else value for key, value in value_dict.items()
|
||||
}
|
||||
data.update(value_dict)
|
||||
data.update(_value_dict)
|
||||
return data
|
||||
|
||||
def validate_text_key(self, data: Data):
|
||||
|
|
|
|||
|
|
@ -86,7 +86,7 @@ class PythonCodeStructuredTool(LCToolComponent):
|
|||
if field_name is None:
|
||||
return build_config
|
||||
|
||||
if field_name != "tool_code" and field_name != "tool_function":
|
||||
if field_name not in ("tool_code", "tool_function"):
|
||||
return build_config
|
||||
|
||||
try:
|
||||
|
|
|
|||
|
|
@ -56,24 +56,15 @@ class MongoVectorStoreComponent(LCVectorStoreComponent):
|
|||
else:
|
||||
documents.append(_input)
|
||||
|
||||
if documents:
|
||||
vector_store = MongoDBAtlasVectorSearch.from_documents(
|
||||
documents=documents, embedding=self.embedding, collection=collection, index_name=self.index_name
|
||||
)
|
||||
else:
|
||||
vector_store = MongoDBAtlasVectorSearch(
|
||||
embedding=self.embedding,
|
||||
collection=collection,
|
||||
index_name=self.index_name,
|
||||
)
|
||||
else:
|
||||
vector_store = MongoDBAtlasVectorSearch(
|
||||
embedding=self.embedding,
|
||||
collection=collection,
|
||||
index_name=self.index_name,
|
||||
if documents:
|
||||
return MongoDBAtlasVectorSearch.from_documents(
|
||||
documents=documents, embedding=self.embedding, collection=collection, index_name=self.index_name
|
||||
)
|
||||
|
||||
return vector_store
|
||||
return MongoDBAtlasVectorSearch(
|
||||
embedding=self.embedding,
|
||||
collection=collection,
|
||||
index_name=self.index_name,
|
||||
)
|
||||
|
||||
def search_documents(self) -> list[Data]:
|
||||
from bson.objectid import ObjectId
|
||||
|
|
|
|||
|
|
@ -352,8 +352,8 @@ class CodeParser:
|
|||
methods=[],
|
||||
init=None,
|
||||
)
|
||||
for node in nodes:
|
||||
self.process_class_node(node, class_details)
|
||||
for _node in nodes:
|
||||
self.process_class_node(_node, class_details)
|
||||
self.data["classes"].append(class_details.model_dump())
|
||||
|
||||
def process_class_node(self, node, class_details):
|
||||
|
|
|
|||
|
|
@ -42,7 +42,7 @@ _ComponentToolkit = None
|
|||
|
||||
|
||||
def _get_component_toolkit():
|
||||
global _ComponentToolkit
|
||||
global _ComponentToolkit # noqa: PLW0603
|
||||
if _ComponentToolkit is None:
|
||||
from langflow.base.tools.component_tool import ComponentToolkit
|
||||
|
||||
|
|
|
|||
|
|
@ -116,8 +116,8 @@ class DirectoryReader:
|
|||
except UnicodeDecodeError:
|
||||
# This is happening in Windows, so we need to open the file in binary mode
|
||||
# The file is always just a python file, so we can safely read it as utf-8
|
||||
with _file_path.open("rb") as file:
|
||||
return file.read().decode("utf-8")
|
||||
with _file_path.open("rb") as f:
|
||||
return f.read().decode("utf-8")
|
||||
|
||||
def get_files(self):
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -44,15 +44,15 @@ def add_output_types(frontend_node: CustomComponentFrontendNode, return_types: l
|
|||
},
|
||||
)
|
||||
if return_type is str:
|
||||
return_type = "Text"
|
||||
_return_type = "Text"
|
||||
elif hasattr(return_type, "__name__"):
|
||||
return_type = return_type.__name__
|
||||
_return_type = return_type.__name__
|
||||
elif hasattr(return_type, "__class__"):
|
||||
return_type = return_type.__class__.__name__
|
||||
_return_type = return_type.__class__.__name__
|
||||
else:
|
||||
return_type = str(return_type)
|
||||
_return_type = str(return_type)
|
||||
|
||||
frontend_node.add_output_type(return_type)
|
||||
frontend_node.add_output_type(_return_type)
|
||||
|
||||
|
||||
def reorder_fields(frontend_node: CustomComponentFrontendNode, field_order: list[str]):
|
||||
|
|
@ -224,19 +224,18 @@ def add_extra_fields(frontend_node, field_config, function_args):
|
|||
config,
|
||||
)
|
||||
if "kwargs" in function_args_names and not all(key in function_args_names for key in field_config):
|
||||
for field_name, field_config in _field_config.copy().items():
|
||||
if "name" not in field_config or field_name == "code":
|
||||
for field_name, config in _field_config.items():
|
||||
if "name" not in config or field_name == "code":
|
||||
continue
|
||||
config = _field_config.get(field_name, {})
|
||||
config = config.model_dump() if isinstance(config, BaseModel) else config
|
||||
field_name, field_type, field_value, field_required = get_field_properties(extra_field=config)
|
||||
_config = config.model_dump() if isinstance(config, BaseModel) else config
|
||||
_field_name, field_type, field_value, field_required = get_field_properties(extra_field=_config)
|
||||
frontend_node = add_new_custom_field(
|
||||
frontend_node,
|
||||
field_name,
|
||||
_field_name,
|
||||
field_type,
|
||||
field_value,
|
||||
field_required,
|
||||
config,
|
||||
_config,
|
||||
)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -30,10 +30,11 @@ class EventManager:
|
|||
raise ValueError(msg)
|
||||
# Check if it has `self, event_type and data`
|
||||
sig = inspect.signature(callback)
|
||||
if len(sig.parameters) != 3:
|
||||
parameters = ["manager", "event_type", "data"]
|
||||
if len(sig.parameters) != len(parameters):
|
||||
msg = "Callback must have exactly 3 parameters"
|
||||
raise ValueError(msg)
|
||||
if not all(param.name in ["manager", "event_type", "data"] for param in sig.parameters.values()):
|
||||
if not all(param.name in parameters for param in sig.parameters.values()):
|
||||
msg = "Callback must have exactly 3 parameters: manager, event_type, and data"
|
||||
raise ValueError(msg)
|
||||
|
||||
|
|
|
|||
|
|
@ -64,7 +64,7 @@ class SourceHandle(BaseModel):
|
|||
if _info.data["data_type"] == "GroupNode":
|
||||
# 'OpenAIModel-u4iGV_text_output'
|
||||
splits = v.split("_", 1)
|
||||
if len(splits) != 2:
|
||||
if len(splits) != 2: # noqa: PLR2004
|
||||
msg = f"Invalid source handle name {v}"
|
||||
raise ValueError(msg)
|
||||
v = splits[1]
|
||||
|
|
|
|||
|
|
@ -1051,7 +1051,7 @@ class Graph:
|
|||
"""Updates the edges of a vertex in the Graph."""
|
||||
new_edges = []
|
||||
for edge in self.edges:
|
||||
if edge.source_id == other_vertex.id or edge.target_id == other_vertex.id:
|
||||
if other_vertex.id in (edge.source_id, edge.target_id):
|
||||
continue
|
||||
new_edges.append(edge)
|
||||
new_edges += other_vertex.edges
|
||||
|
|
@ -1203,7 +1203,7 @@ class Graph:
|
|||
return
|
||||
self.vertices.remove(vertex)
|
||||
self.vertex_map.pop(vertex_id)
|
||||
self.edges = [edge for edge in self.edges if edge.source_id != vertex_id and edge.target_id != vertex_id]
|
||||
self.edges = [edge for edge in self.edges if vertex_id not in (edge.source_id, edge.target_id)]
|
||||
|
||||
def _build_vertex_params(self) -> None:
|
||||
"""Identifies and handles the LLM vertex within the graph."""
|
||||
|
|
@ -1525,8 +1525,8 @@ class Graph:
|
|||
for t in tasks[i + 1 :]:
|
||||
t.cancel()
|
||||
raise result
|
||||
if isinstance(result, tuple) and len(result) == 5:
|
||||
vertices.append(result[4])
|
||||
if isinstance(result, VertexBuildResult):
|
||||
vertices.append(result.vertex)
|
||||
else:
|
||||
msg = f"Invalid result from task {task_name}: {result}"
|
||||
raise ValueError(msg)
|
||||
|
|
|
|||
|
|
@ -235,7 +235,7 @@ def get_updated_edges(base_flow, g_nodes, g_edges, group_node_id):
|
|||
if new_edge["source"] == group_node_id:
|
||||
new_edge = update_source_handle(new_edge, g_nodes, g_edges)
|
||||
|
||||
if edge["target"] == group_node_id or edge["source"] == group_node_id:
|
||||
if group_node_id in (edge["target"], edge["source"]):
|
||||
updated_edges.append(new_edge)
|
||||
return updated_edges
|
||||
|
||||
|
|
|
|||
|
|
@ -222,7 +222,7 @@ def create_state_model(model_name: str = "State", validate: bool = True, **kwarg
|
|||
elif isinstance(value, FieldInfo):
|
||||
field_tuple = (value.annotation or Any, value)
|
||||
fields[name] = field_tuple
|
||||
elif isinstance(value, tuple) and len(value) == 2:
|
||||
elif isinstance(value, tuple) and len(value) == 2: # noqa: PLR2004
|
||||
# Fields are defined by one of the following tuple forms:
|
||||
|
||||
# (<type>, <default value>)
|
||||
|
|
|
|||
|
|
@ -195,7 +195,7 @@ def rewrite_file_path(file_path: str):
|
|||
|
||||
file_path_split = [part for part in file_path.split("/") if part]
|
||||
|
||||
if len(file_path_split) >= 2:
|
||||
if len(file_path_split) > 1:
|
||||
consistent_file_path = f"{file_path_split[-2]}/{file_path_split[-1]}"
|
||||
else:
|
||||
consistent_file_path = "/".join(file_path_split)
|
||||
|
|
|
|||
|
|
@ -126,7 +126,7 @@ class Vertex:
|
|||
|
||||
def set_state(self, state: str):
|
||||
self.state = VertexStates[state]
|
||||
if self.state == VertexStates.INACTIVE and self.graph.in_degree_map[self.id] < 2:
|
||||
if self.state == VertexStates.INACTIVE and self.graph.in_degree_map[self.id] <= 1:
|
||||
# If the vertex is inactive and has only one in degree
|
||||
# it means that it is not a merge point in the graph
|
||||
self.graph.inactivated_vertices.add(self.id)
|
||||
|
|
@ -361,11 +361,10 @@ class Vertex:
|
|||
"Setting to None."
|
||||
)
|
||||
params[field_name] = None
|
||||
elif field["list"]:
|
||||
params[field_name] = []
|
||||
else:
|
||||
if field["list"]:
|
||||
params[field_name] = []
|
||||
else:
|
||||
params[field_name] = None
|
||||
params[field_name] = None
|
||||
|
||||
elif field.get("type") in DIRECT_TYPES and params.get(field_name) is None:
|
||||
val = field.get("value")
|
||||
|
|
@ -732,9 +731,9 @@ class Vertex:
|
|||
Updates the built object and its artifacts.
|
||||
"""
|
||||
if isinstance(result, tuple):
|
||||
if len(result) == 2:
|
||||
if len(result) == 2: # noqa: PLR2004
|
||||
self._built_object, self.artifacts = result
|
||||
elif len(result) == 3:
|
||||
elif len(result) == 3: # noqa: PLR2004
|
||||
self._custom_component, self._built_object, self.artifacts = result
|
||||
self.logs = self._custom_component._output_logs
|
||||
self.artifacts_raw = self.artifacts.get("raw", None)
|
||||
|
|
|
|||
|
|
@ -63,9 +63,9 @@ class ComponentVertex(Vertex):
|
|||
Updates the built object and its artifacts.
|
||||
"""
|
||||
if isinstance(result, tuple):
|
||||
if len(result) == 2:
|
||||
if len(result) == 2: # noqa: PLR2004
|
||||
self._built_object, self.artifacts = result
|
||||
elif len(result) == 3:
|
||||
elif len(result) == 3: # noqa: PLR2004
|
||||
self._custom_component, self._built_object, self.artifacts = result
|
||||
self.logs = self._custom_component._output_logs
|
||||
for key in self.artifacts:
|
||||
|
|
@ -233,8 +233,8 @@ class InterfaceVertex(ComponentVertex):
|
|||
artifacts = []
|
||||
for artifact in _artifacts:
|
||||
# artifacts = {k.title().replace("_", " "): v for k, v in self.artifacts.items() if v is not None}
|
||||
artifact = {k.title().replace("_", " "): v for k, v in artifact.items() if v is not None}
|
||||
artifacts.append(artifact)
|
||||
_artifact = {k.title().replace("_", " "): v for k, v in artifact.items() if v is not None}
|
||||
artifacts.append(_artifact)
|
||||
return yaml.dump(artifacts, default_flow_style=False, allow_unicode=True)
|
||||
return super()._built_object_repr()
|
||||
|
||||
|
|
@ -387,16 +387,16 @@ class InterfaceVertex(ComponentVertex):
|
|||
complete_message = ""
|
||||
if is_async:
|
||||
async for message in iterator:
|
||||
message = message.content if hasattr(message, "content") else message
|
||||
message = message.text if hasattr(message, "text") else message
|
||||
yield message
|
||||
complete_message += message
|
||||
_message = message.content if hasattr(message, "content") else message
|
||||
_message = _message.text if hasattr(_message, "text") else _message
|
||||
yield _message
|
||||
complete_message += _message
|
||||
else:
|
||||
for message in iterator:
|
||||
message = message.content if hasattr(message, "content") else message
|
||||
message = message.text if hasattr(message, "text") else message
|
||||
yield message
|
||||
complete_message += message
|
||||
_message = message.content if hasattr(message, "content") else message
|
||||
_message = _message.text if hasattr(_message, "text") else _message
|
||||
yield _message
|
||||
complete_message += _message
|
||||
|
||||
files = self.params.get("files", [])
|
||||
|
||||
|
|
|
|||
|
|
@ -30,12 +30,11 @@ def data_to_text(template: str, data: Data | list[Data], sep: str = "\n") -> str
|
|||
if isinstance(data, (Data)):
|
||||
data = [data]
|
||||
# Check if there are any format strings in the template
|
||||
_data = []
|
||||
for value in data:
|
||||
_data = [
|
||||
# If it is not a record, create one with the key "text"
|
||||
if not isinstance(value, Data):
|
||||
value = Data(text=value)
|
||||
_data.append(value)
|
||||
Data(text=value) if not isinstance(value, Data) else value
|
||||
for value in data
|
||||
]
|
||||
|
||||
formated_data = [template.format(data=value.data, **value.data) for value in _data]
|
||||
return sep.join(formated_data)
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ _InputTypesMap: dict[str, type["InputTypes"]] | None = None
|
|||
|
||||
|
||||
def get_InputTypesMap():
|
||||
global _InputTypesMap
|
||||
global _InputTypesMap # noqa: PLW0603
|
||||
if _InputTypesMap is None:
|
||||
from langflow.inputs.inputs import InputTypesMap
|
||||
|
||||
|
|
|
|||
|
|
@ -67,7 +67,7 @@ async def get_and_cache_all_types_dict(
|
|||
force_refresh: bool = False,
|
||||
lock: asyncio.Lock | None = None,
|
||||
):
|
||||
global all_types_dict_cache
|
||||
global all_types_dict_cache # noqa: PLW0603
|
||||
if all_types_dict_cache is None:
|
||||
logger.debug("Building langchain types dict")
|
||||
all_types_dict_cache = await aget_all_types_dict(settings_service.settings.components_path)
|
||||
|
|
|
|||
|
|
@ -24,7 +24,7 @@ def upload(file_path: str, host: str, flow_id: str):
|
|||
url = f"{host}/api/v1/upload/{flow_id}"
|
||||
with Path(file_path).open("rb") as file:
|
||||
response = httpx.post(url, files={"file": file})
|
||||
if response.status_code == 200 or response.status_code == 201:
|
||||
if response.status_code in (httpx.codes.OK, httpx.codes.CREATED):
|
||||
return response.json()
|
||||
msg = f"Error uploading file: {response.status_code}"
|
||||
raise Exception(msg)
|
||||
|
|
@ -87,7 +87,7 @@ def get_flow(url: str, flow_id: str):
|
|||
try:
|
||||
flow_url = f"{url}/api/v1/flows/{flow_id}"
|
||||
response = httpx.get(flow_url)
|
||||
if response.status_code == 200:
|
||||
if response.status_code == httpx.codes.OK:
|
||||
json_response = response.json()
|
||||
return FlowBase(**json_response).model_dump()
|
||||
msg = f"Error retrieving flow: {response.status_code}"
|
||||
|
|
|
|||
|
|
@ -4,13 +4,13 @@ LOGGING_CONFIGURED = False
|
|||
|
||||
|
||||
def disable_logging():
|
||||
global LOGGING_CONFIGURED
|
||||
global LOGGING_CONFIGURED # noqa: PLW0603
|
||||
if not LOGGING_CONFIGURED:
|
||||
logger.disable("langflow")
|
||||
LOGGING_CONFIGURED = True
|
||||
|
||||
|
||||
def enable_logging():
|
||||
global LOGGING_CONFIGURED
|
||||
global LOGGING_CONFIGURED # noqa: PLW0603
|
||||
logger.enable("langflow")
|
||||
LOGGING_CONFIGURED = True
|
||||
|
|
|
|||
|
|
@ -36,6 +36,9 @@ from langflow.services.utils import initialize_services, teardown_services
|
|||
warnings.filterwarnings("ignore", category=PydanticDeprecatedSince20)
|
||||
|
||||
|
||||
MAX_PORT = 65535
|
||||
|
||||
|
||||
class RequestCancelledMiddleware(BaseHTTPMiddleware):
|
||||
def __init__(self, app):
|
||||
super().__init__(app)
|
||||
|
|
@ -75,7 +78,11 @@ class JavaScriptMIMETypeMiddleware(BaseHTTPMiddleware):
|
|||
error_messages = json.dumps([message, str(exc)])
|
||||
raise HTTPException(status_code=HTTPStatus.INTERNAL_SERVER_ERROR, detail=error_messages) from exc
|
||||
raise exc
|
||||
if "files/" not in request.url.path and request.url.path.endswith(".js") and response.status_code == 200:
|
||||
if (
|
||||
"files/" not in request.url.path
|
||||
and request.url.path.endswith(".js")
|
||||
and response.status_code == HTTPStatus.OK
|
||||
):
|
||||
response.headers["Content-Type"] = "text/javascript"
|
||||
return response
|
||||
|
||||
|
|
@ -176,7 +183,7 @@ def create_app():
|
|||
if prome_port_str := os.environ.get("LANGFLOW_PROMETHEUS_PORT"):
|
||||
# set here for create_app() entry point
|
||||
prome_port = int(prome_port_str)
|
||||
if prome_port > 0 or prome_port < 65535:
|
||||
if prome_port > 0 or prome_port < MAX_PORT:
|
||||
rprint(f"[bold green]Starting Prometheus server on port {prome_port}...[/bold green]")
|
||||
settings.prometheus_enabled = True
|
||||
settings.prometheus_port = prome_port
|
||||
|
|
|
|||
|
|
@ -141,8 +141,8 @@ def apply_tweaks(node: dict[str, Any], node_tweaks: dict[str, Any]) -> None:
|
|||
if tweak_name in template_data:
|
||||
if isinstance(tweak_value, dict):
|
||||
for k, v in tweak_value.items():
|
||||
k = "file_path" if template_data[tweak_name]["type"] == "file" else k
|
||||
template_data[tweak_name][k] = v
|
||||
_k = "file_path" if template_data[tweak_name]["type"] == "file" else k
|
||||
template_data[tweak_name][_k] = v
|
||||
else:
|
||||
key = "file_path" if template_data[tweak_name]["type"] == "file" else "value"
|
||||
template_data[tweak_name][key] = tweak_value
|
||||
|
|
|
|||
|
|
@ -27,6 +27,8 @@ API_KEY_NAME = "x-api-key"
|
|||
api_key_query = APIKeyQuery(name=API_KEY_NAME, scheme_name="API key query", auto_error=False)
|
||||
api_key_header = APIKeyHeader(name=API_KEY_NAME, scheme_name="API key header", auto_error=False)
|
||||
|
||||
MINIMUM_KEY_LENGTH = 32
|
||||
|
||||
|
||||
# Source: https://github.com/mrtolkien/fastapi_simple_security/blob/master/fastapi_simple_security/security_api_key.py
|
||||
async def api_key_security(
|
||||
|
|
@ -346,7 +348,7 @@ def add_padding(s):
|
|||
|
||||
def ensure_valid_key(s: str) -> bytes:
|
||||
# If the key is too short, we'll use it as a seed to generate a valid key
|
||||
if len(s) < 32:
|
||||
if len(s) < MINIMUM_KEY_LENGTH:
|
||||
# Use the input as a seed for the random number generator
|
||||
random.seed(s)
|
||||
# Generate 32 random bytes
|
||||
|
|
|
|||
|
|
@ -22,6 +22,8 @@ if TYPE_CHECKING:
|
|||
from langflow.services.database.models.user import User
|
||||
from langflow.services.database.models.vertex_builds.model import VertexBuildTable
|
||||
|
||||
HEX_COLOR_LENGTH = 7
|
||||
|
||||
|
||||
class FlowBase(SQLModel):
|
||||
name: str = Field(index=True)
|
||||
|
|
@ -64,7 +66,7 @@ class FlowBase(SQLModel):
|
|||
raise ValueError(msg)
|
||||
|
||||
# validate that it is a valid hex color
|
||||
if v and len(v) != 7:
|
||||
if v and len(v) != HEX_COLOR_LENGTH:
|
||||
msg = "Icon background color must be 7 characters long"
|
||||
raise ValueError(msg)
|
||||
return v
|
||||
|
|
|
|||
|
|
@ -269,20 +269,19 @@ class Settings(BaseSettings):
|
|||
else:
|
||||
logger.debug(f"Creating new database at {new_pre_path}")
|
||||
final_path = new_pre_path
|
||||
elif Path(new_path).exists():
|
||||
logger.debug(f"Database already exists at {new_path}, using it")
|
||||
final_path = new_path
|
||||
elif Path("./{db_file_name}").exists():
|
||||
try:
|
||||
logger.debug("Copying existing database to new location")
|
||||
copy2("./{db_file_name}", new_path)
|
||||
logger.debug(f"Copied existing database to {new_path}")
|
||||
except Exception:
|
||||
logger.exception("Failed to copy database, using default path")
|
||||
new_path = "./{db_file_name}"
|
||||
else:
|
||||
if Path(new_path).exists():
|
||||
logger.debug(f"Database already exists at {new_path}, using it")
|
||||
final_path = new_path
|
||||
elif Path("./{db_file_name}").exists():
|
||||
try:
|
||||
logger.debug("Copying existing database to new location")
|
||||
copy2("./{db_file_name}", new_path)
|
||||
logger.debug(f"Copied existing database to {new_path}")
|
||||
except Exception:
|
||||
logger.exception("Failed to copy database, using default path")
|
||||
new_path = "./{db_file_name}"
|
||||
else:
|
||||
final_path = new_path
|
||||
final_path = new_path
|
||||
|
||||
if final_path is None:
|
||||
final_path = new_pre_path if is_pre_release else new_path
|
||||
|
|
@ -333,20 +332,19 @@ class Settings(BaseSettings):
|
|||
logger.debug(f"Updating {key}")
|
||||
if isinstance(getattr(self, key), list):
|
||||
# value might be a '[something]' string
|
||||
_value = value
|
||||
with contextlib.suppress(json.decoder.JSONDecodeError):
|
||||
value = orjson.loads(str(value))
|
||||
if isinstance(value, list):
|
||||
for item in value:
|
||||
if isinstance(item, Path):
|
||||
item = str(item)
|
||||
if item not in getattr(self, key):
|
||||
getattr(self, key).append(item)
|
||||
_value = orjson.loads(str(value))
|
||||
if isinstance(_value, list):
|
||||
for item in _value:
|
||||
_item = str(item) if isinstance(item, Path) else item
|
||||
if _item not in getattr(self, key):
|
||||
getattr(self, key).append(_item)
|
||||
logger.debug(f"Extended {key}")
|
||||
else:
|
||||
if isinstance(value, Path):
|
||||
value = str(value)
|
||||
if value not in getattr(self, key):
|
||||
getattr(self, key).append(value)
|
||||
_value = str(_value) if isinstance(_value, Path) else _value
|
||||
if _value not in getattr(self, key):
|
||||
getattr(self, key).append(_value)
|
||||
logger.debug(f"Appended {key}")
|
||||
|
||||
else:
|
||||
|
|
|
|||
|
|
@ -42,7 +42,7 @@ async def user_data_context(store_service: StoreService, api_key: str | None = N
|
|||
)
|
||||
user_data_var.set(user_data[0])
|
||||
except HTTPStatusError as exc:
|
||||
if exc.response.status_code == 403:
|
||||
if exc.response.status_code == httpx.codes.FORBIDDEN:
|
||||
msg = "Invalid API key"
|
||||
raise ValueError(msg) from exc
|
||||
try:
|
||||
|
|
@ -487,7 +487,7 @@ class StoreService(Service):
|
|||
timeout=self.timeout,
|
||||
)
|
||||
response.raise_for_status()
|
||||
if response.status_code == 200:
|
||||
if response.status_code == httpx.codes.OK:
|
||||
result = response.json()
|
||||
|
||||
if isinstance(result, list):
|
||||
|
|
@ -543,10 +543,10 @@ class StoreService(Service):
|
|||
if metadata:
|
||||
comp_count = metadata.get("filter_count", 0)
|
||||
except HTTPStatusError as exc:
|
||||
if exc.response.status_code == 403:
|
||||
if exc.response.status_code == httpx.codes.FORBIDDEN:
|
||||
msg = "You are not authorized to access this public resource"
|
||||
raise ForbiddenError(msg) from exc
|
||||
if exc.response.status_code == 401:
|
||||
if exc.response.status_code == httpx.codes.UNAUTHORIZED:
|
||||
msg = "You are not authorized to access this resource. Please check your API key."
|
||||
raise APIKeyError(msg) from exc
|
||||
except Exception as exc:
|
||||
|
|
@ -565,10 +565,10 @@ class StoreService(Service):
|
|||
elif not metadata:
|
||||
comp_count = 0
|
||||
except HTTPStatusError as exc:
|
||||
if exc.response.status_code == 403:
|
||||
if exc.response.status_code == httpx.codes.FORBIDDEN:
|
||||
msg = "You are not authorized to access this public resource"
|
||||
raise ForbiddenError(msg) from exc
|
||||
if exc.response.status_code == 401:
|
||||
if exc.response.status_code == httpx.codes.UNAUTHORIZED:
|
||||
msg = "You are not authorized to access this resource. Please check your API key."
|
||||
raise APIKeyError(msg) from exc
|
||||
|
||||
|
|
|
|||
|
|
@ -44,7 +44,7 @@ async def update_components_with_user_data(
|
|||
def get_lf_version_from_pypi():
|
||||
try:
|
||||
response = httpx.get("https://pypi.org/pypi/langflow/json")
|
||||
if response.status_code != 200:
|
||||
if response.status_code != httpx.codes.OK:
|
||||
return None
|
||||
return response.json()["info"]["version"]
|
||||
except Exception:
|
||||
|
|
|
|||
|
|
@ -67,7 +67,7 @@ class TelemetryService(Service):
|
|||
try:
|
||||
payload_dict = payload.model_dump(exclude_none=True, exclude_unset=True)
|
||||
response = await self.client.get(url, params=payload_dict)
|
||||
if response.status_code != 200:
|
||||
if response.status_code != httpx.codes.OK:
|
||||
logger.error(f"Failed to send telemetry data: {response.status_code} {response.text}")
|
||||
else:
|
||||
logger.debug("Telemetry data sent successfully.")
|
||||
|
|
|
|||
|
|
@ -123,7 +123,6 @@ class KubernetesSecretService(VariableService, Service):
|
|||
|
||||
secret_key, _ = self.resolve_variable(secret_name, user_id, name)
|
||||
self.kubernetes_secrets.delete_secret_key(name=secret_name, key=secret_key)
|
||||
return
|
||||
|
||||
def delete_variable_by_id(self, user_id: UUID | str, variable_id: UUID | str, _session: Session) -> None:
|
||||
self.delete_variable(user_id, _session, str(variable_id))
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
from base64 import b64decode, b64encode
|
||||
from http import HTTPStatus
|
||||
from uuid import UUID
|
||||
|
||||
from kubernetes import client, config # type: ignore
|
||||
|
|
@ -71,7 +72,7 @@ class KubernetesSecretManager:
|
|||
return self.core_api.replace_namespaced_secret(secret_name, self.namespace, existing_secret)
|
||||
|
||||
except ApiException as e:
|
||||
if e.status == 404:
|
||||
if e.status == HTTPStatus.NOT_FOUND:
|
||||
# Secret doesn't exist, create a new one
|
||||
return self.create_secret(secret_name, data)
|
||||
logger.exception(f"Error upserting secret {secret_name}")
|
||||
|
|
@ -91,7 +92,7 @@ class KubernetesSecretManager:
|
|||
secret = self.core_api.read_namespaced_secret(name, self.namespace)
|
||||
return {k: b64decode(v).decode() for k, v in secret.data.items()}
|
||||
except ApiException as e:
|
||||
if e.status == 404:
|
||||
if e.status == HTTPStatus.NOT_FOUND:
|
||||
return None
|
||||
raise
|
||||
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ DEV = False
|
|||
|
||||
|
||||
def _set_dev(value):
|
||||
global DEV
|
||||
global DEV # noqa: PLW0603
|
||||
DEV = value
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -57,10 +57,11 @@ def build_json(root, graph) -> dict:
|
|||
template = root.data["node"]["template"]
|
||||
final_dict = template.copy()
|
||||
|
||||
for key, value in final_dict.items():
|
||||
for key in final_dict:
|
||||
if key == "_type":
|
||||
continue
|
||||
|
||||
value = final_dict[key]
|
||||
node_type = value["type"]
|
||||
|
||||
if "value" in value and value["value"] is not None:
|
||||
|
|
|
|||
|
|
@ -48,6 +48,7 @@ ignore = [
|
|||
"FIX002", # Line contains TODO
|
||||
"ISC001", # Messes with the formatter
|
||||
"PERF203", # Rarely useful
|
||||
"PLR09", # Too many something (arg, statements, etc)
|
||||
"RUF012", # Pydantic models are currently not well detected. See https://github.com/astral-sh/ruff/issues/13630
|
||||
"TD002", # Missing author in TODO
|
||||
"TD003", # Missing issue link in TODO
|
||||
|
|
@ -61,7 +62,6 @@ ignore = [
|
|||
"EXE",
|
||||
"FBT",
|
||||
"N",
|
||||
"PL",
|
||||
"RUF006", # Store a reference to the return value of `asyncio.create_task`
|
||||
"S",
|
||||
"SLF",
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue