ref: Fix ruff rules in preview mode (#4039)

Fix some ruff rules from preview mode
This commit is contained in:
Christophe Bornet 2024-10-08 21:31:13 +02:00 committed by GitHub
commit da6b5eadbc
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
86 changed files with 262 additions and 7508 deletions

View file

@ -68,7 +68,7 @@ def set_var_for_macos_issue():
# we need to set this var is we are running on MacOS
# otherwise we get an error when running gunicorn
if platform.system() in ["Darwin"]:
if platform.system() == "Darwin":
import os
os.environ["OBJC_DISABLE_INITIALIZE_FORK_SAFETY"] = "YES"
@ -202,7 +202,7 @@ def run(
return
process: Process | None = None
try:
if platform.system() in ["Windows"]:
if platform.system() == "Windows":
# Run using uvicorn on MacOS and Windows
# Windows doesn't support gunicorn
# MacOS requires an env variable to be set to use gunicorn
@ -369,7 +369,7 @@ def run_langflow(host, port, log_level, options, app):
Run Langflow server on localhost
"""
if platform.system() in ["Windows"]:
if platform.system() == "Windows":
# Run using uvicorn on MacOS and Windows
# Windows doesn't support gunicorn
# MacOS requires an env variable to be set to use gunicorn
@ -538,7 +538,7 @@ def api_key_banner(unmasked_api_key):
f"[bold blue]{unmasked_api_key.api_key}[/bold blue]\n\n"
"This is the only time the API key will be displayed. \n"
"Make sure to store it in a secure location. \n\n"
f"The API key has been copied to your clipboard. [bold]{['Ctrl','Cmd'][is_mac]} + V[/bold] to paste it.",
f"The API key has been copied to your clipboard. [bold]{['Ctrl', 'Cmd'][is_mac]} + V[/bold] to paste it.",
box=box.ROUNDED,
border_style="blue",
expand=False,

View file

@ -2,4 +2,4 @@ from langflow.api.health_check_router import health_check_router
from langflow.api.log_router import log_router
from langflow.api.router import router
__all__ = ["router", "health_check_router", "log_router"]
__all__ = ["health_check_router", "log_router", "router"]

View file

@ -1,4 +1,5 @@
import uuid
from typing import Annotated
from fastapi import APIRouter, Depends, HTTPException, status
from loguru import logger
@ -37,7 +38,7 @@ async def health():
# It's a reliable health check for a langflow instance
@health_check_router.get("/health_check", response_model=HealthResponse)
async def health_check(
session: Session = Depends(get_session),
session: Annotated[Session, Depends(get_session)],
):
response = HealthResponse()
# use a fixed valid UUId that UUID collision is very unlikely

View file

@ -1,7 +1,7 @@
import asyncio
import json
from http import HTTPStatus
from typing import Any
from typing import Annotated, Any
from fastapi import APIRouter, HTTPException, Query, Request
from fastapi.responses import JSONResponse, StreamingResponse
@ -41,7 +41,7 @@ async def event_generator(request: Request):
last_read_item = item
if to_write:
for ts, msg in to_write:
yield f"{json.dumps({ts:msg})}\n\n"
yield f"{json.dumps({ts: msg})}\n\n"
else:
current_not_sent += 1
if current_not_sent == NUMBER_OF_NOT_SENT_BEFORE_KEEPALIVE:
@ -72,9 +72,9 @@ async def stream_logs(
@log_router.get("/logs")
async def logs(
lines_before: int = Query(0, description="The number of logs before the timestamp or the last log"),
lines_after: int = Query(0, description="The number of logs after the timestamp"),
timestamp: int = Query(0, description="The timestamp to start getting logs from"),
lines_before: Annotated[int, Query(description="The number of logs before the timestamp or the last log")] = 0,
lines_after: Annotated[int, Query(description="The number of logs after the timestamp")] = 0,
timestamp: Annotated[int, Query(description="The timestamp to start getting logs from")] = 0,
):
global log_buffer # noqa: PLW0602
if log_buffer.enabled() is False:

View file

@ -45,7 +45,7 @@ def build_input_keys_response(langchain_object, artifacts):
"""Build the input keys response."""
input_keys_response = {
"input_keys": {key: "" for key in langchain_object.input_keys},
"input_keys": dict.fromkeys(langchain_object.input_keys, ""),
"memory_keys": [],
"handle_keys": artifacts.get("handle_keys", []),
}

View file

@ -13,17 +13,17 @@ from langflow.api.v1.validate import router as validate_router
from langflow.api.v1.variable import router as variables_router
__all__ = [
"api_key_router",
"chat_router",
"endpoints_router",
"store_router",
"validate_router",
"flows_router",
"users_router",
"api_key_router",
"login_router",
"variables_router",
"monitor_router",
"files_router",
"flows_router",
"folders_router",
"login_router",
"monitor_router",
"starter_projects_router",
"store_router",
"users_router",
"validate_router",
"variables_router",
]

View file

@ -1,4 +1,4 @@
from typing import TYPE_CHECKING
from typing import TYPE_CHECKING, Annotated
from uuid import UUID
from fastapi import APIRouter, Depends, HTTPException, Response
@ -21,8 +21,8 @@ router = APIRouter(tags=["APIKey"], prefix="/api_key")
@router.get("/", response_model=ApiKeysResponse)
def get_api_keys_route(
db: Session = Depends(get_session),
current_user: User = Depends(auth_utils.get_current_active_user),
db: Annotated[Session, Depends(get_session)],
current_user: Annotated[User, Depends(auth_utils.get_current_active_user)],
):
try:
user_id = current_user.id
@ -36,8 +36,8 @@ def get_api_keys_route(
@router.post("/", response_model=UnmaskedApiKeyRead)
def create_api_key_route(
req: ApiKeyCreate,
current_user: User = Depends(auth_utils.get_current_active_user),
db: Session = Depends(get_session),
current_user: Annotated[User, Depends(auth_utils.get_current_active_user)],
db: Annotated[Session, Depends(get_session)],
):
try:
user_id = current_user.id
@ -63,8 +63,8 @@ def delete_api_key_route(
def save_store_api_key(
api_key_request: ApiKeyCreateRequest,
response: Response,
current_user: User = Depends(auth_utils.get_current_active_user),
db: Session = Depends(get_session),
current_user: Annotated[User, Depends(auth_utils.get_current_active_user)],
db: Annotated[Session, Depends(get_session)],
settings_service=Depends(get_settings_service),
):
auth_settings = settings_service.auth_settings
@ -95,8 +95,8 @@ def save_store_api_key(
@router.delete("/store")
def delete_store_api_key(
current_user: User = Depends(auth_utils.get_current_active_user),
db: Session = Depends(get_session),
current_user: Annotated[User, Depends(auth_utils.get_current_active_user)],
db: Annotated[Session, Depends(get_session)],
):
try:
current_user.store_api_key = None

View file

@ -172,7 +172,7 @@ async def simple_run_flow_task(
logger.exception(f"Error running flow {flow.id} task")
@router.post("/run/{flow_id_or_name}", response_model=RunResponse, response_model_exclude_none=True)
@router.post("/run/{flow_id_or_name}", response_model=RunResponse, response_model_exclude_none=True) # noqa: RUF100, FAST003
async def simplified_run_flow(
background_tasks: BackgroundTasks,
flow: Annotated[FlowRead | None, Depends(get_flow_by_id_or_endpoint_name)],
@ -296,13 +296,13 @@ async def simplified_run_flow(
raise APIException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, exception=exc, flow=flow) from exc
@router.post("/webhook/{flow_id_or_name}", response_model=dict, status_code=HTTPStatus.ACCEPTED)
@router.post("/webhook/{flow_id_or_name}", response_model=dict, status_code=HTTPStatus.ACCEPTED) # noqa: RUF100, FAST003
async def webhook_run_flow(
flow: Annotated[Flow, Depends(get_flow_by_id_or_endpoint_name)],
user: Annotated[User, Depends(get_user_by_flow_id_or_endpoint_name)],
request: Request,
background_tasks: BackgroundTasks,
telemetry_service: TelemetryService = Depends(get_telemetry_service),
telemetry_service: Annotated[TelemetryService, Depends(get_telemetry_service)],
):
"""
Run a flow using a webhook request.
@ -441,10 +441,9 @@ async def experimental_run_flow(
if inputs is None:
inputs = [InputValueRequest(components=[], input_value="")]
artifacts = {}
if session_id:
session_data = await session_service.load_session(session_id, flow_id=flow_id_str)
graph, artifacts = session_data if session_data else (None, None)
graph, _artifacts = session_data or (None, None)
if graph is None:
msg = f"Session {session_id} not found"
raise ValueError(msg)
@ -585,7 +584,7 @@ def get_version():
@router.post("/custom_component", status_code=HTTPStatus.OK, response_model=CustomComponentResponse)
async def custom_component(
raw_code: CustomComponentRequest,
user: User = Depends(get_current_active_user),
user: Annotated[User, Depends(get_current_active_user)],
):
component = Component(_code=raw_code.code)
@ -600,7 +599,7 @@ async def custom_component(
@router.post("/custom_component/update", status_code=HTTPStatus.OK)
async def custom_component_update(
code_request: UpdateCustomComponentRequest,
user: User = Depends(get_current_active_user),
user: Annotated[User, Depends(get_current_active_user)],
):
"""
Update a custom component with the provided code request.

View file

@ -3,6 +3,7 @@ from datetime import datetime, timezone
from http import HTTPStatus
from io import BytesIO
from pathlib import Path
from typing import Annotated
from uuid import UUID
from fastapi import APIRouter, Depends, HTTPException, UploadFile
@ -39,7 +40,7 @@ def get_flow_id(
@router.post("/upload/{flow_id}", status_code=HTTPStatus.CREATED)
async def upload_file(
file: UploadFile,
flow_id: UUID = Depends(get_flow_id),
flow_id: Annotated[UUID, Depends(get_flow_id)],
current_user=Depends(get_current_active_user),
session=Depends(get_session),
storage_service: StorageService = Depends(get_storage_service),
@ -68,7 +69,9 @@ async def upload_file(
@router.get("/download/{flow_id}/{file_name}")
async def download_file(file_name: str, flow_id: UUID, storage_service: StorageService = Depends(get_storage_service)):
async def download_file(
file_name: str, flow_id: UUID, storage_service: Annotated[StorageService, Depends(get_storage_service)]
):
try:
flow_id_str = str(flow_id)
extension = file_name.split(".")[-1]
@ -93,7 +96,9 @@ async def download_file(file_name: str, flow_id: UUID, storage_service: StorageS
@router.get("/images/{flow_id}/{file_name}")
async def download_image(file_name: str, flow_id: UUID, storage_service: StorageService = Depends(get_storage_service)):
async def download_image(
file_name: str, flow_id: UUID, storage_service: Annotated[StorageService, Depends(get_storage_service)]
):
try:
extension = file_name.split(".")[-1]
flow_id_str = str(flow_id)
@ -118,7 +123,7 @@ async def download_image(file_name: str, flow_id: UUID, storage_service: Storage
async def download_profile_picture(
folder_name: str,
file_name: str,
storage_service: StorageService = Depends(get_storage_service),
storage_service: Annotated[StorageService, Depends(get_storage_service)],
):
try:
extension = file_name.split(".")[-1]
@ -134,7 +139,7 @@ async def download_profile_picture(
@router.get("/profile_pictures/list")
async def list_profile_pictures(storage_service: StorageService = Depends(get_storage_service)):
async def list_profile_pictures(storage_service: Annotated[StorageService, Depends(get_storage_service)]):
try:
config_dir = get_storage_service().settings_service.settings.config_dir
config_path = Path(config_dir) # type: ignore
@ -156,7 +161,8 @@ async def list_profile_pictures(storage_service: StorageService = Depends(get_st
@router.get("/list/{flow_id}")
async def list_files(
flow_id: UUID = Depends(get_flow_id), storage_service: StorageService = Depends(get_storage_service)
flow_id: Annotated[UUID, Depends(get_flow_id)],
storage_service: Annotated[StorageService, Depends(get_storage_service)],
):
try:
flow_id_str = str(flow_id)
@ -168,7 +174,9 @@ async def list_files(
@router.delete("/delete/{flow_id}/{file_name}")
async def delete_file(
file_name: str, flow_id: UUID = Depends(get_flow_id), storage_service: StorageService = Depends(get_storage_service)
file_name: str,
flow_id: Annotated[UUID, Depends(get_flow_id)],
storage_service: Annotated[StorageService, Depends(get_storage_service)],
):
try:
flow_id_str = str(flow_id)

View file

@ -5,6 +5,7 @@ import json
import re
import zipfile
from datetime import datetime, timezone
from typing import Annotated
from uuid import UUID
import orjson
@ -326,7 +327,9 @@ async def upload_file(
@router.delete("/")
async def delete_multiple_flows(
flow_ids: list[UUID], user: User = Depends(get_current_active_user), db: Session = Depends(get_session)
flow_ids: list[UUID],
user: Annotated[User, Depends(get_current_active_user)],
db: Annotated[Session, Depends(get_session)],
):
"""
Delete multiple flows by their IDs.
@ -362,8 +365,8 @@ async def delete_multiple_flows(
@router.post("/download/", status_code=200)
async def download_multiple_file(
flow_ids: list[UUID],
user: User = Depends(get_current_active_user),
db: Session = Depends(get_session),
user: Annotated[User, Depends(get_current_active_user)],
db: Annotated[Session, Depends(get_session)],
):
"""Download all flows as a zip file."""
flows = db.exec(select(Flow).where(and_(Flow.user_id == user.id, Flow.id.in_(flow_ids)))).all() # type: ignore

View file

@ -1,5 +1,7 @@
from __future__ import annotations
from typing import Annotated
from fastapi import APIRouter, Depends, HTTPException, Request, Response, status
from fastapi.security import OAuth2PasswordRequestForm
from sqlmodel import Session
@ -23,8 +25,8 @@ router = APIRouter(tags=["Login"])
@router.post("/login", response_model=Token)
async def login_to_get_access_token(
response: Response,
form_data: OAuth2PasswordRequestForm = Depends(),
db: Session = Depends(get_session),
form_data: Annotated[OAuth2PasswordRequestForm, Depends()],
db: Annotated[Session, Depends(get_session)],
# _: Session = Depends(get_current_active_user)
settings_service=Depends(get_settings_service),
variable_service: VariableService = Depends(get_variable_service),
@ -82,7 +84,7 @@ async def login_to_get_access_token(
@router.get("/auto_login")
async def auto_login(
response: Response, db: Session = Depends(get_session), settings_service=Depends(get_settings_service)
response: Response, db: Annotated[Session, Depends(get_session)], settings_service=Depends(get_settings_service)
):
auth_settings = settings_service.auth_settings
@ -129,8 +131,8 @@ async def auto_login(
async def refresh_token(
request: Request,
response: Response,
settings_service: SettingsService = Depends(get_settings_service),
db: Session = Depends(get_session),
settings_service: Annotated[SettingsService, Depends(get_settings_service)],
db: Annotated[Session, Depends(get_session)],
):
auth_settings = settings_service.auth_settings

View file

@ -1,3 +1,4 @@
from typing import Annotated
from uuid import UUID
from fastapi import APIRouter, Depends, HTTPException, Query
@ -22,8 +23,8 @@ router = APIRouter(prefix="/monitor", tags=["Monitor"])
@router.get("/builds", response_model=VertexBuildMapModel)
async def get_vertex_builds(
flow_id: UUID = Query(),
session: Session = Depends(get_session),
flow_id: Annotated[UUID, Query()],
session: Annotated[Session, Depends(get_session)],
):
try:
vertex_builds = get_vertex_builds_by_flow_id(session, flow_id)
@ -34,8 +35,8 @@ async def get_vertex_builds(
@router.delete("/builds", status_code=204)
async def delete_vertex_builds(
flow_id: UUID = Query(),
session: Session = Depends(get_session),
flow_id: Annotated[UUID, Query()],
session: Annotated[Session, Depends(get_session)],
):
try:
delete_vertex_builds_by_flow_id(session, flow_id)
@ -45,12 +46,12 @@ async def delete_vertex_builds(
@router.get("/messages", response_model=list[MessageResponse])
async def get_messages(
flow_id: str | None = Query(None),
session_id: str | None = Query(None),
sender: str | None = Query(None),
sender_name: str | None = Query(None),
order_by: str | None = Query("timestamp"),
session: Session = Depends(get_session),
session: Annotated[Session, Depends(get_session)],
flow_id: Annotated[str | None, Query()] = None,
session_id: Annotated[str | None, Query()] = None,
sender: Annotated[str | None, Query()] = None,
sender_name: Annotated[str | None, Query()] = None,
order_by: Annotated[str | None, Query()] = "timestamp",
):
try:
stmt = select(MessageTable)
@ -74,8 +75,8 @@ async def get_messages(
@router.delete("/messages", status_code=204)
async def delete_messages(
message_ids: list[UUID],
session: Session = Depends(get_session),
current_user: User = Depends(get_current_active_user),
session: Annotated[Session, Depends(get_session)],
current_user: Annotated[User, Depends(get_current_active_user)],
):
try:
session.exec(delete(MessageTable).where(MessageTable.id.in_(message_ids))) # type: ignore
@ -88,8 +89,8 @@ async def delete_messages(
async def update_message(
message_id: UUID,
message: MessageUpdate,
session: Session = Depends(get_session),
user: User = Depends(get_current_active_user),
session: Annotated[Session, Depends(get_session)],
user: Annotated[User, Depends(get_current_active_user)],
):
try:
db_message = session.get(MessageTable, message_id)
@ -110,9 +111,9 @@ async def update_message(
@router.patch("/messages/session/{old_session_id}", response_model=list[MessageResponse])
async def update_session_id(
old_session_id: str,
new_session_id: str = Query(..., description="The new session ID to update to"),
session: Session = Depends(get_session),
current_user: User = Depends(get_current_active_user),
new_session_id: Annotated[str, Query(..., description="The new session ID to update to")],
session: Annotated[Session, Depends(get_session)],
current_user: Annotated[User, Depends(get_current_active_user)],
):
try:
# Get all messages with the old session ID
@ -143,7 +144,7 @@ async def update_session_id(
@router.delete("/messages/session/{session_id}", status_code=204)
async def delete_messages_session(
session_id: str,
session: Session = Depends(get_session),
session: Annotated[Session, Depends(get_session)],
):
try:
session.exec( # type: ignore
@ -159,8 +160,8 @@ async def delete_messages_session(
@router.get("/transactions", response_model=list[TransactionReadResponse])
async def get_transactions(
flow_id: UUID = Query(),
session: Session = Depends(get_session),
flow_id: Annotated[UUID, Query()],
session: Annotated[Session, Depends(get_session)],
):
try:
transactions = get_transactions_by_flow_id(session, flow_id)

View file

@ -58,8 +58,8 @@ def check_if_store_is_enabled(
@router.get("/check/api_key")
async def check_if_store_has_api_key(
api_key: str | None = Depends(get_optional_user_store_api_key),
store_service: StoreService = Depends(get_store_service),
api_key: Annotated[str | None, Depends(get_optional_user_store_api_key)],
store_service: Annotated[StoreService, Depends(get_store_service)],
):
if api_key is None:
return {"has_api_key": False, "is_valid": False}
@ -75,8 +75,8 @@ async def check_if_store_has_api_key(
@router.post("/components/", response_model=CreateComponentResponse, status_code=201)
async def share_component(
component: StoreComponentCreate,
store_service: StoreService = Depends(get_store_service),
store_api_key: str = Depends(get_user_store_api_key),
store_service: Annotated[StoreService, Depends(get_store_service)],
store_api_key: Annotated[str, Depends(get_user_store_api_key)],
):
try:
await check_langflow_version(component)
@ -89,8 +89,8 @@ async def share_component(
async def update_shared_component(
component_id: UUID,
component: StoreComponentCreate,
store_service: StoreService = Depends(get_store_service),
store_api_key: str = Depends(get_user_store_api_key),
store_service: Annotated[StoreService, Depends(get_store_service)],
store_api_key: Annotated[str, Depends(get_user_store_api_key)],
):
try:
await check_langflow_version(component)
@ -139,8 +139,8 @@ async def get_components(
@router.get("/components/{component_id}", response_model=DownloadComponentResponse)
async def download_component(
component_id: UUID,
store_service: StoreService = Depends(get_store_service),
store_api_key: str = Depends(get_user_store_api_key),
store_service: Annotated[StoreService, Depends(get_store_service)],
store_api_key: Annotated[str, Depends(get_user_store_api_key)],
):
try:
component = await store_service.download(store_api_key, component_id)
@ -157,7 +157,7 @@ async def download_component(
@router.get("/tags", response_model=list[TagResponse])
async def get_tags(
store_service: StoreService = Depends(get_store_service),
store_service: Annotated[StoreService, Depends(get_store_service)],
):
try:
return await store_service.get_tags()
@ -169,8 +169,8 @@ async def get_tags(
@router.get("/users/likes", response_model=list[UsersLikesResponse])
async def get_list_of_components_liked_by_user(
store_service: StoreService = Depends(get_store_service),
store_api_key: str = Depends(get_user_store_api_key),
store_service: Annotated[StoreService, Depends(get_store_service)],
store_api_key: Annotated[str, Depends(get_user_store_api_key)],
):
try:
return await store_service.get_user_likes(store_api_key)
@ -183,8 +183,8 @@ async def get_list_of_components_liked_by_user(
@router.post("/users/likes/{component_id}", response_model=UsersLikesResponse)
async def like_component(
component_id: UUID,
store_service: StoreService = Depends(get_store_service),
store_api_key: str = Depends(get_user_store_api_key),
store_service: Annotated[StoreService, Depends(get_store_service)],
store_api_key: Annotated[str, Depends(get_user_store_api_key)],
):
try:
result = await store_service.like_component(store_api_key, str(component_id))

View file

@ -1,3 +1,4 @@
from typing import Annotated
from uuid import UUID
from fastapi import APIRouter, Depends, HTTPException
@ -24,7 +25,7 @@ router = APIRouter(tags=["Users"], prefix="/users")
@router.post("/", response_model=UserRead, status_code=201)
def add_user(
user: UserCreate,
session: Session = Depends(get_session),
session: Annotated[Session, Depends(get_session)],
settings_service=Depends(get_settings_service),
) -> User:
"""
@ -49,7 +50,7 @@ def add_user(
@router.get("/whoami", response_model=UserRead)
def read_current_user(
current_user: User = Depends(get_current_active_user),
current_user: Annotated[User, Depends(get_current_active_user)],
) -> User:
"""
Retrieve the current user's data.
@ -57,7 +58,7 @@ def read_current_user(
return current_user
@router.get("/", response_model=UsersResponse)
@router.get("/")
def read_all_users(
skip: int = 0,
limit: int = 10,
@ -83,8 +84,8 @@ def read_all_users(
def patch_user(
user_id: UUID,
user_update: UserUpdate,
user: User = Depends(get_current_active_user),
session: Session = Depends(get_session),
user: Annotated[User, Depends(get_current_active_user)],
session: Annotated[Session, Depends(get_session)],
) -> User:
"""
Update an existing user's data.
@ -113,8 +114,8 @@ def patch_user(
def reset_password(
user_id: UUID,
user_update: UserUpdate,
user: User = Depends(get_current_active_user),
session: Session = Depends(get_session),
user: Annotated[User, Depends(get_current_active_user)],
session: Annotated[Session, Depends(get_session)],
) -> User:
"""
Reset a user's password.
@ -134,11 +135,11 @@ def reset_password(
return user
@router.delete("/{user_id}", response_model=dict)
@router.delete("/{user_id}")
def delete_user(
user_id: UUID,
current_user: User = Depends(get_current_active_superuser),
session: Session = Depends(get_session),
current_user: Annotated[User, Depends(get_current_active_superuser)],
session: Annotated[Session, Depends(get_session)],
) -> dict:
"""
Delete a user from the database.

View file

@ -23,15 +23,15 @@ __all__ = [
"chains",
"documentloaders",
"embeddings",
"prompts",
"prototypes",
"models",
"helpers",
"inputs",
"link_extractors",
"memories",
"outputs",
"models",
"output_parsers",
"outputs",
"prompts",
"prototypes",
"retrievers",
"textsplitters",
"toolkits",

View file

@ -68,7 +68,7 @@ class CrewAIAgentComponent(Component):
]
def build_output(self) -> Agent:
kwargs = self.kwargs if self.kwargs else {}
kwargs = self.kwargs or {}
agent = Agent(
role=self.role,
goal=self.goal,
@ -76,7 +76,7 @@ class CrewAIAgentComponent(Component):
llm=self.llm,
verbose=self.verbose,
memory=self.memory,
tools=self.tools if self.tools else [],
tools=self.tools or [],
allow_delegation=self.allow_delegation,
allow_code_execution=self.allow_code_execution,
**kwargs,

View file

@ -114,7 +114,7 @@ class SequentialTaskAgentComponent(Component):
llm=self.llm,
verbose=self.verbose,
memory=self.memory,
tools=self.tools if self.tools else [],
tools=self.tools or [],
allow_delegation=self.allow_delegation,
allow_code_execution=self.allow_code_execution,
**agent_kwargs,

View file

@ -143,7 +143,7 @@ class AssemblyAITranscriptionJobCreator(Component):
self.status = "Error: Expected Number of Speakers must be a valid integer"
return Data(data={"error": "Error: Expected Number of Speakers must be a valid integer"})
language_code = self.language_code if self.language_code else None
language_code = self.language_code or None
config = aai.TranscriptionConfig(
speech_model=self.speech_model,

View file

@ -10,10 +10,10 @@ from .run import AssistantsRun
__all__ = [
"AstraAssistantManager",
"AssistantsCreateAssistant",
"AssistantsCreateThread",
"AssistantsGetAssistantName",
"AssistantsListAssistants",
"AssistantsCreateThread",
"AssistantsRun",
"GetEnvVar",
"Dotenv",
"GetEnvVar",
]

View file

@ -125,7 +125,7 @@ class APIRequestComponent(Component):
body = None
raise ValueError(msg) from e
data = body if body else None
data = body or None
try:
response = await client.request(method, url, headers=headers, json=data, timeout=timeout)

View file

@ -48,5 +48,5 @@ class FileComponent(Component):
raise ValueError(msg)
data = parse_text_file_to_data(resolved_path, silent_errors)
self.status = data if data else "No data"
self.status = data or "No data"
return data or Data()

View file

@ -110,7 +110,7 @@ class GoogleDriveSearchComponent(Component):
creds = Credentials.from_authorized_user_info(token_info)
# Use the query string from the input (which might have been edited by the user)
query = self.query_string if self.query_string else self.generate_query_string()
query = self.query_string or self.generate_query_string()
# Initialize the Google Drive API service
service = build("drive", "v3", credentials=creds)

View file

@ -14,8 +14,8 @@ class ShouldRunNextComponent(CustomComponent):
template = (
"Given the following question and the context below, answer with a yes or no.\n\n"
"{error_message}\n\n"
"Question: {question}\n\n"
"Context: {context}\n\n"
"Question: {question}\n\n" # noqa: RUF100, RUF027
"Context: {context}\n\n" # noqa: RUF100, RUF027
"Answer:"
)

View file

@ -8,14 +8,14 @@ __all__ = [
"ConditionalRouterComponent",
"ExtractKeyFromDataComponent",
"FlowToolComponent",
"ListenComponent",
"ListFlowsComponent",
"ListenComponent",
"MergeDataComponent",
"NotifyComponent",
"PythonFunctionComponent",
"RunFlowComponent",
"SplitTextComponent",
"SQLExecutorComponent",
"SubFlowComponent",
"SelectivePassThroughComponent",
"SplitTextComponent",
"SubFlowComponent",
]

View file

@ -15,9 +15,9 @@ __all__ = [
"AstraVectorizeComponent",
"AzureOpenAIEmbeddingsComponent",
"CohereEmbeddingsComponent",
"GoogleGenerativeAIEmbeddingsComponent",
"HuggingFaceInferenceAPIEmbeddingsComponent",
"OllamaEmbeddingsComponent",
"OpenAIEmbeddingsComponent",
"VertexAIEmbeddingsComponent",
"GoogleGenerativeAIEmbeddingsComponent",
]

View file

@ -86,7 +86,7 @@ class DataConditionalRouterComponent(Component):
else:
false_output.append(item)
self.stop("false_output" if true_output else "true_output")
return true_output if true_output else false_output
return true_output or false_output
if not self.validate_input(self.data_input):
return Data(data={"error": self.status})
result = self.process_single_data(self.data_input)

View file

@ -116,8 +116,5 @@ class MemoryComponent(Component):
return Message(text=stored_text)
def build_lc_memory(self) -> BaseChatMemory:
if self.memory:
chat_memory = self.memory
else:
chat_memory = LCBuiltinChatMemory(flow_id=self.flow_id, session_id=self.session_id)
chat_memory = self.memory or LCBuiltinChatMemory(flow_id=self.flow_id, session_id=self.session_id)
return ConversationBufferMemory(chat_memory=chat_memory)

View file

@ -10,15 +10,15 @@ from .SplitText import SplitTextComponent
from .StoreMessage import StoreMessageComponent
__all__ = [
"CreateListComponent",
"CombineTextComponent",
"CreateListComponent",
"CustomComponent",
"FilterDataComponent",
"IDGeneratorComponent",
"ListComponent",
"MemoryComponent",
"MergeDataComponent",
"ParseDataComponent",
"SplitTextComponent",
"StoreMessageComponent",
"ListComponent",
]

View file

@ -1,4 +1,4 @@
### JSON Document Builder
# JSON Document Builder
# Build a Document containing a JSON object using a key and another Document page content.

View file

@ -92,12 +92,12 @@ class SpiderTool(Component):
parameters = self.params["data"]
else:
parameters = {
"limit": self.limit if self.limit else None,
"depth": self.depth if self.depth else None,
"blacklist": self.blacklist if self.blacklist else None,
"whitelist": self.whitelist if self.whitelist else None,
"limit": self.limit or None,
"depth": self.depth or None,
"blacklist": self.blacklist or None,
"whitelist": self.whitelist or None,
"readability": self.readability,
"request_timeout": self.request_timeout if self.request_timeout else None,
"request_timeout": self.request_timeout or None,
"metadata": self.metadata,
"return_format": "markdown",
}

View file

@ -16,13 +16,13 @@ __all__ = [
"AmazonBedrockComponent",
"AnthropicModelComponent",
"AzureChatOpenAIComponent",
"QianfanChatEndpointComponent",
"ChatOllamaComponent",
"ChatVertexAIComponent",
"CohereComponent",
"GoogleGenerativeAIComponent",
"HuggingFaceEndpointsComponent",
"ChatOllamaComponent",
"OpenAIModelComponent",
"ChatVertexAIComponent",
"PerplexityComponent",
"QianfanChatEndpointComponent",
"base",
]

View file

@ -99,7 +99,7 @@ class LangChainHubPromptComponent(Component):
template = self._fetch_langchain_hub_template()
# Get the parameters from the attributes
original_params = {k[6:] if k.startswith("param_") else k: v for k, v in self._attributes.items()}
original_params = {k.removeprefix("param_"): v for k, v in self._attributes.items()}
prompt_value = template.invoke(original_params)
original_params["template"] = prompt_value.to_string()

View file

@ -1,4 +1,4 @@
from .LangChainHubPrompt import LangChainHubPromptComponent
from .Prompt import PromptComponent
__all__ = ["PromptComponent", "LangChainHubPromptComponent"]
__all__ = ["LangChainHubPromptComponent", "PromptComponent"]

View file

@ -111,7 +111,7 @@ class RunnableExecComponent(Component):
if input_key in runnable.input_keys:
input_dict[input_key] = input_value
else:
input_dict = {k: input_value for k in runnable.input_keys}
input_dict = dict.fromkeys(runnable.input_keys, input_value)
status = f"Warning: The input key is not '{input_key}'. The input key is '{runnable.input_keys}'."
return input_dict, status

View file

@ -13,6 +13,7 @@ from .UpdateData import UpdateDataComponent
__all__ = [
"ConditionalRouterComponent",
"CreateDataComponent",
"FlowToolComponent",
"ListenComponent",
"NotifyComponent",
@ -22,6 +23,5 @@ __all__ = [
"RunnableExecComponent",
"SQLExecutorComponent",
"SubFlowComponent",
"CreateDataComponent",
"UpdateDataComponent",
]

View file

@ -6,6 +6,6 @@ from .RecursiveCharacterTextSplitter import RecursiveCharacterTextSplitterCompon
__all__ = [
"CharacterTextSplitterComponent",
"LanguageRecursiveTextSplitterComponent",
"RecursiveCharacterTextSplitterComponent",
"NaturalLanguageTextSplitterComponent",
"RecursiveCharacterTextSplitterComponent",
]

View file

@ -3,7 +3,7 @@ from .Metaphor import MetaphorToolkit
from .VectorStoreInfo import VectorStoreInfoComponent
__all__ = [
"ComposioAPIComponent",
"MetaphorToolkit",
"VectorStoreInfoComponent",
"ComposioAPIComponent",
]

View file

@ -13,17 +13,17 @@ from .WikipediaAPI import WikipediaAPIComponent
from .WolframAlphaAPI import WolframAlphaAPIComponent
__all__ = [
"RetrieverToolComponent",
"BingSearchAPIComponent",
"CalculatorToolComponent",
"GleanSearchAPIComponent",
"GoogleSearchAPIComponent",
"GoogleSerperAPIComponent",
"PythonCodeStructuredTool",
"PythonREPLToolComponent",
"SearchAPIComponent",
"RetrieverToolComponent",
"SearXNGToolComponent",
"SearchAPIComponent",
"SerpAPIComponent",
"WikipediaAPIComponent",
"WolframAlphaAPIComponent",
"CalculatorToolComponent",
]

View file

@ -129,8 +129,8 @@ class ElasticsearchVectorStoreComponent(LCVectorStoreComponent):
es_params = {
"index_name": self.index_name,
"embedding": self.embedding,
"es_user": self.username if self.username else None,
"es_password": self.password if self.password else None,
"es_user": self.username or None,
"es_password": self.password or None,
}
if self.cloud_id:

View file

@ -65,15 +65,15 @@ class QdrantVectorStoreComponent(LCVectorStoreComponent):
}
server_kwargs = {
"host": self.host if self.host else None,
"host": self.host or None,
"port": int(self.port), # Ensure port is an integer
"grpc_port": int(self.grpc_port), # Ensure grpc_port is an integer
"api_key": self.api_key,
"prefix": self.prefix,
# Ensure timeout is an integer
"timeout": int(self.timeout) if self.timeout else None,
"path": self.path if self.path else None,
"url": self.url if self.url else None,
"path": self.path or None,
"url": self.url or None,
}
server_kwargs = {k: v for k, v in server_kwargs.items() if v is not None}

View file

@ -1,4 +1,4 @@
from langflow.custom.custom_component.component import Component
from langflow.custom.custom_component.custom_component import CustomComponent
__all__ = ["CustomComponent", "Component"]
__all__ = ["Component", "CustomComponent"]

View file

@ -2,6 +2,7 @@ import ast
import contextlib
import inspect
import traceback
from itertools import starmap
from pathlib import Path
from typing import Any
@ -161,7 +162,7 @@ class CodeParser:
if " as " in module:
module, alias = module.split(" as ")
if module in return_type_str or (alias and alias in return_type_str):
exec(f"import {module} as {alias if alias else module}", eval_env)
exec(f"import {module} as {alias or module}", eval_env)
return eval_env
def parse_callable_details(self, node: ast.FunctionDef) -> dict[str, Any]:
@ -218,7 +219,7 @@ class CodeParser:
defaults = missing_defaults + default_values
return [self.parse_arg(arg, default) for arg, default in zip(node.args.args, defaults, strict=True)]
return list(starmap(self.parse_arg, zip(node.args.args, defaults, strict=True)))
def parse_varargs(self, node: ast.FunctionDef) -> list[dict[str, Any]]:
"""
@ -239,7 +240,7 @@ class CodeParser:
ast.unparse(default) if default else None for default in node.args.kw_defaults
]
return [self.parse_arg(arg, default) for arg, default in zip(node.args.kwonlyargs, kw_defaults, strict=True)]
return list(starmap(self.parse_arg, zip(node.args.kwonlyargs, kw_defaults, strict=True)))
def parse_kwargs(self, node: ast.FunctionDef) -> list[dict[str, Any]]:
"""

View file

@ -590,9 +590,9 @@ class Component(CustomComponent):
return frontend_node
def to_frontend_node(self):
#! This part here is clunky but we need it like this for
#! backwards compatibility. We can change how prompt component
#! works and then update this later
# ! This part here is clunky but we need it like this for
# ! backwards compatibility. We can change how prompt component
# ! works and then update this later
field_config = self.get_template_config(self)
frontend_node = ComponentFrontendNode.from_inputs(**field_config)
for key in self._inputs:

View file

@ -78,7 +78,7 @@ class DirectoryReader:
component_tuple = (*build_component(component), component)
components.append(component_tuple)
except Exception as e:
logger.debug(f"Error while loading component { component['name']}")
logger.debug(f"Error while loading component {component['name']}")
logger.debug(e)
continue
items.append({"name": menu["name"], "path": menu["path"], "components": components})

View file

@ -60,8 +60,9 @@ def __getattr__(name: str) -> Any:
__all__ = [
"AgentExecutor",
"BaseChatMemory",
"BaseLanguageModel",
"BaseChatModel",
"BaseLLM",
"BaseLanguageModel",
"BaseLoader",
"BaseMemory",
"BaseOutputParser",
@ -75,15 +76,14 @@ __all__ = [
"Document",
"Embeddings",
"Input",
"LanguageModel",
"NestedDict",
"Object",
"PromptTemplate",
"RangeSpec",
"Retriever",
"Text",
"TextSplitter",
"Tool",
"VectorStore",
"BaseChatModel",
"Retriever",
"Text",
"LanguageModel",
]

View file

@ -3,4 +3,4 @@ from langflow.graph.graph.base import Graph
from langflow.graph.vertex.base import Vertex
from langflow.graph.vertex.types import CustomComponentVertex, InterfaceVertex, StateVertex
__all__ = ["Edge", "Graph", "Vertex", "CustomComponentVertex", "InterfaceVertex", "StateVertex"]
__all__ = ["CustomComponentVertex", "Edge", "Graph", "InterfaceVertex", "StateVertex", "Vertex"]

View file

@ -118,7 +118,7 @@ def build_sugiyama_layout(vertexes, edges):
for vertex in vertexes.values():
vertex.view = VertexViewer(vertex.data)
minw = min([v.view.w for v in vertexes.values()])
minw = min(v.view.w for v in vertexes.values())
for edge in edges:
edge.view = EdgeViewer()

View file

@ -353,7 +353,7 @@ class Graph:
raise ValueError(msg)
if config is not None:
self.__apply_config(config)
#! Change this ASAP
# ! Change this ASAP
nest_asyncio.apply()
loop = asyncio.get_event_loop()
async_gen = self.async_start(inputs, max_iterations, event_manager)
@ -1555,7 +1555,7 @@ class Graph:
ValueError: If the graph contains a cycle.
"""
# States: 0 = unvisited, 1 = visiting, 2 = visited
state = {vertex: 0 for vertex in self.vertices}
state = dict.fromkeys(self.vertices, 0)
sorted_vertices = []
def dfs(vertex):

View file

@ -35,9 +35,9 @@ class VertexTypesDict(LazyLoadDictBase):
def get_type_dict(self):
types = self._types()
return {
**{t: types.CustomComponentVertex for t in ["CustomComponent"]},
**{t: types.ComponentVertex for t in ["Component"]},
**{t: types.InterfaceVertex for t in CHAT_COMPONENTS},
"CustomComponent": types.CustomComponentVertex,
"Component": types.ComponentVertex,
**dict.fromkeys(CHAT_COMPONENTS, types.InterfaceVertex),
}
def get_custom_component_vertex_type(self):

View file

@ -215,7 +215,7 @@ def create_state_model(model_name: str = "State", validate: bool = True, **kwarg
property_method = property(getter, setter)
except ValueError as e:
# If the method is not valid,assume it is already a getter
if "get_output_by_method" not in str(e) and "__self__" not in str(e) or validate:
if ("get_output_by_method" not in str(e) and "__self__" not in str(e)) or validate:
raise e
property_method = value
fields[name] = computed_field(property_method)

View file

@ -106,7 +106,7 @@ def get_artifact_type(value, build_result) -> str:
result = ArtifactType.MESSAGE
if result == ArtifactType.UNKNOWN and (
isinstance(build_result, Generator) or isinstance(value, Message) and isinstance(value.text, Generator)
isinstance(build_result, Generator) or (isinstance(value, Message) and isinstance(value.text, Generator))
):
result = ArtifactType.STREAM

View file

@ -758,7 +758,7 @@ class Vertex:
logger.warning(message)
elif isinstance(self._built_object, Iterator | AsyncIterator):
if self.display_name in ["Text Output"]:
if self.display_name == "Text Output":
msg = f"You are trying to stream to a {self.display_name}. Try using a Chat Output instead."
raise ValueError(msg)

View file

@ -1,3 +1,3 @@
from .data import data_to_text, docs_to_data, messages_to_text
__all__ = ["docs_to_data", "data_to_text", "messages_to_text"]
__all__ = ["data_to_text", "docs_to_data", "messages_to_text"]

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View file

@ -8,12 +8,12 @@ from .sequential_tasks_agent import sequential_tasks_agent_graph
from .vector_store_rag import vector_store_rag_graph
__all__ = [
"blog_writer_graph",
"document_qa_graph",
"memory_chatbot_graph",
"vector_store_rag_graph",
"basic_prompting_graph",
"sequential_tasks_agent_graph",
"hierarchical_tasks_agent_graph",
"blog_writer_graph",
"complex_agent_graph",
"document_qa_graph",
"hierarchical_tasks_agent_graph",
"memory_chatbot_graph",
"sequential_tasks_agent_graph",
"vector_store_rag_graph",
]

View file

@ -25,25 +25,25 @@ from .inputs import (
__all__ = [
"BoolInput",
"CodeInput",
"DataInput",
"DefaultPromptField",
"DictInput",
"DropdownInput",
"MultiselectInput",
"FileInput",
"FloatInput",
"HandleInput",
"Input",
"IntInput",
"LinkInput",
"MessageInput",
"MessageTextInput",
"MultilineInput",
"MultilineSecretInput",
"MultiselectInput",
"NestedDictInput",
"PromptInput",
"CodeInput",
"SecretStrInput",
"StrInput",
"MessageTextInput",
"TableInput",
"Input",
"DefaultPromptField",
"LinkInput",
]

View file

@ -25,25 +25,25 @@ from langflow.template import Output
__all__ = [
"BoolInput",
"CodeInput",
"DataInput",
"DefaultPromptField",
"DictInput",
"DropdownInput",
"MultiselectInput",
"FileInput",
"FloatInput",
"HandleInput",
"IntInput",
"LinkInput",
"MessageInput",
"MessageTextInput",
"MultilineInput",
"MultilineSecretInput",
"MultiselectInput",
"NestedDictInput",
"Output",
"PromptInput",
"CodeInput",
"SecretStrInput",
"StrInput",
"MessageTextInput",
"Output",
"TableInput",
"DefaultPromptField",
"LinkInput",
]

View file

@ -1,4 +1,4 @@
from .load import load_flow_from_json, run_flow_from_json
from .utils import get_flow, upload_file
__all__ = ["load_flow_from_json", "run_flow_from_json", "upload_file", "get_flow"]
__all__ = ["get_flow", "load_flow_from_json", "run_flow_from_json", "upload_file"]

View file

@ -1,4 +1,4 @@
from .logger import configure, logger
from .setup import disable_logging, enable_logging
__all__ = ["configure", "logger", "disable_logging", "enable_logging"]
__all__ = ["configure", "disable_logging", "enable_logging", "logger"]

View file

@ -42,10 +42,8 @@ def get_artifact_type(value, build_result=None) -> str:
result = ArtifactType.ARRAY
if result == ArtifactType.UNKNOWN and (
build_result
and isinstance(build_result, Generator)
or isinstance(value, Message)
and isinstance(value.text, Generator)
(build_result and isinstance(build_result, Generator))
or (isinstance(value, Message) and isinstance(value.text, Generator))
):
result = ArtifactType.STREAM

View file

@ -57,10 +57,8 @@ def get_type(payload):
result = LogType.TEXT
if result == LogType.UNKNOWN and (
payload
and isinstance(payload, Generator)
or isinstance(payload, Message)
and isinstance(payload.text, Generator)
(payload and isinstance(payload, Generator))
or (isinstance(payload, Message) and isinstance(payload.text, Generator))
):
result = LogType.STREAM

View file

@ -1,4 +1,4 @@
from .manager import service_manager
from .schema import ServiceType
__all__ = ["service_manager", "ServiceType"]
__all__ = ["ServiceType", "service_manager"]

View file

@ -3,10 +3,10 @@ from langflow.services.cache.service import AsyncInMemoryCache, CacheService, Re
from . import factory, service
__all__ = [
"factory",
"service",
"ThreadingInMemoryCache",
"AsyncInMemoryCache",
"CacheService",
"RedisCache",
"ThreadingInMemoryCache",
"factory",
"service",
]

View file

@ -6,4 +6,4 @@ from .transactions import TransactionTable
from .user import User
from .variable import Variable
__all__ = ["Flow", "User", "ApiKey", "Variable", "Folder", "MessageTable", "TransactionTable"]
__all__ = ["ApiKey", "Flow", "Folder", "MessageTable", "TransactionTable", "User", "Variable"]

View file

@ -1,3 +1,3 @@
from .model import ApiKey, ApiKeyCreate, ApiKeyRead, UnmaskedApiKeyRead
__all__ = ["ApiKey", "ApiKeyCreate", "UnmaskedApiKeyRead", "ApiKeyRead"]
__all__ = ["ApiKey", "ApiKeyCreate", "ApiKeyRead", "UnmaskedApiKeyRead"]

View file

@ -1,3 +1,3 @@
from .model import MessageCreate, MessageRead, MessageTable, MessageUpdate
__all__ = ["MessageTable", "MessageCreate", "MessageRead", "MessageUpdate"]
__all__ = ["MessageCreate", "MessageRead", "MessageTable", "MessageUpdate"]

View file

@ -42,8 +42,8 @@ def get_service(service_type: ServiceType, default=None):
from langflow.services.manager import service_manager
if not service_manager.factories:
#! This is a workaround to ensure that the service manager is initialized
#! Not optimal, but it works for now
# ! This is a workaround to ensure that the service manager is initialized
# ! Not optimal, but it works for now
service_manager.register_factories()
return service_manager.get(service_type, default) # type: ignore

View file

@ -272,14 +272,14 @@ class Settings(BaseSettings):
elif Path(new_path).exists():
logger.debug(f"Database already exists at {new_path}, using it")
final_path = new_path
elif Path("./{db_file_name}").exists():
elif Path(f"./{db_file_name}").exists():
try:
logger.debug("Copying existing database to new location")
copy2("./{db_file_name}", new_path)
copy2(f"./{db_file_name}", new_path)
logger.debug(f"Copied existing database to {new_path}")
except Exception:
logger.exception("Failed to copy database, using default path")
new_path = "./{db_file_name}"
new_path = f"./{db_file_name}"
else:
final_path = new_path

View file

@ -12,7 +12,7 @@ def set_secure_permissions(file_path: Path):
import win32con
import win32security
user, domain, _ = win32security.LookupAccountName("", win32api.GetUserName())
user, _, _ = win32security.LookupAccountName("", win32api.GetUserName())
sd = win32security.GetFileSecurity(str(file_path), win32security.DACL_SECURITY_INFORMATION)
dacl = win32security.ACL()

View file

@ -17,7 +17,7 @@ class ForbiddenError(CustomException):
class APIKeyError(CustomException):
def __init__(self, detail="API key error"):
super().__init__(detail, 400) #! Should be 401
super().__init__(detail, 400) # ! Should be 401
class FilterError(CustomException):

View file

@ -78,7 +78,7 @@ class LangFuseTracer(BaseTracer):
_metadata: dict = {}
_metadata |= {"trace_type": trace_type} if trace_type else {}
_metadata |= metadata if metadata else {}
_metadata |= metadata or {}
_name = trace_name.removesuffix(f" ({trace_id})")
content_span = {
@ -108,7 +108,7 @@ class LangFuseTracer(BaseTracer):
span = self.spans.get(trace_id, None)
if span:
_output: dict = {}
_output |= outputs if outputs else {}
_output |= outputs or {}
_output |= {"error": str(error)} if error else {}
_output |= {"logs": list(logs)} if logs else {}
content = {"output": _output, "end_time": end_time}

View file

@ -3,8 +3,8 @@ from langflow.template.frontend_node.base import FrontendNode
from langflow.template.template.base import Template
__all__ = [
"FrontendNode",
"Input",
"Output",
"FrontendNode",
"Template",
]

View file

@ -42,7 +42,7 @@ def build_template_from_function(name: str, type_to_loader_dict: dict, add_funct
variables = {"_type": _type}
for class_field_items, value in _class.model_fields.items():
if class_field_items in ["callback_manager"]:
if class_field_items == "callback_manager":
continue
variables[class_field_items] = {}
for name_, value_ in value.__repr_args__():
@ -53,7 +53,7 @@ def build_template_from_function(name: str, type_to_loader_dict: dict, add_funct
)
except Exception:
variables[class_field_items]["default"] = None
elif name_ not in ["name"]:
elif name_ != "name":
variables[class_field_items][name_] = value_
variables[class_field_items]["placeholder"] = docs.params.get(class_field_items, "")
@ -209,7 +209,7 @@ def format_dict(dictionary: dict[str, Any], class_name: str | None = None) -> di
"""
for key, value in dictionary.items():
if key in ["_type"]:
if key == "_type":
continue
_type: str | type = get_type(value)

File diff suppressed because it is too large Load diff

View file

@ -194,7 +194,7 @@ dev = [
"types-redis>=4.6.0.5",
"ipykernel>=6.29.0",
"mypy>=1.11.0",
"ruff>=0.4.5",
"ruff>=0.6.2",
"httpx>=0.27",
"pytest>=8.2.0",
"types-requests>=2.32.0",

View file

@ -64,7 +64,7 @@ class NameTest(FastHttpUser):
"inputs": {"text": f"Hello, My name is {name}"},
"sync": False,
}
result1, session_id = self.process(name, self.flow_id, payload1)
_result1, session_id = self.process(name, self.flow_id, payload1)
payload2 = {
"inputs": {"text": "What is my name? Please, answer like this: Your name is <name>"},

2
uv.lock generated
View file

@ -3896,7 +3896,7 @@ requires-dist = [
{ name = "requests", marker = "extra == 'dev'", specifier = ">=2.32.0" },
{ name = "respx", marker = "extra == 'dev'", specifier = ">=0.21.1" },
{ name = "rich", specifier = ">=13.7.0" },
{ name = "ruff", marker = "extra == 'dev'", specifier = ">=0.4.5" },
{ name = "ruff", marker = "extra == 'dev'", specifier = ">=0.6.2" },
{ name = "sentence-transformers", marker = "extra == 'all'", specifier = ">=2.0.0" },
{ name = "sentence-transformers", marker = "extra == 'local'", specifier = ">=2.0.0" },
{ name = "sentry-sdk", extras = ["fastapi", "loguru"], specifier = ">=2.5.1" },