diff --git a/src/backend/base/langflow/__main__.py b/src/backend/base/langflow/__main__.py index c3ebd26c4..b2100d250 100644 --- a/src/backend/base/langflow/__main__.py +++ b/src/backend/base/langflow/__main__.py @@ -10,8 +10,8 @@ import click import httpx import typer from dotenv import load_dotenv -from multiprocess import cpu_count # type: ignore -from multiprocess.context import Process # type: ignore +from multiprocess import cpu_count +from multiprocess.context import Process from packaging import version as pkg_version from rich import box from rich import print as rprint @@ -530,7 +530,7 @@ def api_key( def api_key_banner(unmasked_api_key): is_mac = platform.system() == "Darwin" - import pyperclip # type: ignore + import pyperclip pyperclip.copy(unmasked_api_key.api_key) panel = Panel( diff --git a/src/backend/base/langflow/alembic/env.py b/src/backend/base/langflow/alembic/env.py index e37e12bef..6e4a87561 100644 --- a/src/backend/base/langflow/alembic/env.py +++ b/src/backend/base/langflow/alembic/env.py @@ -6,7 +6,7 @@ from alembic import context from loguru import logger from sqlalchemy import engine_from_config, pool -from langflow.services.database.models import * # noqa +from langflow.services.database.models import * from langflow.services.database.service import SQLModel # this is the Alembic Config object, which provides diff --git a/src/backend/base/langflow/api/utils.py b/src/backend/base/langflow/api/utils.py index 2920927ed..8393cb521 100644 --- a/src/backend/base/langflow/api/utils.py +++ b/src/backend/base/langflow/api/utils.py @@ -259,9 +259,9 @@ def parse_value(value: Any, input_type: str) -> Any: async def cascade_delete_flow(session: Session, flow: Flow): try: - session.exec(delete(TransactionTable).where(TransactionTable.flow_id == flow.id)) # type: ignore - session.exec(delete(VertexBuildTable).where(VertexBuildTable.flow_id == flow.id)) # type: ignore - session.exec(delete(Flow).where(Flow.id == flow.id)) # type: ignore + session.exec(delete(TransactionTable).where(TransactionTable.flow_id == flow.id)) + session.exec(delete(VertexBuildTable).where(VertexBuildTable.flow_id == flow.id)) + session.exec(delete(Flow).where(Flow.id == flow.id)) except Exception as e: msg = f"Unable to cascade delete flow: ${flow.id}" raise RuntimeError(msg, e) from e diff --git a/src/backend/base/langflow/api/v1/base.py b/src/backend/base/langflow/api/v1/base.py index e42cbc93e..879637b88 100644 --- a/src/backend/base/langflow/api/v1/base.py +++ b/src/backend/base/langflow/api/v1/base.py @@ -12,7 +12,7 @@ class Code(BaseModel): class FrontendNodeRequest(FrontendNode): - template: dict # type: ignore + template: dict # type: ignore[assignment] @model_serializer(mode="wrap") def serialize_model(self, handler): diff --git a/src/backend/base/langflow/api/v1/endpoints.py b/src/backend/base/langflow/api/v1/endpoints.py index ce5fd1bba..4d5fefef8 100644 --- a/src/backend/base/langflow/api/v1/endpoints.py +++ b/src/backend/base/langflow/api/v1/endpoints.py @@ -133,7 +133,7 @@ async def simple_run_flow( if input_request.output_type == "debug" or ( vertex.is_output - and (input_request.output_type == "any" or input_request.output_type in vertex.id.lower()) # type: ignore + and (input_request.output_type == "any" or input_request.output_type in vertex.id.lower()) # type: ignore[operator] ) ] task_result, session_id = await run_graph_internal( @@ -346,7 +346,7 @@ async def webhook_run_flow( ) logger.debug("Starting background task") - background_tasks.add_task( # type: ignore + background_tasks.add_task( simple_run_flow_task, flow=flow, input_request=input_request, @@ -659,7 +659,7 @@ def get_config(): try: from langflow.services.deps import get_settings_service - settings_service: SettingsService = get_settings_service() # type: ignore + settings_service: SettingsService = get_settings_service() return settings_service.settings.model_dump() except Exception as exc: logger.exception(exc) diff --git a/src/backend/base/langflow/api/v1/files.py b/src/backend/base/langflow/api/v1/files.py index 5839204c2..3d297cfec 100644 --- a/src/backend/base/langflow/api/v1/files.py +++ b/src/backend/base/langflow/api/v1/files.py @@ -128,10 +128,10 @@ async def download_profile_picture( try: extension = file_name.split(".")[-1] config_dir = get_storage_service().settings_service.settings.config_dir - config_path = Path(config_dir) # type: ignore + config_path = Path(config_dir) # type: ignore[arg-type] folder_path = config_path / "profile_pictures" / folder_name content_type = build_content_type_from_extension(extension) - file_content = await storage_service.get_file(flow_id=folder_path, file_name=file_name) # type: ignore + file_content = await storage_service.get_file(flow_id=folder_path, file_name=file_name) # type: ignore[arg-type] return StreamingResponse(BytesIO(file_content), media_type=content_type) except Exception as e: @@ -142,13 +142,13 @@ async def download_profile_picture( async def list_profile_pictures(storage_service: Annotated[StorageService, Depends(get_storage_service)]): try: config_dir = get_storage_service().settings_service.settings.config_dir - config_path = Path(config_dir) # type: ignore + config_path = Path(config_dir) # type: ignore[arg-type] people_path = config_path / "profile_pictures/People" space_path = config_path / "profile_pictures/Space" - people = await storage_service.list_files(flow_id=people_path) # type: ignore - space = await storage_service.list_files(flow_id=space_path) # type: ignore + people = await storage_service.list_files(flow_id=people_path) # type: ignore[arg-type] + space = await storage_service.list_files(flow_id=space_path) # type: ignore[arg-type] files = [f"People/{i}" for i in people] files += [f"Space/{i}" for i in space] diff --git a/src/backend/base/langflow/api/v1/flows.py b/src/backend/base/langflow/api/v1/flows.py index a92a121be..109aa53fd 100644 --- a/src/backend/base/langflow/api/v1/flows.py +++ b/src/backend/base/langflow/api/v1/flows.py @@ -52,7 +52,7 @@ def create_flow( # based on the highest number found if session.exec(select(Flow).where(Flow.name == flow.name).where(Flow.user_id == current_user.id)).first(): flows = session.exec( - select(Flow).where(Flow.name.like(f"{flow.name} (%")).where(Flow.user_id == current_user.id) # type: ignore + select(Flow).where(Flow.name.like(f"{flow.name} (%")).where(Flow.user_id == current_user.id) # type: ignore[attr-defined] ).all() if flows: extract_number = re.compile(r"\((\d+)\)$") @@ -74,14 +74,14 @@ def create_flow( ): flows = session.exec( select(Flow) - .where(Flow.endpoint_name.like(f"{flow.endpoint_name}-%")) # type: ignore + .where(Flow.endpoint_name.like(f"{flow.endpoint_name}-%")) # type: ignore[union-attr] .where(Flow.user_id == current_user.id) ).all() if flows: # The endpoitn name is like "my-endpoint","my-endpoint-1", "my-endpoint-2" # so we need to get the highest number and add 1 # we need to get the last part of the endpoint name - numbers = [int(flow.endpoint_name.split("-")[-1]) for flow in flows] # type: ignore + numbers = [int(flow.endpoint_name.split("-")[-1]) for flow in flows] flow.endpoint_name = f"{flow.endpoint_name}-{max(numbers) + 1}" else: flow.endpoint_name = f"{flow.endpoint_name}-1" @@ -148,16 +148,16 @@ def read_flows( auth_settings = settings_service.auth_settings if auth_settings.AUTO_LOGIN: stmt = select(Flow).where( - (Flow.user_id == None) | (Flow.user_id == current_user.id) # noqa + (Flow.user_id == None) | (Flow.user_id == current_user.id) # noqa: E711 ) if components_only: - stmt = stmt.where(Flow.is_component == True) # noqa + stmt = stmt.where(Flow.is_component == True) # noqa: E712 flows = session.exec(stmt).all() else: flows = current_user.flows - flows = validate_is_component(flows) # type: ignore + flows = validate_is_component(flows) if components_only: flows = [flow for flow in flows if flow.is_component] flow_ids = [flow.id for flow in flows] @@ -169,7 +169,7 @@ def read_flows( example_flows = folder.flows if folder else [] for example_flow in example_flows: if example_flow.id not in flow_ids: - flows.append(example_flow) # type: ignore + flows.append(example_flow) except Exception as e: logger.exception(e) @@ -196,7 +196,7 @@ def read_flow( # If auto login is enable user_id can be current_user.id or None # so write an OR stmt = stmt.where( - (Flow.user_id == current_user.id) | (Flow.user_id == None) # noqa + (Flow.user_id == current_user.id) | (Flow.user_id == None) # noqa: E711 ) if user_flow := session.exec(stmt).first(): return user_flow @@ -369,7 +369,7 @@ async def download_multiple_file( db: Annotated[Session, Depends(get_session)], ): """Download all flows as a zip file.""" - flows = db.exec(select(Flow).where(and_(Flow.user_id == user.id, Flow.id.in_(flow_ids)))).all() # type: ignore + flows = db.exec(select(Flow).where(and_(Flow.user_id == user.id, Flow.id.in_(flow_ids)))).all() # type: ignore[attr-defined] if not flows: raise HTTPException(status_code=404, detail="No flows found.") diff --git a/src/backend/base/langflow/api/v1/folders.py b/src/backend/base/langflow/api/v1/folders.py index e67605beb..4fb9ae2d4 100644 --- a/src/backend/base/langflow/api/v1/folders.py +++ b/src/backend/base/langflow/api/v1/folders.py @@ -44,7 +44,7 @@ def create_folder( ).first(): folder_results = session.exec( select(Folder).where( - Folder.name.like(f"{new_folder.name}%"), # type: ignore + Folder.name.like(f"{new_folder.name}%"), # type: ignore[attr-defined] Folder.user_id == current_user.id, ) ) @@ -62,14 +62,14 @@ def create_folder( if folder.components_list: update_statement_components = ( - update(Flow).where(Flow.id.in_(folder.components_list)).values(folder_id=new_folder.id) # type: ignore + update(Flow).where(Flow.id.in_(folder.components_list)).values(folder_id=new_folder.id) # type: ignore[attr-defined] ) - session.exec(update_statement_components) # type: ignore + session.exec(update_statement_components) session.commit() if folder.flows_list: - update_statement_flows = update(Flow).where(Flow.id.in_(folder.flows_list)).values(folder_id=new_folder.id) # type: ignore - session.exec(update_statement_flows) # type: ignore + update_statement_flows = update(Flow).where(Flow.id.in_(folder.flows_list)).values(folder_id=new_folder.id) # type: ignore[attr-defined] + session.exec(update_statement_flows) session.commit() return new_folder @@ -86,7 +86,7 @@ def read_folders( try: folders = session.exec( select(Folder).where( - or_(Folder.user_id == current_user.id, Folder.user_id == None) # type: ignore # noqa: E711 + or_(Folder.user_id == current_user.id, Folder.user_id == None) # noqa: E711 ) ).all() return sorted(folders, key=lambda x: x.name != DEFAULT_FOLDER_NAME) @@ -152,16 +152,16 @@ def update_folder( my_collection_folder = session.exec(select(Folder).where(Folder.name == DEFAULT_FOLDER_NAME)).first() if my_collection_folder: update_statement_my_collection = ( - update(Flow).where(Flow.id.in_(excluded_flows)).values(folder_id=my_collection_folder.id) # type: ignore + update(Flow).where(Flow.id.in_(excluded_flows)).values(folder_id=my_collection_folder.id) # type: ignore[attr-defined] ) - session.exec(update_statement_my_collection) # type: ignore + session.exec(update_statement_my_collection) session.commit() if concat_folder_components: update_statement_components = ( - update(Flow).where(Flow.id.in_(concat_folder_components)).values(folder_id=existing_folder.id) # type: ignore + update(Flow).where(Flow.id.in_(concat_folder_components)).values(folder_id=existing_folder.id) # type: ignore[attr-defined] ) - session.exec(update_statement_components) # type: ignore + session.exec(update_statement_components) session.commit() return existing_folder diff --git a/src/backend/base/langflow/api/v1/monitor.py b/src/backend/base/langflow/api/v1/monitor.py index 4d3e3c801..6474444c5 100644 --- a/src/backend/base/langflow/api/v1/monitor.py +++ b/src/backend/base/langflow/api/v1/monitor.py @@ -79,7 +79,7 @@ async def delete_messages( current_user: Annotated[User, Depends(get_current_active_user)], ): try: - session.exec(delete(MessageTable).where(MessageTable.id.in_(message_ids))) # type: ignore + session.exec(delete(MessageTable).where(MessageTable.id.in_(message_ids))) # type: ignore[attr-defined] session.commit() except Exception as e: raise HTTPException(status_code=500, detail=str(e)) from e @@ -147,7 +147,7 @@ async def delete_messages_session( session: Annotated[Session, Depends(get_session)], ): try: - session.exec( # type: ignore + session.exec( delete(MessageTable) .where(col(MessageTable.session_id) == session_id) .execution_options(synchronize_session="fetch") diff --git a/src/backend/base/langflow/api/v1/users.py b/src/backend/base/langflow/api/v1/users.py index 679211547..2151dfcaf 100644 --- a/src/backend/base/langflow/api/v1/users.py +++ b/src/backend/base/langflow/api/v1/users.py @@ -71,11 +71,11 @@ def read_all_users( query: SelectOfScalar = select(User).offset(skip).limit(limit) users = session.exec(query).fetchall() - count_query = select(func.count()).select_from(User) # type: ignore + count_query = select(func.count()).select_from(User) total_count = session.exec(count_query).first() return UsersResponse( - total_count=total_count, # type: ignore + total_count=total_count, users=[UserRead(**user.model_dump()) for user in users], ) diff --git a/src/backend/base/langflow/base/agents/agent.py b/src/backend/base/langflow/base/agents/agent.py index 2e8e3be22..a64572794 100644 --- a/src/backend/base/langflow/base/agents/agent.py +++ b/src/backend/base/langflow/base/agents/agent.py @@ -134,7 +134,7 @@ class LCToolsAgentComponent(LCAgentComponent): runnable = agent else: runnable = AgentExecutor.from_agent_and_tools( - agent=agent, # type: ignore + agent=agent, tools=self.tools, handle_parsing_errors=self.handle_parsing_errors, verbose=self.verbose, diff --git a/src/backend/base/langflow/base/agents/crewai/crew.py b/src/backend/base/langflow/base/agents/crewai/crew.py index f3ed99c2a..5ceb538fe 100644 --- a/src/backend/base/langflow/base/agents/crewai/crew.py +++ b/src/backend/base/langflow/base/agents/crewai/crew.py @@ -1,8 +1,8 @@ from collections.abc import Callable from typing import cast -from crewai import Agent, Crew, Process, Task # type: ignore -from crewai.task import TaskOutput # type: ignore +from crewai import Agent, Crew, Process, Task +from crewai.task import TaskOutput from langchain_core.agents import AgentAction, AgentFinish from langflow.custom import Component diff --git a/src/backend/base/langflow/base/agents/crewai/tasks.py b/src/backend/base/langflow/base/agents/crewai/tasks.py index 8e6f6cb39..b2f5a5a27 100644 --- a/src/backend/base/langflow/base/agents/crewai/tasks.py +++ b/src/backend/base/langflow/base/agents/crewai/tasks.py @@ -1,4 +1,4 @@ -from crewai import Task # type: ignore +from crewai import Task class SequentialTask(Task): diff --git a/src/backend/base/langflow/base/data/utils.py b/src/backend/base/langflow/base/data/utils.py index 4c9ede095..31212a92d 100644 --- a/src/backend/base/langflow/base/data/utils.py +++ b/src/backend/base/langflow/base/data/utils.py @@ -76,7 +76,7 @@ def retrieve_file_paths( def partition_file_to_data(file_path: str, silent_errors: bool) -> Data | None: # Use the partition function to load the file - from unstructured.partition.auto import partition # type: ignore + from unstructured.partition.auto import partition try: elements = partition(file_path) @@ -108,14 +108,14 @@ def read_text_file(file_path: str) -> str: def read_docx_file(file_path: str) -> str: - from docx import Document # type: ignore + from docx import Document doc = Document(file_path) return "\n\n".join([p.text for p in doc.paragraphs]) def parse_pdf_to_text(file_path: str) -> str: - from pypdf import PdfReader # type: ignore + from pypdf import PdfReader with Path(file_path).open("rb") as f: reader = PdfReader(f) diff --git a/src/backend/base/langflow/base/io/chat.py b/src/backend/base/langflow/base/io/chat.py index 4b4527cca..d1e346d00 100644 --- a/src/backend/base/langflow/base/io/chat.py +++ b/src/backend/base/langflow/base/io/chat.py @@ -97,4 +97,4 @@ class ChatComponent(Component): flow_id=self.graph.flow_id, ) self.status = messages - return message_text # type: ignore + return message_text # type: ignore[return-value] diff --git a/src/backend/base/langflow/base/models/model.py b/src/backend/base/langflow/base/models/model.py index 5434a1d08..bc72775ea 100644 --- a/src/backend/base/langflow/base/models/model.py +++ b/src/backend/base/langflow/base/models/model.py @@ -133,9 +133,9 @@ class LCModelComponent(Component): } } else: - status_message = f"Response: {content}" # type: ignore + status_message = f"Response: {content}" # type: ignore[assignment] else: - status_message = f"Response: {message.content}" # type: ignore + status_message = f"Response: {message.content}" # type: ignore[assignment] return status_message def get_chat_result( @@ -175,7 +175,7 @@ class LCModelComponent(Component): if self.output_parser is not None: runnable = runnable | self.output_parser - runnable = runnable.with_config( # type: ignore + runnable = runnable.with_config( { "run_name": self.display_name, "project_name": self.get_project_name(), @@ -183,8 +183,8 @@ class LCModelComponent(Component): } ) if stream: - return runnable.stream(inputs) # type: ignore - message = runnable.invoke(inputs) # type: ignore + return runnable.stream(inputs) + message = runnable.invoke(inputs) result = message.content if hasattr(message, "content") else message if isinstance(message, AIMessage): status_message = self.build_status_message(message) diff --git a/src/backend/base/langflow/base/tools/component_tool.py b/src/backend/base/langflow/base/tools/component_tool.py index 2972b8825..a21452da7 100644 --- a/src/backend/base/langflow/base/tools/component_tool.py +++ b/src/backend/base/langflow/base/tools/component_tool.py @@ -60,7 +60,7 @@ def _format_tool_name(name: str): return re.sub(r"[^a-zA-Z0-9_-]", "-", name) -class ComponentToolkit: # type: ignore +class ComponentToolkit: def __init__(self, component: Component): self.component = component diff --git a/src/backend/base/langflow/components/agents/CrewAIAgent.py b/src/backend/base/langflow/components/agents/CrewAIAgent.py index 2a9a11de9..879977ec7 100644 --- a/src/backend/base/langflow/components/agents/CrewAIAgent.py +++ b/src/backend/base/langflow/components/agents/CrewAIAgent.py @@ -1,4 +1,4 @@ -from crewai import Agent # type: ignore +from crewai import Agent from langflow.custom import Component from langflow.io import BoolInput, DictInput, HandleInput, MultilineInput, Output diff --git a/src/backend/base/langflow/components/agents/HierarchicalCrew.py b/src/backend/base/langflow/components/agents/HierarchicalCrew.py index e2267bd43..c56144d53 100644 --- a/src/backend/base/langflow/components/agents/HierarchicalCrew.py +++ b/src/backend/base/langflow/components/agents/HierarchicalCrew.py @@ -1,4 +1,4 @@ -from crewai import Crew, Process # type: ignore +from crewai import Crew, Process from langflow.base.agents.crewai.crew import BaseCrewComponent from langflow.io import HandleInput diff --git a/src/backend/base/langflow/components/agents/SequentialCrew.py b/src/backend/base/langflow/components/agents/SequentialCrew.py index cbe172f5b..df17e7525 100644 --- a/src/backend/base/langflow/components/agents/SequentialCrew.py +++ b/src/backend/base/langflow/components/agents/SequentialCrew.py @@ -1,4 +1,4 @@ -from crewai import Agent, Crew, Process, Task # type: ignore +from crewai import Agent, Crew, Process, Task from langflow.base.agents.crewai.crew import BaseCrewComponent from langflow.io import HandleInput diff --git a/src/backend/base/langflow/components/astra_assistants/run.py b/src/backend/base/langflow/components/astra_assistants/run.py index ac1b697f4..7c83dfe4a 100644 --- a/src/backend/base/langflow/components/astra_assistants/run.py +++ b/src/backend/base/langflow/components/astra_assistants/run.py @@ -1,6 +1,6 @@ from typing import Any -from astra_assistants import patch # type: ignore +from astra_assistants import patch from openai import OpenAI from openai.lib.streaming import AssistantEventHandler diff --git a/src/backend/base/langflow/components/chains/ConversationChain.py b/src/backend/base/langflow/components/chains/ConversationChain.py index 67b488dfc..f1bb74ef9 100644 --- a/src/backend/base/langflow/components/chains/ConversationChain.py +++ b/src/backend/base/langflow/components/chains/ConversationChain.py @@ -30,7 +30,7 @@ class ConversationChainComponent(LCChainComponent): result = chain.invoke({"input": self.input_value}, config={"callbacks": self.get_langchain_callbacks()}) if isinstance(result, dict): - result = result.get(chain.output_key, "") # type: ignore + result = result.get(chain.output_key, "") elif not isinstance(result, str): result = result.get("response") diff --git a/src/backend/base/langflow/components/data/Directory.py b/src/backend/base/langflow/components/data/Directory.py index ac9ece726..81e0cafbe 100644 --- a/src/backend/base/langflow/components/data/Directory.py +++ b/src/backend/base/langflow/components/data/Directory.py @@ -92,4 +92,4 @@ class DirectoryComponent(Component): loaded_data = [parse_text_file_to_data(file_path, silent_errors) for file_path in file_paths] loaded_data = list(filter(None, loaded_data)) self.status = loaded_data - return loaded_data # type: ignore + return loaded_data # type: ignore[return-value] diff --git a/src/backend/base/langflow/components/deactivated/ChatLiteLLMModel.py b/src/backend/base/langflow/components/deactivated/ChatLiteLLMModel.py index 7e521abb8..914542c88 100644 --- a/src/backend/base/langflow/components/deactivated/ChatLiteLLMModel.py +++ b/src/backend/base/langflow/components/deactivated/ChatLiteLLMModel.py @@ -120,7 +120,7 @@ class ChatLiteLLMModelComponent(LCModelComponent): def build_model(self) -> LanguageModel: # type: ignore[type-var] try: - import litellm # type: ignore + import litellm litellm.drop_params = True litellm.set_verbose = self.verbose @@ -155,4 +155,4 @@ class ChatLiteLLMModelComponent(LCModelComponent): ) output.client.api_key = self.api_key - return output # type: ignore + return output diff --git a/src/backend/base/langflow/components/embeddings/AmazonBedrockEmbeddings.py b/src/backend/base/langflow/components/embeddings/AmazonBedrockEmbeddings.py index 45e52f01d..d67b93826 100644 --- a/src/backend/base/langflow/components/embeddings/AmazonBedrockEmbeddings.py +++ b/src/backend/base/langflow/components/embeddings/AmazonBedrockEmbeddings.py @@ -37,7 +37,7 @@ class AmazonBedrockEmbeddingsComponent(LCModelComponent): def build_embeddings(self) -> Embeddings: if self.aws_access_key: - import boto3 # type: ignore + import boto3 session = boto3.Session( aws_access_key_id=self.aws_access_key, @@ -65,4 +65,4 @@ class AmazonBedrockEmbeddingsComponent(LCModelComponent): model_id=self.model_id, endpoint_url=self.endpoint_url, region_name=self.region_name, - ) # type: ignore + ) diff --git a/src/backend/base/langflow/components/embeddings/CohereEmbeddings.py b/src/backend/base/langflow/components/embeddings/CohereEmbeddings.py index 6c7256338..db71e3c8e 100644 --- a/src/backend/base/langflow/components/embeddings/CohereEmbeddings.py +++ b/src/backend/base/langflow/components/embeddings/CohereEmbeddings.py @@ -36,7 +36,7 @@ class CohereEmbeddingsComponent(LCModelComponent): ] def build_embeddings(self) -> Embeddings: - return CohereEmbeddings( # type: ignore + return CohereEmbeddings( cohere_api_key=self.cohere_api_key, model=self.model, truncate=self.truncate, diff --git a/src/backend/base/langflow/components/embeddings/NVIDIAEmbeddings.py b/src/backend/base/langflow/components/embeddings/NVIDIAEmbeddings.py index 10907805b..1aca0a33d 100644 --- a/src/backend/base/langflow/components/embeddings/NVIDIAEmbeddings.py +++ b/src/backend/base/langflow/components/embeddings/NVIDIAEmbeddings.py @@ -47,7 +47,7 @@ class NVIDIAEmbeddingsComponent(LCEmbeddingsModel): if field_name == "base_url" and field_value: try: build_model = self.build_embeddings() - ids = [model.id for model in build_model.available_models] # type: ignore + ids = [model.id for model in build_model.available_models] build_config["model"]["options"] = ids build_config["model"]["value"] = ids[0] except Exception as e: @@ -67,7 +67,7 @@ class NVIDIAEmbeddingsComponent(LCEmbeddingsModel): base_url=self.base_url, temperature=self.temperature, nvidia_api_key=self.nvidia_api_key, - ) # type: ignore + ) except Exception as e: msg = f"Could not connect to NVIDIA API. Error: {e}" raise ValueError(msg) from e diff --git a/src/backend/base/langflow/components/embeddings/OllamaEmbeddings.py b/src/backend/base/langflow/components/embeddings/OllamaEmbeddings.py index 104a732df..03f59919f 100644 --- a/src/backend/base/langflow/components/embeddings/OllamaEmbeddings.py +++ b/src/backend/base/langflow/components/embeddings/OllamaEmbeddings.py @@ -41,7 +41,7 @@ class OllamaEmbeddingsComponent(LCModelComponent): model=self.model, base_url=self.base_url, temperature=self.temperature, - ) # type: ignore + ) except Exception as e: msg = "Could not connect to Ollama API." raise ValueError(msg) from e diff --git a/src/backend/base/langflow/components/embeddings/util/AIMLEmbeddingsImpl.py b/src/backend/base/langflow/components/embeddings/util/AIMLEmbeddingsImpl.py index 8abc198de..3273fcce5 100644 --- a/src/backend/base/langflow/components/embeddings/util/AIMLEmbeddingsImpl.py +++ b/src/backend/base/langflow/components/embeddings/util/AIMLEmbeddingsImpl.py @@ -40,7 +40,7 @@ class AIMLEmbeddingsImpl(BaseModel, Embeddings): logger.exception("Error occurred") raise - return embeddings # type: ignore + return embeddings # type: ignore[return-value] def _embed_text(self, client: httpx.Client, headers: dict, text: str) -> dict: payload = { diff --git a/src/backend/base/langflow/components/langchain_utilities/FirecrawlCrawlApi.py b/src/backend/base/langflow/components/langchain_utilities/FirecrawlCrawlApi.py index 25e910186..05c5287fb 100644 --- a/src/backend/base/langflow/components/langchain_utilities/FirecrawlCrawlApi.py +++ b/src/backend/base/langflow/components/langchain_utilities/FirecrawlCrawlApi.py @@ -55,7 +55,7 @@ class FirecrawlCrawlApi(CustomComponent): idempotency_key: str | None = None, ) -> Data: try: - from firecrawl.firecrawl import FirecrawlApp # type: ignore + from firecrawl.firecrawl import FirecrawlApp except ImportError as e: msg = "Could not import firecrawl integration package. Please install it with `pip install firecrawl-py`." raise ImportError(msg) from e diff --git a/src/backend/base/langflow/components/langchain_utilities/FirecrawlScrapeApi.py b/src/backend/base/langflow/components/langchain_utilities/FirecrawlScrapeApi.py index 440ca46b2..f03c273fd 100644 --- a/src/backend/base/langflow/components/langchain_utilities/FirecrawlScrapeApi.py +++ b/src/backend/base/langflow/components/langchain_utilities/FirecrawlScrapeApi.py @@ -48,7 +48,7 @@ class FirecrawlScrapeApi(CustomComponent): extractorOptions: Data | None = None, ) -> Data: try: - from firecrawl.firecrawl import FirecrawlApp # type: ignore + from firecrawl.firecrawl import FirecrawlApp except ImportError as e: msg = "Could not import firecrawl integration package. Please install it with `pip install firecrawl-py`." raise ImportError(msg) from e diff --git a/src/backend/base/langflow/components/models/AIMLModel.py b/src/backend/base/langflow/components/models/AIMLModel.py index 5e4c8edbe..372fd817a 100644 --- a/src/backend/base/langflow/components/models/AIMLModel.py +++ b/src/backend/base/langflow/components/models/AIMLModel.py @@ -107,7 +107,7 @@ class AIMLModelComponent(LCModelComponent): except ImportError: return None if isinstance(e, BadRequestError): - message = e.json_body.get("error", {}).get("message", "") # type: ignore + message = e.json_body.get("error", {}).get("message", "") if message: return message return None diff --git a/src/backend/base/langflow/components/models/AmazonBedrockModel.py b/src/backend/base/langflow/components/models/AmazonBedrockModel.py index 1816be570..bedd426bf 100644 --- a/src/backend/base/langflow/components/models/AmazonBedrockModel.py +++ b/src/backend/base/langflow/components/models/AmazonBedrockModel.py @@ -73,7 +73,7 @@ class AmazonBedrockComponent(LCModelComponent): msg = "langchain_aws is not installed. Please install it with `pip install langchain_aws`." raise ImportError(msg) from e if self.aws_access_key: - import boto3 # type: ignore + import boto3 session = boto3.Session( aws_access_key_id=self.aws_access_key, @@ -96,7 +96,7 @@ class AmazonBedrockComponent(LCModelComponent): boto3_client = session.client("bedrock-runtime", **client_params) try: - output = ChatBedrock( # type: ignore + output = ChatBedrock( client=boto3_client, model_id=self.model_id, region_name=self.region_name, @@ -107,4 +107,4 @@ class AmazonBedrockComponent(LCModelComponent): except Exception as e: msg = "Could not connect to AmazonBedrock API." raise ValueError(msg) from e - return output # type: ignore + return output diff --git a/src/backend/base/langflow/components/models/AnthropicModel.py b/src/backend/base/langflow/components/models/AnthropicModel.py index e71cc2fd6..45b7148a3 100644 --- a/src/backend/base/langflow/components/models/AnthropicModel.py +++ b/src/backend/base/langflow/components/models/AnthropicModel.py @@ -69,7 +69,7 @@ class AnthropicModelComponent(LCModelComponent): output = ChatAnthropic( model=model, anthropic_api_key=(SecretStr(anthropic_api_key) if anthropic_api_key else None), - max_tokens_to_sample=max_tokens, # type: ignore + max_tokens_to_sample=max_tokens, temperature=temperature, anthropic_api_url=anthropic_api_url, streaming=self.stream, @@ -78,7 +78,7 @@ class AnthropicModelComponent(LCModelComponent): msg = "Could not connect to Anthropic API." raise ValueError(msg) from e - return output # type: ignore + return output def _get_exception_message(self, exception: Exception) -> str | None: """ @@ -95,7 +95,7 @@ class AnthropicModelComponent(LCModelComponent): except ImportError: return None if isinstance(exception, BadRequestError): - message = exception.body.get("error", {}).get("message") # type: ignore + message = exception.body.get("error", {}).get("message") if message: return message return None diff --git a/src/backend/base/langflow/components/models/AzureOpenAIModel.py b/src/backend/base/langflow/components/models/AzureOpenAIModel.py index ac42b22b3..9131e6ee3 100644 --- a/src/backend/base/langflow/components/models/AzureOpenAIModel.py +++ b/src/backend/base/langflow/components/models/AzureOpenAIModel.py @@ -82,4 +82,4 @@ class AzureChatOpenAIComponent(LCModelComponent): msg = f"Could not connect to AzureOpenAI API: {e}" raise ValueError(msg) from e - return output # type: ignore + return output diff --git a/src/backend/base/langflow/components/models/BaiduQianfanChatModel.py b/src/backend/base/langflow/components/models/BaiduQianfanChatModel.py index c7aa591df..83b434ea4 100644 --- a/src/backend/base/langflow/components/models/BaiduQianfanChatModel.py +++ b/src/backend/base/langflow/components/models/BaiduQianfanChatModel.py @@ -86,7 +86,7 @@ class QianfanChatEndpointComponent(LCModelComponent): endpoint = self.endpoint try: - output = QianfanChatEndpoint( # type: ignore + output = QianfanChatEndpoint( model=model, qianfan_ak=SecretStr(qianfan_ak) if qianfan_ak else None, qianfan_sk=SecretStr(qianfan_sk) if qianfan_sk else None, @@ -99,4 +99,4 @@ class QianfanChatEndpointComponent(LCModelComponent): msg = "Could not connect to Baidu Qianfan API." raise ValueError(msg) from e - return output # type: ignore + return output diff --git a/src/backend/base/langflow/components/models/GoogleGenerativeAIModel.py b/src/backend/base/langflow/components/models/GoogleGenerativeAIModel.py index 7fb872f12..46b70ff37 100644 --- a/src/backend/base/langflow/components/models/GoogleGenerativeAIModel.py +++ b/src/backend/base/langflow/components/models/GoogleGenerativeAIModel.py @@ -73,7 +73,7 @@ class GoogleGenerativeAIComponent(LCModelComponent): top_p = self.top_p n = self.n - return ChatGoogleGenerativeAI( # type: ignore + return ChatGoogleGenerativeAI( model=model, max_output_tokens=max_output_tokens or None, temperature=temperature, diff --git a/src/backend/base/langflow/components/models/GroqModel.py b/src/backend/base/langflow/components/models/GroqModel.py index b6bcf16f6..13cdc9a42 100644 --- a/src/backend/base/langflow/components/models/GroqModel.py +++ b/src/backend/base/langflow/components/models/GroqModel.py @@ -90,7 +90,7 @@ class GroqModel(LCModelComponent): n = self.n stream = self.stream - return ChatGroq( # type: ignore + return ChatGroq( model=model_name, max_tokens=max_tokens or None, temperature=temperature, diff --git a/src/backend/base/langflow/components/models/NvidiaModel.py b/src/backend/base/langflow/components/models/NvidiaModel.py index 6e6fff6b1..998a59dd6 100644 --- a/src/backend/base/langflow/components/models/NvidiaModel.py +++ b/src/backend/base/langflow/components/models/NvidiaModel.py @@ -62,7 +62,7 @@ class NVIDIAModelComponent(LCModelComponent): if field_name == "base_url" and field_value: try: build_model = self.build_model() - ids = [model.id for model in build_model.available_models] # type: ignore + ids = [model.id for model in build_model.available_models] build_config["model_name"]["options"] = ids build_config["model_name"]["value"] = ids[0] except Exception as e: @@ -85,7 +85,7 @@ class NVIDIAModelComponent(LCModelComponent): max_tokens=max_tokens or None, model=model_name, base_url=self.base_url, - api_key=nvidia_api_key, # type: ignore + api_key=nvidia_api_key, temperature=temperature or 0.1, seed=seed, ) diff --git a/src/backend/base/langflow/components/models/OllamaModel.py b/src/backend/base/langflow/components/models/OllamaModel.py index 54c47ff6b..a910d5575 100644 --- a/src/backend/base/langflow/components/models/OllamaModel.py +++ b/src/backend/base/langflow/components/models/OllamaModel.py @@ -180,7 +180,7 @@ class ChatOllamaComponent(LCModelComponent): mirostat_options = {"Mirostat": 1, "Mirostat 2.0": 2} # Default to 0 for 'Disabled' - mirostat_value = mirostat_options.get(self.mirostat, 0) # type: ignore + mirostat_value = mirostat_options.get(self.mirostat, 0) # Set mirostat_eta and mirostat_tau to None if mirostat is disabled if mirostat_value == 0: @@ -220,9 +220,9 @@ class ChatOllamaComponent(LCModelComponent): llm_params = {k: v for k, v in llm_params.items() if v is not None} try: - output = ChatOllama(**llm_params) # type: ignore + output = ChatOllama(**llm_params) except Exception as e: msg = "Could not initialize Ollama LLM." raise ValueError(msg) from e - return output # type: ignore + return output diff --git a/src/backend/base/langflow/components/models/OpenAIModel.py b/src/backend/base/langflow/components/models/OpenAIModel.py index e849c349d..9156674ed 100644 --- a/src/backend/base/langflow/components/models/OpenAIModel.py +++ b/src/backend/base/langflow/components/models/OpenAIModel.py @@ -115,11 +115,11 @@ class OpenAIModelComponent(LCModelComponent): ) if json_mode: if output_schema_dict: - output = output.with_structured_output(schema=output_schema_dict, method="json_mode") # type: ignore + output = output.with_structured_output(schema=output_schema_dict, method="json_mode") else: - output = output.bind(response_format={"type": "json_object"}) # type: ignore + output = output.bind(response_format={"type": "json_object"}) - return output # type: ignore + return output def _get_exception_message(self, e: Exception): """ @@ -137,7 +137,7 @@ class OpenAIModelComponent(LCModelComponent): except ImportError: return None if isinstance(e, BadRequestError): - message = e.body.get("message") # type: ignore + message = e.body.get("message") if message: return message return None diff --git a/src/backend/base/langflow/components/prototypes/ConditionalRouter.py b/src/backend/base/langflow/components/prototypes/ConditionalRouter.py index 2cf2d5aa2..a13c1eaeb 100644 --- a/src/backend/base/langflow/components/prototypes/ConditionalRouter.py +++ b/src/backend/base/langflow/components/prototypes/ConditionalRouter.py @@ -70,7 +70,7 @@ class ConditionalRouterComponent(Component): self.status = self.message return self.message self.stop("true_result") - return None # type: ignore + return None # type: ignore[return-value] def false_response(self) -> Message: result = self.evaluate_condition(self.input_text, self.match_text, self.operator, self.case_sensitive) @@ -78,4 +78,4 @@ class ConditionalRouterComponent(Component): self.status = self.message return self.message self.stop("false_result") - return None # type: ignore + return None # type: ignore[return-value] diff --git a/src/backend/base/langflow/components/prototypes/FlowTool.py b/src/backend/base/langflow/components/prototypes/FlowTool.py index db99a6051..8e795a535 100644 --- a/src/backend/base/langflow/components/prototypes/FlowTool.py +++ b/src/backend/base/langflow/components/prototypes/FlowTool.py @@ -100,4 +100,4 @@ class FlowToolComponent(LCToolComponent): description_repr = repr(tool.description).strip("'") args_str = "\n".join([f"- {arg_name}: {arg_data['description']}" for arg_name, arg_data in tool.args.items()]) self.status = f"{description_repr}\nArguments:\n{args_str}" - return tool # type: ignore + return tool diff --git a/src/backend/base/langflow/components/prototypes/JSONCleaner.py b/src/backend/base/langflow/components/prototypes/JSONCleaner.py index f3426d07c..968179ac8 100644 --- a/src/backend/base/langflow/components/prototypes/JSONCleaner.py +++ b/src/backend/base/langflow/components/prototypes/JSONCleaner.py @@ -46,7 +46,7 @@ class JSONCleaner(Component): def clean_json(self) -> Message: try: - from json_repair import repair_json # type: ignore + from json_repair import repair_json except ImportError as e: msg = "Could not import the json_repair package. Please install it with `pip install json_repair`." raise ImportError(msg) from e diff --git a/src/backend/base/langflow/components/retrievers/AmazonKendra.py b/src/backend/base/langflow/components/retrievers/AmazonKendra.py index ce98c792f..43fefeec2 100644 --- a/src/backend/base/langflow/components/retrievers/AmazonKendra.py +++ b/src/backend/base/langflow/components/retrievers/AmazonKendra.py @@ -46,7 +46,7 @@ class AmazonKendraRetrieverComponent(CustomComponent): credentials_profile_name=credentials_profile_name, attribute_filter=attribute_filter, user_context=user_context, - ) # type: ignore + ) except Exception as e: msg = "Could not connect to AmazonKendra API." raise ValueError(msg) from e diff --git a/src/backend/base/langflow/components/retrievers/CohereRerank.py b/src/backend/base/langflow/components/retrievers/CohereRerank.py index a15eda2c8..3a4882a8a 100644 --- a/src/backend/base/langflow/components/retrievers/CohereRerank.py +++ b/src/backend/base/langflow/components/retrievers/CohereRerank.py @@ -73,7 +73,7 @@ class CohereRerankComponent(LCVectorStoreComponent): retriever = ContextualCompressionRetriever(base_compressor=cohere_reranker, base_retriever=self.retriever) return cast(Retriever, retriever) - async def search_documents(self) -> list[Data]: # type: ignore + async def search_documents(self) -> list[Data]: # type: ignore[override] retriever = self.build_base_retriever() documents = await retriever.ainvoke(self.search_query, config={"callbacks": self.get_langchain_callbacks()}) data = self.to_data(documents) diff --git a/src/backend/base/langflow/components/retrievers/MetalRetriever.py b/src/backend/base/langflow/components/retrievers/MetalRetriever.py index d30aef4aa..083031560 100644 --- a/src/backend/base/langflow/components/retrievers/MetalRetriever.py +++ b/src/backend/base/langflow/components/retrievers/MetalRetriever.py @@ -1,7 +1,7 @@ from typing import cast from langchain_community.retrievers import MetalRetriever -from metal_sdk.metal import Metal # type: ignore +from metal_sdk.metal import Metal from langflow.custom import CustomComponent from langflow.field_typing import Retriever diff --git a/src/backend/base/langflow/components/retrievers/NvidiaRerank.py b/src/backend/base/langflow/components/retrievers/NvidiaRerank.py index 70a47d6b4..d0a68f6a2 100644 --- a/src/backend/base/langflow/components/retrievers/NvidiaRerank.py +++ b/src/backend/base/langflow/components/retrievers/NvidiaRerank.py @@ -72,7 +72,7 @@ class NvidiaRerankComponent(LCVectorStoreComponent): retriever = ContextualCompressionRetriever(base_compressor=nvidia_reranker, base_retriever=self.retriever) return cast(Retriever, retriever) - async def search_documents(self) -> list[Data]: # type: ignore + async def search_documents(self) -> list[Data]: # type: ignore[override] retriever = self.build_base_retriever() documents = await retriever.ainvoke(self.search_query, config={"callbacks": self.get_langchain_callbacks()}) data = self.to_data(documents) diff --git a/src/backend/base/langflow/components/retrievers/VectaraSelfQueryRetriver.py b/src/backend/base/langflow/components/retrievers/VectaraSelfQueryRetriver.py index 86733e2d5..17cf494b5 100644 --- a/src/backend/base/langflow/components/retrievers/VectaraSelfQueryRetriver.py +++ b/src/backend/base/langflow/components/retrievers/VectaraSelfQueryRetriver.py @@ -45,7 +45,7 @@ class VectaraSelfQueryRetriverComponent(CustomComponent): document_content_description: str, llm: LanguageModel, metadata_field_info: list[str], - ) -> Retriever: # type: ignore + ) -> Retriever: metadata_field_obj = [] for meta in metadata_field_info: diff --git a/src/backend/base/langflow/components/toolkits/ComposioAPI.py b/src/backend/base/langflow/components/toolkits/ComposioAPI.py index 13a83e32d..be63876d9 100644 --- a/src/backend/base/langflow/components/toolkits/ComposioAPI.py +++ b/src/backend/base/langflow/components/toolkits/ComposioAPI.py @@ -1,7 +1,7 @@ from collections.abc import Sequence from typing import Any -from composio_langchain import Action, App, ComposioToolSet # type: ignore +from composio_langchain import Action, App, ComposioToolSet from langchain_core.tools import Tool from loguru import logger diff --git a/src/backend/base/langflow/components/toolkits/Metaphor.py b/src/backend/base/langflow/components/toolkits/Metaphor.py index 29c8ce9a3..66caa7b2b 100644 --- a/src/backend/base/langflow/components/toolkits/Metaphor.py +++ b/src/backend/base/langflow/components/toolkits/Metaphor.py @@ -1,6 +1,6 @@ from langchain_community.agent_toolkits.base import BaseToolkit from langchain_core.tools import Tool, tool -from metaphor_python import Metaphor # type: ignore +from metaphor_python import Metaphor from langflow.custom import CustomComponent @@ -48,4 +48,4 @@ class MetaphorToolkit(CustomComponent): """ return client.find_similar(url, num_results=similar_num_results) - return [search, get_contents, find_similar] # type: ignore + return [search, get_contents, find_similar] diff --git a/src/backend/base/langflow/components/tools/BingSearchAPI.py b/src/backend/base/langflow/components/tools/BingSearchAPI.py index d753f0dfd..d408f20c5 100644 --- a/src/backend/base/langflow/components/tools/BingSearchAPI.py +++ b/src/backend/base/langflow/components/tools/BingSearchAPI.py @@ -30,7 +30,7 @@ class BingSearchAPIComponent(LCToolComponent): bing_search_url=self.bing_search_url, bing_subscription_key=self.bing_subscription_key ) else: - wrapper = BingSearchAPIWrapper(bing_subscription_key=self.bing_subscription_key) # type: ignore + wrapper = BingSearchAPIWrapper(bing_subscription_key=self.bing_subscription_key) results = wrapper.results(query=self.input_value, num_results=self.k) data = [Data(data=result, text=result["snippet"]) for result in results] self.status = data @@ -42,5 +42,5 @@ class BingSearchAPIComponent(LCToolComponent): bing_search_url=self.bing_search_url, bing_subscription_key=self.bing_subscription_key ) else: - wrapper = BingSearchAPIWrapper(bing_subscription_key=self.bing_subscription_key) # type: ignore + wrapper = BingSearchAPIWrapper(bing_subscription_key=self.bing_subscription_key) return cast(Tool, BingSearchResults(api_wrapper=wrapper, num_results=self.k)) diff --git a/src/backend/base/langflow/components/tools/GoogleSearchAPI.py b/src/backend/base/langflow/components/tools/GoogleSearchAPI.py index c1f1ef57d..7df0fef5f 100644 --- a/src/backend/base/langflow/components/tools/GoogleSearchAPI.py +++ b/src/backend/base/langflow/components/tools/GoogleSearchAPI.py @@ -37,7 +37,7 @@ class GoogleSearchAPIComponent(LCToolComponent): def _build_wrapper(self): try: - from langchain_google_community import GoogleSearchAPIWrapper # type: ignore + from langchain_google_community import GoogleSearchAPIWrapper except ImportError as e: msg = "Please install langchain-google-community to use GoogleSearchAPIWrapper." raise ImportError(msg) from e diff --git a/src/backend/base/langflow/components/tools/PythonCodeStructuredTool.py b/src/backend/base/langflow/components/tools/PythonCodeStructuredTool.py index c20c0f2a7..2257f61a0 100644 --- a/src/backend/base/langflow/components/tools/PythonCodeStructuredTool.py +++ b/src/backend/base/langflow/components/tools/PythonCodeStructuredTool.py @@ -125,7 +125,7 @@ class PythonCodeStructuredTool(LCToolComponent): return build_config async def build_tool(self) -> Tool: - _local_namespace = {} # type: ignore + _local_namespace = {} # type: ignore[var-annotated] modules = self._find_imports(self.tool_code) import_code = "" for module in modules["imports"]: @@ -149,7 +149,7 @@ class PythonCodeStructuredTool(LCToolComponent): return _local_namespace[self.tool_function](**PythonCodeToolFunc.params) _globals = globals() - _local = {} # type: ignore + _local = {} _local[self.tool_function] = PythonCodeToolFunc _globals.update(_local) @@ -196,7 +196,7 @@ class PythonCodeStructuredTool(LCToolComponent): PythonCodeToolSchema = None if schema_fields: - PythonCodeToolSchema = create_model("PythonCodeToolSchema", **schema_fields) # type: ignore + PythonCodeToolSchema = create_model("PythonCodeToolSchema", **schema_fields) return StructuredTool.from_function( func=_local[self.tool_function].run, diff --git a/src/backend/base/langflow/components/tools/SearXNGTool.py b/src/backend/base/langflow/components/tools/SearXNGTool.py index 94a34024b..8adb54ffa 100644 --- a/src/backend/base/langflow/components/tools/SearXNGTool.py +++ b/src/backend/base/langflow/components/tools/SearXNGTool.py @@ -126,7 +126,7 @@ class SearXNGToolComponent(LCToolComponent): "categories": (list[str], Field(default=[], description="The categories to search in.")), } - SearxSearchSchema = create_model("SearxSearchSchema", **schema_fields) # type: ignore + SearxSearchSchema = create_model("SearxSearchSchema", **schema_fields) return StructuredTool.from_function( func=_local["SearxSearch"].search, diff --git a/src/backend/base/langflow/components/tools/WikipediaAPI.py b/src/backend/base/langflow/components/tools/WikipediaAPI.py index ad8bca404..51aafbec3 100644 --- a/src/backend/base/langflow/components/tools/WikipediaAPI.py +++ b/src/backend/base/langflow/components/tools/WikipediaAPI.py @@ -39,7 +39,7 @@ class WikipediaAPIComponent(LCToolComponent): return cast(Tool, WikipediaQueryRun(api_wrapper=wrapper)) def _build_wrapper(self) -> WikipediaAPIWrapper: - return WikipediaAPIWrapper( # type: ignore + return WikipediaAPIWrapper( top_k_results=self.k, lang=self.lang, load_all_available_meta=self.load_all_available_meta, diff --git a/src/backend/base/langflow/components/tools/WolframAlphaAPI.py b/src/backend/base/langflow/components/tools/WolframAlphaAPI.py index 4f378b1a0..6e30781e3 100644 --- a/src/backend/base/langflow/components/tools/WolframAlphaAPI.py +++ b/src/backend/base/langflow/components/tools/WolframAlphaAPI.py @@ -31,4 +31,4 @@ class WolframAlphaAPIComponent(LCToolComponent): return Tool(name="wolfram_alpha_api", description="Answers mathematical questions.", func=wrapper.run) def _build_wrapper(self) -> WolframAlphaAPIWrapper: - return WolframAlphaAPIWrapper(wolfram_alpha_appid=self.app_id) # type: ignore + return WolframAlphaAPIWrapper(wolfram_alpha_appid=self.app_id) diff --git a/src/backend/base/langflow/components/vectorstores/Clickhouse.py b/src/backend/base/langflow/components/vectorstores/Clickhouse.py index faf501e3b..cfd81ff77 100644 --- a/src/backend/base/langflow/components/vectorstores/Clickhouse.py +++ b/src/backend/base/langflow/components/vectorstores/Clickhouse.py @@ -70,7 +70,7 @@ class ClickhouseVectorStoreComponent(LCVectorStoreComponent): @check_cached_vector_store def build_vector_store(self) -> Clickhouse: try: - import clickhouse_connect # type: ignore + import clickhouse_connect except ImportError as e: msg = ( "Failed to import Clickhouse dependencies. " diff --git a/src/backend/base/langflow/components/vectorstores/Couchbase.py b/src/backend/base/langflow/components/vectorstores/Couchbase.py index ec6806a9e..622d582a7 100644 --- a/src/backend/base/langflow/components/vectorstores/Couchbase.py +++ b/src/backend/base/langflow/components/vectorstores/Couchbase.py @@ -44,9 +44,9 @@ class CouchbaseVectorStoreComponent(LCVectorStoreComponent): @check_cached_vector_store def build_vector_store(self) -> CouchbaseVectorStore: try: - from couchbase.auth import PasswordAuthenticator # type: ignore - from couchbase.cluster import Cluster # type: ignore - from couchbase.options import ClusterOptions # type: ignore + from couchbase.auth import PasswordAuthenticator + from couchbase.cluster import Cluster + from couchbase.options import ClusterOptions except ImportError as e: msg = "Failed to import Couchbase dependencies. Install it using `pip install langflow[couchbase] --pre`" raise ImportError(msg) from e diff --git a/src/backend/base/langflow/core/celery_app.py b/src/backend/base/langflow/core/celery_app.py index ef3fc6545..74e29c5e0 100644 --- a/src/backend/base/langflow/core/celery_app.py +++ b/src/backend/base/langflow/core/celery_app.py @@ -1,4 +1,4 @@ -from celery import Celery # type: ignore +from celery import Celery def make_celery(app_name: str, config: str) -> Celery: diff --git a/src/backend/base/langflow/custom/code_parser/code_parser.py b/src/backend/base/langflow/custom/code_parser/code_parser.py index ae9152ea4..9fcf12abe 100644 --- a/src/backend/base/langflow/custom/code_parser/code_parser.py +++ b/src/backend/base/langflow/custom/code_parser/code_parser.py @@ -108,8 +108,8 @@ class CodeParser: Parses an AST node and updates the data dictionary with the relevant information. """ - if handler := self.handlers.get(type(node)): # type: ignore - handler(node) # type: ignore + if handler := self.handlers.get(type(node)): + handler(node) # type: ignore[operator] def parse_imports(self, node: ast.Import | ast.ImportFrom) -> None: """ diff --git a/src/backend/base/langflow/custom/custom_component/component.py b/src/backend/base/langflow/custom/custom_component/component.py index 8cc722b01..a5e8f0511 100644 --- a/src/backend/base/langflow/custom/custom_component/component.py +++ b/src/backend/base/langflow/custom/custom_component/component.py @@ -6,7 +6,7 @@ from copy import deepcopy from textwrap import dedent from typing import TYPE_CHECKING, Any, ClassVar, get_type_hints -import nanoid # type: ignore +import nanoid import yaml from pydantic import BaseModel diff --git a/src/backend/base/langflow/graph/edge/base.py b/src/backend/base/langflow/graph/edge/base.py index 7454bb964..8d9b6721c 100644 --- a/src/backend/base/langflow/graph/edge/base.py +++ b/src/backend/base/langflow/graph/edge/base.py @@ -53,8 +53,8 @@ class Edge: else: # Logging here because this is a breaking change logger.error("Edge data is empty") - self._source_handle = edge.get("sourceHandle", "") # type: ignore - self._target_handle = edge.get("targetHandle", "") # type: ignore + self._source_handle = edge.get("sourceHandle", "") # type: ignore[assignment] + self._target_handle = edge.get("targetHandle", "") # type: ignore[assignment] # 'BaseLoader;BaseOutputParser|documents|PromptTemplate-zmTlD' # target_param is documents if isinstance(self._target_handle, str): diff --git a/src/backend/base/langflow/graph/graph/base.py b/src/backend/base/langflow/graph/graph/base.py index a1e0aba7d..d9200a919 100644 --- a/src/backend/base/langflow/graph/graph/base.py +++ b/src/backend/base/langflow/graph/graph/base.py @@ -1725,8 +1725,8 @@ class Graph: def _create_vertex(self, frontend_data: NodeData): vertex_data = frontend_data["data"] - vertex_type: str = vertex_data["type"] # type: ignore - vertex_base_type: str = vertex_data["node"]["template"]["_type"] # type: ignore + vertex_type: str = vertex_data["type"] + vertex_base_type: str = vertex_data["node"]["template"]["_type"] if "id" not in vertex_data: msg = f"Vertex data for {vertex_data['display_name']} does not contain an id" raise ValueError(msg) diff --git a/src/backend/base/langflow/graph/vertex/base.py b/src/backend/base/langflow/graph/vertex/base.py index 935e6434e..ef7b8afab 100644 --- a/src/backend/base/langflow/graph/vertex/base.py +++ b/src/backend/base/langflow/graph/vertex/base.py @@ -267,7 +267,7 @@ class Vertex: def get_task(self): # using the task_id, get the task from celery # and return it - from celery.result import AsyncResult # type: ignore + from celery.result import AsyncResult return AsyncResult(self.task_id) @@ -446,9 +446,9 @@ class Vertex: if any(isinstance(self._raw_params.get(key), Vertex) for key in new_params): return if not overwrite: - for key in new_params.copy(): # type: ignore + for key in new_params.copy(): # type: ignore[attr-defined] if key not in self._raw_params: - new_params.pop(key) # type: ignore + new_params.pop(key) # type: ignore[attr-defined] self._raw_params.update(new_params) self.params = self._raw_params.copy() self.updated_raw_params = True @@ -692,9 +692,9 @@ class Vertex: if key == "func": if not isinstance(result, types.FunctionType): if hasattr(result, "run"): - result = result.run # type: ignore + result = result.run elif hasattr(result, "get_function"): - result = result.get_function() # type: ignore + result = result.get_function() elif inspect.iscoroutinefunction(result): self.params["coroutine"] = result else: diff --git a/src/backend/base/langflow/helpers/flow.py b/src/backend/base/langflow/helpers/flow.py index 964a553c2..8c4daca12 100644 --- a/src/backend/base/langflow/helpers/flow.py +++ b/src/backend/base/langflow/helpers/flow.py @@ -34,7 +34,7 @@ def list_flows(*, user_id: str | None = None) -> list[Data]: try: with session_scope() as session: flows = session.exec( - select(Flow).where(Flow.user_id == user_id).where(Flow.is_component == False) # noqa + select(Flow).where(Flow.user_id == user_id).where(Flow.is_component == False) # noqa: E712 ).all() return [flow.to_data() for flow in flows] @@ -107,7 +107,7 @@ async def run_flow( for vertex in graph.vertices if output_type == "debug" or ( - vertex.is_output and (output_type == "any" or output_type in vertex.id.lower()) # type: ignore + vertex.is_output and (output_type == "any" or output_type in vertex.id.lower()) # type: ignore[operator] ) ] @@ -249,7 +249,7 @@ def build_schema_from_inputs(name: str, inputs: list[Vertex]) -> type[BaseModel] field_name = input_.display_name.lower().replace(" ", "_") description = input_.description fields[field_name] = (str, Field(default="", description=description)) - return create_model(name, **fields) # type: ignore + return create_model(name, **fields) def get_arg_names(inputs: list[Vertex]) -> list[dict[str, str]]: diff --git a/src/backend/base/langflow/initial_setup/setup.py b/src/backend/base/langflow/initial_setup/setup.py index d548021a7..3d89e9999 100644 --- a/src/backend/base/langflow/initial_setup/setup.py +++ b/src/backend/base/langflow/initial_setup/setup.py @@ -10,7 +10,7 @@ from pathlib import Path from uuid import UUID import orjson -from emoji import demojize, purely_emoji # type: ignore +from emoji import demojize, purely_emoji from loguru import logger from sqlmodel import select diff --git a/src/backend/base/langflow/inputs/input_mixin.py b/src/backend/base/langflow/inputs/input_mixin.py index a87da6cfd..896f2d5d2 100644 --- a/src/backend/base/langflow/inputs/input_mixin.py +++ b/src/backend/base/langflow/inputs/input_mixin.py @@ -35,7 +35,7 @@ SerializableFieldTypes = Annotated[FieldTypes, PlainSerializer(lambda v: v.value # Base mixin for common input field attributes and methods -class BaseInputMixin(BaseModel, validate_assignment=True): # type: ignore +class BaseInputMixin(BaseModel, validate_assignment=True): # type: ignore[call-arg] model_config = ConfigDict( arbitrary_types_allowed=True, extra="forbid", diff --git a/src/backend/base/langflow/io/schema.py b/src/backend/base/langflow/io/schema.py index a0d060a23..328e63e79 100644 --- a/src/backend/base/langflow/io/schema.py +++ b/src/backend/base/langflow/io/schema.py @@ -38,9 +38,9 @@ def create_input_schema(inputs: list["InputTypes"]) -> type[BaseModel]: literal_string = f"Literal{input_model.options}" # validate that the literal_string is a valid literal - field_type = eval(literal_string, {"Literal": Literal}) # type: ignore + field_type = eval(literal_string, {"Literal": Literal}) if hasattr(input_model, "is_list") and input_model.is_list: - field_type = list[field_type] # type: ignore + field_type = list[field_type] # type: ignore[valid-type] if input_model.name: name = input_model.name.replace("_", " ").title() elif input_model.display_name: @@ -53,7 +53,7 @@ def create_input_schema(inputs: list["InputTypes"]) -> type[BaseModel]: "description": input_model.info or "", } if input_model.required is False: - field_dict["default"] = input_model.value # type: ignore + field_dict["default"] = input_model.value # type: ignore[assignment] pydantic_field = Field(**field_dict) fields[input_model.name] = (field_type, pydantic_field) diff --git a/src/backend/base/langflow/load/load.py b/src/backend/base/langflow/load/load.py index 336873e71..1e775112b 100644 --- a/src/backend/base/langflow/load/load.py +++ b/src/backend/base/langflow/load/load.py @@ -108,7 +108,7 @@ def run_flow_from_json( """ # Set all streaming to false try: - import nest_asyncio # type: ignore + import nest_asyncio nest_asyncio.apply() except Exception: diff --git a/src/backend/base/langflow/main.py b/src/backend/base/langflow/main.py index a9005da52..a3e137787 100644 --- a/src/backend/base/langflow/main.py +++ b/src/backend/base/langflow/main.py @@ -8,7 +8,7 @@ from http import HTTPStatus from pathlib import Path from urllib.parse import urlencode -import nest_asyncio # type: ignore +import nest_asyncio from fastapi import FastAPI, HTTPException, Request, Response, status from fastapi.middleware.cors import CORSMiddleware from fastapi.responses import FileResponse, JSONResponse @@ -192,7 +192,7 @@ def create_app(): raise ValueError(msg) if settings.prometheus_enabled: - from prometheus_client import start_http_server # type: ignore + from prometheus_client import start_http_server start_http_server(settings.prometheus_port) diff --git a/src/backend/base/langflow/processing/base.py b/src/backend/base/langflow/processing/base.py index f9f356558..b12be39cb 100644 --- a/src/backend/base/langflow/processing/base.py +++ b/src/backend/base/langflow/processing/base.py @@ -8,7 +8,7 @@ from langflow.services.deps import get_plugins_service if TYPE_CHECKING: from langchain_core.callbacks import BaseCallbackHandler - from langfuse.callback import CallbackHandler # type: ignore + from langfuse.callback import CallbackHandler def setup_callbacks(sync, trace_id, **kwargs): diff --git a/src/backend/base/langflow/schema/data.py b/src/backend/base/langflow/schema/data.py index 86128d7c3..594fc3d12 100644 --- a/src/backend/base/langflow/schema/data.py +++ b/src/backend/base/langflow/schema/data.py @@ -144,9 +144,9 @@ class Data(BaseModel): image_template = ImagePromptTemplate() image_prompt_value: ImagePromptValue = image_template.invoke( input={"path": file_path}, config={"callbacks": self.get_langchain_callbacks()} - ) # type: ignore + ) contents.append({"type": "image_url", "image_url": image_prompt_value.image_url}) - human_message = HumanMessage(content=contents) # type: ignore + human_message = HumanMessage(content=contents) else: human_message = HumanMessage( content=[{"type": "text", "text": text}], @@ -154,7 +154,7 @@ class Data(BaseModel): return human_message - return AIMessage(content=text) # type: ignore + return AIMessage(content=text) def __getattr__(self, key): """ diff --git a/src/backend/base/langflow/schema/message.py b/src/backend/base/langflow/schema/message.py index 01a5b7973..accf6c98c 100644 --- a/src/backend/base/langflow/schema/message.py +++ b/src/backend/base/langflow/schema/message.py @@ -113,12 +113,12 @@ class Message(Data): if self.files: contents = [{"type": "text", "text": text}] contents.extend(self.sync_get_file_content_dicts()) - human_message = HumanMessage(content=contents) # type: ignore + human_message = HumanMessage(content=contents) else: human_message = HumanMessage(content=text) return human_message - return AIMessage(content=text) # type: ignore + return AIMessage(content=text) @classmethod def from_lc_message(cls, lc_message: BaseMessage) -> Message: @@ -180,7 +180,7 @@ class Message(Data): content_dicts.append(file.to_content_dict()) else: image_template = ImagePromptTemplate() - image_prompt_value: ImagePromptValue = image_template.invoke(input={"path": file}) # type: ignore + image_prompt_value: ImagePromptValue = image_template.invoke(input={"path": file}) content_dicts.append({"type": "image_url", "image_url": image_prompt_value.image_url}) return content_dicts @@ -235,7 +235,7 @@ class Message(Data): if contents: message = HumanMessage(content=[{"type": "text", "text": text}, *contents]) - prompt_template = ChatPromptTemplate.from_messages([message]) # type: ignore + prompt_template = ChatPromptTemplate.from_messages([message]) instance.prompt = jsonable_encoder(prompt_template.to_json()) instance.messages = instance.prompt.get("kwargs", {}).get("messages", []) diff --git a/src/backend/base/langflow/schema/schema.py b/src/backend/base/langflow/schema/schema.py index 685b05c5e..8ca4ce95d 100644 --- a/src/backend/base/langflow/schema/schema.py +++ b/src/backend/base/langflow/schema/schema.py @@ -123,7 +123,7 @@ def recursive_serialize_or_str(obj): if hasattr(obj, "model_dump"): obj_dict = obj.model_dump() elif hasattr(obj, "dict"): - obj_dict = obj.dict() # type: ignore + obj_dict = obj.dict() return {k: recursive_serialize_or_str(v) for k, v in obj_dict.items()} if isinstance(obj, AsyncIterator | Generator | Iterator): diff --git a/src/backend/base/langflow/server.py b/src/backend/base/langflow/server.py index 0c3a21a2e..0c4e20dad 100644 --- a/src/backend/base/langflow/server.py +++ b/src/backend/base/langflow/server.py @@ -2,11 +2,11 @@ import asyncio import logging import signal -from gunicorn import glogging # type: ignore -from gunicorn.app.base import BaseApplication # type: ignore +from gunicorn import glogging +from gunicorn.app.base import BaseApplication from uvicorn.workers import UvicornWorker -from langflow.logging.logger import InterceptHandler # type: ignore +from langflow.logging.logger import InterceptHandler class LangflowUvicornWorker(UvicornWorker): diff --git a/src/backend/base/langflow/services/auth/utils.py b/src/backend/base/langflow/services/auth/utils.py index 41aec6610..ec065ba9a 100644 --- a/src/backend/base/langflow/services/auth/utils.py +++ b/src/backend/base/langflow/services/auth/utils.py @@ -114,8 +114,8 @@ async def get_current_user_by_jwt( with warnings.catch_warnings(): warnings.simplefilter("ignore") payload = jwt.decode(token, secret_key, algorithms=[settings_service.auth_settings.ALGORITHM]) - user_id: UUID = payload.get("sub") # type: ignore - token_type: str = payload.get("type") # type: ignore + user_id: UUID = payload.get("sub") # type: ignore[assignment] + token_type: str = payload.get("type") # type: ignore[assignment] if expires := payload.get("exp", None): expires_datetime = datetime.fromtimestamp(expires, timezone.utc) if datetime.now(timezone.utc) > expires_datetime: @@ -305,8 +305,8 @@ def create_refresh_token(refresh_token: str, db: Session = Depends(get_session)) settings_service.auth_settings.SECRET_KEY.get_secret_value(), algorithms=[settings_service.auth_settings.ALGORITHM], ) - user_id: UUID = payload.get("sub") # type: ignore - token_type: str = payload.get("type") # type: ignore + user_id: UUID = payload.get("sub") # type: ignore[assignment] + token_type: str = payload.get("type") # type: ignore[assignment] if user_id is None or token_type == "": raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid refresh token") diff --git a/src/backend/base/langflow/services/cache/disk.py b/src/backend/base/langflow/services/cache/disk.py index f53a833ce..9c866d443 100644 --- a/src/backend/base/langflow/services/cache/disk.py +++ b/src/backend/base/langflow/services/cache/disk.py @@ -10,7 +10,7 @@ from langflow.services.cache.base import AsyncBaseCacheService, AsyncLockType from langflow.services.cache.utils import CACHE_MISS -class AsyncDiskCache(AsyncBaseCacheService, Generic[AsyncLockType]): # type: ignore +class AsyncDiskCache(AsyncBaseCacheService, Generic[AsyncLockType]): def __init__(self, cache_dir, max_size=None, expiration_time=3600): self.cache = Cache(cache_dir) # Let's clear the cache for now to maintain a similar diff --git a/src/backend/base/langflow/services/cache/service.py b/src/backend/base/langflow/services/cache/service.py index 71d6e3531..2f786b188 100644 --- a/src/backend/base/langflow/services/cache/service.py +++ b/src/backend/base/langflow/services/cache/service.py @@ -11,7 +11,7 @@ from langflow.services.cache.base import AsyncBaseCacheService, AsyncLockType, C from langflow.services.cache.utils import CACHE_MISS -class ThreadingInMemoryCache(CacheService, Generic[LockType]): # type: ignore +class ThreadingInMemoryCache(CacheService, Generic[LockType]): """ A simple in-memory cache using an OrderedDict. @@ -174,7 +174,7 @@ class ThreadingInMemoryCache(CacheService, Generic[LockType]): # type: ignore return f"InMemoryCache(max_size={self.max_size}, expiration_time={self.expiration_time})" -class RedisCache(AsyncBaseCacheService, Generic[LockType]): # type: ignore +class RedisCache(AsyncBaseCacheService, Generic[LockType]): """ A Redis-based cache implementation. @@ -327,7 +327,7 @@ class RedisCache(AsyncBaseCacheService, Generic[LockType]): # type: ignore return f"RedisCache(expiration_time={self.expiration_time})" -class AsyncInMemoryCache(AsyncBaseCacheService, Generic[AsyncLockType]): # type: ignore +class AsyncInMemoryCache(AsyncBaseCacheService, Generic[AsyncLockType]): def __init__(self, max_size=None, expiration_time=3600): self.cache = OrderedDict() diff --git a/src/backend/base/langflow/services/database/models/api_key/model.py b/src/backend/base/langflow/services/database/models/api_key/model.py index fa1ef8647..f2864e6fa 100644 --- a/src/backend/base/langflow/services/database/models/api_key/model.py +++ b/src/backend/base/langflow/services/database/models/api_key/model.py @@ -20,7 +20,7 @@ class ApiKeyBase(SQLModel): is_active: bool = Field(default=True) -class ApiKey(ApiKeyBase, table=True): # type: ignore +class ApiKey(ApiKeyBase, table=True): # type: ignore[call-arg] id: UUID = Field(default_factory=uuid4, primary_key=True, unique=True) created_at: datetime | None = Field( default=None, sa_column=Column(DateTime(timezone=True), server_default=func.now(), nullable=False) diff --git a/src/backend/base/langflow/services/database/models/flow/model.py b/src/backend/base/langflow/services/database/models/flow/model.py index 855437849..100da7681 100644 --- a/src/backend/base/langflow/services/database/models/flow/model.py +++ b/src/backend/base/langflow/services/database/models/flow/model.py @@ -6,7 +6,7 @@ from typing import TYPE_CHECKING, Optional from uuid import UUID, uuid4 import emoji -from emoji import purely_emoji # type: ignore +from emoji import purely_emoji from fastapi import HTTPException, status from loguru import logger from pydantic import field_serializer, field_validator @@ -150,7 +150,7 @@ class FlowBase(SQLModel): return datetime.fromisoformat(v) -class Flow(FlowBase, table=True): # type: ignore +class Flow(FlowBase, table=True): # type: ignore[call-arg] id: UUID = Field(default_factory=uuid4, primary_key=True, unique=True) data: dict | None = Field(default=None, sa_column=Column(JSON)) user_id: UUID | None = Field(index=True, foreign_key="user.id", nullable=True) diff --git a/src/backend/base/langflow/services/database/models/folder/model.py b/src/backend/base/langflow/services/database/models/folder/model.py index 0dbfb94d6..82a2e6d56 100644 --- a/src/backend/base/langflow/services/database/models/folder/model.py +++ b/src/backend/base/langflow/services/database/models/folder/model.py @@ -13,7 +13,7 @@ class FolderBase(SQLModel): description: str | None = Field(default=None, sa_column=Column(Text)) -class Folder(FolderBase, table=True): # type: ignore +class Folder(FolderBase, table=True): # type: ignore[call-arg] id: UUID | None = Field(default_factory=uuid4, primary_key=True) parent_id: UUID | None = Field(default=None, foreign_key="folder.id") diff --git a/src/backend/base/langflow/services/database/models/folder/utils.py b/src/backend/base/langflow/services/database/models/folder/utils.py index 4ffdb4fdc..342920123 100644 --- a/src/backend/base/langflow/services/database/models/folder/utils.py +++ b/src/backend/base/langflow/services/database/models/folder/utils.py @@ -20,10 +20,10 @@ def create_default_folder_if_it_doesnt_exist(session: Session, user_id: UUID): session.commit() session.refresh(folder) session.exec( - update(Flow) # type: ignore + update(Flow) .where( and_( - Flow.folder_id == None, # type: ignore # noqa + Flow.folder_id is None, Flow.user_id == user_id, ) ) diff --git a/src/backend/base/langflow/services/database/models/message/model.py b/src/backend/base/langflow/services/database/models/message/model.py index 355ac2d2c..753ab1b96 100644 --- a/src/backend/base/langflow/services/database/models/message/model.py +++ b/src/backend/base/langflow/services/database/models/message/model.py @@ -61,7 +61,7 @@ class MessageBase(SQLModel): ) -class MessageTable(MessageBase, table=True): # type: ignore +class MessageTable(MessageBase, table=True): # type: ignore[call-arg] __tablename__ = "message" id: UUID = Field(default_factory=uuid4, primary_key=True) flow_id: UUID | None = Field(default=None, foreign_key="flow.id") diff --git a/src/backend/base/langflow/services/database/models/transactions/model.py b/src/backend/base/langflow/services/database/models/transactions/model.py index effcf36cb..68edddde7 100644 --- a/src/backend/base/langflow/services/database/models/transactions/model.py +++ b/src/backend/base/langflow/services/database/models/transactions/model.py @@ -33,7 +33,7 @@ class TransactionBase(SQLModel): return value -class TransactionTable(TransactionBase, table=True): # type: ignore +class TransactionTable(TransactionBase, table=True): # type: ignore[call-arg] __tablename__ = "transaction" id: UUID | None = Field(default_factory=uuid4, primary_key=True) flow: "Flow" = Relationship(back_populates="transactions") diff --git a/src/backend/base/langflow/services/database/models/user/crud.py b/src/backend/base/langflow/services/database/models/user/crud.py index bb9b1cfde..090ecca2d 100644 --- a/src/backend/base/langflow/services/database/models/user/crud.py +++ b/src/backend/base/langflow/services/database/models/user/crud.py @@ -22,7 +22,7 @@ def update_user(user_db: User | None, user: UserUpdate, db: Session = Depends(ge if not user_db: raise HTTPException(status_code=404, detail="User not found") - # user_db_by_username = get_user_by_username(db, user.username) # type: ignore + # user_db_by_username = get_user_by_username(db, user.username) # if user_db_by_username and user_db_by_username.id != user_id: # raise HTTPException(status_code=409, detail="Username already exists") @@ -50,7 +50,7 @@ def update_user(user_db: User | None, user: UserUpdate, db: Session = Depends(ge def update_user_last_login_at(user_id: UUID, db: Session = Depends(get_session)): try: - user_data = UserUpdate(last_login_at=datetime.now(timezone.utc)) # type: ignore + user_data = UserUpdate(last_login_at=datetime.now(timezone.utc)) user = get_user_by_id(db, user_id) return update_user(user, user_data, db) except Exception: diff --git a/src/backend/base/langflow/services/database/models/user/model.py b/src/backend/base/langflow/services/database/models/user/model.py index fc3e2e7f9..b3d48c58a 100644 --- a/src/backend/base/langflow/services/database/models/user/model.py +++ b/src/backend/base/langflow/services/database/models/user/model.py @@ -11,7 +11,7 @@ if TYPE_CHECKING: from langflow.services.database.models.variable import Variable -class User(SQLModel, table=True): # type: ignore +class User(SQLModel, table=True): # type: ignore[call-arg] id: UUID = Field(default_factory=uuid4, primary_key=True, unique=True) username: str = Field(index=True, unique=True) password: str = Field() diff --git a/src/backend/base/langflow/services/database/models/variable/model.py b/src/backend/base/langflow/services/database/models/variable/model.py index 19cd04866..77791cec1 100644 --- a/src/backend/base/langflow/services/database/models/variable/model.py +++ b/src/backend/base/langflow/services/database/models/variable/model.py @@ -22,7 +22,7 @@ class VariableBase(SQLModel): type: str | None = Field(None, description="Type of the variable") -class Variable(VariableBase, table=True): # type: ignore +class Variable(VariableBase, table=True): # type: ignore[call-arg] id: UUID | None = Field( default_factory=uuid4, primary_key=True, diff --git a/src/backend/base/langflow/services/database/models/vertex_builds/model.py b/src/backend/base/langflow/services/database/models/vertex_builds/model.py index 6326dfcc9..5abccaaa8 100644 --- a/src/backend/base/langflow/services/database/models/vertex_builds/model.py +++ b/src/backend/base/langflow/services/database/models/vertex_builds/model.py @@ -40,7 +40,7 @@ class VertexBuildBase(SQLModel): return value -class VertexBuildTable(VertexBuildBase, table=True): # type: ignore +class VertexBuildTable(VertexBuildBase, table=True): # type: ignore[call-arg] __tablename__ = "vertex_build" build_id: UUID | None = Field(default_factory=uuid4, primary_key=True) flow: "Flow" = Relationship(back_populates="vertex_builds") diff --git a/src/backend/base/langflow/services/deps.py b/src/backend/base/langflow/services/deps.py index fdd52d07c..ca954c2a0 100644 --- a/src/backend/base/langflow/services/deps.py +++ b/src/backend/base/langflow/services/deps.py @@ -45,7 +45,7 @@ def get_service(service_type: ServiceType, default=None): # ! This is a workaround to ensure that the service manager is initialized # ! Not optimal, but it works for now service_manager.register_factories() - return service_manager.get(service_type, default) # type: ignore + return service_manager.get(service_type, default) def get_telemetry_service() -> TelemetryService: @@ -57,7 +57,7 @@ def get_telemetry_service() -> TelemetryService: """ from langflow.services.telemetry.factory import TelemetryServiceFactory - return get_service(ServiceType.TELEMETRY_SERVICE, TelemetryServiceFactory()) # type: ignore + return get_service(ServiceType.TELEMETRY_SERVICE, TelemetryServiceFactory()) def get_tracing_service() -> TracingService: @@ -69,7 +69,7 @@ def get_tracing_service() -> TracingService: """ from langflow.services.tracing.factory import TracingServiceFactory - return get_service(ServiceType.TRACING_SERVICE, TracingServiceFactory()) # type: ignore + return get_service(ServiceType.TRACING_SERVICE, TracingServiceFactory()) def get_state_service() -> StateService: @@ -81,7 +81,7 @@ def get_state_service() -> StateService: """ from langflow.services.state.factory import StateServiceFactory - return get_service(ServiceType.STATE_SERVICE, StateServiceFactory()) # type: ignore + return get_service(ServiceType.STATE_SERVICE, StateServiceFactory()) def get_socket_service() -> SocketIOService: @@ -91,7 +91,7 @@ def get_socket_service() -> SocketIOService: Returns: SocketIOService: The SocketIOService instance. """ - return get_service(ServiceType.SOCKETIO_SERVICE) # type: ignore + return get_service(ServiceType.SOCKETIO_SERVICE) # type: ignore[attr-defined] def get_storage_service() -> StorageService: @@ -103,7 +103,7 @@ def get_storage_service() -> StorageService: """ from langflow.services.storage.factory import StorageServiceFactory - return get_service(ServiceType.STORAGE_SERVICE, default=StorageServiceFactory()) # type: ignore + return get_service(ServiceType.STORAGE_SERVICE, default=StorageServiceFactory()) def get_variable_service() -> VariableService: @@ -116,7 +116,7 @@ def get_variable_service() -> VariableService: """ from langflow.services.variable.factory import VariableServiceFactory - return get_service(ServiceType.VARIABLE_SERVICE, VariableServiceFactory()) # type: ignore + return get_service(ServiceType.VARIABLE_SERVICE, VariableServiceFactory()) def get_plugins_service() -> PluginService: @@ -126,7 +126,7 @@ def get_plugins_service() -> PluginService: Returns: PluginService: The PluginService instance. """ - return get_service(ServiceType.PLUGIN_SERVICE) # type: ignore + return get_service(ServiceType.PLUGIN_SERVICE) # type: ignore[attr-defined] def get_settings_service() -> SettingsService: @@ -143,7 +143,7 @@ def get_settings_service() -> SettingsService: """ from langflow.services.settings.factory import SettingsServiceFactory - return get_service(ServiceType.SETTINGS_SERVICE, SettingsServiceFactory()) # type: ignore + return get_service(ServiceType.SETTINGS_SERVICE, SettingsServiceFactory()) def get_db_service() -> DatabaseService: @@ -156,7 +156,7 @@ def get_db_service() -> DatabaseService: """ from langflow.services.database.factory import DatabaseServiceFactory - return get_service(ServiceType.DATABASE_SERVICE, DatabaseServiceFactory()) # type: ignore + return get_service(ServiceType.DATABASE_SERVICE, DatabaseServiceFactory()) def get_session() -> Generator[Session, None, None]: @@ -207,7 +207,7 @@ def get_cache_service() -> CacheService: """ from langflow.services.cache.factory import CacheServiceFactory - return get_service(ServiceType.CACHE_SERVICE, CacheServiceFactory()) # type: ignore + return get_service(ServiceType.CACHE_SERVICE, CacheServiceFactory()) def get_shared_component_cache_service() -> CacheService: @@ -219,7 +219,7 @@ def get_shared_component_cache_service() -> CacheService: """ from langflow.services.shared_component_cache.factory import SharedComponentCacheServiceFactory - return get_service(ServiceType.SHARED_COMPONENT_CACHE_SERVICE, SharedComponentCacheServiceFactory()) # type: ignore + return get_service(ServiceType.SHARED_COMPONENT_CACHE_SERVICE, SharedComponentCacheServiceFactory()) def get_session_service() -> SessionService: @@ -231,7 +231,7 @@ def get_session_service() -> SessionService: """ from langflow.services.session.factory import SessionServiceFactory - return get_service(ServiceType.SESSION_SERVICE, SessionServiceFactory()) # type: ignore + return get_service(ServiceType.SESSION_SERVICE, SessionServiceFactory()) def get_task_service() -> TaskService: @@ -244,7 +244,7 @@ def get_task_service() -> TaskService: """ from langflow.services.task.factory import TaskServiceFactory - return get_service(ServiceType.TASK_SERVICE, TaskServiceFactory()) # type: ignore + return get_service(ServiceType.TASK_SERVICE, TaskServiceFactory()) def get_chat_service() -> ChatService: @@ -254,7 +254,7 @@ def get_chat_service() -> ChatService: Returns: ChatService: The chat service instance. """ - return get_service(ServiceType.CHAT_SERVICE) # type: ignore + return get_service(ServiceType.CHAT_SERVICE) def get_store_service() -> StoreService: @@ -264,4 +264,4 @@ def get_store_service() -> StoreService: Returns: StoreService: The StoreService instance. """ - return get_service(ServiceType.STORE_SERVICE) # type: ignore + return get_service(ServiceType.STORE_SERVICE) diff --git a/src/backend/base/langflow/services/manager.py b/src/backend/base/langflow/services/manager.py index 82e130a40..00f09ab99 100644 --- a/src/backend/base/langflow/services/manager.py +++ b/src/backend/base/langflow/services/manager.py @@ -161,7 +161,7 @@ def initialize_session_service(): Initialize the session manager. """ from langflow.services.cache import factory as cache_factory - from langflow.services.session import factory as session_service_factory # type: ignore + from langflow.services.session import factory as session_service_factory initialize_settings_service() diff --git a/src/backend/base/langflow/services/plugins/langfuse_plugin.py b/src/backend/base/langflow/services/plugins/langfuse_plugin.py index 272d2910a..1ad52b92c 100644 --- a/src/backend/base/langflow/services/plugins/langfuse_plugin.py +++ b/src/backend/base/langflow/services/plugins/langfuse_plugin.py @@ -8,7 +8,7 @@ from langflow.services.deps import get_settings_service from langflow.services.plugins.base import CallbackPlugin if TYPE_CHECKING: - from langfuse import Langfuse # type: ignore + from langfuse import Langfuse class LangfuseInstance: @@ -25,7 +25,7 @@ class LangfuseInstance: def create(cls): try: logger.debug("Creating Langfuse instance") - from langfuse import Langfuse # type: ignore + from langfuse import Langfuse settings_manager = get_settings_service() diff --git a/src/backend/base/langflow/services/socket/service.py b/src/backend/base/langflow/services/socket/service.py index 45cfc5fbc..521c2fc92 100644 --- a/src/backend/base/langflow/services/socket/service.py +++ b/src/backend/base/langflow/services/socket/service.py @@ -1,6 +1,6 @@ from typing import TYPE_CHECKING, Any -import socketio # type: ignore +import socketio from loguru import logger from langflow.services.base import Service diff --git a/src/backend/base/langflow/services/socket/utils.py b/src/backend/base/langflow/services/socket/utils.py index 550145979..7bfdd9717 100644 --- a/src/backend/base/langflow/services/socket/utils.py +++ b/src/backend/base/langflow/services/socket/utils.py @@ -1,7 +1,7 @@ import time from collections.abc import Callable -import socketio # type: ignore +import socketio from sqlmodel import select from langflow.api.utils import format_elapsed_time diff --git a/src/backend/base/langflow/services/storage/s3.py b/src/backend/base/langflow/services/storage/s3.py index f9b68c43d..9f1412a88 100644 --- a/src/backend/base/langflow/services/storage/s3.py +++ b/src/backend/base/langflow/services/storage/s3.py @@ -1,5 +1,5 @@ -import boto3 # type: ignore -from botocore.exceptions import ClientError, NoCredentialsError # type: ignore +import boto3 +from botocore.exceptions import ClientError, NoCredentialsError from loguru import logger from .service import StorageService diff --git a/src/backend/base/langflow/services/task/backends/celery.py b/src/backend/base/langflow/services/task/backends/celery.py index f73ff6e7f..e5dbb3e50 100644 --- a/src/backend/base/langflow/services/task/backends/celery.py +++ b/src/backend/base/langflow/services/task/backends/celery.py @@ -1,7 +1,7 @@ from collections.abc import Callable from typing import TYPE_CHECKING, Any -from celery.result import AsyncResult # type: ignore +from celery.result import AsyncResult from langflow.services.task.backends.base import TaskBackend from langflow.worker import celery_app diff --git a/src/backend/base/langflow/services/task/utils.py b/src/backend/base/langflow/services/task/utils.py index 2b1bf0894..fb155e70f 100644 --- a/src/backend/base/langflow/services/task/utils.py +++ b/src/backend/base/langflow/services/task/utils.py @@ -4,7 +4,7 @@ if TYPE_CHECKING: import contextlib with contextlib.suppress(ImportError): - from celery import Celery # type: ignore + from celery import Celery def get_celery_worker_status(app: "Celery"): diff --git a/src/backend/base/langflow/services/tracing/langwatch.py b/src/backend/base/langflow/services/tracing/langwatch.py index 27d8e6327..4609bc64c 100644 --- a/src/backend/base/langflow/services/tracing/langwatch.py +++ b/src/backend/base/langflow/services/tracing/langwatch.py @@ -2,7 +2,7 @@ from __future__ import annotations from typing import TYPE_CHECKING, Any, cast -import nanoid # type: ignore +import nanoid from loguru import logger from langflow.schema.data import Data diff --git a/src/backend/base/langflow/services/tracing/service.py b/src/backend/base/langflow/services/tracing/service.py index 7c61c335d..dcbd9f64a 100644 --- a/src/backend/base/langflow/services/tracing/service.py +++ b/src/backend/base/langflow/services/tracing/service.py @@ -124,9 +124,7 @@ class TracingService(Service): ) def _initialize_langwatch_tracer(self): - if ( - "langwatch" not in self._tracers or self._tracers["langwatch"].trace_id != self.run_id # type: ignore - ): + if "langwatch" not in self._tracers or self._tracers["langwatch"].trace_id != self.run_id: langwatch_tracer = _get_langwatch_tracer() self._tracers["langwatch"] = langwatch_tracer( trace_name=self.run_name, @@ -164,7 +162,7 @@ class TracingService(Service): self.inputs[trace_name] = inputs self.inputs_metadata[trace_name] = metadata or {} for tracer in self._tracers.values(): - if not tracer.ready: # type: ignore + if not tracer.ready: # type: ignore[truthy-function] continue try: tracer.add_trace(trace_id, trace_name, trace_type, inputs, metadata, vertex) @@ -173,7 +171,7 @@ class TracingService(Service): def _end_traces(self, trace_id: str, trace_name: str, error: Exception | None = None): for tracer in self._tracers.values(): - if not tracer.ready: # type: ignore + if not tracer.ready: # type: ignore[truthy-function] continue try: tracer.end_trace( @@ -188,7 +186,7 @@ class TracingService(Service): def _end_all_traces(self, outputs: dict, error: Exception | None = None): for tracer in self._tracers.values(): - if not tracer.ready: # type: ignore + if not tracer.ready: # type: ignore[truthy-function] continue try: tracer.end(self.inputs, outputs=self.outputs, error=error, metadata=outputs) @@ -254,7 +252,7 @@ class TracingService(Service): def get_langchain_callbacks(self) -> list[BaseCallbackHandler]: callbacks = [] for tracer in self._tracers.values(): - if not tracer.ready: # type: ignore + if not tracer.ready: # type: ignore[truthy-function] continue langchain_callback = tracer.get_langchain_callback() if langchain_callback: diff --git a/src/backend/base/langflow/services/utils.py b/src/backend/base/langflow/services/utils.py index 880fa5d5d..c13bff535 100644 --- a/src/backend/base/langflow/services/utils.py +++ b/src/backend/base/langflow/services/utils.py @@ -141,7 +141,7 @@ def initialize_session_service(): Initialize the session manager. """ from langflow.services.cache import factory as cache_factory - from langflow.services.session import factory as session_service_factory # type: ignore + from langflow.services.session import factory as session_service_factory initialize_settings_service() diff --git a/src/backend/base/langflow/services/variable/kubernetes.py b/src/backend/base/langflow/services/variable/kubernetes.py index a2c982b19..4e1885688 100644 --- a/src/backend/base/langflow/services/variable/kubernetes.py +++ b/src/backend/base/langflow/services/variable/kubernetes.py @@ -82,7 +82,7 @@ class KubernetesSecretService(VariableService, Service): ) -> str: secret_name = encode_user_id(user_id) key, value = self.resolve_variable(secret_name, user_id, name) - if key.startswith(CREDENTIAL_TYPE + "_") and field == "session_id": # type: ignore + if key.startswith(CREDENTIAL_TYPE + "_") and field == "session_id": msg = ( f"variable {name} of type 'Credential' cannot be used in a Session ID field " "because its purpose is to prevent the exposure of values." diff --git a/src/backend/base/langflow/services/variable/kubernetes_secrets.py b/src/backend/base/langflow/services/variable/kubernetes_secrets.py index 3ac5a9cf9..3a54184a2 100644 --- a/src/backend/base/langflow/services/variable/kubernetes_secrets.py +++ b/src/backend/base/langflow/services/variable/kubernetes_secrets.py @@ -2,8 +2,8 @@ from base64 import b64decode, b64encode from http import HTTPStatus from uuid import UUID -from kubernetes import client, config # type: ignore -from kubernetes.client.rest import ApiException # type: ignore +from kubernetes import client, config +from kubernetes.client.rest import ApiException from loguru import logger diff --git a/src/backend/base/langflow/services/variable/service.py b/src/backend/base/langflow/services/variable/service.py index 70df3defa..909e91077 100644 --- a/src/backend/base/langflow/services/variable/service.py +++ b/src/backend/base/langflow/services/variable/service.py @@ -83,7 +83,7 @@ class DatabaseVariableService(VariableService, Service): msg = f"{name} variable not found." raise ValueError(msg) - if variable.type == CREDENTIAL_TYPE and field == "session_id": # type: ignore + if variable.type == CREDENTIAL_TYPE and field == "session_id": msg = ( f"variable {name} of type 'Credential' cannot be used in a Session ID field " "because its purpose is to prevent the exposure of values." diff --git a/src/backend/base/langflow/template/field/base.py b/src/backend/base/langflow/template/field/base.py index d8778d16e..51305329b 100644 --- a/src/backend/base/langflow/template/field/base.py +++ b/src/backend/base/langflow/template/field/base.py @@ -1,9 +1,11 @@ -from collections.abc import Callable # noqa: I001 +from collections.abc import Callable from enum import Enum -from typing import Any -from typing import GenericAlias # type: ignore -from typing import _GenericAlias # type: ignore -from typing import _UnionGenericAlias # type: ignore +from typing import ( # type: ignore[attr-defined] + Any, + GenericAlias, # type: ignore[attr-defined] + _GenericAlias, # type: ignore[attr-defined] + _UnionGenericAlias, # type: ignore[attr-defined] +) from pydantic import BaseModel, ConfigDict, Field, field_serializer, field_validator, model_serializer, model_validator diff --git a/src/backend/base/langflow/template/field/prompt.py b/src/backend/base/langflow/template/field/prompt.py index aed416cbc..b04329cf6 100644 --- a/src/backend/base/langflow/template/field/prompt.py +++ b/src/backend/base/langflow/template/field/prompt.py @@ -1,3 +1,2 @@ # This file is for backwards compatibility -from langflow.inputs.inputs import DEFAULT_PROMPT_INTUT_TYPES # noqa -from langflow.inputs.inputs import DefaultPromptField # noqa +from langflow.inputs.inputs import DEFAULT_PROMPT_INTUT_TYPES, DefaultPromptField # noqa: F401 diff --git a/src/backend/base/langflow/utils/payload.py b/src/backend/base/langflow/utils/payload.py index f7cfc565b..5ec0e099a 100644 --- a/src/backend/base/langflow/utils/payload.py +++ b/src/backend/base/langflow/utils/payload.py @@ -82,7 +82,7 @@ def build_json(root, graph) -> dict: raise ValueError(msg) values = [build_json(child, graph) for child in children] value = ( - list(values) if value["list"] else next(iter(values), None) # type: ignore + list(values) if value["list"] else next(iter(values), None) # type: ignore[arg-type] ) final_dict[key] = value diff --git a/src/backend/base/langflow/utils/schemas.py b/src/backend/base/langflow/utils/schemas.py index f34e0d22b..58f9f0074 100644 --- a/src/backend/base/langflow/utils/schemas.py +++ b/src/backend/base/langflow/utils/schemas.py @@ -79,7 +79,7 @@ class ChatOutputResponse(BaseModel): ): """Build chat output response from message.""" content = message.content - return cls(message=content, sender=sender, sender_name=sender_name) # type: ignore + return cls(message=content, sender=sender, sender_name=sender_name) @model_validator(mode="after") def validate_message(self): diff --git a/src/backend/base/langflow/worker.py b/src/backend/base/langflow/worker.py index 2e4b6c1fc..7a9472c37 100644 --- a/src/backend/base/langflow/worker.py +++ b/src/backend/base/langflow/worker.py @@ -3,7 +3,7 @@ from __future__ import annotations from typing import TYPE_CHECKING, Any from asgiref.sync import async_to_sync -from celery.exceptions import SoftTimeLimitExceeded # type: ignore +from celery.exceptions import SoftTimeLimitExceeded from langflow.core.celery_app import celery_app diff --git a/src/backend/base/pyproject.toml b/src/backend/base/pyproject.toml index 0d4c505b7..fb4c7ce7f 100644 --- a/src/backend/base/pyproject.toml +++ b/src/backend/base/pyproject.toml @@ -62,7 +62,6 @@ ignore = [ "EXE", "FBT", "N", - "PGH", "RUF006", # Store a reference to the return value of `asyncio.create_task` "S", "SLF",