fix: remove incorrect use of logging that caused log spam (#7763)

fix: use correct logger and remove blanket logging config

* 🔧 (mcp.py, url.py, watsonx.py, langwatch.py, mem0_chat_memory.py, models/watsonx.py, schema.py): Remove standard logging and integrate loguru for enhanced logging functionality and consistency across components.
This commit is contained in:
Gabriel Luiz Freitas Almeida 2025-04-24 18:09:40 -03:00 committed by GitHub
commit c34b30718e
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
7 changed files with 10 additions and 29 deletions

View file

@ -1,7 +1,6 @@
import asyncio
import base64
import json
import logging
from collections.abc import Awaitable, Callable
from contextvars import ContextVar
from functools import wraps
@ -13,6 +12,7 @@ import pydantic
from anyio import BrokenResourceError
from fastapi import APIRouter, Depends, HTTPException, Request
from fastapi.responses import StreamingResponse
from loguru import logger
from mcp import types
from mcp.server import NotificationOptions, Server
from mcp.server.sse import SseServerTransport
@ -33,8 +33,6 @@ from langflow.services.deps import (
)
from langflow.services.storage.utils import build_content_type_from_extension
logger = logging.getLogger(__name__)
T = TypeVar("T")
P = ParamSpec("P")

View file

@ -1,8 +1,8 @@
import logging
import re
from bs4 import BeautifulSoup
from langchain_community.document_loaders import RecursiveUrlLoader
from loguru import logger
from langflow.custom.custom_component.component import Component
from langflow.helpers.data import data_to_text
@ -11,8 +11,6 @@ from langflow.schema import Data
from langflow.schema.dataframe import DataFrame
from langflow.schema.message import Message
logger = logging.getLogger(__name__)
class URLComponent(Component):
"""A component that loads and parses child links from a root URL recursively."""

View file

@ -1,10 +1,10 @@
import logging
from typing import Any
import requests
from ibm_watsonx_ai import APIClient, Credentials
from ibm_watsonx_ai.metanames import EmbedTextParamsMetaNames
from langchain_ibm import WatsonxEmbeddings
from loguru import logger
from pydantic.v1 import SecretStr
from langflow.base.embeddings.model import LCEmbeddingsModel
@ -12,9 +12,6 @@ from langflow.field_typing import Embeddings
from langflow.io import BoolInput, DropdownInput, IntInput, SecretStrInput, StrInput
from langflow.schema.dotdict import dotdict
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
class WatsonxEmbeddingsComponent(LCEmbeddingsModel):
display_name = "IBM watsonx.ai Embeddings"
@ -92,7 +89,7 @@ class WatsonxEmbeddingsComponent(LCEmbeddingsModel):
data = response.json()
models = [model["model_id"] for model in data.get("resources", [])]
return sorted(models)
except Exception:
except Exception: # noqa: BLE001
logger.exception("Error fetching models")
return WatsonxEmbeddingsComponent._default_models
@ -112,7 +109,7 @@ class WatsonxEmbeddingsComponent(LCEmbeddingsModel):
build_config.model_name.value = models[0]
info_message = f"Updated model options: {len(models)} models found in {build_config.url.value}"
logger.info(info_message)
except Exception:
except Exception: # noqa: BLE001
logger.exception("Error updating model options.")
def build_embeddings(self) -> Embeddings:

View file

@ -1,9 +1,9 @@
import json
import logging
import os
from typing import Any
import httpx
from loguru import logger
from langflow.custom import Component
from langflow.inputs.inputs import MultilineInput
@ -20,9 +20,6 @@ from langflow.io import (
from langflow.schema import Data
from langflow.schema.dotdict import dotdict
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
class LangWatchComponent(Component):
display_name: str = "LangWatch Evaluator"

View file

@ -1,6 +1,6 @@
import logging
import os
from loguru import logger
from mem0 import Memory, MemoryClient
from langflow.base.memory.model import LCChatMemoryComponent
@ -14,8 +14,6 @@ from langflow.inputs import (
from langflow.io import Output
from langflow.schema import Data
logger = logging.getLogger(__name__)
class Mem0MemoryComponent(LCChatMemoryComponent):
display_name = "Mem0 Chat Memory"

View file

@ -1,9 +1,9 @@
import json
import logging
from typing import Any
import requests
from langchain_ibm import ChatWatsonx
from loguru import logger
from pydantic.v1 import SecretStr
from langflow.base.models.model import LCModelComponent
@ -12,9 +12,6 @@ from langflow.field_typing.range_spec import RangeSpec
from langflow.inputs import BoolInput, DropdownInput, IntInput, SecretStrInput, SliderInput, StrInput
from langflow.schema.dotdict import dotdict
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
class WatsonxAIComponent(LCModelComponent):
display_name = "IBM watsonx.ai"
@ -150,7 +147,7 @@ class WatsonxAIComponent(LCModelComponent):
data = response.json()
models = [model["model_id"] for model in data.get("resources", [])]
return sorted(models)
except Exception:
except Exception: # noqa: BLE001
logger.exception("Error fetching models. Using default models.")
return WatsonxAIComponent._default_models
@ -166,7 +163,7 @@ class WatsonxAIComponent(LCModelComponent):
build_config.model_name.value = models[0]
info_message = f"Updated model options: {len(models)} models found in {build_config.url.value}"
logger.info(info_message)
except Exception:
except Exception: # noqa: BLE001
logger.exception("Error updating model options.")
def build_model(self) -> LanguageModel:

View file

@ -1,13 +1,9 @@
import logging
from pydantic import BaseModel, field_serializer
from pydantic_core import PydanticSerializationError
from langflow.schema.log import LoggableType
from langflow.serialization.serialization import serialize
logger = logging.getLogger(__name__)
class Log(BaseModel):
name: str