refactor: Add compression to the 2 largest LF calls (#7200)

* add compression to largest calls

* add to the flows call too

* [autofix.ci] apply automated fixes

* [autofix.ci] apply automated fixes (attempt 2/3)

---------

Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
This commit is contained in:
Mike Fortman 2025-03-21 12:18:59 -05:00 committed by GitHub
commit 05c34f932d
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
3 changed files with 38 additions and 4 deletions

View file

@ -47,6 +47,7 @@ from langflow.services.database.models.user.model import User, UserRead
from langflow.services.deps import get_session_service, get_settings_service, get_telemetry_service
from langflow.services.settings.feature_flags import FEATURE_FLAGS
from langflow.services.telemetry.schema import RunPayload
from langflow.utils.compression import compress_response
from langflow.utils.version import get_version_info
if TYPE_CHECKING:
@ -58,10 +59,16 @@ router = APIRouter(tags=["Base"])
@router.get("/all", dependencies=[Depends(get_current_active_user)])
async def get_all():
"""Retrieve all component types with compression for better performance.
Returns a compressed response containing all available component types.
"""
from langflow.interface.components import get_and_cache_all_types_dict
try:
return await get_and_cache_all_types_dict(settings_service=get_settings_service())
all_types = await get_and_cache_all_types_dict(settings_service=get_settings_service())
# Return compressed response using our utility function
return compress_response(all_types)
except Exception as exc:
raise HTTPException(status_code=500, detail=str(exc)) from exc

View file

@ -31,6 +31,7 @@ from langflow.services.database.models.folder.constants import DEFAULT_FOLDER_NA
from langflow.services.database.models.folder.model import Folder
from langflow.services.deps import get_settings_service
from langflow.services.settings.service import SettingsService
from langflow.utils.compression import compress_response
# build router
router = APIRouter(prefix="/flows", tags=["Flows"])
@ -238,8 +239,12 @@ async def read_flows(
if remove_example_flows and starter_folder_id:
flows = [flow for flow in flows if flow.folder_id != starter_folder_id]
if header_flows:
return [FlowHeader.model_validate(flow, from_attributes=True) for flow in flows]
return flows
# Convert to FlowHeader objects and compress the response
flow_headers = [FlowHeader.model_validate(flow, from_attributes=True) for flow in flows]
return compress_response(flow_headers)
# Compress the full flows response
return compress_response(flows)
stmt = stmt.where(Flow.folder_id == folder_id)
return await paginate(session, stmt, params=params)
@ -538,7 +543,10 @@ async def read_basic_examples(
return []
# Get all flows in the starter folder
return (await session.exec(select(Flow).where(Flow.folder_id == starter_folder.id))).all()
flows = (await session.exec(select(Flow).where(Flow.folder_id == starter_folder.id))).all()
# Return compressed response using our utility function
return compress_response(flows)
except Exception as e:
raise HTTPException(status_code=500, detail=str(e)) from e

View file

@ -0,0 +1,19 @@
import gzip
import json
from typing import Any
from fastapi import Response
from fastapi.encoders import jsonable_encoder
def compress_response(data: Any) -> Response:
"""Compress data and return it as a FastAPI Response with appropriate headers."""
json_data = json.dumps(jsonable_encoder(data)).encode("utf-8")
compressed_data = gzip.compress(json_data, compresslevel=6)
return Response(
content=compressed_data,
media_type="application/json",
headers={"Content-Encoding": "gzip", "Vary": "Accept-Encoding", "Content-Length": str(len(compressed_data))},
)