refactor(api): change variable type annotations (#3592)

* refactor(log_router.py): change variable type annotation from List to list for better consistency
refactor(utils.py): change variable type annotation from Dict to dict for better consistency
refactor(base.py): change variable type annotation from Optional to Union for better clarity
refactor(callback.py): change variable type annotation from Dict to dict for better consistency
refactor(chat.py): change variable type annotation from Optional to Union for better clarity
refactor(endpoints.py): change variable type annotation from Optional to Union for better clarity
refactor(flows.py): change variable type annotation from List to list for better consistency

refactor(api): update response_model annotations to use lowercase list for consistency and improve readability

refactor(store.py): update type annotations for query parameters in get_components endpoint to improve code readability and maintainability
feat(store.py): add support for type hinting Union and list types in query parameters for better data validation and documentation

* run make format
This commit is contained in:
Gabriel Luiz Freitas Almeida 2024-08-28 16:15:04 -03:00 committed by GitHub
commit d7dbf1ae78
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
12 changed files with 116 additions and 124 deletions

View file

@ -1,6 +1,6 @@
import asyncio
import json
from typing import List, Any
from typing import Any
from fastapi import APIRouter, Query, HTTPException, Request
from fastapi.responses import JSONResponse, StreamingResponse
@ -15,7 +15,7 @@ async def event_generator(request: Request):
last_read_item = None
current_not_sent = 0
while not await request.is_disconnected():
to_write: List[Any] = []
to_write: list[Any] = []
with log_buffer.get_write_lock():
if last_read_item is None:
last_read_item = log_buffer.buffer[len(log_buffer.buffer) - 1]

View file

@ -1,6 +1,6 @@
import uuid
import warnings
from typing import TYPE_CHECKING, Any, Optional, Dict
from typing import TYPE_CHECKING, Any
from fastapi import HTTPException
from sqlmodel import Session
@ -122,7 +122,7 @@ def format_elapsed_time(elapsed_time: float) -> str:
return f"{minutes} {minutes_unit}, {seconds} {seconds_unit}"
async def build_graph_from_data(flow_id: str, payload: Dict, **kwargs):
async def build_graph_from_data(flow_id: str, payload: dict, **kwargs):
"""Build and cache the graph."""
graph = Graph.from_payload(payload, flow_id, **kwargs)
for vertex_id in graph._has_session_id_vertices:
@ -141,7 +141,7 @@ async def build_graph_from_data(flow_id: str, payload: Dict, **kwargs):
async def build_graph_from_db_no_cache(flow_id: str, session: Session):
"""Build and cache the graph."""
flow: Optional[Flow] = session.get(Flow, flow_id)
flow: Flow | None = session.get(Flow, flow_id)
if not flow or not flow.data:
raise ValueError("Invalid flow ID")
return await build_graph_from_data(flow_id, flow.data, flow_name=flow.name, user_id=str(flow.user_id))

View file

@ -1,5 +1,3 @@
from typing import Optional
from pydantic import BaseModel, field_validator, model_serializer
from langflow.template.frontend_node.base import FrontendNode
@ -26,8 +24,8 @@ class FrontendNodeRequest(FrontendNode):
class ValidatePromptRequest(BaseModel):
name: str
template: str
custom_fields: Optional[dict] = None
frontend_node: Optional[FrontendNodeRequest] = None
custom_fields: dict | None = None
frontend_node: FrontendNodeRequest | None = None
# Build ValidationResponse class for {"imports": {"errors": []}, "function": {"errors": []}}
@ -49,4 +47,4 @@ class CodeValidationResponse(BaseModel):
class PromptValidationResponse(BaseModel):
input_variables: list
# object return for tweak call
frontend_node: Optional[FrontendNodeRequest] = None
frontend_node: FrontendNodeRequest | None = None

View file

@ -1,4 +1,4 @@
from typing import TYPE_CHECKING, Any, Dict, List, Optional
from typing import TYPE_CHECKING, Any
from uuid import UUID
from langchain_core.callbacks.base import AsyncCallbackHandler
from loguru import logger
@ -32,7 +32,7 @@ class AsyncStreamingLLMCallbackHandleSIO(AsyncCallbackHandler):
resp = ChatResponse(message=token, type="stream", intermediate_steps="")
await self.socketio_service.emit_token(to=self.sid, data=resp.model_dump())
async def on_tool_start(self, serialized: Dict[str, Any], input_str: str, **kwargs: Any) -> Any:
async def on_tool_start(self, serialized: dict[str, Any], input_str: str, **kwargs: Any) -> Any:
"""Run when tool starts running."""
resp = ChatResponse(
message="",
@ -79,8 +79,8 @@ class AsyncStreamingLLMCallbackHandleSIO(AsyncCallbackHandler):
error: BaseException,
*,
run_id: UUID,
parent_run_id: Optional[UUID] = None,
tags: Optional[List[str]] = None,
parent_run_id: UUID | None = None,
tags: list[str] | None = None,
**kwargs: Any,
) -> None:
"""Run when tool errors."""

View file

@ -4,7 +4,7 @@ import time
import traceback
import typing
import uuid
from typing import TYPE_CHECKING, Annotated, Optional
from typing import TYPE_CHECKING, Annotated
from fastapi import APIRouter, BackgroundTasks, Body, Depends, HTTPException
from fastapi.responses import StreamingResponse
@ -68,9 +68,9 @@ async def try_running_celery_task(vertex, user_id):
async def retrieve_vertices_order(
flow_id: uuid.UUID,
background_tasks: BackgroundTasks,
data: Optional[Annotated[Optional[FlowDataRequest], Body(embed=True)]] = None,
stop_component_id: Optional[str] = None,
start_component_id: Optional[str] = None,
data: Annotated[FlowDataRequest | None, Body(embed=True)] | None = None,
stop_component_id: str | None = None,
start_component_id: str | None = None,
chat_service: "ChatService" = Depends(get_chat_service),
session=Depends(get_session),
telemetry_service: "TelemetryService" = Depends(get_telemetry_service),
@ -141,12 +141,12 @@ async def retrieve_vertices_order(
async def build_flow(
background_tasks: BackgroundTasks,
flow_id: uuid.UUID,
inputs: Annotated[Optional[InputValueRequest], Body(embed=True)] = None,
data: Annotated[Optional[FlowDataRequest], Body(embed=True)] = None,
files: Optional[list[str]] = None,
stop_component_id: Optional[str] = None,
start_component_id: Optional[str] = None,
log_builds: Optional[bool] = True,
inputs: Annotated[InputValueRequest | None, Body(embed=True)] = None,
data: Annotated[FlowDataRequest | None, Body(embed=True)] = None,
files: list[str] | None = None,
stop_component_id: str | None = None,
start_component_id: str | None = None,
log_builds: bool | None = True,
chat_service: "ChatService" = Depends(get_chat_service),
current_user=Depends(get_current_active_user),
telemetry_service: "TelemetryService" = Depends(get_telemetry_service),
@ -434,7 +434,7 @@ class DisconnectHandlerStreamingResponse(StreamingResponse):
headers: typing.Mapping[str, str] | None = None,
media_type: str | None = None,
background: BackgroundTask | None = None,
on_disconnect: Optional[typing.Callable] = None,
on_disconnect: typing.Callable | None = None,
):
super().__init__(content, status_code, headers, media_type, background)
self.on_disconnect = on_disconnect
@ -453,8 +453,8 @@ async def build_vertex(
flow_id: uuid.UUID,
vertex_id: str,
background_tasks: BackgroundTasks,
inputs: Annotated[Optional[InputValueRequest], Body(embed=True)] = None,
files: Optional[list[str]] = None,
inputs: Annotated[InputValueRequest | None, Body(embed=True)] = None,
files: list[str] | None = None,
chat_service: "ChatService" = Depends(get_chat_service),
current_user=Depends(get_current_active_user),
telemetry_service: "TelemetryService" = Depends(get_telemetry_service),
@ -606,7 +606,7 @@ async def build_vertex(
async def build_vertex_stream(
flow_id: uuid.UUID,
vertex_id: str,
session_id: Optional[str] = None,
session_id: str | None = None,
chat_service: "ChatService" = Depends(get_chat_service),
session_service: "SessionService" = Depends(get_session_service),
):

View file

@ -1,7 +1,7 @@
import time
from asyncio import Lock
from http import HTTPStatus
from typing import TYPE_CHECKING, Annotated, List, Optional, Union
from typing import TYPE_CHECKING, Annotated
from uuid import UUID
import sqlalchemy as sa
@ -108,12 +108,12 @@ async def simple_run_flow(
flow: Flow,
input_request: SimplifiedAPIRequest,
stream: bool = False,
api_key_user: Optional[User] = None,
api_key_user: User | None = None,
):
if input_request.input_value is not None and input_request.tweaks is not None:
validate_input_and_tweaks(input_request)
try:
task_result: List[RunOutputs] = []
task_result: list[RunOutputs] = []
user_id = api_key_user.id if api_key_user else None
flow_id_str = str(flow.id)
if flow.data is None:
@ -155,7 +155,7 @@ async def simple_run_flow_task(
flow: Flow,
input_request: SimplifiedAPIRequest,
stream: bool = False,
api_key_user: Optional[User] = None,
api_key_user: User | None = None,
):
"""
Run a flow task as a BackgroundTask, therefore it should not throw exceptions.
@ -362,11 +362,11 @@ async def webhook_run_flow(
async def experimental_run_flow(
session: Annotated[Session, Depends(get_session)],
flow_id: UUID,
inputs: Optional[List[InputValueRequest]] = [InputValueRequest(components=[], input_value="")],
outputs: Optional[List[str]] = [],
tweaks: Annotated[Optional[Tweaks], Body(embed=True)] = None, # noqa: F821
inputs: list[InputValueRequest] | None = [InputValueRequest(components=[], input_value="")],
outputs: list[str] | None = [],
tweaks: Annotated[Tweaks | None, Body(embed=True)] = None, # noqa: F821
stream: Annotated[bool, Body(embed=True)] = False, # noqa: F821
session_id: Annotated[Union[None, str], Body(embed=True)] = None, # noqa: F821
session_id: Annotated[None | str, Body(embed=True)] = None, # noqa: F821
api_key_user: UserRead = Depends(api_key_security),
session_service: SessionService = Depends(get_session_service),
):
@ -476,10 +476,10 @@ async def experimental_run_flow(
async def process(
session: Annotated[Session, Depends(get_session)],
flow_id: str,
inputs: Optional[Union[List[dict], dict]] = None,
tweaks: Optional[dict] = None,
inputs: list[dict] | dict | None = None,
tweaks: dict | None = None,
clear_cache: Annotated[bool, Body(embed=True)] = False, # noqa: F821
session_id: Annotated[Union[None, str], Body(embed=True)] = None, # noqa: F821
session_id: Annotated[None | str, Body(embed=True)] = None, # noqa: F821
task_service: "TaskService" = Depends(get_task_service),
api_key_user: UserRead = Depends(api_key_security),
sync: Annotated[bool, Body(embed=True)] = True, # noqa: F821

View file

@ -3,7 +3,6 @@ import json
import re
import zipfile
from datetime import datetime, timezone
from typing import List
from uuid import UUID
import orjson
@ -273,7 +272,7 @@ def delete_flow(
return {"message": "Flow deleted successfully"}
@router.post("/batch/", response_model=List[FlowRead], status_code=201)
@router.post("/batch/", response_model=list[FlowRead], status_code=201)
def create_flows(
*,
session: Session = Depends(get_session),
@ -293,7 +292,7 @@ def create_flows(
return db_flows
@router.post("/upload/", response_model=List[FlowRead], status_code=201)
@router.post("/upload/", response_model=list[FlowRead], status_code=201)
async def upload_file(
*,
session: Session = Depends(get_session),
@ -322,7 +321,7 @@ async def upload_file(
@router.delete("/")
async def delete_multiple_flows(
flow_ids: List[UUID], user: User = Depends(get_current_active_user), db: Session = Depends(get_session)
flow_ids: list[UUID], user: User = Depends(get_current_active_user), db: Session = Depends(get_session)
):
"""
Delete multiple flows by their IDs.
@ -357,7 +356,7 @@ async def delete_multiple_flows(
@router.post("/download/", status_code=200)
async def download_multiple_file(
flow_ids: List[UUID],
flow_ids: list[UUID],
user: User = Depends(get_current_active_user),
db: Session = Depends(get_session),
):

View file

@ -1,5 +1,3 @@
from typing import List
import orjson
from fastapi import APIRouter, Depends, File, HTTPException, Response, UploadFile, status
from sqlalchemy import or_, update
@ -78,7 +76,7 @@ def create_folder(
raise HTTPException(status_code=500, detail=str(e))
@router.get("/", response_model=List[FolderRead], status_code=200)
@router.get("/", response_model=list[FolderRead], status_code=200)
def read_folders(
*,
session: Session = Depends(get_session),
@ -211,7 +209,7 @@ async def download_file(
raise HTTPException(status_code=500, detail=str(e))
@router.post("/upload/", response_model=List[FlowRead], status_code=201)
@router.post("/upload/", response_model=list[FlowRead], status_code=201)
async def upload_file(
*,
session: Session = Depends(get_session),

View file

@ -1,4 +1,3 @@
from typing import List, Optional
from uuid import UUID
from fastapi import APIRouter, Depends, HTTPException, Query
@ -44,13 +43,13 @@ async def delete_vertex_builds(
raise HTTPException(status_code=500, detail=str(e))
@router.get("/messages", response_model=List[MessageModelResponse])
@router.get("/messages", response_model=list[MessageModelResponse])
async def get_messages(
flow_id: Optional[str] = Query(None),
session_id: Optional[str] = Query(None),
sender: Optional[str] = Query(None),
sender_name: Optional[str] = Query(None),
order_by: Optional[str] = Query("timestamp"),
flow_id: str | None = Query(None),
session_id: str | None = Query(None),
sender: str | None = Query(None),
sender_name: str | None = Query(None),
order_by: str | None = Query("timestamp"),
session: Session = Depends(get_session),
):
try:
@ -74,7 +73,7 @@ async def get_messages(
@router.delete("/messages", status_code=204)
async def delete_messages(
message_ids: List[UUID],
message_ids: list[UUID],
session: Session = Depends(get_session),
current_user: User = Depends(get_current_active_user),
):
@ -125,7 +124,7 @@ async def delete_messages_session(
raise HTTPException(status_code=500, detail=str(e))
@router.get("/transactions", response_model=List[TransactionReadResponse])
@router.get("/transactions", response_model=list[TransactionReadResponse])
async def get_transactions(
flow_id: UUID = Query(),
session: Session = Depends(get_session),

View file

@ -1,7 +1,7 @@
from datetime import datetime, timezone
from enum import Enum
from pathlib import Path
from typing import Any, Dict, List, Optional, Union
from typing import Any
from uuid import UUID
from pydantic import BaseModel, ConfigDict, Field, field_serializer, field_validator, model_serializer
@ -28,7 +28,7 @@ class BuildStatus(Enum):
class TweaksRequest(BaseModel):
tweaks: Optional[Dict[str, Dict[str, Any]]] = Field(default_factory=dict)
tweaks: dict[str, dict[str, Any]] | None = Field(default_factory=dict)
class UpdateTemplateRequest(BaseModel):
@ -38,25 +38,25 @@ class UpdateTemplateRequest(BaseModel):
class TaskResponse(BaseModel):
"""Task response schema."""
id: Optional[str] = Field(None)
href: Optional[str] = Field(None)
id: str | None = Field(None)
href: str | None = Field(None)
class ProcessResponse(BaseModel):
"""Process response schema."""
result: Any
status: Optional[str] = None
task: Optional[TaskResponse] = None
session_id: Optional[str] = None
backend: Optional[str] = None
status: str | None = None
task: TaskResponse | None = None
session_id: str | None = None
backend: str | None = None
class RunResponse(BaseModel):
"""Run response schema."""
outputs: Optional[List[RunOutputs]] = []
session_id: Optional[str] = None
outputs: list[RunOutputs] | None = []
session_id: str | None = None
@model_serializer(mode="plain")
def serialize(self):
@ -76,23 +76,23 @@ class RunResponse(BaseModel):
class PreloadResponse(BaseModel):
"""Preload response schema."""
session_id: Optional[str] = None
is_clear: Optional[bool] = None
session_id: str | None = None
is_clear: bool | None = None
class TaskStatusResponse(BaseModel):
"""Task status response schema."""
status: str
result: Optional[Any] = None
result: Any | None = None
class ChatMessage(BaseModel):
"""Chat message schema."""
is_bot: bool = False
message: Union[str, None, dict] = None
chatKey: Optional[str] = None
message: str | None | dict = None
chatKey: str | None = None
type: str = "human"
@ -138,19 +138,19 @@ class FileResponse(ChatMessage):
class FlowListCreate(BaseModel):
flows: List[FlowCreate]
flows: list[FlowCreate]
class FlowListIds(BaseModel):
flow_ids: List[str]
flow_ids: list[str]
class FlowListRead(BaseModel):
flows: List[FlowRead]
flows: list[FlowRead]
class FlowListReadWithFolderName(BaseModel):
flows: List[FlowRead]
flows: list[FlowRead]
name: str
description: str
@ -181,7 +181,7 @@ class StreamData(BaseModel):
class CustomComponentRequest(BaseModel):
model_config = ConfigDict(arbitrary_types_allowed=True)
code: str
frontend_node: Optional[dict] = None
frontend_node: dict | None = None
class CustomComponentResponse(BaseModel):
@ -191,7 +191,7 @@ class CustomComponentResponse(BaseModel):
class UpdateCustomComponentRequest(CustomComponentRequest):
field: str
field_value: Optional[Union[str, int, float, bool, dict, list]] = None
field_value: str | int | float | bool | dict | list | None = None
template: dict
def get_template(self):
@ -204,16 +204,16 @@ class CustomComponentResponseError(BaseModel):
class ComponentListCreate(BaseModel):
flows: List[FlowCreate]
flows: list[FlowCreate]
class ComponentListRead(BaseModel):
flows: List[FlowRead]
flows: list[FlowRead]
class UsersResponse(BaseModel):
total_count: int
users: List[UserRead]
users: list[UserRead]
class ApiKeyResponse(BaseModel):
@ -227,7 +227,7 @@ class ApiKeyResponse(BaseModel):
class ApiKeysResponse(BaseModel):
total_count: int
user_id: UUID
api_keys: List[ApiKeyRead]
api_keys: list[ApiKeyRead]
class CreateApiKeyRequest(BaseModel):
@ -245,20 +245,20 @@ class ApiKeyCreateRequest(BaseModel):
class VerticesOrderResponse(BaseModel):
ids: List[str]
ids: list[str]
run_id: UUID
vertices_to_run: List[str]
vertices_to_run: list[str]
class ResultDataResponse(BaseModel):
results: Optional[Any] = Field(default_factory=dict)
results: Any | None = Field(default_factory=dict)
outputs: dict[str, OutputValue] = Field(default_factory=dict)
logs: dict[str, list[Log]] = Field(default_factory=dict)
message: Optional[Any] = Field(default_factory=dict)
artifacts: Optional[Any] = Field(default_factory=dict)
timedelta: Optional[float] = None
duration: Optional[str] = None
used_frozen_result: Optional[bool] = False
message: Any | None = Field(default_factory=dict)
artifacts: Any | None = Field(default_factory=dict)
timedelta: float | None = None
duration: str | None = None
used_frozen_result: bool | None = False
@field_serializer("results")
@classmethod
@ -269,27 +269,27 @@ class ResultDataResponse(BaseModel):
class VertexBuildResponse(BaseModel):
id: Optional[str] = None
inactivated_vertices: Optional[List[str]] = None
next_vertices_ids: Optional[List[str]] = None
top_level_vertices: Optional[List[str]] = None
id: str | None = None
inactivated_vertices: list[str] | None = None
next_vertices_ids: list[str] | None = None
top_level_vertices: list[str] | None = None
valid: bool
params: Optional[Any] = Field(default_factory=dict)
params: Any | None = Field(default_factory=dict)
"""JSON string of the params."""
data: ResultDataResponse
"""Mapping of vertex ids to result dict containing the param name and result value."""
timestamp: Optional[datetime] = Field(default_factory=lambda: datetime.now(timezone.utc))
timestamp: datetime | None = Field(default_factory=lambda: datetime.now(timezone.utc))
"""Timestamp of the build."""
class VerticesBuiltResponse(BaseModel):
vertices: List[VertexBuildResponse]
vertices: list[VertexBuildResponse]
class InputValueRequest(BaseModel):
components: Optional[List[str]] = []
input_value: Optional[str] = None
type: Optional[InputType] = Field(
components: list[str] | None = []
input_value: str | None = None
type: InputType | None = Field(
"any",
description="Defines on which components the input value should be applied. 'any' applies to all input components.",
)
@ -313,15 +313,15 @@ class InputValueRequest(BaseModel):
class SimplifiedAPIRequest(BaseModel):
input_value: Optional[str] = Field(default=None, description="The input value")
input_type: Optional[InputType] = Field(default="chat", description="The input type")
output_type: Optional[OutputType] = Field(default="chat", description="The output type")
output_component: Optional[str] = Field(
input_value: str | None = Field(default=None, description="The input value")
input_type: InputType | None = Field(default="chat", description="The input type")
output_type: OutputType | None = Field(default="chat", description="The output type")
output_component: str | None = Field(
default="",
description="If there are multiple output components, you can specify the component to get the output from.",
)
tweaks: Optional[Tweaks] = Field(default=None, description="The tweaks")
session_id: Optional[str] = Field(default=None, description="The session id")
tweaks: Tweaks | None = Field(default=None, description="The tweaks")
session_id: str | None = Field(default=None, description="The session id")
# (alias) type ReactFlowJsonObject<NodeData = any, EdgeData = any> = {
@ -331,9 +331,9 @@ class SimplifiedAPIRequest(BaseModel):
# }
# import ReactFlowJsonObject
class FlowDataRequest(BaseModel):
nodes: List[dict]
edges: List[dict]
viewport: Optional[dict] = None
nodes: list[dict]
edges: list[dict]
viewport: dict | None = None
class ConfigResponse(BaseModel):

View file

@ -1,5 +1,3 @@
from typing import List
from fastapi import APIRouter, Depends, HTTPException
from loguru import logger
@ -10,7 +8,7 @@ from langflow.services.database.models.user.model import User
router = APIRouter(prefix="/starter-projects", tags=["Flows"])
@router.get("/", response_model=List[GraphDump], status_code=200)
@router.get("/", response_model=list[GraphDump], status_code=200)
def get_starter_projects(
*,
current_user: User = Depends(get_current_active_user),

View file

@ -1,4 +1,4 @@
from typing import Annotated, List, Optional, Union
from typing import Annotated
from uuid import UUID
from fastapi import APIRouter, Depends, HTTPException, Query
@ -60,7 +60,7 @@ def check_if_store_is_enabled(
@router.get("/check/api_key")
async def check_if_store_has_api_key(
api_key: Optional[str] = Depends(get_optional_user_store_api_key),
api_key: str | None = Depends(get_optional_user_store_api_key),
store_service: StoreService = Depends(get_store_service),
):
if api_key is None:
@ -105,19 +105,19 @@ async def update_shared_component(
@router.get("/components/", response_model=ListComponentResponseModel)
async def get_components(
component_id: Annotated[Optional[str], Query()] = None,
search: Annotated[Optional[str], Query()] = None,
private: Annotated[Optional[bool], Query()] = None,
is_component: Annotated[Optional[bool], Query()] = None,
tags: Annotated[Optional[list[str]], Query()] = None,
sort: Annotated[Union[list[str], None], Query()] = None,
component_id: Annotated[str | None, Query()] = None,
search: Annotated[str | None, Query()] = None,
private: Annotated[bool | None, Query()] = None,
is_component: Annotated[bool | None, Query()] = None,
tags: Annotated[list[str] | None, Query()] = None,
sort: Annotated[list[str] | None, Query()] = None,
liked: Annotated[bool, Query()] = False,
filter_by_user: Annotated[bool, Query()] = False,
fields: Annotated[Optional[list[str]], Query()] = None,
fields: Annotated[list[str] | None, Query()] = None,
page: int = 1,
limit: int = 10,
store_service: StoreService = Depends(get_store_service),
store_api_key: Optional[str] = Depends(get_optional_user_store_api_key),
store_api_key: str | None = Depends(get_optional_user_store_api_key),
):
try:
return await store_service.get_list_component_response_model(
@ -159,7 +159,7 @@ async def download_component(
return component
@router.get("/tags", response_model=List[TagResponse])
@router.get("/tags", response_model=list[TagResponse])
async def get_tags(
store_service: StoreService = Depends(get_store_service),
):
@ -171,7 +171,7 @@ async def get_tags(
raise HTTPException(status_code=500, detail=str(exc))
@router.get("/users/likes", response_model=List[UsersLikesResponse])
@router.get("/users/likes", response_model=list[UsersLikesResponse])
async def get_list_of_components_liked_by_user(
store_service: StoreService = Depends(get_store_service),
store_api_key: str = Depends(get_user_store_api_key),