tests: fix tests by clearing service_manager services (#5573)

* refactor(tests): add new fixtures to files tests

- Introduced new pytest fixtures for creating API keys, active users, and flows to streamline test setup.
- Updated file upload, download, list, and delete tests to utilize the new fixtures, improving clarity and maintainability.
- Ensured that flow IDs are correctly referenced in all file operations, enhancing test accuracy.
- Added cleanup logic in fixtures to maintain database integrity after tests.

This refactor improves the organization of test cases and ensures that they are more robust and easier to understand.

* fix(tests): clear services cache in test fixtures

- Added logic to clear the services cache in both `conftest.py` and `test_files.py` fixtures to ensure a clean state for tests.
- This change prevents potential interference between tests by resetting the service manager's factories and services before each test run.

* refactor(tests): streamline file upload tests and remove unused mocks

- Removed the mock implementation of StorageService from the test fixtures to simplify the test setup.
- Introduced new fixtures to set maximum file size upload limits, enhancing test flexibility.
- Updated file upload, download, and list tests to directly use the new fixtures, improving clarity and maintainability.
- Ensured that uploaded file content is accurately tested, reflecting changes in the upload and download logic.
This commit is contained in:
Gabriel Luiz Freitas Almeida 2025-01-08 10:13:41 -03:00 committed by GitHub
commit 8809128d93
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
2 changed files with 179 additions and 95 deletions

View file

@ -354,7 +354,11 @@ async def client_fixture(
)
monkeypatch.setenv("LANGFLOW_LOAD_FLOWS_PATH", load_flows_dir)
monkeypatch.setenv("LANGFLOW_AUTO_LOGIN", "true")
# Clear the services cache
from langflow.services.manager import service_manager
service_manager.factories.clear()
service_manager.services.clear() # Clear the services cache
app = create_app()
db_service = get_db_service()
db_service.database_url = f"sqlite:///{db_path}"

View file

@ -1,82 +1,100 @@
import asyncio
import json
import re
import shutil
import tempfile
from contextlib import suppress
from io import BytesIO
from pathlib import Path
from unittest.mock import MagicMock
# we need to import tmpdir
import anyio
import pytest
from asgi_lifespan import LifespanManager
from httpx import ASGITransport, AsyncClient
from langflow.main import create_app
from langflow.services.deps import get_storage_service
from langflow.services.storage.service import StorageService
from sqlmodel import Session
from langflow.services.auth.utils import get_password_hash
from langflow.services.database.models.api_key.model import ApiKey
from langflow.services.database.models.flow.model import Flow, FlowCreate
from langflow.services.database.models.user.model import User, UserRead
from langflow.services.database.utils import session_getter
from langflow.services.deps import get_db_service
from sqlalchemy.orm import selectinload
from sqlmodel import select
from tests.conftest import _delete_transactions_and_vertex_builds
@pytest.fixture
def mock_storage_service():
# Create a mock instance of StorageService
service = MagicMock(spec=StorageService)
# Setup mock behaviors for the service methods as needed
service.save_file.return_value = None
service.get_file.return_value = b"file content" # Binary content for files
service.list_files.return_value = ["file1.txt", "file2.jpg"]
service.delete_file.return_value = None
# Mock the settings service with proper max_file_size_upload attribute
settings_mock = MagicMock()
settings_mock.settings = MagicMock()
settings_mock.settings.max_file_size_upload = 1 # Default 1MB limit
service.settings_service = settings_mock
return service
@pytest.fixture(name="files_created_api_key")
async def files_created_api_key(files_client, files_active_user): # noqa: ARG001
hashed = get_password_hash("random_key")
api_key = ApiKey(
name="files_created_api_key",
user_id=files_active_user.id,
api_key="random_key",
hashed_api_key=hashed,
)
db_manager = get_db_service()
async with session_getter(db_manager) as session:
stmt = select(ApiKey).where(ApiKey.api_key == api_key.api_key)
if existing_api_key := (await session.exec(stmt)).first():
yield existing_api_key
return
session.add(api_key)
await session.commit()
await session.refresh(api_key)
yield api_key
# Clean up
await session.delete(api_key)
await session.commit()
@pytest.fixture(name="files_client")
async def files_client_fixture(
session: Session, # noqa: ARG001
monkeypatch,
request,
load_flows_dir,
mock_storage_service,
@pytest.fixture(name="files_active_user")
async def files_active_user(files_client): # noqa: ARG001
db_manager = get_db_service()
async with db_manager.with_session() as session:
user = User(
username="files_active_user",
password=get_password_hash("testpassword"),
is_active=True,
is_superuser=False,
)
stmt = select(User).where(User.username == user.username)
if active_user := (await session.exec(stmt)).first():
user = active_user
else:
session.add(user)
await session.commit()
await session.refresh(user)
user = UserRead.model_validate(user, from_attributes=True)
yield user
# Clean up
# Now cleanup transactions, vertex_build
async with db_manager.with_session() as session:
user = await session.get(User, user.id, options=[selectinload(User.flows)])
await _delete_transactions_and_vertex_builds(session, user.flows)
await session.delete(user)
await session.commit()
@pytest.fixture(name="files_flow")
async def files_flow(
files_client, # noqa: ARG001
json_flow: str,
files_active_user,
):
# Set the database url to a test database
if "noclient" in request.keywords:
yield
else:
def init_app():
db_dir = tempfile.mkdtemp()
db_path = Path(db_dir) / "test.db"
monkeypatch.setenv("LANGFLOW_DATABASE_URL", f"sqlite:///{db_path}")
monkeypatch.setenv("LANGFLOW_AUTO_LOGIN", "false")
if "load_flows" in request.keywords:
shutil.copyfile(
pytest.BASIC_EXAMPLE_PATH, Path(load_flows_dir) / "c54f9130-f2fa-4a3e-b22a-3856d946351b.json"
)
monkeypatch.setenv("LANGFLOW_LOAD_FLOWS_PATH", load_flows_dir)
monkeypatch.setenv("LANGFLOW_AUTO_LOGIN", "true")
app = create_app()
return app, db_path
app, db_path = await asyncio.to_thread(init_app)
app.dependency_overrides[get_storage_service] = lambda: mock_storage_service
async with (
LifespanManager(app, startup_timeout=None, shutdown_timeout=None) as manager,
AsyncClient(transport=ASGITransport(app=manager.app), base_url="http://testserver/") as client,
):
yield client
# app.dependency_overrides.clear()
monkeypatch.undo()
# clear the temp db
with suppress(FileNotFoundError):
await anyio.Path(db_path).unlink()
loaded_json = json.loads(json_flow)
flow_data = FlowCreate(name="test_flow", data=loaded_json.get("data"), user_id=files_active_user.id)
db_manager = get_db_service()
flow = Flow.model_validate(flow_data)
async with db_manager.with_session() as session:
session.add(flow)
await session.commit()
await session.refresh(flow)
yield flow
# Clean up
await session.delete(flow)
await session.commit()
@pytest.fixture
@ -93,54 +111,116 @@ def max_file_size_upload_10mb_fixture(monkeypatch):
monkeypatch.undo()
async def test_upload_file(files_client, created_api_key, flow):
headers = {"x-api-key": created_api_key.api_key}
@pytest.fixture(name="files_client")
async def files_client_fixture(
monkeypatch,
request,
):
# Set the database url to a test database
if "noclient" in request.keywords:
yield
else:
def init_app():
db_dir = tempfile.mkdtemp()
db_path = Path(db_dir) / "test.db"
monkeypatch.setenv("LANGFLOW_DATABASE_URL", f"sqlite:///{db_path}")
monkeypatch.setenv("LANGFLOW_AUTO_LOGIN", "false")
from langflow.services.manager import service_manager
service_manager.factories.clear()
service_manager.services.clear() # Clear the services cache
app = create_app()
return app, db_path
app, db_path = await asyncio.to_thread(init_app)
async with (
LifespanManager(app, startup_timeout=None, shutdown_timeout=None) as manager,
AsyncClient(transport=ASGITransport(app=manager.app), base_url="http://testserver/") as client,
):
yield client
# app.dependency_overrides.clear()
monkeypatch.undo()
# clear the temp db
with suppress(FileNotFoundError):
await anyio.Path(db_path).unlink()
async def test_upload_file(files_client, files_created_api_key, files_flow):
headers = {"x-api-key": files_created_api_key.api_key}
response = await files_client.post(
f"api/v1/files/upload/{flow.id}",
f"api/v1/files/upload/{files_flow.id}",
files={"file": ("test.txt", b"test content")},
headers=headers,
)
assert response.status_code == 201, f"Expected 201, got {response.status_code}: {response.json()}"
response_json = response.json()
assert response_json["flowId"] == str(files_flow.id)
# Check that the file_path matches the expected pattern
file_path_pattern = re.compile(rf"{files_flow.id}/\d{{4}}-\d{{2}}-\d{{2}}_\d{{2}}-\d{{2}}-\d{{2}}_test\.txt")
assert file_path_pattern.match(response_json["file_path"])
async def test_download_file(files_client, files_created_api_key, files_flow):
headers = {"x-api-key": files_created_api_key.api_key}
# First upload a file
response = await files_client.post(
f"api/v1/files/upload/{files_flow.id}",
files={"file": ("test.txt", b"test content")},
headers=headers,
)
assert response.status_code == 201
response_json = response.json()
assert response_json["flowId"] == str(flow.id)
# Get the actual filename from the response
file_path = response.json()["file_path"]
file_name = file_path.split("/")[-1]
# Check that the file_path matches the expected pattern
file_path_pattern = re.compile(rf"{flow.id}/\d{{4}}-\d{{2}}-\d{{2}}_\d{{2}}-\d{{2}}-\d{{2}}_test\.txt")
assert file_path_pattern.match(response_json["file_path"])
async def test_download_file(files_client, created_api_key, flow):
headers = {"x-api-key": created_api_key.api_key}
response = await files_client.get(f"api/v1/files/download/{flow.id}/test.txt", headers=headers)
# Then try to download it
response = await files_client.get(f"api/v1/files/download/{files_flow.id}/{file_name}", headers=headers)
assert response.status_code == 200
assert response.content == b"file content"
assert response.content == b"test content"
async def test_list_files(files_client, created_api_key, flow):
headers = {"x-api-key": created_api_key.api_key}
response = await files_client.get(f"api/v1/files/list/{flow.id}", headers=headers)
async def test_list_files(files_client, files_created_api_key, files_flow):
headers = {"x-api-key": files_created_api_key.api_key}
# First upload a file
response = await files_client.post(
f"api/v1/files/upload/{files_flow.id}",
files={"file": ("test.txt", b"test content")},
headers=headers,
)
assert response.status_code == 201
# Then list the files
response = await files_client.get(f"api/v1/files/list/{files_flow.id}", headers=headers)
assert response.status_code == 200
assert response.json() == {"files": ["file1.txt", "file2.jpg"]}
files = response.json()["files"]
assert len(files) == 1
assert files[0].endswith("test.txt")
async def test_delete_file(files_client, created_api_key, flow):
headers = {"x-api-key": created_api_key.api_key}
async def test_delete_file(files_client, files_created_api_key, files_flow):
headers = {"x-api-key": files_created_api_key.api_key}
response = await files_client.delete(f"api/v1/files/delete/{flow.id}/test.txt", headers=headers)
response = await files_client.delete(f"api/v1/files/delete/{files_flow.id}/test.txt", headers=headers)
assert response.status_code == 200
assert response.json() == {"message": "File test.txt deleted successfully"}
async def test_file_operations(client, created_api_key, flow):
headers = {"x-api-key": created_api_key.api_key}
flow_id = flow.id
async def test_file_operations(files_client, files_created_api_key, files_flow):
headers = {"x-api-key": files_created_api_key.api_key}
flow_id = files_flow.id
file_name = "test.txt"
file_content = b"Hello, world!"
# Step 1: Upload the file
response = await client.post(
response = await files_client.post(
f"api/v1/files/upload/{flow_id}",
files={"file": (file_name, file_content)},
headers=headers,
@ -158,36 +238,36 @@ async def test_file_operations(client, created_api_key, flow):
full_file_name = response_json["file_path"].split("/")[-1]
# Step 2: List files in the folder
response = await client.get(f"api/v1/files/list/{flow_id}", headers=headers)
response = await files_client.get(f"api/v1/files/list/{files_flow.id}", headers=headers)
assert response.status_code == 200
assert full_file_name in response.json()["files"]
# Step 3: Download the file and verify its content
response = await client.get(f"api/v1/files/download/{flow_id}/{full_file_name}", headers=headers)
response = await files_client.get(f"api/v1/files/download/{files_flow.id}/{full_file_name}", headers=headers)
assert response.status_code == 200
assert response.content == file_content
assert response.headers["content-type"] == "application/octet-stream"
# Step 4: Delete the file
response = await client.delete(f"api/v1/files/delete/{flow_id}/{full_file_name}", headers=headers)
response = await files_client.delete(f"api/v1/files/delete/{files_flow.id}/{full_file_name}", headers=headers)
assert response.status_code == 200
assert response.json() == {"message": f"File {full_file_name} deleted successfully"}
# Verify that the file is indeed deleted
response = await client.get(f"api/v1/files/list/{flow_id}", headers=headers)
response = await files_client.get(f"api/v1/files/list/{files_flow.id}", headers=headers)
assert full_file_name not in response.json()["files"]
@pytest.mark.usefixtures("max_file_size_upload_fixture")
async def test_upload_file_size_limit(files_client, created_api_key, flow):
headers = {"x-api-key": created_api_key.api_key}
async def test_upload_file_size_limit(files_client, files_created_api_key, files_flow):
headers = {"x-api-key": files_created_api_key.api_key}
# Test file under the limit (500KB)
small_content = b"x" * (500 * 1024)
small_file = ("small_file.txt", small_content, "application/octet-stream")
headers["Content-Length"] = str(len(small_content))
response = await files_client.post(
f"api/v1/files/upload/{flow.id}",
f"api/v1/files/upload/{files_flow.id}",
files={"file": small_file},
headers=headers,
)
@ -199,7 +279,7 @@ async def test_upload_file_size_limit(files_client, created_api_key, flow):
bio = BytesIO(large_content)
headers["Content-Length"] = str(len(large_content))
response = await files_client.post(
f"api/v1/files/upload/{flow.id}",
f"api/v1/files/upload/{files_flow.id}",
files={"file": ("large_file.txt", bio, "application/octet-stream")},
headers=headers,
)