fix: Refactor a few more components to proper folders (#8324)

* fix: Refactor a few more components to proper folders

* Rename action for load files

* [autofix.ci] apply automated fixes

* Update tests for new naming

* Update video_file.py

* Update video_file.py

* Update video_file.py

* Update test_batch_run_component.py

* Move unit tests

* Update test_structured_output_component.py

---------

Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: Yuqi Tang <yuqi.tang@datastax.com>
This commit is contained in:
Eric Hare 2025-06-03 16:35:46 -07:00 committed by GitHub
commit 9aaca68687
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
18 changed files with 31 additions and 53 deletions

View file

@ -174,7 +174,7 @@ class BaseFileComponent(Component, ABC):
]
_base_outputs = [
Output(display_name="Loaded Files", name="dataframe", method="load_dataframe"),
Output(display_name="Loaded Files", name="dataframe", method="load_files"),
]
@abstractmethod
@ -225,7 +225,7 @@ class BaseFileComponent(Component, ABC):
else:
file.path.unlink()
def load_files(self) -> list[Data]:
def load_files_core(self) -> list[Data]:
"""Load files and return as Data objects.
Returns:
@ -236,13 +236,13 @@ class BaseFileComponent(Component, ABC):
return [Data()]
return data_list
def load_dataframe(self) -> DataFrame:
def load_files(self) -> DataFrame:
"""Load files and return as DataFrame.
Returns:
DataFrame: DataFrame containing all file data
"""
data_list = self.load_files()
data_list = self.load_files_core()
if not data_list:
return DataFrame()

View file

@ -5,10 +5,8 @@ from .id_generator import IDGeneratorComponent
from .memory import MemoryComponent
from .output_parser import OutputParserComponent
from .store_message import MessageStoreComponent
from .structured_output import StructuredOutputComponent
__all__ = [
"BatchRunComponent",
"CalculatorComponent",
"CreateListComponent",
"CurrentDateComponent",
@ -16,5 +14,4 @@ __all__ = [
"MemoryComponent",
"MessageStoreComponent",
"OutputParserComponent",
"StructuredOutputComponent",
]

View file

@ -1,4 +1,5 @@
from .alter_metadata import AlterMetadataComponent
from .batch_run import BatchRunComponent
from .combine_text import CombineTextComponent
from .converter import TypeConverterComponent
from .create_data import CreateDataComponent
@ -17,10 +18,12 @@ from .python_repl_core import PythonREPLComponent
from .regex import RegexExtractorComponent
from .select_data import SelectDataComponent
from .split_text import SplitTextComponent
from .structured_output import StructuredOutputComponent
from .update_data import UpdateDataComponent
__all__ = [
"AlterMetadataComponent",
"BatchRunComponent",
"CombineTextComponent",
"CreateDataComponent",
"DataFilterComponent",
@ -39,6 +42,7 @@ __all__ = [
"RegexExtractorComponent",
"SelectDataComponent",
"SplitTextComponent",
"StructuredOutputComponent",
"TypeConverterComponent",
"UpdateDataComponent",
]

View file

@ -9,7 +9,7 @@ from fastapi.encoders import jsonable_encoder
from langflow.api.v2.files import upload_user_file
from langflow.custom import Component
from langflow.io import DropdownInput, HandleInput, Output, StrInput
from langflow.io import DropdownInput, HandleInput, StrInput
from langflow.schema import Data, DataFrame, Message
from langflow.services.auth.utils import create_user_longterm_token
from langflow.services.database.models.user.crud import get_user_by_id
@ -51,14 +51,6 @@ class SaveToFileComponent(Component):
),
]
outputs = [
Output(
name="confirmation",
display_name="Confirmation",
method="save_to_file",
),
]
async def save_to_file(self) -> str:
"""Save the input to a file and upload it, returning a confirmation message."""
# Validate inputs

View file

@ -2,7 +2,7 @@ from pathlib import Path
from langflow.base.data import BaseFileComponent
from langflow.io import FileInput
from langflow.schema import Data
from langflow.schema import Data, DataFrame
class VideoFileComponent(BaseFileComponent):
@ -135,13 +135,13 @@ class VideoFileComponent(BaseFileComponent):
return processed_files
def load_files(self) -> list[Data]:
def load_files(self) -> DataFrame:
"""Load video files and return a list of Data objects."""
try:
self.log("DEBUG: Starting video file load")
if not hasattr(self, "file_path") or not self.file_path:
self.log("DEBUG: No video file path provided")
return []
return DataFrame()
self.log(f"DEBUG: Loading video from path: {self.file_path}")
@ -149,7 +149,7 @@ class VideoFileComponent(BaseFileComponent):
file_path_obj = Path(self.file_path)
if not file_path_obj.exists():
self.log(f"DEBUG: Video file not found at path: {self.file_path}")
return []
return DataFrame()
# Verify file size
file_size = file_path_obj.stat().st_size
@ -162,18 +162,18 @@ class VideoFileComponent(BaseFileComponent):
}
self.log(f"DEBUG: Created video data: {video_data}")
result = [Data(data=video_data)]
result = DataFrame(data=[video_data])
# Log the result to verify it's a proper Data object
self.log("DEBUG: Returning list with Data objects")
except (FileNotFoundError, PermissionError, OSError) as e:
self.log(f"DEBUG: File error in video load_files: {e!s}", "ERROR")
return []
return DataFrame()
except ImportError as e:
self.log(f"DEBUG: Import error in video load_files: {e!s}", "ERROR")
return []
return DataFrame()
except (ValueError, TypeError) as e:
self.log(f"DEBUG: Value or type error in video load_files: {e!s}", "ERROR")
return []
return DataFrame()
else:
return result

View file

@ -804,7 +804,7 @@
"cache": true,
"display_name": "Loaded Files",
"group_outputs": false,
"method": "load_dataframe",
"method": "load_files",
"name": "dataframe",
"required_inputs": [],
"selected": "DataFrame",

File diff suppressed because one or more lines are too long

View file

@ -1857,7 +1857,7 @@
"cache": true,
"display_name": "Loaded Files",
"group_outputs": false,
"method": "load_dataframe",
"method": "load_files",
"name": "dataframe",
"required_inputs": [],
"selected": "DataFrame",

View file

@ -266,7 +266,7 @@
"cache": true,
"display_name": "Loaded Files",
"group_outputs": false,
"method": "load_dataframe",
"method": "load_files",
"name": "dataframe",
"required_inputs": [],
"selected": "DataFrame",

View file

@ -2447,7 +2447,7 @@
"cache": true,
"display_name": "Loaded Files",
"group_outputs": false,
"method": "load_dataframe",
"method": "load_files",
"name": "dataframe",
"required_inputs": [],
"selected": "DataFrame",

View file

@ -23,7 +23,7 @@ Answer:
"""
file_component = FileComponent()
parse_data_component = ParserComponent()
parse_data_component.set(input_data=file_component.load_dataframe)
parse_data_component.set(input_data=file_component.load_files)
chat_input = ChatInput()
prompt_component = PromptComponent()

View file

@ -15,7 +15,7 @@ def ingestion_graph():
# Ingestion Graph
file_component = FileComponent()
text_splitter = SplitTextComponent()
text_splitter.set(data_inputs=file_component.load_dataframe)
text_splitter.set(data_inputs=file_component.load_files)
openai_embeddings = OpenAIEmbeddingsComponent()
vector_store = AstraDBVectorStoreComponent()
vector_store.set(

View file

@ -1,7 +1,7 @@
import re
import pytest
from langflow.components.helpers.batch_run import BatchRunComponent
from langflow.components.processing.batch_run import BatchRunComponent
from langflow.schema import DataFrame
from tests.base import ComponentTestBaseWithoutClient

View file

@ -5,7 +5,7 @@ from unittest.mock import patch
import openai
import pytest
from langchain_openai import ChatOpenAI
from langflow.components.helpers.structured_output import StructuredOutputComponent
from langflow.components.processing.structured_output import StructuredOutputComponent
from langflow.helpers.base_model import build_model_from_schema
from langflow.inputs.inputs import TableInput
from pydantic import BaseModel
@ -53,7 +53,7 @@ class TestStructuredOutputComponent(ComponentTestBaseWithoutClient):
system_prompt="Test system prompt",
)
with patch("langflow.components.helpers.structured_output.get_chat_result", mock_get_chat_result):
with patch("langflow.components.processing.structured_output.get_chat_result", mock_get_chat_result):
result = component.build_structured_output_base()
assert isinstance(result, list)
assert result == [{"field": "value"}]
@ -174,7 +174,7 @@ class TestStructuredOutputComponent(ComponentTestBaseWithoutClient):
with pytest.raises(ValueError, match="Invalid type: invalid_type"):
component.build_structured_output()
@patch("langflow.components.helpers.structured_output.get_chat_result")
@patch("langflow.components.processing.structured_output.get_chat_result")
def test_nested_output_schema(self, mock_get_chat_result):
class ChildModel(BaseModel):
child: str = "value"
@ -208,7 +208,7 @@ class TestStructuredOutputComponent(ComponentTestBaseWithoutClient):
assert isinstance(result, list)
assert result == [{"parent": {"child": "value"}}]
@patch("langflow.components.helpers.structured_output.get_chat_result")
@patch("langflow.components.processing.structured_output.get_chat_result")
def test_large_input_value(self, mock_get_chat_result):
large_input = "Test input " * 1000

View file

@ -24,7 +24,7 @@ def ingestion_graph():
file_component.set(path="test.txt")
file_component.set_on_output(name="dataframe", value=Data(text="This is a test file."), cache=True)
text_splitter = SplitTextComponent(_id="text-splitter-123")
text_splitter.set(data_inputs=file_component.load_dataframe)
text_splitter.set(data_inputs=file_component.load_files)
openai_embeddings = OpenAIEmbeddingsComponent(_id="openai-embeddings-123")
openai_embeddings.set(
openai_api_key="sk-123", openai_api_base="https://api.openai.com/v1", openai_api_type="openai"