diff --git a/src/backend/langflow/components/models/base/__init__.py b/src/backend/langflow/components/models/base/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/backend/langflow/components/models/base/model.py b/src/backend/langflow/components/models/base/model.py new file mode 100644 index 000000000..9f9ca7b36 --- /dev/null +++ b/src/backend/langflow/components/models/base/model.py @@ -0,0 +1,28 @@ +from langchain_core.runnables import Runnable + +from langflow import CustomComponent + + +class LCModelComponent(CustomComponent): + display_name: str = "Model Name" + description: str = "Model Description" + + def get_result(self, output: Runnable, stream: bool, input_value: str): + """ + Retrieves the result from the output of a Runnable object. + + Args: + output (Runnable): The output object to retrieve the result from. + stream (bool): Indicates whether to use streaming or invocation mode. + input_value (str): The input value to pass to the output object. + + Returns: + The result obtained from the output object. + """ + if stream: + result = output.stream(input_value) + else: + message = output.invoke(input_value) + result = message.content if hasattr(message, "content") else message + self.status = result + return result