Delete unused files and components

This commit is contained in:
Gabriel Luiz Freitas Almeida 2024-04-02 11:07:41 -03:00
commit 830d063ca1
9 changed files with 0 additions and 344 deletions

View file

@ -1,70 +0,0 @@
from typing import List, Union
from langchain.agents import AgentExecutor, BaseMultiActionAgent, BaseSingleActionAgent
from langflow import CustomComponent
from langflow.field_typing import BaseMemory, Text, Tool
class LCAgentComponent(CustomComponent):
def build_config(self):
return {
"lc": {
"display_name": "LangChain",
"info": "The LangChain to interact with.",
},
"handle_parsing_errors": {
"display_name": "Handle Parsing Errors",
"info": "If True, the agent will handle parsing errors. If False, the agent will raise an error.",
"advanced": True,
},
"output_key": {
"display_name": "Output Key",
"info": "The key to use to get the output from the agent.",
"advanced": True,
},
"memory": {
"display_name": "Memory",
"info": "Memory to use for the agent.",
},
"tools": {
"display_name": "Tools",
"info": "Tools the agent can use.",
},
"input_value": {
"display_name": "Input",
"info": "Input text to pass to the agent.",
},
}
async def run_agent(
self,
agent: Union[BaseSingleActionAgent, BaseMultiActionAgent, AgentExecutor],
inputs: str,
input_variables: list[str],
tools: List[Tool],
memory: BaseMemory = None,
handle_parsing_errors: bool = True,
output_key: str = "output",
) -> Text:
if isinstance(agent, AgentExecutor):
runnable = agent
else:
runnable = AgentExecutor.from_agent_and_tools(
agent=agent, tools=tools, verbose=True, memory=memory, handle_parsing_errors=handle_parsing_errors
)
input_dict = {"input": inputs}
for var in input_variables:
if var not in ["agent_scratchpad", "input"]:
input_dict[var] = ""
result = await runnable.ainvoke(input_dict)
self.status = result
if output_key in result:
return result.get(output_key)
elif "output" not in result:
if output_key != "output":
raise ValueError(f"Output key not found in result. Tried '{output_key}' and 'output'.")
else:
raise ValueError("Output key not found in result. Tried 'output'.")
return result.get("output")

View file

@ -1,3 +0,0 @@
from .model import LCModelComponent
__all__ = ["LCModelComponent"]

View file

@ -1,48 +0,0 @@
from typing import Optional
from langchain_core.language_models.chat_models import BaseChatModel
from langchain_core.language_models.llms import LLM
from langchain_core.messages import HumanMessage, SystemMessage
from langflow import CustomComponent
class LCModelComponent(CustomComponent):
display_name: str = "Model Name"
description: str = "Model Description"
def get_result(self, runnable: LLM, stream: bool, input_value: str):
"""
Retrieves the result from the output of a Runnable object.
Args:
output (Runnable): The output object to retrieve the result from.
stream (bool): Indicates whether to use streaming or invocation mode.
input_value (str): The input value to pass to the output object.
Returns:
The result obtained from the output object.
"""
if stream:
result = runnable.stream(input_value)
else:
message = runnable.invoke(input_value)
result = message.content if hasattr(message, "content") else message
self.status = result
return result
def get_chat_result(
self, runnable: BaseChatModel, stream: bool, input_value: str, system_message: Optional[str] = None
):
messages = []
if input_value:
messages.append(HumanMessage(input_value))
if system_message:
messages.append(SystemMessage(system_message))
if stream:
result = runnable.stream(messages)
else:
message = runnable.invoke(messages)
result = message.content
self.status = result
return result

View file

@ -1,37 +0,0 @@
from langchain_community.tools.searchapi import SearchAPIRun
from langchain_community.utilities.searchapi import SearchApiAPIWrapper
from langflow import CustomComponent
from langflow.field_typing import Tool
class SearchApiToolComponent(CustomComponent):
display_name: str = "SearchApi Tool"
description: str = "Real-time search engine results API."
documentation: str = "https://www.searchapi.io/docs/google"
field_config = {
"engine": {
"display_name": "Engine",
"field_type": "str",
"info": "The search engine to use.",
},
"api_key": {
"display_name": "API Key",
"field_type": "str",
"required": True,
"password": True,
"info": "The API key to use SearchApi.",
},
}
def build(
self,
engine: str,
api_key: str,
) -> Tool:
search_api_wrapper = SearchApiAPIWrapper(engine=engine, searchapi_api_key=api_key)
tool = SearchAPIRun(api_wrapper=search_api_wrapper)
self.status = tool
return tool

View file

@ -1,103 +0,0 @@
from contextlib import contextmanager
from typing import TYPE_CHECKING, Generator
from langflow.services import ServiceType, service_manager
if TYPE_CHECKING:
from sqlmodel import Session
from langflow.services.cache.service import CacheService
from langflow.services.chat.service import ChatService
from langflow.services.credentials.service import CredentialService
from langflow.services.database.service import DatabaseService
from langflow.services.monitor.service import MonitorService
from langflow.services.plugins.service import PluginService
from langflow.services.session.service import SessionService
from langflow.services.settings.service import SettingsService
from langflow.services.socket.service import SocketIOService
from langflow.services.storage.service import StorageService
from langflow.services.store.service import StoreService
from langflow.services.task.service import TaskService
def get_socket_service() -> "SocketIOService":
return service_manager.get(ServiceType.SOCKETIO_SERVICE) # type: ignore
def get_storage_service() -> "StorageService":
return service_manager.get(ServiceType.STORAGE_SERVICE) # type: ignore
def get_credential_service() -> "CredentialService":
return service_manager.get(ServiceType.CREDENTIAL_SERVICE) # type: ignore
def get_plugins_service() -> "PluginService":
return service_manager.get(ServiceType.PLUGIN_SERVICE) # type: ignore
def get_settings_service() -> "SettingsService":
try:
return service_manager.get(ServiceType.SETTINGS_SERVICE) # type: ignore
except ValueError:
# initialize settings service
from langflow.services.manager import initialize_settings_service
initialize_settings_service()
return service_manager.get(ServiceType.SETTINGS_SERVICE) # type: ignore
def get_db_service() -> "DatabaseService":
return service_manager.get(ServiceType.DATABASE_SERVICE) # type: ignore
def get_session() -> Generator["Session", None, None]:
db_service = get_db_service()
yield from db_service.get_session()
@contextmanager
def session_scope():
"""
Context manager for managing a session scope.
Yields:
session: The session object.
Raises:
Exception: If an error occurs during the session scope.
"""
session = next(get_session())
try:
yield session
session.commit()
except:
session.rollback()
raise
finally:
session.close()
def get_cache_service() -> "CacheService":
return service_manager.get(ServiceType.CACHE_SERVICE) # type: ignore
def get_session_service() -> "SessionService":
return service_manager.get(ServiceType.SESSION_SERVICE) # type: ignore
def get_monitor_service() -> "MonitorService":
return service_manager.get(ServiceType.MONITOR_SERVICE) # type: ignore
def get_task_service() -> "TaskService":
return service_manager.get(ServiceType.TASK_SERVICE) # type: ignore
def get_chat_service() -> "ChatService":
return service_manager.get(ServiceType.CHAT_SERVICE) # type: ignore
def get_store_service() -> "StoreService":
return service_manager.get(ServiceType.STORE_SERVICE) # type: ignore

View file

@ -1,83 +0,0 @@
import importlib
import inspect
from typing import TYPE_CHECKING, Type, get_type_hints
from cachetools import LRUCache, cached
from loguru import logger
from langflow.services.schema import ServiceType
if TYPE_CHECKING:
from langflow.services.base import Service
class ServiceFactory:
def __init__(
self,
service_class,
):
self.service_class = service_class
self.dependencies = infer_service_types(self, import_all_services_into_a_dict())
def create(self, *args, **kwargs) -> "Service":
raise self.service_class(*args, **kwargs)
def hash_factory(factory: ServiceFactory) -> str:
return factory.service_class.__name__
def hash_dict(d: dict) -> str:
return str(d)
def hash_infer_service_types_args(factory_class: Type[ServiceFactory], available_services=None) -> str:
factory_hash = hash_factory(factory_class)
services_hash = hash_dict(available_services)
return f"{factory_hash}_{services_hash}"
@cached(cache=LRUCache(maxsize=10), key=hash_infer_service_types_args)
def infer_service_types(factory_class: Type[ServiceFactory], available_services=None) -> "ServiceType":
create_method = factory_class.create
type_hints = get_type_hints(create_method, globalns=available_services)
service_types = []
for param_name, param_type in type_hints.items():
# Skip the return type if it's included in type hints
if param_name == "return":
continue
# Convert the type to the expected enum format directly without appending "_SERVICE"
type_name = param_type.__name__.upper().replace("SERVICE", "_SERVICE")
try:
# Attempt to find a matching enum value
service_type = ServiceType[type_name]
service_types.append(service_type)
except KeyError:
raise ValueError(f"No matching ServiceType for parameter type: {param_type.__name__}")
return service_types
@cached(cache=LRUCache(maxsize=1))
def import_all_services_into_a_dict():
# Services are all in langflow.services.{service_name}.service
# and are subclass of Service
# We want to import all of them and put them in a dict
# to use as globals
from langflow.services.base import Service
services = {}
for service_type in ServiceType:
try:
service_name = ServiceType(service_type).value.replace("_service", "")
module_name = f"langflow.services.{service_name}.service"
module = importlib.import_module(module_name)
for name, obj in inspect.getmembers(module, inspect.isclass):
if issubclass(obj, Service) and obj is not Service:
services[name] = obj
break
except Exception as exc:
logger.exception(exc)
raise RuntimeError("Could not initialize services. Please check your settings.") from exc
return services