🔥 refactor(loading.py): remove unused imports and functions

The imports and functions that were not being used were removed to improve the code's readability and maintainability.
This commit is contained in:
Gabriel Luiz Freitas Almeida 2023-06-23 10:51:09 -03:00
commit fb25744714

View file

@ -1,22 +1,11 @@
import json
from typing import Any, Callable, Dict, Optional
from typing import Any, Callable, Dict, Sequence
from langchain.agents import ZeroShotAgent
from langchain.agents import agent as agent_module
from langchain.agents.agent import AgentExecutor
from langchain.agents.agent_toolkits.base import BaseToolkit
from langchain.agents.load_tools import (
_BASE_TOOLS,
_EXTRA_LLM_TOOLS,
_EXTRA_OPTIONAL_TOOLS,
_LLM_TOOLS,
)
from langchain.agents.loading import load_agent_from_config
from langchain.agents.tools import Tool
from langchain.base_language import BaseLanguageModel
from langchain.callbacks.base import BaseCallbackManager
from langchain.chains.loading import load_chain_from_config
from langchain.llms.loading import load_llm_from_config
from langchain.agents.tools import BaseTool
from langflow.interface.initialize.vector_store import (
initialize_chroma,
initialize_faiss,
@ -31,9 +20,8 @@ from langflow.interface.custom_lists import CUSTOM_NODES
from langflow.interface.importing.utils import get_function, import_by_type
from langflow.interface.toolkits.base import toolkits_creator
from langflow.interface.chains.base import chain_creator
from langflow.interface.types import get_type_list
from langflow.interface.utils import load_file_into_dict
from langflow.utils import util, validate
from langflow.utils import validate
def instantiate_class(node_type: str, base_type: str, params: Dict) -> Any:
@ -237,49 +225,15 @@ def replace_zero_shot_prompt_with_prompt_template(nodes):
return nodes
def load_langchain_type_from_config(config: Dict[str, Any]):
"""Load langchain type from config"""
# Get type list
type_list = get_type_list()
if config["_type"] in type_list["agents"]:
config = util.update_verbose(config, new_value=False)
return load_agent_executor_from_config(config, verbose=True)
elif config["_type"] in type_list["chains"]:
config = util.update_verbose(config, new_value=False)
return load_chain_from_config(config, verbose=True)
elif config["_type"] in type_list["llms"]:
config = util.update_verbose(config, new_value=True)
return load_llm_from_config(config)
else:
raise ValueError("Type should be either agent, chain or llm")
def load_agent_executor_from_config(
config: dict,
llm: Optional[BaseLanguageModel] = None,
tools: Optional[list[Tool]] = None,
callback_manager: Optional[BaseCallbackManager] = None,
**kwargs: Any,
):
tools = load_tools_from_config(config["allowed_tools"])
config["allowed_tools"] = [tool.name for tool in tools] if tools else []
agent_obj = load_agent_from_config(config, llm, tools, **kwargs)
return AgentExecutor.from_agent_and_tools(
agent=agent_obj,
tools=tools,
callback_manager=callback_manager,
**kwargs,
)
def load_agent_executor(agent_class: type[agent_module.Agent], params, **kwargs):
"""Load agent executor from agent class, tools and chain"""
allowed_tools = params.get("allowed_tools", [])
allowed_tools: Sequence[BaseTool] = params.get("allowed_tools", [])
llm_chain = params["llm_chain"]
# if allowed_tools is not a list or set, make it a list
if not isinstance(allowed_tools, (list, set)):
allowed_tools = [allowed_tools]
if not isinstance(allowed_tools, (list, set)) and isinstance(
allowed_tools, BaseTool
):
allowed_tools: Sequence[BaseTool] = [allowed_tools]
tool_names = [tool.name for tool in allowed_tools]
# Agent class requires an output_parser but Agent classes
# have a default output_parser.
@ -297,46 +251,6 @@ def load_toolkits_executor(node_type: str, toolkit: BaseToolkit, params: dict):
return create_function(llm=llm, toolkit=toolkit)
def load_tools_from_config(tool_list: list[dict]) -> list:
"""Load tools based on a config list.
Args:
config: config list.
Returns:
List of tools.
"""
tools = []
for tool in tool_list:
tool_type = tool.pop("_type")
llm_config = tool.pop("llm", None)
llm = load_llm_from_config(llm_config) if llm_config else None
kwargs = tool
if tool_type in _BASE_TOOLS:
tools.append(_BASE_TOOLS[tool_type]())
elif tool_type in _LLM_TOOLS:
if llm is None:
raise ValueError(f"Tool {tool_type} requires an LLM to be provided")
tools.append(_LLM_TOOLS[tool_type](llm))
elif tool_type in _EXTRA_LLM_TOOLS:
if llm is None:
raise ValueError(f"Tool {tool_type} requires an LLM to be provided")
_get_llm_tool_func, extra_keys = _EXTRA_LLM_TOOLS[tool_type]
if missing_keys := set(extra_keys).difference(kwargs):
raise ValueError(
f"Tool {tool_type} requires some parameters that were not "
f"provided: {missing_keys}"
)
tools.append(_get_llm_tool_func(llm=llm, **kwargs))
elif tool_type in _EXTRA_OPTIONAL_TOOLS:
_get_tool_func, extra_keys = _EXTRA_OPTIONAL_TOOLS[tool_type]
kwargs = {k: value for k, value in kwargs.items() if value}
tools.append(_get_tool_func(**kwargs))
else:
raise ValueError(f"Got unknown tool {tool_type}")
return tools
def build_prompt_template(prompt, tools):
"""Build PromptTemplate from ZeroShotPrompt"""
prefix = prompt["node"]["template"]["prefix"]["value"]