updates imports to use settings_manager

This commit is contained in:
Gabriel Luiz Freitas Almeida 2023-08-05 23:20:35 -03:00
commit 6ca7308e3c
25 changed files with 182 additions and 269 deletions

View file

@ -2,7 +2,7 @@ from datetime import timezone
from typing import List
from uuid import UUID
from langflow.services.database.models.component import Component, ComponentModel
from langflow.services.database.base import get_session
from langflow.services.utils import get_session
from sqlmodel import Session, select
from fastapi import APIRouter, Depends, HTTPException
from sqlalchemy.exc import IntegrityError

View file

@ -4,9 +4,8 @@ from typing import Annotated, Optional
from langflow.services.cache.utils import save_uploaded_file
from langflow.services.database.models.flow import Flow
from langflow.processing.process import process_graph_cached, process_tweaks
from langflow.services.utils import get_settings_manager
from langflow.utils.logger import logger
from langflow.settings import settings
from fastapi import APIRouter, Depends, HTTPException, UploadFile, Body
from langflow.interface.custom.custom_component import CustomComponent
@ -26,7 +25,7 @@ from langflow.interface.types import (
build_langchain_custom_component_list_from_path,
)
from langflow.services.database.base import get_session
from langflow.services.utils import get_session
from sqlmodel import Session
# build router
@ -40,11 +39,14 @@ def get_all():
# custom_components is a list of dicts
# need to merge all the keys into one dict
custom_components_from_file = {}
if settings.COMPONENTS_PATH:
logger.info(f"Building custom components from {settings.COMPONENTS_PATH}")
settings_manager = get_settings_manager()
if settings_manager.settings.COMPONENTS_PATH:
logger.info(
f"Building custom components from {settings_manager.settings.COMPONENTS_PATH}"
)
custom_component_dicts = [
build_langchain_custom_component_list_from_path(str(path))
for path in settings.COMPONENTS_PATH
for path in settings_manager.settings.COMPONENTS_PATH
]
logger.info(f"Loading {len(custom_component_dicts)} custom components")

View file

@ -5,7 +5,7 @@ from langflow.services.database.models.flow_style import (
FlowStyleRead,
FlowStyleUpdate,
)
from langflow.services.database.base import get_session
from langflow.services.utils import get_session
from sqlmodel import Session, select
from fastapi import APIRouter, Depends, HTTPException

View file

@ -1,6 +1,5 @@
from typing import List
from uuid import UUID
from langflow.settings import settings
from langflow.api.utils import remove_api_keys
from langflow.api.v1.schemas import FlowListCreate, FlowListRead
from langflow.services.database.models.flow import (
@ -10,7 +9,8 @@ from langflow.services.database.models.flow import (
FlowReadWithStyle,
FlowUpdate,
)
from langflow.services.database.base import get_session
from langflow.services.utils import get_session
from langflow.services.utils import get_settings_manager
from sqlmodel import Session, select
from fastapi import APIRouter, Depends, HTTPException
from fastapi.encoders import jsonable_encoder
@ -61,7 +61,8 @@ def update_flow(
if not db_flow:
raise HTTPException(status_code=404, detail="Flow not found")
flow_data = flow.dict(exclude_unset=True)
if settings.REMOVE_API_KEYS:
settings_manager = get_settings_manager()
if settings_manager.settings.REMOVE_API_KEYS:
flow_data = remove_api_keys(flow_data)
for key, value in flow_data.items():
setattr(db_flow, key, value)

View file

@ -1,7 +1,7 @@
from typing import Dict, Generator, List, Type, Union
from langflow.graph.edge.base import Edge
from langflow.graph.graph.constants import VERTEX_TYPE_MAP
from langflow.graph.graph.constants import lazy_load_vertex_dict
from langflow.graph.vertex.base import Vertex
from langflow.graph.vertex.types import (
FileToolVertex,
@ -187,10 +187,12 @@ class Graph:
"""Returns the node class based on the node type."""
if node_type in FILE_TOOLS:
return FileToolVertex
if node_type in VERTEX_TYPE_MAP:
return VERTEX_TYPE_MAP[node_type]
if node_type in lazy_load_vertex_dict.VERTEX_TYPE_MAP:
return lazy_load_vertex_dict.VERTEX_TYPE_MAP[node_type]
return (
VERTEX_TYPE_MAP[node_lc_type] if node_lc_type in VERTEX_TYPE_MAP else Vertex
lazy_load_vertex_dict.VERTEX_TYPE_MAP[node_lc_type]
if node_lc_type in lazy_load_vertex_dict.VERTEX_TYPE_MAP
else Vertex
)
def _build_vertices(self) -> List[Vertex]:

View file

@ -1,4 +1,3 @@
from langflow.graph.vertex.base import Vertex
from langflow.graph.vertex import types
from langflow.interface.agents.base import agent_creator
from langflow.interface.chains.base import chain_creator
@ -15,23 +14,45 @@ from langflow.interface.wrappers.base import wrapper_creator
from langflow.interface.output_parsers.base import output_parser_creator
from langflow.interface.retrievers.base import retriever_creator
from langflow.interface.custom.base import custom_component_creator
from typing import Dict, Type
from langflow.utils.lazy_load import LazyLoadDictBase
VERTEX_TYPE_MAP: Dict[str, Type[Vertex]] = {
**{t: types.PromptVertex for t in prompt_creator.to_list()},
**{t: types.AgentVertex for t in agent_creator.to_list()},
**{t: types.ChainVertex for t in chain_creator.to_list()},
**{t: types.ToolVertex for t in tool_creator.to_list()},
**{t: types.ToolkitVertex for t in toolkits_creator.to_list()},
**{t: types.WrapperVertex for t in wrapper_creator.to_list()},
**{t: types.LLMVertex for t in llm_creator.to_list()},
**{t: types.MemoryVertex for t in memory_creator.to_list()},
**{t: types.EmbeddingVertex for t in embedding_creator.to_list()},
**{t: types.VectorStoreVertex for t in vectorstore_creator.to_list()},
**{t: types.DocumentLoaderVertex for t in documentloader_creator.to_list()},
**{t: types.TextSplitterVertex for t in textsplitter_creator.to_list()},
**{t: types.OutputParserVertex for t in output_parser_creator.to_list()},
**{t: types.CustomComponentVertex for t in custom_component_creator.to_list()},
**{t: types.RetrieverVertex for t in retriever_creator.to_list()},
}
class VertexTypesDict(LazyLoadDictBase):
def __init__(self):
self._all_types_dict = None
@property
def VERTEX_TYPE_MAP(self):
return self.all_types_dict
def _build_dict(self):
langchain_types_dict = self.get_type_dict()
return {
**langchain_types_dict,
"Custom": ["Custom Tool", "Python Function"],
}
def get_type_dict(self):
return {
**{t: types.PromptVertex for t in prompt_creator.to_list()},
**{t: types.AgentVertex for t in agent_creator.to_list()},
**{t: types.ChainVertex for t in chain_creator.to_list()},
**{t: types.ToolVertex for t in tool_creator.to_list()},
**{t: types.ToolkitVertex for t in toolkits_creator.to_list()},
**{t: types.WrapperVertex for t in wrapper_creator.to_list()},
**{t: types.LLMVertex for t in llm_creator.to_list()},
**{t: types.MemoryVertex for t in memory_creator.to_list()},
**{t: types.EmbeddingVertex for t in embedding_creator.to_list()},
**{t: types.VectorStoreVertex for t in vectorstore_creator.to_list()},
**{t: types.DocumentLoaderVertex for t in documentloader_creator.to_list()},
**{t: types.TextSplitterVertex for t in textsplitter_creator.to_list()},
**{t: types.OutputParserVertex for t in output_parser_creator.to_list()},
**{
t: types.CustomComponentVertex
for t in custom_component_creator.to_list()
},
**{t: types.RetrieverVertex for t in retriever_creator.to_list()},
}
lazy_load_vertex_dict = VertexTypesDict()

View file

@ -1,6 +1,6 @@
import ast
from langflow.interface.initialize import loading
from langflow.interface.listing import ALL_TYPES_DICT
from langflow.interface.listing import lazy_load_dict
from langflow.utils.constants import DIRECT_TYPES
from langflow.utils.logger import logger
from langflow.utils.util import sync_to_async
@ -62,7 +62,7 @@ class Vertex:
)
if self.base_type is None:
for base_type, value in ALL_TYPES_DICT.items():
for base_type, value in lazy_load_dict.ALL_TYPES_DICT.items():
if self.vertex_type in value:
self.base_type = base_type
break

View file

@ -5,7 +5,8 @@ from langchain.agents import types
from langflow.custom.customs import get_custom_nodes
from langflow.interface.agents.custom import CUSTOM_AGENTS
from langflow.interface.base import LangChainTypeCreator
from langflow.settings import settings
from langflow.services.utils import get_settings_manager
from langflow.template.frontend_node.agents import AgentFrontendNode
from langflow.utils.logger import logger
from langflow.utils.util import build_template_from_class, build_template_from_method
@ -53,13 +54,17 @@ class AgentCreator(LangChainTypeCreator):
# Now this is a generator
def to_list(self) -> List[str]:
names = []
settings_manager = get_settings_manager()
for _, agent in self.type_to_loader_dict.items():
agent_name = (
agent.function_name()
if hasattr(agent, "function_name")
else agent.__name__
)
if agent_name in settings.AGENTS or settings.DEV:
if (
agent_name in settings_manager.settings.AGENTS
or settings_manager.settings.DEV
):
names.append(agent_name)
return names

View file

@ -2,13 +2,14 @@ from abc import ABC, abstractmethod
from typing import Any, Dict, List, Optional, Type, Union
from langchain.chains.base import Chain
from langchain.agents import AgentExecutor
from langflow.services.utils import get_settings_manager
from pydantic import BaseModel
from langflow.template.field.base import TemplateField
from langflow.template.frontend_node.base import FrontendNode
from langflow.template.template.base import Template
from langflow.utils.logger import logger
from langflow.settings import settings
# Assuming necessary imports for Field, Template, and FrontendNode classes
@ -26,9 +27,12 @@ class LangChainTypeCreator(BaseModel, ABC):
@property
def docs_map(self) -> Dict[str, str]:
"""A dict with the name of the component as key and the documentation link as value."""
settings_manager = get_settings_manager()
if self.name_docs_dict is None:
try:
type_settings = getattr(settings, self.type_name.upper())
type_settings = getattr(
settings_manager.settings, self.type_name.upper()
)
self.name_docs_dict = {
name: value_dict["documentation"]
for name, value_dict in type_settings.items()

View file

@ -3,7 +3,8 @@ from typing import Any, Dict, List, Optional, Type
from langflow.custom.customs import get_custom_nodes
from langflow.interface.base import LangChainTypeCreator
from langflow.interface.importing.utils import import_class
from langflow.settings import settings
from langflow.services.utils import get_settings_manager
from langflow.template.frontend_node.chains import ChainFrontendNode
from langflow.utils.logger import logger
from langflow.utils.util import build_template_from_class, build_template_from_method
@ -30,6 +31,7 @@ class ChainCreator(LangChainTypeCreator):
@property
def type_to_loader_dict(self) -> Dict:
if self.type_dict is None:
settings_manager = get_settings_manager()
self.type_dict: dict[str, Any] = {
chain_name: import_class(f"langchain.chains.{chain_name}")
for chain_name in chains.__all__
@ -43,7 +45,8 @@ class ChainCreator(LangChainTypeCreator):
self.type_dict = {
name: chain
for name, chain in self.type_dict.items()
if name in settings.CHAINS or settings.DEV
if name in settings_manager.settings.CHAINS
or settings_manager.settings.DEV
}
return self.type_dict

View file

@ -1,9 +1,10 @@
from typing import Dict, List, Optional, Type
from langflow.interface.base import LangChainTypeCreator
from langflow.services.utils import get_settings_manager
from langflow.template.frontend_node.documentloaders import DocumentLoaderFrontNode
from langflow.interface.custom_lists import documentloaders_type_to_cls_dict
from langflow.settings import settings
from langflow.utils.logger import logger
from langflow.utils.util import build_template_from_class
@ -30,10 +31,12 @@ class DocumentLoaderCreator(LangChainTypeCreator):
return None
def to_list(self) -> List[str]:
settings_manager = get_settings_manager()
return [
documentloader.__name__
for documentloader in self.type_to_loader_dict.values()
if documentloader.__name__ in settings.DOCUMENTLOADERS or settings.DEV
if documentloader.__name__ in settings_manager.settings.DOCUMENTLOADERS
or settings_manager.settings.DEV
]

View file

@ -2,7 +2,8 @@ from typing import Dict, List, Optional, Type
from langflow.interface.base import LangChainTypeCreator
from langflow.interface.custom_lists import embedding_type_to_cls_dict
from langflow.settings import settings
from langflow.services.utils import get_settings_manager
from langflow.template.frontend_node.base import FrontendNode
from langflow.template.frontend_node.embeddings import EmbeddingFrontendNode
from langflow.utils.logger import logger
@ -32,10 +33,12 @@ class EmbeddingCreator(LangChainTypeCreator):
return None
def to_list(self) -> List[str]:
settings_manager = get_settings_manager()
return [
embedding.__name__
for embedding in self.type_to_loader_dict.values()
if embedding.__name__ in settings.EMBEDDINGS or settings.DEV
if embedding.__name__ in settings_manager.settings.EMBEDDINGS
or settings_manager.settings.DEV
]

View file

@ -14,34 +14,43 @@ from langflow.interface.wrappers.base import wrapper_creator
from langflow.interface.output_parsers.base import output_parser_creator
from langflow.interface.retrievers.base import retriever_creator
from langflow.interface.custom.base import custom_component_creator
from langflow.utils.lazy_load import LazyLoadDictBase
def get_type_dict():
return {
"agents": agent_creator.to_list(),
"prompts": prompt_creator.to_list(),
"llms": llm_creator.to_list(),
"tools": tool_creator.to_list(),
"chains": chain_creator.to_list(),
"memory": memory_creator.to_list(),
"toolkits": toolkits_creator.to_list(),
"wrappers": wrapper_creator.to_list(),
"documentLoaders": documentloader_creator.to_list(),
"vectorStore": vectorstore_creator.to_list(),
"embeddings": embedding_creator.to_list(),
"textSplitters": textsplitter_creator.to_list(),
"utilities": utility_creator.to_list(),
"outputParsers": output_parser_creator.to_list(),
"retrievers": retriever_creator.to_list(),
"custom_components": custom_component_creator.to_list(),
}
class AllTypesDict(LazyLoadDictBase):
def __init__(self):
self._all_types_dict = None
@property
def ALL_TYPES_DICT(self):
return self.all_types_dict
def _build_dict(self):
langchain_types_dict = self.get_type_dict()
return {
**langchain_types_dict,
"Custom": ["Custom Tool", "Python Function"],
}
def get_type_dict(self):
return {
"agents": agent_creator.to_list(),
"prompts": prompt_creator.to_list(),
"llms": llm_creator.to_list(),
"tools": tool_creator.to_list(),
"chains": chain_creator.to_list(),
"memory": memory_creator.to_list(),
"toolkits": toolkits_creator.to_list(),
"wrappers": wrapper_creator.to_list(),
"documentLoaders": documentloader_creator.to_list(),
"vectorStore": vectorstore_creator.to_list(),
"embeddings": embedding_creator.to_list(),
"textSplitters": textsplitter_creator.to_list(),
"utilities": utility_creator.to_list(),
"outputParsers": output_parser_creator.to_list(),
"retrievers": retriever_creator.to_list(),
"custom_components": custom_component_creator.to_list(),
}
LANGCHAIN_TYPES_DICT = get_type_dict()
# Now we'll build a dict with Langchain types and ours
ALL_TYPES_DICT = {
**LANGCHAIN_TYPES_DICT,
"Custom": ["Custom Tool", "Python Function"],
}
lazy_load_dict = AllTypesDict()

View file

@ -2,7 +2,8 @@ from typing import Dict, List, Optional, Type
from langflow.interface.base import LangChainTypeCreator
from langflow.interface.custom_lists import llm_type_to_cls_dict
from langflow.settings import settings
from langflow.services.utils import get_settings_manager
from langflow.template.frontend_node.llms import LLMFrontendNode
from langflow.utils.logger import logger
from langflow.utils.util import build_template_from_class
@ -33,10 +34,12 @@ class LLMCreator(LangChainTypeCreator):
return None
def to_list(self) -> List[str]:
settings_manager = get_settings_manager()
return [
llm.__name__
for llm in self.type_to_loader_dict.values()
if llm.__name__ in settings.LLMS or settings.DEV
if llm.__name__ in settings_manager.settings.LLMS
or settings_manager.settings.DEV
]

View file

@ -2,7 +2,8 @@ from typing import Dict, List, Optional, Type
from langflow.interface.base import LangChainTypeCreator
from langflow.interface.custom_lists import memory_type_to_cls_dict
from langflow.settings import settings
from langflow.services.utils import get_settings_manager
from langflow.template.frontend_node.base import FrontendNode
from langflow.template.frontend_node.memories import MemoryFrontendNode
from langflow.utils.logger import logger
@ -48,10 +49,12 @@ class MemoryCreator(LangChainTypeCreator):
return None
def to_list(self) -> List[str]:
settings_manager = get_settings_manager()
return [
memory.__name__
for memory in self.type_to_loader_dict.values()
if memory.__name__ in settings.MEMORIES or settings.DEV
if memory.__name__ in settings_manager.settings.MEMORIES
or settings_manager.settings.DEV
]

View file

@ -4,7 +4,8 @@ from langchain import output_parsers
from langflow.interface.base import LangChainTypeCreator
from langflow.interface.importing.utils import import_class
from langflow.settings import settings
from langflow.services.utils import get_settings_manager
from langflow.template.frontend_node.output_parsers import OutputParserFrontendNode
from langflow.utils.logger import logger
from langflow.utils.util import build_template_from_class, build_template_from_method
@ -23,6 +24,7 @@ class OutputParserCreator(LangChainTypeCreator):
@property
def type_to_loader_dict(self) -> Dict:
if self.type_dict is None:
settings_manager = get_settings_manager()
self.type_dict = {
output_parser_name: import_class(
f"langchain.output_parsers.{output_parser_name}"
@ -33,7 +35,8 @@ class OutputParserCreator(LangChainTypeCreator):
self.type_dict = {
name: output_parser
for name, output_parser in self.type_dict.items()
if name in settings.OUTPUT_PARSERS or settings.DEV
if name in settings_manager.settings.OUTPUT_PARSERS
or settings_manager.settings.DEV
}
return self.type_dict

View file

@ -5,7 +5,8 @@ from langchain import prompts
from langflow.custom.customs import get_custom_nodes
from langflow.interface.base import LangChainTypeCreator
from langflow.interface.importing.utils import import_class
from langflow.settings import settings
from langflow.services.utils import get_settings_manager
from langflow.template.frontend_node.prompts import PromptFrontendNode
from langflow.utils.logger import logger
from langflow.utils.util import build_template_from_class
@ -20,6 +21,7 @@ class PromptCreator(LangChainTypeCreator):
@property
def type_to_loader_dict(self) -> Dict:
settings_manager = get_settings_manager()
if self.type_dict is None:
self.type_dict = {
prompt_name: import_class(f"langchain.prompts.{prompt_name}")
@ -34,7 +36,8 @@ class PromptCreator(LangChainTypeCreator):
self.type_dict = {
name: prompt
for name, prompt in self.type_dict.items()
if name in settings.PROMPTS or settings.DEV
if name in settings_manager.settings.PROMPTS
or settings_manager.settings.DEV
}
return self.type_dict

View file

@ -4,7 +4,8 @@ from langchain import retrievers
from langflow.interface.base import LangChainTypeCreator
from langflow.interface.importing.utils import import_class
from langflow.settings import settings
from langflow.services.utils import get_settings_manager
from langflow.template.frontend_node.retrievers import RetrieverFrontendNode
from langflow.utils.logger import logger
from langflow.utils.util import build_template_from_method, build_template_from_class
@ -48,10 +49,12 @@ class RetrieverCreator(LangChainTypeCreator):
return None
def to_list(self) -> List[str]:
settings_manager = get_settings_manager()
return [
retriever
for retriever in self.type_to_loader_dict.keys()
if retriever in settings.RETRIEVERS or settings.DEV
if retriever in settings_manager.settings.RETRIEVERS
or settings_manager.settings.DEV
]

View file

@ -1,9 +1,10 @@
from typing import Dict, List, Optional, Type
from langflow.interface.base import LangChainTypeCreator
from langflow.services.utils import get_settings_manager
from langflow.template.frontend_node.textsplitters import TextSplittersFrontendNode
from langflow.interface.custom_lists import textsplitter_type_to_cls_dict
from langflow.settings import settings
from langflow.utils.logger import logger
from langflow.utils.util import build_template_from_class
@ -30,10 +31,12 @@ class TextSplitterCreator(LangChainTypeCreator):
return None
def to_list(self) -> List[str]:
settings_manager = get_settings_manager()
return [
textsplitter.__name__
for textsplitter in self.type_to_loader_dict.values()
if textsplitter.__name__ in settings.TEXTSPLITTERS or settings.DEV
if textsplitter.__name__ in settings_manager.settings.TEXTSPLITTERS
or settings_manager.settings.DEV
]

View file

@ -4,7 +4,8 @@ from langchain.agents import agent_toolkits
from langflow.interface.base import LangChainTypeCreator
from langflow.interface.importing.utils import import_class, import_module
from langflow.settings import settings
from langflow.services.utils import get_settings_manager
from langflow.utils.logger import logger
from langflow.utils.util import build_template_from_class
@ -29,13 +30,15 @@ class ToolkitCreator(LangChainTypeCreator):
@property
def type_to_loader_dict(self) -> Dict:
if self.type_dict is None:
settings_manager = get_settings_manager()
self.type_dict = {
toolkit_name: import_class(
f"langchain.agents.agent_toolkits.{toolkit_name}"
)
# if toolkit_name is not lower case it is a class
for toolkit_name in agent_toolkits.__all__
if not toolkit_name.islower() and toolkit_name in settings.TOOLKITS
if not toolkit_name.islower()
and toolkit_name in settings_manager.settings.TOOLKITS
}
return self.type_dict

View file

@ -15,7 +15,8 @@ from langflow.interface.tools.constants import (
OTHER_TOOLS,
)
from langflow.interface.tools.util import get_tool_params
from langflow.settings import settings
from langflow.services.utils import get_settings_manager
from langflow.template.field.base import TemplateField
from langflow.template.template.base import Template
from langflow.utils import util
@ -66,6 +67,7 @@ class ToolCreator(LangChainTypeCreator):
@property
def type_to_loader_dict(self) -> Dict:
settings_manager = get_settings_manager()
if self.tools_dict is None:
all_tools = {}
@ -74,7 +76,10 @@ class ToolCreator(LangChainTypeCreator):
tool_name = tool_params.get("name") or tool
if tool_name in settings.TOOLS or settings.DEV:
if (
tool_name in settings_manager.settings.TOOLS
or settings_manager.settings.DEV
):
if tool_name == "JsonSpec":
tool_params["path"] = tool_params.pop("dict_") # type: ignore
all_tools[tool_name] = {

View file

@ -5,7 +5,8 @@ from langchain import SQLDatabase, utilities
from langflow.custom.customs import get_custom_nodes
from langflow.interface.base import LangChainTypeCreator
from langflow.interface.importing.utils import import_class
from langflow.settings import settings
from langflow.services.utils import get_settings_manager
from langflow.template.frontend_node.utilities import UtilitiesFrontendNode
from langflow.utils.logger import logger
from langflow.utils.util import build_template_from_class
@ -26,6 +27,7 @@ class UtilityCreator(LangChainTypeCreator):
from the langchain.chains module and filtering them according to the settings.utilities list.
"""
if self.type_dict is None:
settings_manager = get_settings_manager()
self.type_dict = {
utility_name: import_class(f"langchain.utilities.{utility_name}")
for utility_name in utilities.__all__
@ -35,7 +37,8 @@ class UtilityCreator(LangChainTypeCreator):
self.type_dict = {
name: utility
for name, utility in self.type_dict.items()
if name in settings.UTILITIES or settings.DEV
if name in settings_manager.settings.UTILITIES
or settings_manager.settings.DEV
}
return self.type_dict

View file

@ -10,6 +10,7 @@ from langchain.base_language import BaseLanguageModel
from PIL.Image import Image
from langflow.utils.logger import logger
from langflow.services.chat.config import ChatConfig
from langflow.services.utils import get_settings_manager
def load_file_into_dict(file_path: str) -> dict:
@ -63,13 +64,11 @@ def extract_input_variables_from_prompt(prompt: str) -> list[str]:
def setup_llm_caching():
"""Setup LLM caching."""
from langflow.settings import settings
settings_manager = get_settings_manager()
try:
set_langchain_cache(settings)
set_langchain_cache(settings_manager.settings)
except ImportError:
logger.warning(f"Could not import {settings.CACHE}. ")
logger.warning(f"Could not import {settings_manager.settings.CACHE}. ")
except Exception as exc:
logger.warning(f"Could not setup LLM caching. Error: {exc}")

View file

@ -4,7 +4,8 @@ from langchain import vectorstores
from langflow.interface.base import LangChainTypeCreator
from langflow.interface.importing.utils import import_class
from langflow.settings import settings
from langflow.services.utils import get_settings_manager
from langflow.template.frontend_node.vectorstores import VectorStoreFrontendNode
from langflow.utils.logger import logger
from langflow.utils.util import build_template_from_method
@ -43,10 +44,12 @@ class VectorstoreCreator(LangChainTypeCreator):
return None
def to_list(self) -> List[str]:
settings_manager = get_settings_manager()
return [
vectorstore
for vectorstore in self.type_to_loader_dict.keys()
if vectorstore in settings.VECTORSTORES or settings.DEV
if vectorstore in settings_manager.settings.VECTORSTORES
or settings_manager.settings.DEV
]

View file

@ -1,171 +0,0 @@
import contextlib
import json
import os
from typing import Optional, List
from pathlib import Path
import yaml
from pydantic import BaseSettings, root_validator, validator
from langflow.utils.logger import logger
BASE_COMPONENTS_PATH = str(Path(__file__).parent / "components")
class Settings(BaseSettings):
CHAINS: dict = {}
AGENTS: dict = {}
PROMPTS: dict = {}
LLMS: dict = {}
TOOLS: dict = {}
MEMORIES: dict = {}
EMBEDDINGS: dict = {}
VECTORSTORES: dict = {}
DOCUMENTLOADERS: dict = {}
WRAPPERS: dict = {}
RETRIEVERS: dict = {}
TOOLKITS: dict = {}
TEXTSPLITTERS: dict = {}
UTILITIES: dict = {}
OUTPUT_PARSERS: dict = {}
CUSTOM_COMPONENTS: dict = {}
DEV: bool = False
DATABASE_URL: Optional[str] = None
CACHE: str = "InMemoryCache"
REMOVE_API_KEYS: bool = False
COMPONENTS_PATH: List[str] = []
@validator("DATABASE_URL", pre=True)
def set_database_url(cls, value):
if not value:
logger.debug(
"No database_url provided, trying LANGFLOW_DATABASE_URL env variable"
)
if langflow_database_url := os.getenv("LANGFLOW_DATABASE_URL"):
value = langflow_database_url
logger.debug("Using LANGFLOW_DATABASE_URL env variable.")
else:
logger.debug("No DATABASE_URL env variable, using sqlite database")
value = "sqlite:///./langflow.db"
return value
@validator("COMPONENTS_PATH", pre=True)
def set_components_path(cls, value):
if os.getenv("LANGFLOW_COMPONENTS_PATH"):
logger.debug("Adding LANGFLOW_COMPONENTS_PATH to components_path")
langflow_component_path = os.getenv("LANGFLOW_COMPONENTS_PATH")
if (
Path(langflow_component_path).exists()
and langflow_component_path not in value
):
if isinstance(langflow_component_path, list):
for path in langflow_component_path:
if path not in value:
value.append(path)
logger.debug(
f"Extending {langflow_component_path} to components_path"
)
elif langflow_component_path not in value:
value.append(langflow_component_path)
logger.debug(
f"Appending {langflow_component_path} to components_path"
)
if not value:
value = [BASE_COMPONENTS_PATH]
logger.debug("Setting default components path to components_path")
elif BASE_COMPONENTS_PATH not in value:
value.append(BASE_COMPONENTS_PATH)
logger.debug("Adding default components path to components_path")
logger.debug(f"Components path: {value}")
return value
class Config:
validate_assignment = True
extra = "ignore"
env_prefix = "LANGFLOW_"
@root_validator(allow_reuse=True)
def validate_lists(cls, values):
for key, value in values.items():
if key != "dev" and not value:
values[key] = []
return values
def update_from_yaml(self, file_path: str, dev: bool = False):
new_settings = load_settings_from_yaml(file_path)
self.CHAINS = new_settings.CHAINS or {}
self.AGENTS = new_settings.AGENTS or {}
self.PROMPTS = new_settings.PROMPTS or {}
self.LLMS = new_settings.LLMS or {}
self.TOOLS = new_settings.TOOLS or {}
self.MEMORIES = new_settings.MEMORIES or {}
self.WRAPPERS = new_settings.WRAPPERS or {}
self.TOOLKITS = new_settings.TOOLKITS or {}
self.TEXTSPLITTERS = new_settings.TEXTSPLITTERS or {}
self.UTILITIES = new_settings.UTILITIES or {}
self.EMBEDDINGS = new_settings.EMBEDDINGS or {}
self.VECTORSTORES = new_settings.VECTORSTORES or {}
self.DOCUMENTLOADERS = new_settings.DOCUMENTLOADERS or {}
self.RETRIEVERS = new_settings.RETRIEVERS or {}
self.OUTPUT_PARSERS = new_settings.OUTPUT_PARSERS or {}
self.CUSTOM_COMPONENTS = new_settings.CUSTOM_COMPONENTS or {}
self.COMPONENTS_PATH = new_settings.COMPONENTS_PATH or []
self.DEV = dev
def update_settings(self, **kwargs):
logger.debug("Updating settings")
for key, value in kwargs.items():
# value may contain sensitive information, so we don't want to log it
if not hasattr(self, key):
logger.debug(f"Key {key} not found in settings")
continue
logger.debug(f"Updating {key}")
if isinstance(getattr(self, key), list):
# value might be a '[something]' string
with contextlib.suppress(json.decoder.JSONDecodeError):
value = json.loads(str(value))
if isinstance(value, list):
for item in value:
if item not in getattr(self, key):
getattr(self, key).append(item)
logger.debug(f"Extended {key}")
else:
getattr(self, key).append(value)
logger.debug(f"Appended {key}")
else:
setattr(self, key, value)
logger.debug(f"Updated {key}")
logger.debug(f"{key}: {getattr(self, key)}")
def save_settings_to_yaml(settings: Settings, file_path: str):
with open(file_path, "w") as f:
settings_dict = settings.dict()
yaml.dump(settings_dict, f)
def load_settings_from_yaml(file_path: str) -> Settings:
# Check if a string is a valid path or a file name
if "/" not in file_path:
# Get current path
current_path = os.path.dirname(os.path.abspath(__file__))
file_path = os.path.join(current_path, file_path)
with open(file_path, "r") as f:
settings_dict = yaml.safe_load(f)
settings_dict = {k.upper(): v for k, v in settings_dict.items()}
for key in settings_dict:
if key not in Settings.__fields__.keys():
raise KeyError(f"Key {key} not found in settings")
logger.debug(f"Loading {len(settings_dict[key])} {key} from {file_path}")
return Settings(**settings_dict)
settings = load_settings_from_yaml("config.yaml")