📝 chore(utils.py): add utility function to check if an object is a basic type

📝 chore(loading.py): refactor code to improve readability and maintainability
📝 chore(vector_store.py): refactor code to improve readability and maintainability
📝 chore(run.py): update return type hint for build_sorted_vertices function
This commit is contained in:
Gabriel Luiz Freitas Almeida 2023-09-22 10:59:53 -03:00
commit 04fe2f6054
4 changed files with 32 additions and 4 deletions

View file

@ -0,0 +1,5 @@
from langflow.utils.constants import PYTHON_BASIC_TYPES
def is_basic_type(obj):
return type(obj) in PYTHON_BASIC_TYPES

View file

@ -1,7 +1,7 @@
import json
import orjson
from typing import Any, Callable, Dict, Sequence, Type, TYPE_CHECKING
from langchain.schema import Document
from langchain.agents import agent as agent_module
from langchain.agents.agent import AgentExecutor
from langchain.agents.agent_toolkits.base import BaseToolkit
@ -40,12 +40,23 @@ if TYPE_CHECKING:
from langflow import CustomComponent
def build_vertex_in_params(params: Dict) -> Dict:
from langflow.graph.vertex.base import Vertex
# If any of the values in params is a Vertex, we will build it
return {
key: value.build() if isinstance(value, Vertex) else value
for key, value in params.items()
}
def instantiate_class(
node_type: str, base_type: str, params: Dict, user_id=None
) -> Any:
"""Instantiate class from module type and key, and params"""
params = convert_params_to_sets(params)
params = convert_kwargs(params)
if node_type in CUSTOM_NODES:
if custom_node := CUSTOM_NODES.get(node_type):
if hasattr(custom_node, "initialize"):
@ -289,6 +300,13 @@ def instantiate_embedding(node_type, class_object, params: Dict):
def instantiate_vectorstore(class_object: Type[VectorStore], params: Dict):
search_kwargs = params.pop("search_kwargs", {})
# clean up docs or texts to have only documents
if "texts" in params:
params["documents"] = params.pop("texts")
if "documents" in params:
params["documents"] = [
doc for doc in params["documents"] if isinstance(doc, Document)
]
if initializer := vecstore_initializer.get(class_object.__name__):
vecstore = initializer(class_object, params)
else:

View file

@ -8,7 +8,7 @@ from langchain.vectorstores import (
SupabaseVectorStore,
MongoDBAtlasVectorSearch,
)
from langchain.schema import Document
import os
import orjson
@ -201,11 +201,16 @@ def initialize_chroma(class_object: Type[Chroma], params: dict):
if "texts" in params:
params["documents"] = params.pop("texts")
for doc in params["documents"]:
if not isinstance(doc, Document):
# remove any non-Document objects from the list
params["documents"].remove(doc)
continue
if doc.metadata is None:
doc.metadata = {}
for key, value in doc.metadata.items():
if value is None:
doc.metadata[key] = ""
chromadb = class_object.from_documents(**params)
if persist:
chromadb.persist()

View file

@ -3,7 +3,7 @@ from langflow.graph import Graph
from loguru import logger
def build_sorted_vertices(data_graph) -> Tuple[Any, Dict]:
def build_sorted_vertices(data_graph) -> Tuple[Graph, Dict]:
"""
Build langchain object from data_graph.
"""
@ -16,7 +16,7 @@ def build_sorted_vertices(data_graph) -> Tuple[Any, Dict]:
vertex.build()
if vertex.artifacts:
artifacts.update(vertex.artifacts)
return graph.build(), artifacts
return graph, artifacts
def build_langchain_object(data_graph):