Merge branch 'new_project_modal' into zustand/io/migration

This commit is contained in:
Gabriel Luiz Freitas Almeida 2024-03-05 22:46:52 -03:00
commit 725dc1442e
41 changed files with 1638 additions and 119 deletions

View file

@ -11,12 +11,8 @@ from sqlmodel import Session, select
from langflow.api.utils import remove_api_keys, validate_is_component
from langflow.api.v1.schemas import FlowListCreate, FlowListRead
from langflow.services.auth.utils import get_current_active_user
from langflow.services.database.models.flow import (
Flow,
FlowCreate,
FlowRead,
FlowUpdate,
)
from langflow.services.database.models.flow import (Flow, FlowCreate, FlowRead,
FlowUpdate)
from langflow.services.database.models.user.model import User
from langflow.services.deps import get_session, get_settings_service

View file

@ -1,6 +1,6 @@
from typing import Any, Dict, Optional
from typing import Any, Dict
from langchain_community.document_loaders.url import UnstructuredURLLoader
from langchain_community.document_loaders.web_base import WebBaseLoader
from langflow import CustomComponent
from langflow.schema import Record
@ -8,7 +8,7 @@ from langflow.schema import Record
class URLComponent(CustomComponent):
display_name = "URL"
description = "Load a URL."
description = "Load URLs and convert them to records."
def build_config(self) -> Dict[str, Any]:
return {
@ -18,9 +18,9 @@ class URLComponent(CustomComponent):
async def build(
self,
urls: list[str],
) -> Optional[Record]:
) -> Record:
loader = UnstructuredURLLoader(urls=urls)
loader = WebBaseLoader(web_paths=urls)
docs = loader.load()
records = self.to_records(docs)
return records

View file

@ -52,7 +52,7 @@ class APIRequest(CustomComponent):
if method not in ["GET", "POST", "PATCH", "PUT"]:
raise ValueError(f"Unsupported method: {method}")
data = record.text if record else None
data = record.data if record else None
try:
response = await client.request(
method, url, headers=headers, content=data, timeout=timeout

View file

@ -1,5 +1,5 @@
import uuid
from typing import Text
from typing import Any, Text
from langflow import CustomComponent
@ -9,11 +9,20 @@ class UUIDGeneratorComponent(CustomComponent):
display_name = "Unique ID Generator"
description = "Generates a unique ID."
def generate(self, *args, **kwargs):
return Text(uuid.uuid4().hex)
def update_build_config(
self, build_config: dict, field_name: Text, field_value: Any
):
if field_name == "unique_id":
build_config[field_name]["value"] = str(uuid.uuid4())
return build_config
def build_config(self):
return {"unique_id": {"display_name": "Value", "value": self.generate}}
return {
"unique_id": {
"display_name": "Value",
"refresh": True,
}
}
def build(self, unique_id: str) -> str:
return unique_id

View file

@ -6,7 +6,7 @@ from langflow.schema import Record
class RecordsAsTextComponent(CustomComponent):
display_name = "Records to Text"
description = "Converts Records a list of Records to text using a template."
description = "Converts Records into single piece of text using a template."
def build_config(self):
return {
@ -16,7 +16,7 @@ class RecordsAsTextComponent(CustomComponent):
},
"template": {
"display_name": "Template",
"info": "The template to use for formatting the records. It must contain the keys {text} and {data}.",
"info": "The template to use for formatting the records. It can contain the keys {text}, {data} or any other key in the Record.",
},
}

View file

@ -1,8 +1,9 @@
from typing import List
from langchain.text_splitter import CharacterTextSplitter
from langchain_core.documents.base import Document
from langflow import CustomComponent
from langflow.schema.schema import Record
class CharacterTextSplitterComponent(CustomComponent):
@ -11,7 +12,7 @@ class CharacterTextSplitterComponent(CustomComponent):
def build_config(self):
return {
"documents": {"display_name": "Documents"},
"inputs": {"display_name": "Input", "input_types": ["Document", "Record"]},
"chunk_overlap": {"display_name": "Chunk Overlap", "default": 200},
"chunk_size": {"display_name": "Chunk Size", "default": 1000},
"separator": {"display_name": "Separator", "default": "\n"},
@ -19,17 +20,24 @@ class CharacterTextSplitterComponent(CustomComponent):
def build(
self,
documents: List[Document],
inputs: List[Record],
chunk_overlap: int = 200,
chunk_size: int = 1000,
separator: str = "\n",
) -> List[Document]:
) -> List[Record]:
# separator may come escaped from the frontend
separator = separator.encode().decode("unicode_escape")
documents = []
for _input in inputs:
if isinstance(_input, Record):
documents.append(_input.to_lc_document())
else:
documents.append(_input)
docs = CharacterTextSplitter(
chunk_overlap=chunk_overlap,
chunk_size=chunk_size,
separator=separator,
).split_documents(documents)
self.status = docs
return docs
records = self.to_records(docs)
self.status = records
return records

View file

@ -1,23 +1,22 @@
from typing import Optional
from typing import List, Optional
from langchain.text_splitter import Language
from langchain_core.documents import Document
from langflow import CustomComponent
from langflow.schema.schema import Record
class LanguageRecursiveTextSplitterComponent(CustomComponent):
display_name: str = "Language Recursive Text Splitter"
description: str = "Split text into chunks of a specified length based on language."
documentation: str = "https://docs.langflow.org/components/text-splitters#languagerecursivetextsplitter"
documentation: str = (
"https://docs.langflow.org/components/text-splitters#languagerecursivetextsplitter"
)
def build_config(self):
options = [x.value for x in Language]
return {
"documents": {
"display_name": "Documents",
"info": "The documents to split.",
},
"inputs": {"display_name": "Input", "input_types": ["Document", "Record"]},
"separator_type": {
"display_name": "Separator Type",
"info": "The type of separator to use.",
@ -47,11 +46,11 @@ class LanguageRecursiveTextSplitterComponent(CustomComponent):
def build(
self,
documents: list[Document],
inputs: List[Record],
chunk_size: Optional[int] = 1000,
chunk_overlap: Optional[int] = 200,
separator_type: str = "Python",
) -> list[Document]:
) -> list[Record]:
"""
Split text into chunks of a specified length.
@ -77,6 +76,12 @@ class LanguageRecursiveTextSplitterComponent(CustomComponent):
chunk_size=chunk_size,
chunk_overlap=chunk_overlap,
)
documents = []
for _input in inputs:
if isinstance(_input, Record):
documents.append(_input.to_lc_document())
else:
documents.append(_input)
docs = splitter.split_documents(documents)
return docs
records = self.to_records(docs)
return records

View file

@ -1,22 +1,26 @@
from typing import Optional
from langchain.text_splitter import RecursiveCharacterTextSplitter
from langchain_core.documents import Document
from langflow import CustomComponent
from langflow.schema import Record
from langflow.utils.util import build_loader_repr_from_documents
from langchain.text_splitter import RecursiveCharacterTextSplitter
class RecursiveCharacterTextSplitterComponent(CustomComponent):
display_name: str = "Recursive Character Text Splitter"
description: str = "Split text into chunks of a specified length."
documentation: str = "https://docs.langflow.org/components/text-splitters#recursivecharactertextsplitter"
documentation: str = (
"https://docs.langflow.org/components/text-splitters#recursivecharactertextsplitter"
)
def build_config(self):
return {
"documents": {
"display_name": "Documents",
"info": "The documents to split.",
"inputs": {
"display_name": "Input",
"info": "The texts to split.",
"input_types": ["Document", "Record"],
},
"separators": {
"display_name": "Separators",
@ -40,11 +44,11 @@ class RecursiveCharacterTextSplitterComponent(CustomComponent):
def build(
self,
documents: list[Document],
inputs: list[Document],
separators: Optional[list[str]] = None,
chunk_size: Optional[int] = 1000,
chunk_overlap: Optional[int] = 200,
) -> list[Document]:
) -> list[Record]:
"""
Split text into chunks of a specified length.
@ -75,7 +79,12 @@ class RecursiveCharacterTextSplitterComponent(CustomComponent):
chunk_size=chunk_size,
chunk_overlap=chunk_overlap,
)
documents = []
for _input in inputs:
if isinstance(_input, Record):
documents.append(_input.to_lc_document())
else:
documents.append(_input)
docs = splitter.split_documents(documents)
self.repr_value = build_loader_repr_from_documents(docs)
return docs
return self.to_records(docs)

View file

@ -2,11 +2,12 @@ from typing import List, Optional, Union
import chromadb # type: ignore
from langchain.embeddings.base import Embeddings
from langchain.schema import BaseRetriever, Document
from langchain.schema import BaseRetriever
from langchain_community.vectorstores import VectorStore
from langchain_community.vectorstores.chroma import Chroma
from langflow import CustomComponent
from langflow.schema.schema import Record
class ChromaComponent(CustomComponent):
@ -31,7 +32,7 @@ class ChromaComponent(CustomComponent):
"collection_name": {"display_name": "Collection Name", "value": "langflow"},
"index_directory": {"display_name": "Persist Directory"},
"code": {"advanced": True, "display_name": "Code"},
"documents": {"display_name": "Documents", "is_list": True},
"inputs": {"display_name": "Input", "input_types": ["Document", "Record"]},
"embedding": {"display_name": "Embedding"},
"chroma_server_cors_allow_origins": {
"display_name": "Server CORS Allow Origins",
@ -55,7 +56,7 @@ class ChromaComponent(CustomComponent):
embedding: Embeddings,
chroma_server_ssl_enabled: bool,
index_directory: Optional[str] = None,
documents: Optional[List[Document]] = None,
inputs: Optional[List[Record]] = None,
chroma_server_cors_allow_origins: Optional[str] = None,
chroma_server_host: Optional[str] = None,
chroma_server_port: Optional[int] = None,
@ -84,7 +85,8 @@ class ChromaComponent(CustomComponent):
if chroma_server_host is not None:
chroma_settings = chromadb.config.Settings(
chroma_server_cors_allow_origins=chroma_server_cors_allow_origins or None,
chroma_server_cors_allow_origins=chroma_server_cors_allow_origins
or None,
chroma_server_host=chroma_server_host,
chroma_server_port=chroma_server_port or None,
chroma_server_grpc_port=chroma_server_grpc_port or None,
@ -97,9 +99,17 @@ class ChromaComponent(CustomComponent):
if index_directory is not None:
index_directory = self.resolve_path(index_directory)
documents = []
for _input in inputs:
if isinstance(_input, Record):
documents.append(_input.to_lc_document())
else:
documents.append(_input)
if documents is not None and embedding is not None:
if len(documents) == 0:
raise ValueError("If documents are provided, there must be at least one document.")
raise ValueError(
"If documents are provided, there must be at least one document."
)
chroma = Chroma.from_documents(
documents=documents, # type: ignore
persist_directory=index_directory,

View file

@ -35,7 +35,6 @@ class ChromaSearchComponent(LCVectorStoreComponent):
# "persist": {"display_name": "Persist"},
"index_directory": {"display_name": "Index Directory"},
"code": {"show": False, "display_name": "Code"},
"documents": {"display_name": "Documents", "is_list": True},
"embedding": {
"display_name": "Embedding",
"info": "Embedding model to vectorize inputs (make sure to use same as index)",

View file

@ -5,7 +5,8 @@ from langchain_community.vectorstores import VectorStore
from langchain_community.vectorstores.faiss import FAISS
from langflow import CustomComponent
from langflow.field_typing import Document, Embeddings
from langflow.field_typing import Embeddings
from langflow.schema.schema import Record
class FAISSComponent(CustomComponent):
@ -15,7 +16,7 @@ class FAISSComponent(CustomComponent):
def build_config(self):
return {
"documents": {"display_name": "Documents"},
"inputs": {"display_name": "Input", "input_types": ["Document", "Record"]},
"embedding": {"display_name": "Embedding"},
"folder_path": {
"display_name": "Folder Path",
@ -27,10 +28,16 @@ class FAISSComponent(CustomComponent):
def build(
self,
embedding: Embeddings,
documents: List[Document],
inputs: List[Record],
folder_path: str,
index_name: str = "langflow_index",
) -> Union[VectorStore, FAISS, BaseRetriever]:
documents = []
for _input in inputs:
if isinstance(_input, Record):
documents.append(_input.to_lc_document())
else:
documents.append(_input)
vector_store = FAISS.from_documents(documents=documents, embedding=embedding)
if not folder_path:
raise ValueError("Folder path is required to save the FAISS index.")

View file

@ -14,7 +14,6 @@ class FAISSSearchComponent(LCVectorStoreComponent):
def build_config(self):
return {
"documents": {"display_name": "Documents"},
"embedding": {"display_name": "Embedding"},
"folder_path": {
"display_name": "Folder Path",

View file

@ -3,17 +3,20 @@ from typing import List, Optional
from langchain_community.vectorstores.mongodb_atlas import MongoDBAtlasVectorSearch
from langflow import CustomComponent
from langflow.field_typing import Document, Embeddings, NestedDict
from langflow.field_typing import Embeddings, NestedDict
from langflow.schema.schema import Record
class MongoDBAtlasComponent(CustomComponent):
display_name = "MongoDB Atlas"
description = "Construct a `MongoDB Atlas Vector Search` vector store from raw documents."
description = (
"Construct a `MongoDB Atlas Vector Search` vector store from raw documents."
)
icon = "MongoDB"
def build_config(self):
return {
"documents": {"display_name": "Documents"},
"inputs": {"display_name": "Input", "input_types": ["Document", "Record"]},
"embedding": {"display_name": "Embedding"},
"collection_name": {"display_name": "Collection Name"},
"db_name": {"display_name": "Database Name"},
@ -25,7 +28,7 @@ class MongoDBAtlasComponent(CustomComponent):
def build(
self,
embedding: Embeddings,
documents: List[Document],
inputs: List[Record],
collection_name: str = "",
db_name: str = "",
index_name: str = "",
@ -36,12 +39,20 @@ class MongoDBAtlasComponent(CustomComponent):
try:
from pymongo import MongoClient
except ImportError:
raise ImportError("Please install pymongo to use MongoDB Atlas Vector Store")
raise ImportError(
"Please install pymongo to use MongoDB Atlas Vector Store"
)
try:
mongo_client: MongoClient = MongoClient(mongodb_atlas_cluster_uri)
collection = mongo_client[db_name][collection_name]
except Exception as e:
raise ValueError(f"Failed to connect to MongoDB Atlas: {e}")
documents = []
for _input in inputs:
if isinstance(_input, Record):
documents.append(_input.to_lc_document())
else:
documents.append(_input)
if documents:
vector_store = MongoDBAtlasVectorSearch.from_documents(
documents=documents,

View file

@ -7,7 +7,8 @@ from langchain_community.vectorstores import VectorStore
from langchain_community.vectorstores.pinecone import Pinecone
from langflow import CustomComponent
from langflow.field_typing import Document, Embeddings
from langflow.field_typing import Embeddings
from langflow.schema.schema import Record
class PineconeComponent(CustomComponent):
@ -17,7 +18,7 @@ class PineconeComponent(CustomComponent):
def build_config(self):
return {
"documents": {"display_name": "Documents"},
"inputs": {"display_name": "Input", "input_types": ["Document", "Record"]},
"embedding": {"display_name": "Embedding"},
"index_name": {"display_name": "Index Name"},
"namespace": {"display_name": "Namespace"},
@ -44,7 +45,7 @@ class PineconeComponent(CustomComponent):
self,
embedding: Embeddings,
pinecone_env: str,
documents: List[Document],
inputs: List[Record],
text_key: str = "text",
pool_threads: int = 4,
index_name: Optional[str] = None,
@ -59,6 +60,12 @@ class PineconeComponent(CustomComponent):
pinecone.init(api_key=pinecone_api_key, environment=pinecone_env) # type: ignore
if not index_name:
raise ValueError("Index Name is required.")
documents = []
for _input in inputs:
if isinstance(_input, Record):
documents.append(_input.to_lc_document())
else:
documents.append(_input)
if documents:
return Pinecone.from_documents(
documents=documents,

View file

@ -3,8 +3,10 @@ from typing import Optional, Union
from langchain.schema import BaseRetriever
from langchain_community.vectorstores import VectorStore
from langchain_community.vectorstores.qdrant import Qdrant
from langflow import CustomComponent
from langflow.field_typing import Document, Embeddings, NestedDict
from langflow.field_typing import Embeddings, NestedDict
from langflow.schema.schema import Record
class QdrantComponent(CustomComponent):
@ -14,17 +16,23 @@ class QdrantComponent(CustomComponent):
def build_config(self):
return {
"documents": {"display_name": "Documents"},
"inputs": {"display_name": "Input", "input_types": ["Document", "Record"]},
"embedding": {"display_name": "Embedding"},
"api_key": {"display_name": "API Key", "password": True, "advanced": True},
"collection_name": {"display_name": "Collection Name"},
"content_payload_key": {"display_name": "Content Payload Key", "advanced": True},
"content_payload_key": {
"display_name": "Content Payload Key",
"advanced": True,
},
"distance_func": {"display_name": "Distance Function", "advanced": True},
"grpc_port": {"display_name": "gRPC Port", "advanced": True},
"host": {"display_name": "Host", "advanced": True},
"https": {"display_name": "HTTPS", "advanced": True},
"location": {"display_name": "Location", "advanced": True},
"metadata_payload_key": {"display_name": "Metadata Payload Key", "advanced": True},
"metadata_payload_key": {
"display_name": "Metadata Payload Key",
"advanced": True,
},
"path": {"display_name": "Path", "advanced": True},
"port": {"display_name": "Port", "advanced": True},
"prefer_grpc": {"display_name": "Prefer gRPC", "advanced": True},
@ -38,7 +46,7 @@ class QdrantComponent(CustomComponent):
self,
embedding: Embeddings,
collection_name: str,
documents: Optional[Document] = None,
inputs: Optional[Record] = None,
api_key: Optional[str] = None,
content_payload_key: str = "page_content",
distance_func: str = "Cosine",
@ -55,6 +63,12 @@ class QdrantComponent(CustomComponent):
timeout: Optional[int] = None,
url: Optional[str] = None,
) -> Union[VectorStore, Qdrant, BaseRetriever]:
documents = []
for _input in inputs:
if isinstance(_input, Record):
documents.append(_input.to_lc_document())
else:
documents.append(_input)
if documents is None:
from qdrant_client import QdrantClient

View file

@ -3,9 +3,10 @@ from typing import Optional, Union
from langchain.embeddings.base import Embeddings
from langchain_community.vectorstores import VectorStore
from langchain_community.vectorstores.redis import Redis
from langchain_core.documents import Document
from langchain_core.retrievers import BaseRetriever
from langflow import CustomComponent
from langflow.schema.schema import Record
class RedisComponent(CustomComponent):
@ -28,7 +29,7 @@ class RedisComponent(CustomComponent):
return {
"index_name": {"display_name": "Index Name", "value": "your_index"},
"code": {"show": False, "display_name": "Code"},
"documents": {"display_name": "Documents", "is_list": True},
"inputs": {"display_name": "Input", "input_types": ["Document", "Record"]},
"embedding": {"display_name": "Embedding"},
"schema": {"display_name": "Schema", "file_types": [".yaml"]},
"redis_server_url": {
@ -44,7 +45,7 @@ class RedisComponent(CustomComponent):
redis_server_url: str,
redis_index_name: str,
schema: Optional[str] = None,
documents: Optional[Document] = None,
inputs: Optional[Record] = None,
) -> Union[VectorStore, BaseRetriever]:
"""
Builds the Vector Store or BaseRetriever object.
@ -58,9 +59,17 @@ class RedisComponent(CustomComponent):
Returns:
- VectorStore: The Vector Store object.
"""
if documents is None:
documents = []
for _input in inputs:
if isinstance(_input, Record):
documents.append(_input.to_lc_document())
else:
documents.append(_input)
if not documents:
if schema is None:
raise ValueError("If no documents are provided, a schema must be provided.")
raise ValueError(
"If no documents are provided, a schema must be provided."
)
redis_vs = Redis.from_existing_index(
embedding=embedding,
index_name=redis_index_name,

View file

@ -33,7 +33,7 @@ class RedisSearchComponent(RedisComponent, LCVectorStoreComponent):
"input_value": {"display_name": "Input"},
"index_name": {"display_name": "Index Name", "value": "your_index"},
"code": {"show": False, "display_name": "Code"},
"documents": {"display_name": "Documents", "is_list": True},
"embedding": {"display_name": "Embedding"},
"schema": {"display_name": "Schema", "file_types": [".yaml"]},
"redis_server_url": {

View file

@ -3,10 +3,12 @@ from typing import List, Union
from langchain.schema import BaseRetriever
from langchain_community.vectorstores import VectorStore
from langchain_community.vectorstores.supabase import SupabaseVectorStore
from langflow import CustomComponent
from langflow.field_typing import Document, Embeddings, NestedDict
from supabase.client import Client, create_client
from langflow import CustomComponent
from langflow.field_typing import Embeddings, NestedDict
from langflow.schema.schema import Record
class SupabaseComponent(CustomComponent):
display_name = "Supabase"
@ -14,7 +16,7 @@ class SupabaseComponent(CustomComponent):
def build_config(self):
return {
"documents": {"display_name": "Documents"},
"inputs": {"display_name": "Input", "input_types": ["Document", "Record"]},
"embedding": {"display_name": "Embedding"},
"query_name": {"display_name": "Query Name"},
"search_kwargs": {"display_name": "Search Kwargs", "advanced": True},
@ -26,14 +28,22 @@ class SupabaseComponent(CustomComponent):
def build(
self,
embedding: Embeddings,
documents: List[Document],
inputs: List[Record],
query_name: str = "",
search_kwargs: NestedDict = {},
supabase_service_key: str = "",
supabase_url: str = "",
table_name: str = "",
) -> Union[VectorStore, SupabaseVectorStore, BaseRetriever]:
supabase: Client = create_client(supabase_url, supabase_key=supabase_service_key)
supabase: Client = create_client(
supabase_url, supabase_key=supabase_service_key
)
documents = []
for _input in inputs:
if isinstance(_input, Record):
documents.append(_input.to_lc_document())
else:
documents.append(_input)
return SupabaseVectorStore.from_documents(
documents=documents,
embedding=embedding,

View file

@ -8,13 +8,16 @@ from langchain_community.vectorstores.vectara import Vectara
from langchain_core.vectorstores import VectorStore
from langflow import CustomComponent
from langflow.field_typing import BaseRetriever, Document
from langflow.field_typing import BaseRetriever
from langflow.schema.schema import Record
class VectaraComponent(CustomComponent):
display_name: str = "Vectara"
description: str = "Implementation of Vector Store using Vectara"
documentation = "https://python.langchain.com/docs/integrations/vectorstores/vectara"
documentation = (
"https://python.langchain.com/docs/integrations/vectorstores/vectara"
)
beta = True
icon = "Vectara"
field_config = {
@ -28,8 +31,9 @@ class VectaraComponent(CustomComponent):
"display_name": "Vectara API Key",
"password": True,
},
"documents": {
"display_name": "Documents",
"inputs": {
"display_name": "Input",
"input_types": ["Document", "Record"],
"info": "If provided, will be upserted to corpus (optional)",
},
"files_url": {
@ -44,11 +48,18 @@ class VectaraComponent(CustomComponent):
vectara_corpus_id: str,
vectara_api_key: str,
files_url: Optional[List[str]] = None,
documents: Optional[Document] = None,
inputs: Optional[Record] = None,
) -> Union[VectorStore, BaseRetriever]:
source = "Langflow"
if documents is not None:
documents = []
for _input in inputs:
if isinstance(_input, Record):
documents.append(_input.to_lc_document())
else:
documents.append(_input)
if documents:
return Vectara.from_documents(
documents=documents, # type: ignore
embedding=FakeEmbeddings(size=768),

View file

@ -33,10 +33,6 @@ class VectaraSearchComponent(VectaraComponent, LCVectorStoreComponent):
"display_name": "Vectara API Key",
"password": True,
},
"documents": {
"display_name": "Documents",
"info": "If provided, will be upserted to corpus (optional)",
},
"files_url": {
"display_name": "Files Url",
"info": "Make vectara object using url of files (optional)",

View file

@ -2,16 +2,19 @@ from typing import Optional, Union
import weaviate # type: ignore
from langchain.embeddings.base import Embeddings
from langchain.schema import BaseRetriever, Document
from langchain.schema import BaseRetriever
from langchain_community.vectorstores import VectorStore, Weaviate
from langflow import CustomComponent
from langflow.schema.schema import Record
class WeaviateVectorStoreComponent(CustomComponent):
display_name: str = "Weaviate"
description: str = "Implementation of Vector Store using Weaviate"
documentation = "https://python.langchain.com/docs/integrations/vectorstores/weaviate"
documentation = (
"https://python.langchain.com/docs/integrations/vectorstores/weaviate"
)
beta = True
field_config = {
"url": {"display_name": "Weaviate URL", "value": "http://localhost:8080"},
@ -30,7 +33,7 @@ class WeaviateVectorStoreComponent(CustomComponent):
"advanced": True,
"value": "text",
},
"documents": {"display_name": "Documents", "is_list": True},
"inputs": {"display_name": "Input", "input_types": ["Document", "Record"]},
"embedding": {"display_name": "Embedding"},
"attributes": {
"display_name": "Attributes",
@ -55,7 +58,7 @@ class WeaviateVectorStoreComponent(CustomComponent):
index_name: Optional[str] = None,
text_key: str = "text",
embedding: Optional[Embeddings] = None,
documents: Optional[Document] = None,
inputs: Optional[Record] = None,
attributes: Optional[list] = None,
) -> Union[VectorStore, BaseRetriever]:
if api_key:
@ -78,8 +81,14 @@ class WeaviateVectorStoreComponent(CustomComponent):
return pascal_case_word
index_name = _to_pascal_case(index_name) if index_name else None
documents = []
for _input in inputs:
if isinstance(_input, Record):
documents.append(_input.to_lc_document())
else:
documents.append(_input)
if documents is not None and embedding is not None:
if documents and embedding is not None:
return Weaviate.from_documents(
client=client,
index_name=index_name,

View file

@ -39,7 +39,6 @@ class WeaviateSearchVectorStore(WeaviateVectorStoreComponent, LCVectorStoreCompo
"advanced": True,
"value": "text",
},
"documents": {"display_name": "Documents", "is_list": True},
"embedding": {"display_name": "Embedding"},
"attributes": {
"display_name": "Attributes",

View file

@ -3,9 +3,10 @@ from typing import Optional, Union
from langchain.embeddings.base import Embeddings
from langchain_community.vectorstores import VectorStore
from langchain_community.vectorstores.pgvector import PGVector
from langchain_core.documents import Document
from langchain_core.retrievers import BaseRetriever
from langflow import CustomComponent
from langflow.schema.schema import Record
class PGVectorComponent(CustomComponent):
@ -15,7 +16,9 @@ class PGVectorComponent(CustomComponent):
display_name: str = "PGVector"
description: str = "Implementation of Vector Store using PostgreSQL"
documentation = "https://python.langchain.com/docs/integrations/vectorstores/pgvector"
documentation = (
"https://python.langchain.com/docs/integrations/vectorstores/pgvector"
)
def build_config(self):
"""
@ -26,7 +29,7 @@ class PGVectorComponent(CustomComponent):
"""
return {
"code": {"show": False},
"documents": {"display_name": "Documents", "is_list": True},
"inputs": {"display_name": "Input", "input_types": ["Document", "Record"]},
"embedding": {"display_name": "Embedding"},
"pg_server_url": {
"display_name": "PostgreSQL Server Connection String",
@ -40,7 +43,7 @@ class PGVectorComponent(CustomComponent):
embedding: Embeddings,
pg_server_url: str,
collection_name: str,
documents: Optional[Document] = None,
inputs: Optional[Record] = None,
) -> Union[VectorStore, BaseRetriever]:
"""
Builds the Vector Store or BaseRetriever object.
@ -55,6 +58,12 @@ class PGVectorComponent(CustomComponent):
- VectorStore: The Vector Store object.
"""
documents = []
for _input in inputs:
if isinstance(_input, Record):
documents.append(_input.to_lc_document())
else:
documents.append(_input)
try:
if documents is None:
vector_store = PGVector.from_existing_index(

View file

@ -0,0 +1,115 @@
from datetime import datetime
from pathlib import Path
import orjson
from loguru import logger
from sqlmodel import select
from langflow.services.database.models.flow.model import Flow
from langflow.services.deps import session_scope
STARTER_FOLDER_NAME = "Starter Projects"
# In the folder ./starter_projects we have a few JSON files that represent
# starter projects. We want to load these into the database so that users
# can use them as a starting point for their own projects.
def load_starter_projects():
starter_projects = []
folder = Path(__file__).parent / "starter_projects"
for file in folder.glob("*.json"):
project = orjson.loads(file.read_text())
starter_projects.append(project)
logger.info(f"Loaded starter project {file}")
return starter_projects
def get_project_data(project):
project_name = project.get("name")
project_description = project.get("description")
project_is_component = project.get("is_component")
project_updated_at = project.get("updated_at")
updated_at_datetime = datetime.strptime(project_updated_at, "%Y-%m-%dT%H:%M:%S.%f")
project_data = project.get("data")
return (
project_name,
project_description,
project_is_component,
updated_at_datetime,
project_data,
)
def update_existing_project(
existing_project,
project_name,
project_description,
project_is_component,
updated_at_datetime,
project_data,
):
logger.info(f"Updating starter project {project_name}")
existing_project.data = project_data
existing_project.folder = STARTER_FOLDER_NAME
existing_project.description = project_description
existing_project.is_component = project_is_component
existing_project.updated_at = updated_at_datetime
def create_new_project(
session,
project_name,
project_description,
project_is_component,
updated_at_datetime,
project_data,
):
logger.info(f"Creating starter project {project_name}")
new_project = Flow(
name=project_name,
description=project_description,
is_component=project_is_component,
updated_at=updated_at_datetime,
folder=STARTER_FOLDER_NAME,
data=project_data,
)
session.add(new_project)
def create_or_update_starter_projects():
with session_scope() as session:
starter_projects = load_starter_projects()
for project in starter_projects:
(
project_name,
project_description,
project_is_component,
updated_at_datetime,
project_data,
) = get_project_data(project)
if project_name and project_data:
existing_project = session.exec(
select(Flow).where(
Flow.name == project_name, Flow.folder == STARTER_FOLDER_NAME
)
).first()
if existing_project:
update_existing_project(
existing_project,
project_name,
project_description,
project_is_component,
updated_at_datetime,
project_data,
)
else:
create_new_project(
session,
project_name,
project_description,
project_is_component,
updated_at_datetime,
project_data,
)

File diff suppressed because one or more lines are too long

View file

@ -8,7 +8,9 @@ from fastapi import FastAPI, Request
from fastapi.middleware.cors import CORSMiddleware
from fastapi.responses import FileResponse
from fastapi.staticfiles import StaticFiles
from langflow.api import router
from langflow.initial_setup.setup import create_or_update_starter_projects
from langflow.interface.utils import setup_llm_caching
from langflow.services.plugins.langfuse_plugin import LangfuseInstance
from langflow.services.utils import initialize_services, teardown_services
@ -18,9 +20,12 @@ from langflow.utils.logger import configure
def get_lifespan(fix_migration=False, socketio_server=None):
@asynccontextmanager
async def lifespan(app: FastAPI):
initialize_services(fix_migration=fix_migration, socketio_server=socketio_server)
initialize_services(
fix_migration=fix_migration, socketio_server=socketio_server
)
setup_llm_caching()
LangfuseInstance.update()
create_or_update_starter_projects()
yield
teardown_services()
@ -31,7 +36,9 @@ def create_app():
"""Create the FastAPI app and include the router."""
configure()
socketio_server = socketio.AsyncServer(async_mode="asgi", cors_allowed_origins="*", logger=True)
socketio_server = socketio.AsyncServer(
async_mode="asgi", cors_allowed_origins="*", logger=True
)
lifespan = get_lifespan(socketio_server=socketio_server)
app = FastAPI(lifespan=lifespan)
origins = ["*"]
@ -98,7 +105,9 @@ def get_static_files_dir():
return frontend_path / "frontend"
def setup_app(static_files_dir: Optional[Path] = None, backend_only: bool = False) -> FastAPI:
def setup_app(
static_files_dir: Optional[Path] = None, backend_only: bool = False
) -> FastAPI:
"""Setup the FastAPI app."""
# get the directory of the current file
if not static_files_dir:
@ -114,6 +123,7 @@ def setup_app(static_files_dir: Optional[Path] = None, backend_only: bool = Fals
if __name__ == "__main__":
import uvicorn
from langflow.__main__ import get_number_of_workers
configure()

View file

@ -169,12 +169,12 @@ class DatabaseService(Service):
try:
command.check(alembic_cfg)
except util.exc.AutogenerateDiffsDetected as e:
except util.exc.AutogenerateDiffsDetected as exc:
logger.error(f"AutogenerateDiffsDetected: {exc}")
if not fix:
raise RuntimeError(
"Something went wrong running migrations. Please, run `langflow migration --fix`"
) from e
) from exc
if fix:
self.try_downgrade_upgrade_until_success(alembic_cfg)

View file

@ -1,3 +1,4 @@
from contextlib import contextmanager
from typing import TYPE_CHECKING, Generator
from langflow.services import ServiceType, service_manager
@ -54,6 +55,19 @@ def get_session() -> Generator["Session", None, None]:
yield from db_service.get_session()
@contextmanager
def session_scope():
session = next(get_session())
try:
yield session
session.commit()
except:
session.rollback()
raise
finally:
session.close()
def get_cache_service() -> "BaseCacheService":
return service_manager.get(ServiceType.CACHE_SERVICE) # type: ignore

View file

@ -44,7 +44,6 @@ export default function GenericNode({
const buildFlow = useFlowStore((state) => state.buildFlow);
const setNode = useFlowStore((state) => state.setNode);
const name = nodeIconsLucide[data.type] ? data.type : types[data.type];
console.log(types[data.type])
const [inputName, setInputName] = useState(false);
const [nodeName, setNodeName] = useState(data.node!.display_name);
const [inputDescription, setInputDescription] = useState(false);
@ -158,7 +157,7 @@ export default function GenericNode({
const iconElement = data?.node?.icon;
const iconColor = nodeColors[types[data.type]];
const iconName =
iconElement || (data.node?.flow ? "group_components" : name);
iconElement || (data.node?.flow ? "group_components" : name);
const iconClassName = `generic-node-icon ${
!showNode ? " absolute inset-x-6 h-12 w-12 " : ""
}`;

View file

@ -0,0 +1,54 @@
import { useEffect, useState } from "react";
import { getComponent, postLikeComponent } from "../../controllers/API";
import DeleteConfirmationModal from "../../modals/DeleteConfirmationModal";
import useAlertStore from "../../stores/alertStore";
import useFlowsManagerStore from "../../stores/flowsManagerStore";
import { useStoreStore } from "../../stores/storeStore";
import { storeComponent } from "../../types/store";
import cloneFLowWithParent from "../../utils/storeUtils";
import { cn } from "../../utils/utils";
import ShadTooltip from "../ShadTooltipComponent";
import IconComponent from "../genericIconComponent";
import { Badge } from "../ui/badge";
import { Button } from "../ui/button";
import {
Card,
CardContent,
CardDescription,
CardFooter,
CardHeader,
CardTitle,
} from "../ui/card";
import { FlowType } from "../../types/flow";
import { useNavigate } from "react-router-dom";
export default function NewFlowCardComponent({
}: {
}) {
const addFlow = useFlowsManagerStore((state) => state.addFlow);
const navigate = useNavigate();
return (
<Card
className={cn(
"group relative h-48 w-2/6 flex flex-col justify-between overflow-hidden transition-all hover:shadow-md",
)}
>
<CardContent className="w-full h-full flex align-middle items-center justify-center">
<button onClick={() => {
addFlow(true).then((id) => {
navigate("/flow/" + id);
});
}}>
<IconComponent
className={cn(
"h-12 w-12 text-muted-foreground",
)}
name="PlusCircle"
/>
</button>
</CardContent>
</Card>
);
}

View file

@ -0,0 +1,90 @@
import { useEffect, useState } from "react";
import { getComponent, postLikeComponent } from "../../controllers/API";
import DeleteConfirmationModal from "../../modals/DeleteConfirmationModal";
import useAlertStore from "../../stores/alertStore";
import useFlowsManagerStore from "../../stores/flowsManagerStore";
import { useStoreStore } from "../../stores/storeStore";
import { storeComponent } from "../../types/store";
import cloneFLowWithParent from "../../utils/storeUtils";
import { cn } from "../../utils/utils";
import ShadTooltip from "../ShadTooltipComponent";
import IconComponent from "../genericIconComponent";
import { Badge } from "../ui/badge";
import { Button } from "../ui/button";
import {
Card,
CardDescription,
CardFooter,
CardHeader,
CardTitle,
} from "../ui/card";
import { FlowType } from "../../types/flow";
import { updateIds } from "../../utils/reactflowUtils";
import { useNavigate } from "react-router-dom";
export default function CollectionCardComponent({
flow,
}: {
flow: FlowType;
authorized?: boolean;
}) {
const addFlow = useFlowsManagerStore((state) => state.addFlow);
const navigate = useNavigate();
return (
<Card
className={cn(
"group relative h-48 w-2/6 flex flex-col justify-between overflow-hidden transition-all hover:shadow-md",
)}
>
<div>
<CardHeader>
<div>
<CardTitle className="flex w-full items-center justify-between gap-3 text-xl">
<IconComponent
className={cn(
"flex-shrink-0 h-7 w-7 text-flow-icon",
)}
name="Group"
/>
<ShadTooltip content={flow.name}>
<div className="w-full truncate">{flow.name}</div>
</ShadTooltip>
</CardTitle>
</div>
<CardDescription className="pb-2 pt-2">
<ShadTooltip side="bottom" styleClasses="z-50" content={flow.description}>
<div className="truncate-doubleline">{flow.description}</div>
</ShadTooltip>
</CardDescription>
</CardHeader>
</div>
<CardFooter>
<div className="flex w-full items-center justify-between gap-2">
<div className="flex w-full justify-end flex-wrap gap-2">
<Button
onClick={() => {
updateIds(flow.data!)
addFlow(true, flow).then((id) => {
navigate("/flow/" + id);
});
}}
tabIndex={-1}
variant="outline"
size="sm"
className="whitespace-nowrap "
>
<IconComponent
name="ExternalLink"
className="main-page-nav-button select-none"
/>
Select Flow
</Button>
</div>
</div>
</CardFooter>
</Card>
);
}

View file

@ -727,7 +727,7 @@ export const STATUS_BUILD = "Build to validate status.";
export const STATUS_BUILDING = "Building...";
export const SAVED_HOVER = "Last saved at ";
export const RUN_TIMESTAMP_PREFIX = "Last Run: ";
export const STARTER_FOLDER_NAME = "Starter Projects";
export const PRIORITY_SIDEBAR_ORDER = [
"saved_components",
"inputs",

View file

@ -14,6 +14,7 @@ import {
import useAlertStore from "../../../../stores/alertStore";
import useFlowsManagerStore from "../../../../stores/flowsManagerStore";
import { FlowType } from "../../../../types/flow";
import { STARTER_FOLDER_NAME } from "../../../../constants/constants";
export default function ComponentsComponent({
is_component = true,
@ -24,6 +25,7 @@ export default function ComponentsComponent({
const uploadFlow = useFlowsManagerStore((state) => state.uploadFlow);
const removeFlow = useFlowsManagerStore((state) => state.removeFlow);
const isLoading = useFlowsManagerStore((state) => state.isLoading);
const setExamples = useFlowsManagerStore((state) => state.setExamples);
const flows = useFlowsManagerStore((state) => state.flows);
const setSuccessData = useAlertStore((state) => state.setSuccessData);
const setErrorData = useAlertStore((state) => state.setErrorData);
@ -35,7 +37,7 @@ export default function ComponentsComponent({
useEffect(() => {
if (isLoading) return;
const all = flows
let all = flows
.filter((f) => (f.is_component ?? false) === is_component)
.sort((a, b) => {
if (a?.updated_at && b?.updated_at) {

View file

@ -1,5 +1,5 @@
import { Group, ToyBrick } from "lucide-react";
import { useEffect } from "react";
import { useEffect, useState } from "react";
import { Outlet, useLocation, useNavigate } from "react-router-dom";
import DropdownButton from "../../components/DropdownButtonComponent";
import IconComponent from "../../components/genericIconComponent";
@ -14,6 +14,9 @@ import {
import useAlertStore from "../../stores/alertStore";
import useFlowsManagerStore from "../../stores/flowsManagerStore";
import { downloadFlows } from "../../utils/reactflowUtils";
import BaseModal from "../../modals/baseModal";
import ExampleCardComponent from "../../components/exampleComponent";
import NewFlowCardComponent from "../../components/NewFlowCardComponent";
export default function HomePage(): JSX.Element {
const addFlow = useFlowsManagerStore((state) => state.addFlow);
const uploadFlow = useFlowsManagerStore((state) => state.uploadFlow);
@ -25,6 +28,8 @@ export default function HomePage(): JSX.Element {
const setErrorData = useAlertStore((state) => state.setErrorData);
const location = useLocation();
const pathname = location.pathname;
const [openModal, setOpenModal] = useState(false);
const examples = useFlowsManagerStore((state) => state.examples);
const is_component = pathname === "/components";
const dropdownOptions = [
{
@ -36,9 +41,8 @@ export default function HomePage(): JSX.Element {
})
.then((id) => {
setSuccessData({
title: `${
is_component ? "Component" : "Flow"
} uploaded successfully`,
title: `${is_component ? "Component" : "Flow"
} uploaded successfully`,
});
if (!is_component) navigate("/flow/" + id);
})
@ -98,11 +102,7 @@ export default function HomePage(): JSX.Element {
</Button>
<DropdownButton
firstButtonName="New Project"
onFirstBtnClick={() => {
addFlow(true).then((id) => {
navigate("/flow/" + id);
});
}}
onFirstBtnClick={() => setOpenModal(true)}
options={dropdownOptions}
/>
</div>
@ -116,6 +116,27 @@ export default function HomePage(): JSX.Element {
<Outlet />
</div>
</div>
<BaseModal open={openModal} setOpen={setOpenModal}>
<BaseModal.Header description={"Select a template or start from scratch"}>
<span className="pr-2" data-testid="modal-title">
Create a New Flow
</span>
<IconComponent
name="Group"
className="h-6 w-6 text-primary stroke-2 "
aria-hidden="true"
/>
</BaseModal.Header>
<BaseModal.Content>
<div className="flex flex-wrap w-full h-full p-4 gap-3 overflow-auto custom-scroll">
{examples.map((example, idx) => {
return(
<ExampleCardComponent key={idx} flow={example} />)
})}
<NewFlowCardComponent/>
</div>
</BaseModal.Content>
</BaseModal>
</PageLayout>
);
}

View file

@ -563,7 +563,6 @@ const useFlowStore = create<FlowStoreType>((set, get) => ({
});
},
updateBuildStatus: (nodeIdList: string[], status: BuildStatus) => {
console.log("updateBuildStatus", nodeIdList, status);
const newFlowBuildStatus = { ...get().flowBuildStatus };
nodeIdList.forEach((id) => {
newFlowBuildStatus[id] = {
@ -573,7 +572,6 @@ const useFlowStore = create<FlowStoreType>((set, get) => ({
const timestamp_string = new Date(Date.now()).toLocaleString();
newFlowBuildStatus[id].timestamp = timestamp_string;
}
console.log("updateBuildStatus", newFlowBuildStatus);
});
set({ flowBuildStatus: newFlowBuildStatus });
},

View file

@ -25,6 +25,7 @@ import useAlertStore from "./alertStore";
import { useDarkStore } from "./darkStore";
import useFlowStore from "./flowStore";
import { useTypesStore } from "./typesStore";
import { STARTER_FOLDER_NAME } from "../constants/constants";
let saveTimeoutId: NodeJS.Timeout | null = null;
@ -37,6 +38,10 @@ const past = {};
const future = {};
const useFlowsManagerStore = create<FlowsManagerStoreType>((set, get) => ({
examples:[],
setExamples: (examples: FlowType[]) => {
set({ examples });
},
currentFlowId: "",
setCurrentFlowId: (currentFlowId: string) => {
set((state) => ({
@ -62,7 +67,8 @@ const useFlowsManagerStore = create<FlowsManagerStoreType>((set, get) => ({
.then((dbData) => {
if (dbData) {
const { data, flows } = processFlows(dbData, false);
get().setFlows(flows);
get().setExamples(flows.filter(f=>(f.folder===STARTER_FOLDER_NAME && !f.user_id)));
get().setFlows(flows.filter(f=>!(f.folder===STARTER_FOLDER_NAME && !f.user_id)));
useTypesStore.setState((state) => ({
data: { ...state.data, ["saved_components"]: data },
}));

View file

@ -13,6 +13,8 @@ export type FlowType = {
updated_at?: string;
date_created?: string;
parent?: string;
folder?: string;
user_id?: string;
};
export type NodeType = {

View file

@ -44,6 +44,8 @@ export type FlowsManagerStoreType = {
undo: () => void;
redo: () => void;
takeSnapshot: () => void;
examples: Array<FlowType>;
setExamples: (examples: FlowType[]) => void;
};
export type UseUndoRedoOptions = {

View file

@ -87,6 +87,8 @@ import {
Pin,
Play,
Plus,
PlusCircle,
PlusSquare,
PocketKnife,
Redo,
RefreshCcw,
@ -393,6 +395,8 @@ export const nodeIconsLucide: iconsType = {
Circle,
CircleDot,
Clipboard,
PlusCircle,
PlusSquare,
Code2,
Variable,
Snowflake,