Merge remote-tracking branch 'origin/zustand/io/migration' into ioview
This commit is contained in:
commit
27c97a52e6
71 changed files with 1044 additions and 724 deletions
933
poetry.lock
generated
933
poetry.lock
generated
File diff suppressed because it is too large
Load diff
|
|
@ -44,9 +44,7 @@ huggingface-hub = { version = "^0.20.0", extras = ["inference"] }
|
|||
rich = "^13.7.0"
|
||||
llama-cpp-python = { version = "~0.2.0", optional = true }
|
||||
networkx = "^3.1"
|
||||
unstructured = "^0.12.0"
|
||||
pypdf = "^4.0.0"
|
||||
lxml = "^4.9.2"
|
||||
pysrt = "^1.1.2"
|
||||
fake-useragent = "^1.4.0"
|
||||
docstring-parser = "^0.15"
|
||||
|
|
@ -107,6 +105,7 @@ pytube = "^15.0.0"
|
|||
python-socketio = "^5.11.0"
|
||||
llama-index = "0.9.48"
|
||||
langchain-openai = "^0.0.6"
|
||||
unstructured = "^0.12.4"
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
pytest-asyncio = "^0.23.1"
|
||||
|
|
|
|||
|
|
@ -1,8 +1,14 @@
|
|||
from typing import Callable, Optional, Union
|
||||
from typing import Optional
|
||||
|
||||
from langchain.chains import LLMChain
|
||||
|
||||
from langflow import CustomComponent
|
||||
from langflow.field_typing import BaseLanguageModel, BaseMemory, BasePromptTemplate, Chain, Text
|
||||
from langflow.field_typing import (
|
||||
BaseLanguageModel,
|
||||
BaseMemory,
|
||||
BasePromptTemplate,
|
||||
Text,
|
||||
)
|
||||
|
||||
|
||||
class LLMChainComponent(CustomComponent):
|
||||
|
|
@ -22,5 +28,10 @@ class LLMChainComponent(CustomComponent):
|
|||
prompt: BasePromptTemplate,
|
||||
llm: BaseLanguageModel,
|
||||
memory: Optional[BaseMemory] = None,
|
||||
) -> Union[Chain, Callable, Text]:
|
||||
return LLMChain(prompt=prompt, llm=llm, memory=memory)
|
||||
) -> Text:
|
||||
runnable = LLMChain(prompt=prompt, llm=llm, memory=memory)
|
||||
result_dict = runnable.invoke({})
|
||||
output_key = runnable.output_key
|
||||
result = result_dict[output_key]
|
||||
self.status = result
|
||||
return result
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@ from typing import Callable, Optional, Union
|
|||
from langchain.chains.combine_documents.base import BaseCombineDocumentsChain
|
||||
from langchain.chains.retrieval_qa.base import BaseRetrievalQA, RetrievalQA
|
||||
from langchain_core.documents import Document
|
||||
|
||||
from langflow import CustomComponent
|
||||
from langflow.field_typing import BaseMemory, BaseRetriever, Text
|
||||
|
||||
|
|
@ -47,4 +48,11 @@ class RetrievalQAComponent(CustomComponent):
|
|||
result = result.content if hasattr(result, "content") else result
|
||||
# Result is a dict with keys "query", "result" and "source_documents"
|
||||
# for now we just return the result
|
||||
return result.get("result")
|
||||
records = self.to_records(result.get("source_documents"))
|
||||
references_str = ""
|
||||
if return_source_documents:
|
||||
references_str = self.create_references_from_records(records)
|
||||
result_str = result.get("result")
|
||||
final_result = "\n".join([result_str, references_str])
|
||||
self.status = final_result
|
||||
return final_result
|
||||
|
|
|
|||
|
|
@ -1,11 +1,11 @@
|
|||
from typing import Optional
|
||||
|
||||
from langchain.chains import RetrievalQAWithSourcesChain
|
||||
from langchain.chains.qa_with_sources.base import BaseQAWithSourcesChain
|
||||
from langchain.chains.combine_documents.base import BaseCombineDocumentsChain
|
||||
from langchain_core.documents import Document
|
||||
|
||||
from langflow import CustomComponent
|
||||
from langflow.field_typing import BaseLanguageModel, BaseMemory, BaseRetriever
|
||||
from langflow.field_typing import BaseLanguageModel, BaseMemory, BaseRetriever, Text
|
||||
|
||||
|
||||
class RetrievalQAWithSourcesChainComponent(CustomComponent):
|
||||
|
|
@ -31,8 +31,8 @@ class RetrievalQAWithSourcesChainComponent(CustomComponent):
|
|||
chain_type: str,
|
||||
memory: Optional[BaseMemory] = None,
|
||||
return_source_documents: Optional[bool] = True,
|
||||
) -> BaseQAWithSourcesChain:
|
||||
return RetrievalQAWithSourcesChain.from_chain_type(
|
||||
) -> Text:
|
||||
runnable = RetrievalQAWithSourcesChain.from_chain_type(
|
||||
llm=llm,
|
||||
chain_type=chain_type,
|
||||
combine_documents_chain=combine_documents_chain,
|
||||
|
|
@ -40,3 +40,19 @@ class RetrievalQAWithSourcesChainComponent(CustomComponent):
|
|||
return_source_documents=return_source_documents,
|
||||
retriever=retriever,
|
||||
)
|
||||
if isinstance(inputs, Document):
|
||||
inputs = inputs.page_content
|
||||
self.status = runnable
|
||||
input_key = runnable.input_keys[0]
|
||||
result = runnable.invoke({input_key: inputs})
|
||||
result = result.content if hasattr(result, "content") else result
|
||||
# Result is a dict with keys "query", "result" and "source_documents"
|
||||
# for now we just return the result
|
||||
records = self.to_records(result.get("source_documents"))
|
||||
references_str = ""
|
||||
if return_source_documents:
|
||||
references_str = self.create_references_from_records(records)
|
||||
result_str = result.get("result")
|
||||
final_result = "\n".join([result_str, references_str])
|
||||
self.status = final_result
|
||||
return final_result
|
||||
|
|
|
|||
|
|
@ -1,42 +0,0 @@
|
|||
from typing import Any, Dict, List
|
||||
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.document_loaders.directory import DirectoryLoader
|
||||
from langflow import CustomComponent
|
||||
|
||||
|
||||
class DirectoryLoaderComponent(CustomComponent):
|
||||
display_name = "DirectoryLoader"
|
||||
description = "Load from a directory."
|
||||
|
||||
def build_config(self) -> Dict[str, Any]:
|
||||
return {
|
||||
"glob": {"display_name": "Glob Pattern", "value": "**/*.txt"},
|
||||
"load_hidden": {"display_name": "Load Hidden Files", "value": False, "advanced": True},
|
||||
"max_concurrency": {"display_name": "Max Concurrency", "value": 10, "advanced": True},
|
||||
"metadata": {"display_name": "Metadata", "value": {}},
|
||||
"path": {"display_name": "Local Directory"},
|
||||
"recursive": {"display_name": "Recursive", "value": True, "advanced": True},
|
||||
"silent_errors": {"display_name": "Silent Errors", "value": False, "advanced": True},
|
||||
"use_multithreading": {"display_name": "Use Multithreading", "value": True, "advanced": True},
|
||||
}
|
||||
|
||||
def build(
|
||||
self,
|
||||
glob: str,
|
||||
path: str,
|
||||
max_concurrency: int = 2,
|
||||
load_hidden: bool = False,
|
||||
recursive: bool = True,
|
||||
silent_errors: bool = False,
|
||||
use_multithreading: bool = True,
|
||||
) -> List[Document]:
|
||||
return DirectoryLoader(
|
||||
glob=glob,
|
||||
path=path,
|
||||
load_hidden=load_hidden,
|
||||
max_concurrency=max_concurrency,
|
||||
recursive=recursive,
|
||||
silent_errors=silent_errors,
|
||||
use_multithreading=use_multithreading,
|
||||
).load()
|
||||
|
|
@ -1,4 +1,5 @@
|
|||
from langchain_core.documents import Document
|
||||
|
||||
from langflow import CustomComponent
|
||||
from langflow.utils.constants import LOADERS_INFO
|
||||
|
||||
|
|
@ -38,6 +39,7 @@ class FileLoaderComponent(CustomComponent):
|
|||
"srt",
|
||||
"eml",
|
||||
"md",
|
||||
"mdx",
|
||||
"pptx",
|
||||
"docx",
|
||||
],
|
||||
|
|
@ -55,6 +57,7 @@ class FileLoaderComponent(CustomComponent):
|
|||
".srt",
|
||||
".eml",
|
||||
".md",
|
||||
".mdx",
|
||||
".pptx",
|
||||
".docx",
|
||||
],
|
||||
|
|
@ -74,7 +77,7 @@ class FileLoaderComponent(CustomComponent):
|
|||
def build(self, file_path: str, loader: str) -> Document:
|
||||
file_type = file_path.split(".")[-1]
|
||||
|
||||
# Mapeie o nome do loader selecionado para suas informações
|
||||
# Map the loader to the correct loader class
|
||||
selected_loader_info = None
|
||||
for loader_info in LOADERS_INFO:
|
||||
if loader_info["name"] == loader:
|
||||
|
|
@ -85,7 +88,7 @@ class FileLoaderComponent(CustomComponent):
|
|||
raise ValueError(f"Loader {loader} not found in the loader info list")
|
||||
|
||||
if loader == "Automatic":
|
||||
# Determine o loader automaticamente com base na extensão do arquivo
|
||||
# Determine the loader based on the file type
|
||||
default_loader_info = None
|
||||
for info in LOADERS_INFO:
|
||||
if "defaultFor" in info and file_type in info["defaultFor"]:
|
||||
|
|
@ -103,7 +106,7 @@ class FileLoaderComponent(CustomComponent):
|
|||
module_name, class_name = loader_import.rsplit(".", 1)
|
||||
|
||||
try:
|
||||
# Importe o loader dinamicamente
|
||||
# Import the loader class
|
||||
loader_module = __import__(module_name, fromlist=[class_name])
|
||||
loader_instance = getattr(loader_module, class_name)
|
||||
except ImportError as e:
|
||||
|
|
|
|||
129
src/backend/langflow/components/documentloaders/GatherRecords.py
Normal file
129
src/backend/langflow/components/documentloaders/GatherRecords.py
Normal file
|
|
@ -0,0 +1,129 @@
|
|||
from concurrent import futures
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List
|
||||
|
||||
from langflow import CustomComponent
|
||||
from langflow.schema import Record
|
||||
|
||||
|
||||
class GatherRecordsComponent(CustomComponent):
|
||||
display_name = "Gather Records"
|
||||
description = "Gather records from a directory."
|
||||
|
||||
def build_config(self) -> Dict[str, Any]:
|
||||
return {
|
||||
"load_hidden": {
|
||||
"display_name": "Load Hidden Files",
|
||||
"value": False,
|
||||
"advanced": True,
|
||||
},
|
||||
"max_concurrency": {
|
||||
"display_name": "Max Concurrency",
|
||||
"value": 10,
|
||||
"advanced": True,
|
||||
},
|
||||
"path": {"display_name": "Local Directory"},
|
||||
"recursive": {"display_name": "Recursive", "value": True, "advanced": True},
|
||||
"use_multithreading": {
|
||||
"display_name": "Use Multithreading",
|
||||
"value": True,
|
||||
"advanced": True,
|
||||
},
|
||||
}
|
||||
|
||||
def is_hidden(self, path: Path) -> bool:
|
||||
return path.name.startswith(".")
|
||||
|
||||
def retrieve_file_paths(
|
||||
self,
|
||||
path: str,
|
||||
types: List[str],
|
||||
load_hidden: bool,
|
||||
recursive: bool,
|
||||
depth: int,
|
||||
) -> List[str]:
|
||||
path_obj = Path(path)
|
||||
if not path_obj.exists() or not path_obj.is_dir():
|
||||
raise ValueError(f"Path {path} must exist and be a directory.")
|
||||
|
||||
def match_types(p: Path) -> bool:
|
||||
return any(p.suffix == f".{t}" for t in types) if types else True
|
||||
|
||||
def is_not_hidden(p: Path) -> bool:
|
||||
return not self.is_hidden(p) or load_hidden
|
||||
|
||||
def walk_level(directory: Path, max_depth: int):
|
||||
directory = directory.resolve()
|
||||
prefix_length = len(directory.parts)
|
||||
for p in directory.rglob("*" if recursive else "[!.]*"):
|
||||
if len(p.parts) - prefix_length <= max_depth:
|
||||
yield p
|
||||
|
||||
glob = "**/*" if recursive else "*"
|
||||
paths = walk_level(path_obj, depth) if depth else path_obj.glob(glob)
|
||||
file_paths = [str(p) for p in paths if p.is_file() and match_types(p) and is_not_hidden(p)]
|
||||
|
||||
return file_paths
|
||||
|
||||
def parse_file_to_record(self, file_path: str, silent_errors: bool) -> Record:
|
||||
# Use the partition function to load the file
|
||||
from unstructured.partition.auto import partition
|
||||
|
||||
try:
|
||||
elements = partition(file_path)
|
||||
except Exception as e:
|
||||
if not silent_errors:
|
||||
raise ValueError(f"Error loading file {file_path}: {e}") from e
|
||||
return None
|
||||
|
||||
# Create a Record
|
||||
text = "\n\n".join([str(el) for el in elements])
|
||||
metadata = elements.metadata if hasattr(elements, "metadata") else {}
|
||||
metadata["file_path"] = file_path
|
||||
record = Record(text=text, data=metadata)
|
||||
return record
|
||||
|
||||
def get_elements(
|
||||
self,
|
||||
file_paths: List[str],
|
||||
silent_errors: bool,
|
||||
max_concurrency: int,
|
||||
use_multithreading: bool,
|
||||
) -> List[Record]:
|
||||
if use_multithreading:
|
||||
records = self.parallel_load_records(file_paths, silent_errors, max_concurrency)
|
||||
else:
|
||||
records = [self.parse_file_to_record(file_path, silent_errors) for file_path in file_paths]
|
||||
records = list(filter(None, records))
|
||||
return records
|
||||
|
||||
def parallel_load_records(self, file_paths: List[str], silent_errors: bool, max_concurrency: int) -> List[Record]:
|
||||
with futures.ThreadPoolExecutor(max_workers=max_concurrency) as executor:
|
||||
loaded_files = executor.map(
|
||||
lambda file_path: self.parse_file_to_record(file_path, silent_errors),
|
||||
file_paths,
|
||||
)
|
||||
return loaded_files
|
||||
|
||||
def build(
|
||||
self,
|
||||
path: str,
|
||||
types: List[str] = None,
|
||||
depth: int = 0,
|
||||
max_concurrency: int = 2,
|
||||
load_hidden: bool = False,
|
||||
recursive: bool = True,
|
||||
silent_errors: bool = False,
|
||||
use_multithreading: bool = True,
|
||||
) -> List[Record]:
|
||||
resolved_path = self.resolve_path(path)
|
||||
file_paths = self.retrieve_file_paths(resolved_path, types, load_hidden, recursive, depth)
|
||||
loaded_records = []
|
||||
|
||||
if use_multithreading:
|
||||
loaded_records = self.parallel_load_records(file_paths, silent_errors, max_concurrency)
|
||||
else:
|
||||
loaded_records = [self.parse_file_to_record(file_path, silent_errors) for file_path in file_paths]
|
||||
loaded_records = list(filter(None, loaded_records))
|
||||
self.status = loaded_records
|
||||
return loaded_records
|
||||
|
|
@ -1,7 +1,9 @@
|
|||
from typing import Optional
|
||||
|
||||
from langchain.embeddings import BedrockEmbeddings
|
||||
from langchain.embeddings.base import Embeddings
|
||||
from langchain_community.embeddings import BedrockEmbeddings
|
||||
|
||||
|
||||
from langflow import CustomComponent
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
from langflow import CustomComponent
|
||||
from langchain.embeddings import VertexAIEmbeddings
|
||||
from langchain_community.embeddings import VertexAIEmbeddings
|
||||
from typing import Optional, List
|
||||
|
||||
|
||||
|
|
@ -9,17 +9,45 @@ class VertexAIEmbeddingsComponent(CustomComponent):
|
|||
|
||||
def build_config(self):
|
||||
return {
|
||||
"credentials": {"display_name": "Credentials", "value": "", "file_types": [".json"], "field_type": "file"},
|
||||
"instance": {"display_name": "instance", "advanced": True, "field_type": "dict"},
|
||||
"location": {"display_name": "Location", "value": "us-central1", "advanced": True},
|
||||
"credentials": {
|
||||
"display_name": "Credentials",
|
||||
"value": "",
|
||||
"file_types": [".json"],
|
||||
"field_type": "file",
|
||||
},
|
||||
"instance": {
|
||||
"display_name": "instance",
|
||||
"advanced": True,
|
||||
"field_type": "dict",
|
||||
},
|
||||
"location": {
|
||||
"display_name": "Location",
|
||||
"value": "us-central1",
|
||||
"advanced": True,
|
||||
},
|
||||
"max_output_tokens": {"display_name": "Max Output Tokens", "value": 128},
|
||||
"max_retries": {"display_name": "Max Retries", "value": 6, "advanced": True},
|
||||
"model_name": {"display_name": "Model Name", "value": "textembedding-gecko"},
|
||||
"max_retries": {
|
||||
"display_name": "Max Retries",
|
||||
"value": 6,
|
||||
"advanced": True,
|
||||
},
|
||||
"model_name": {
|
||||
"display_name": "Model Name",
|
||||
"value": "textembedding-gecko",
|
||||
},
|
||||
"n": {"display_name": "N", "value": 1, "advanced": True},
|
||||
"project": {"display_name": "Project", "advanced": True},
|
||||
"request_parallelism": {"display_name": "Request Parallelism", "value": 5, "advanced": True},
|
||||
"request_parallelism": {
|
||||
"display_name": "Request Parallelism",
|
||||
"value": 5,
|
||||
"advanced": True,
|
||||
},
|
||||
"stop": {"display_name": "Stop", "advanced": True},
|
||||
"streaming": {"display_name": "Streaming", "value": False, "advanced": True},
|
||||
"streaming": {
|
||||
"display_name": "Streaming",
|
||||
"value": False,
|
||||
"advanced": True,
|
||||
},
|
||||
"temperature": {"display_name": "Temperature", "value": 0.0},
|
||||
"top_k": {"display_name": "Top K", "value": 40, "advanced": True},
|
||||
"top_p": {"display_name": "Top P", "value": 0.95, "advanced": True},
|
||||
|
|
|
|||
|
|
@ -9,9 +9,6 @@ class TextInput(CustomComponent):
|
|||
description = "Used to pass text input to the next component."
|
||||
|
||||
field_config = {
|
||||
"code": {
|
||||
"show": False,
|
||||
},
|
||||
"value": {"display_name": "Value"},
|
||||
}
|
||||
|
||||
|
|
|
|||
19
src/backend/langflow/components/io/TextOutput.py
Normal file
19
src/backend/langflow/components/io/TextOutput.py
Normal file
|
|
@ -0,0 +1,19 @@
|
|||
from typing import Optional
|
||||
|
||||
from langflow import CustomComponent
|
||||
from langflow.field_typing import Text
|
||||
|
||||
|
||||
class TextOutput(CustomComponent):
|
||||
display_name = "Text Output"
|
||||
description = "Used to pass text output to the next component."
|
||||
|
||||
field_config = {
|
||||
"value": {"display_name": "Value"},
|
||||
}
|
||||
|
||||
def build(self, value: Optional[str] = "") -> Text:
|
||||
self.status = value
|
||||
if not value:
|
||||
value = ""
|
||||
return value
|
||||
|
|
@ -1,7 +1,9 @@
|
|||
from typing import Optional
|
||||
|
||||
from langchain.llms.base import BaseLLM
|
||||
from langchain.llms.bedrock import Bedrock
|
||||
from langchain_community.llms.bedrock import Bedrock
|
||||
|
||||
|
||||
from langflow import CustomComponent
|
||||
|
||||
|
||||
|
|
@ -1,13 +1,17 @@
|
|||
from typing import Optional
|
||||
from langflow import CustomComponent
|
||||
|
||||
from langchain.llms.base import BaseLanguageModel
|
||||
from langchain_community.chat_models.azure_openai import AzureChatOpenAI
|
||||
|
||||
from langflow import CustomComponent
|
||||
|
||||
class AzureChatOpenAIComponent(CustomComponent):
|
||||
|
||||
class AzureChatOpenAISpecsComponent(CustomComponent):
|
||||
display_name: str = "AzureChatOpenAI"
|
||||
description: str = "LLM model from Azure OpenAI."
|
||||
documentation: str = "https://python.langchain.com/docs/integrations/llms/azure_openai"
|
||||
documentation: str = (
|
||||
"https://python.langchain.com/docs/integrations/llms/azure_openai"
|
||||
)
|
||||
beta = False
|
||||
|
||||
AZURE_OPENAI_MODELS = [
|
||||
|
|
@ -1,13 +1,13 @@
|
|||
from typing import Optional
|
||||
|
||||
from langchain_community.chat_models.bedrock import BedrockChat
|
||||
from langflow.field_typing import Text
|
||||
|
||||
from langflow import CustomComponent
|
||||
from langflow.field_typing import Text
|
||||
|
||||
|
||||
class AmazonBedrockComponent(CustomComponent):
|
||||
display_name: str = "Amazon Bedrock model"
|
||||
display_name: str = "Amazon Bedrock Model"
|
||||
description: str = "Generate text using LLM model from Amazon Bedrock."
|
||||
|
||||
def build_config(self):
|
||||
|
|
@ -2,14 +2,13 @@ from typing import Optional
|
|||
|
||||
from langchain_community.chat_models.anthropic import ChatAnthropic
|
||||
from pydantic.v1 import SecretStr
|
||||
|
||||
from langflow import CustomComponent
|
||||
from langflow.field_typing import Text
|
||||
|
||||
|
||||
from langflow import CustomComponent
|
||||
|
||||
|
||||
class AnthropicLLM(CustomComponent):
|
||||
display_name: str = "Anthropic model"
|
||||
display_name: str = "AnthropicModel"
|
||||
description: str = "Generate text using Anthropic Chat&Completion large language models."
|
||||
|
||||
def build_config(self):
|
||||
|
|
@ -67,7 +66,7 @@ class AnthropicLLM(CustomComponent):
|
|||
try:
|
||||
output = ChatAnthropic(
|
||||
model_name=model,
|
||||
anthropic_api_key=SecretStr(anthropic_api_key) if anthropic_api_key else None,
|
||||
anthropic_api_key=(SecretStr(anthropic_api_key) if anthropic_api_key else None),
|
||||
max_tokens_to_sample=max_tokens, # type: ignore
|
||||
temperature=temperature,
|
||||
anthropic_api_url=api_endpoint,
|
||||
|
|
@ -7,9 +7,11 @@ from langflow import CustomComponent
|
|||
|
||||
|
||||
class AzureChatOpenAIComponent(CustomComponent):
|
||||
display_name: str = "AzureOpenAI model"
|
||||
display_name: str = "AzureOpenAI Model"
|
||||
description: str = "Generate text using LLM model from Azure OpenAI."
|
||||
documentation: str = "https://python.langchain.com/docs/integrations/llms/azure_openai"
|
||||
documentation: str = (
|
||||
"https://python.langchain.com/docs/integrations/llms/azure_openai"
|
||||
)
|
||||
beta = False
|
||||
|
||||
AZURE_OPENAI_MODELS = [
|
||||
|
|
@ -1,13 +1,13 @@
|
|||
from typing import Dict, Optional
|
||||
|
||||
from langchain_community.llms.ctransformers import CTransformers
|
||||
from langflow.field_typing import Text
|
||||
|
||||
from langflow import CustomComponent
|
||||
from langflow.field_typing import Text
|
||||
|
||||
|
||||
class CTransformersComponent(CustomComponent):
|
||||
display_name = "CTransformers model"
|
||||
display_name = "CTransformersModel"
|
||||
description = "Generate text using CTransformers LLM models"
|
||||
documentation = "https://python.langchain.com/docs/modules/model_io/models/llms/integrations/ctransformers"
|
||||
|
||||
|
|
@ -31,7 +31,14 @@ class CTransformersComponent(CustomComponent):
|
|||
"inputs": {"display_name": "Input"},
|
||||
}
|
||||
|
||||
def build(self, model: str, model_file: str, inputs: str, model_type: str, config: Optional[Dict] = None) -> Text:
|
||||
def build(
|
||||
self,
|
||||
model: str,
|
||||
model_file: str,
|
||||
inputs: str,
|
||||
model_type: str,
|
||||
config: Optional[Dict] = None,
|
||||
) -> Text:
|
||||
output = CTransformers(model=model, model_file=model_file, model_type=model_type, config=config)
|
||||
message = output.invoke(inputs)
|
||||
result = message.content if hasattr(message, "content") else message
|
||||
|
|
@ -1,18 +1,33 @@
|
|||
from langchain_community.chat_models.cohere import ChatCohere
|
||||
|
||||
from langflow import CustomComponent
|
||||
from langflow.field_typing import Text
|
||||
|
||||
|
||||
class CohereComponent(CustomComponent):
|
||||
display_name = "Cohere model"
|
||||
display_name = "CohereModel"
|
||||
description = "Generate text using Cohere large language models."
|
||||
documentation = "https://python.langchain.com/docs/modules/model_io/models/llms/integrations/cohere"
|
||||
|
||||
def build_config(self):
|
||||
return {
|
||||
"cohere_api_key": {"display_name": "Cohere API Key", "type": "password", "password": True},
|
||||
"max_tokens": {"display_name": "Max Tokens", "default": 256, "type": "int", "show": True},
|
||||
"temperature": {"display_name": "Temperature", "default": 0.75, "type": "float", "show": True},
|
||||
"cohere_api_key": {
|
||||
"display_name": "Cohere API Key",
|
||||
"type": "password",
|
||||
"password": True,
|
||||
},
|
||||
"max_tokens": {
|
||||
"display_name": "Max Tokens",
|
||||
"default": 256,
|
||||
"type": "int",
|
||||
"show": True,
|
||||
},
|
||||
"temperature": {
|
||||
"display_name": "Temperature",
|
||||
"default": 0.75,
|
||||
"type": "float",
|
||||
"show": True,
|
||||
},
|
||||
"inputs": {"display_name": "Input"},
|
||||
}
|
||||
|
||||
|
|
@ -23,8 +38,13 @@ class CohereComponent(CustomComponent):
|
|||
max_tokens: int = 256,
|
||||
temperature: float = 0.75,
|
||||
) -> Text:
|
||||
output = ChatCohere(cohere_api_key=cohere_api_key, max_tokens=max_tokens, temperature=temperature)
|
||||
output = ChatCohere(
|
||||
cohere_api_key=cohere_api_key,
|
||||
max_tokens=max_tokens,
|
||||
temperature=temperature,
|
||||
)
|
||||
message = output.invoke(inputs)
|
||||
result = message.content if hasattr(message, "content") else message
|
||||
self.status = result
|
||||
return result
|
||||
return result
|
||||
|
|
@ -1,14 +1,14 @@
|
|||
from typing import Optional
|
||||
|
||||
from langchain_google_genai import ChatGoogleGenerativeAI # type: ignore
|
||||
from langflow import CustomComponent
|
||||
from langflow.field_typing import RangeSpec
|
||||
from pydantic.v1.types import SecretStr
|
||||
from langflow.field_typing import Text
|
||||
|
||||
from langflow import CustomComponent
|
||||
from langflow.field_typing import RangeSpec, Text
|
||||
|
||||
|
||||
class GoogleGenerativeAIComponent(CustomComponent):
|
||||
display_name: str = "Google Generative AI model"
|
||||
display_name: str = "Google Generative AIModel"
|
||||
description: str = "Generate text using Google Generative AI to generate text."
|
||||
documentation: str = "http://docs.langflow.org/components/custom"
|
||||
|
||||
|
|
@ -1,7 +1,10 @@
|
|||
from typing import Optional
|
||||
from langflow import CustomComponent
|
||||
from langchain.llms.huggingface_endpoint import HuggingFaceEndpoint
|
||||
|
||||
from langchain_community.chat_models.huggingface import ChatHuggingFace
|
||||
from langchain_community.llms.huggingface_endpoint import HuggingFaceEndpoint
|
||||
|
||||
from langflow import CustomComponent
|
||||
|
||||
from langflow.field_typing import Text
|
||||
|
||||
|
||||
|
|
@ -1,11 +1,13 @@
|
|||
from typing import Optional, List, Dict, Any
|
||||
from langflow import CustomComponent
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
from langchain_community.llms.llamacpp import LlamaCpp
|
||||
|
||||
from langflow import CustomComponent
|
||||
from langflow.field_typing import Text
|
||||
|
||||
|
||||
class LlamaCppComponent(CustomComponent):
|
||||
display_name = "LlamaCpp model"
|
||||
display_name = "LlamaCppModel"
|
||||
description = "Generate text using llama.cpp model."
|
||||
documentation = "https://python.langchain.com/docs/modules/model_io/models/llms/integrations/llamacpp"
|
||||
|
||||
|
|
@ -17,7 +19,10 @@ class LlamaCppComponent(CustomComponent):
|
|||
"echo": {"display_name": "Echo", "advanced": True},
|
||||
"f16_kv": {"display_name": "F16 KV", "advanced": True},
|
||||
"grammar_path": {"display_name": "Grammar Path", "advanced": True},
|
||||
"last_n_tokens_size": {"display_name": "Last N Tokens Size", "advanced": True},
|
||||
"last_n_tokens_size": {
|
||||
"display_name": "Last N Tokens Size",
|
||||
"advanced": True,
|
||||
},
|
||||
"logits_all": {"display_name": "Logits All", "advanced": True},
|
||||
"logprobs": {"display_name": "Logprobs", "advanced": True},
|
||||
"lora_base": {"display_name": "Lora Base", "advanced": True},
|
||||
|
|
@ -134,3 +139,5 @@ class LlamaCppComponent(CustomComponent):
|
|||
result = message.content if hasattr(message, "content") else message
|
||||
self.status = result
|
||||
return result
|
||||
self.status = result
|
||||
return result
|
||||
|
|
@ -12,7 +12,7 @@ from langflow.field_typing import Text
|
|||
|
||||
|
||||
class ChatOllamaComponent(CustomComponent):
|
||||
display_name = "ChatOllama model"
|
||||
display_name = "ChatOllamaModel"
|
||||
description = "Generate text using Local LLM for chat with Ollama."
|
||||
|
||||
def build_config(self) -> dict:
|
||||
|
|
@ -7,7 +7,7 @@ from langflow.field_typing import Text
|
|||
|
||||
|
||||
class ChatVertexAIComponent(CustomComponent):
|
||||
display_name = "ChatVertexAI model"
|
||||
display_name = "ChatVertexAIModel"
|
||||
description = "Generate text using Vertex AI Chat large language models API."
|
||||
|
||||
def build_config(self):
|
||||
|
|
@ -1,7 +1,9 @@
|
|||
from typing import Optional
|
||||
from langflow import CustomComponent
|
||||
from langchain.retrievers import AmazonKendraRetriever
|
||||
|
||||
from langchain.schema import BaseRetriever
|
||||
from langchain_community.retrievers import AmazonKendraRetriever
|
||||
|
||||
from langflow import CustomComponent
|
||||
|
||||
|
||||
class AmazonKendraRetrieverComponent(CustomComponent):
|
||||
|
|
|
|||
|
|
@ -1,9 +1,11 @@
|
|||
from typing import Optional
|
||||
from langflow import CustomComponent
|
||||
from langchain.retrievers import MetalRetriever
|
||||
|
||||
from langchain.schema import BaseRetriever
|
||||
from langchain_community.retrievers import MetalRetriever
|
||||
from metal_sdk.metal import Metal # type: ignore
|
||||
|
||||
from langflow import CustomComponent
|
||||
|
||||
|
||||
class MetalRetrieverComponent(CustomComponent):
|
||||
display_name: str = "Metal Retriever"
|
||||
|
|
|
|||
|
|
@ -0,0 +1,22 @@
|
|||
from typing import List
|
||||
|
||||
from langchain_core.documents import Document
|
||||
|
||||
from langflow import CustomComponent
|
||||
from langflow.schema import Record
|
||||
|
||||
|
||||
class DocumentToRecordComponent(CustomComponent):
|
||||
display_name = "Documents to Records"
|
||||
description = "Convert documents to records."
|
||||
|
||||
field_config = {
|
||||
"documents": {"display_name": "Documents"},
|
||||
}
|
||||
|
||||
def build(self, documents: List[Document]) -> List[Record]:
|
||||
if isinstance(documents, Document):
|
||||
documents = [documents]
|
||||
records = [Record.from_document(document) for document in documents]
|
||||
self.status = records
|
||||
return records
|
||||
|
|
@ -109,32 +109,6 @@ embeddings:
|
|||
OllamaEmbeddings:
|
||||
documentation: "https://python.langchain.com/docs/modules/data_connection/text_embedding/integrations/ollama"
|
||||
|
||||
llms:
|
||||
OpenAI:
|
||||
documentation: "https://python.langchain.com/docs/modules/model_io/models/llms/integrations/openai"
|
||||
ChatOpenAI:
|
||||
documentation: "https://python.langchain.com/docs/modules/model_io/models/chat/integrations/openai"
|
||||
LlamaCpp:
|
||||
documentation: "https://python.langchain.com/docs/modules/model_io/models/llms/integrations/llamacpp"
|
||||
CTransformers:
|
||||
documentation: "https://python.langchain.com/docs/modules/model_io/models/llms/integrations/ctransformers"
|
||||
Cohere:
|
||||
documentation: "https://python.langchain.com/docs/modules/model_io/models/llms/integrations/cohere"
|
||||
Anthropic:
|
||||
documentation: ""
|
||||
ChatAnthropic:
|
||||
documentation: "https://python.langchain.com/docs/modules/model_io/models/chat/integrations/anthropic"
|
||||
HuggingFaceHub:
|
||||
documentation: "https://python.langchain.com/docs/modules/model_io/models/llms/integrations/huggingface_hub"
|
||||
VertexAI:
|
||||
documentation: "https://python.langchain.com/docs/modules/model_io/models/llms/integrations/google_vertex_ai_palm"
|
||||
###
|
||||
# There's a bug in this component deactivating until we get it sorted: _language_models.py", line 804, in send_message
|
||||
# is_blocked=safety_attributes.get("blocked", False),
|
||||
# AttributeError: 'list' object has no attribute 'get'
|
||||
ChatVertexAI:
|
||||
documentation: "https://python.langchain.com/docs/modules/model_io/models/chat/integrations/google_vertex_ai_palm"
|
||||
###
|
||||
memories:
|
||||
# https://github.com/supabase-community/supabase-py/issues/482
|
||||
# ZepChatMessageHistory:
|
||||
|
|
|
|||
|
|
@ -13,6 +13,7 @@ from langflow.graph.vertex.types import (
|
|||
ChatVertex,
|
||||
FileToolVertex,
|
||||
LLMVertex,
|
||||
RoutingVertex,
|
||||
ToolkitVertex,
|
||||
)
|
||||
from langflow.interface.tools.constants import FILE_TOOLS
|
||||
|
|
@ -402,10 +403,12 @@ class Graph:
|
|||
node_name = node_id.split("-")[0]
|
||||
if node_name in ["ChatOutput", "ChatInput"]:
|
||||
return ChatVertex
|
||||
if node_name in lazy_load_vertex_dict.VERTEX_TYPE_MAP:
|
||||
return lazy_load_vertex_dict.VERTEX_TYPE_MAP[node_name]
|
||||
elif node_name in ["ShouldRunNext"]:
|
||||
return RoutingVertex
|
||||
elif node_base_type in lazy_load_vertex_dict.VERTEX_TYPE_MAP:
|
||||
return lazy_load_vertex_dict.VERTEX_TYPE_MAP[node_base_type]
|
||||
elif node_name in lazy_load_vertex_dict.VERTEX_TYPE_MAP:
|
||||
return lazy_load_vertex_dict.VERTEX_TYPE_MAP[node_name]
|
||||
|
||||
if node_type in FILE_TOOLS:
|
||||
return FileToolVertex
|
||||
|
|
|
|||
|
|
@ -72,11 +72,17 @@ class Vertex:
|
|||
|
||||
def set_state(self, state: str):
|
||||
self.state = VertexStates[state]
|
||||
if self.state == VertexStates.INACTIVE and self.graph.in_degree_map[self.id] < 2:
|
||||
if (
|
||||
self.state == VertexStates.INACTIVE
|
||||
and self.graph.in_degree_map[self.id] < 2
|
||||
):
|
||||
# If the vertex is inactive and has only one in degree
|
||||
# it means that it is not a merge point in the graph
|
||||
self.graph.inactive_vertices.add(self.id)
|
||||
elif self.state == VertexStates.ACTIVE and self.id in self.graph.inactive_vertices:
|
||||
elif (
|
||||
self.state == VertexStates.ACTIVE
|
||||
and self.id in self.graph.inactive_vertices
|
||||
):
|
||||
self.graph.inactive_vertices.remove(self.id)
|
||||
|
||||
@property
|
||||
|
|
@ -104,7 +110,9 @@ class Vertex:
|
|||
):
|
||||
if edge.target_id not in edge_results:
|
||||
edge_results[edge.target_id] = {}
|
||||
edge_results[edge.target_id][edge.target_param] = await edge.get_result(source=self, target=target)
|
||||
edge_results[edge.target_id][edge.target_param] = await edge.get_result(
|
||||
source=self, target=target
|
||||
)
|
||||
return edge_results
|
||||
|
||||
def set_result(self, result: "ResultData") -> None:
|
||||
|
|
@ -114,7 +122,9 @@ class Vertex:
|
|||
# If the Vertex.type is a power component
|
||||
# then we need to return the built object
|
||||
# instead of the result dict
|
||||
if self.is_interface_component and not isinstance(self._built_object, UnbuiltObject):
|
||||
if self.is_interface_component and not isinstance(
|
||||
self._built_object, UnbuiltObject
|
||||
):
|
||||
result = self._built_object
|
||||
# if it is not a dict or a string and hasattr model_dump then
|
||||
# return the model_dump
|
||||
|
|
@ -124,7 +134,11 @@ class Vertex:
|
|||
|
||||
if isinstance(self._built_result, UnbuiltResult):
|
||||
return {}
|
||||
return self._built_result if isinstance(self._built_result, dict) else {"result": self._built_result}
|
||||
return (
|
||||
self._built_result
|
||||
if isinstance(self._built_result, dict)
|
||||
else {"result": self._built_result}
|
||||
)
|
||||
|
||||
def set_artifacts(self) -> None:
|
||||
pass
|
||||
|
|
@ -185,18 +199,31 @@ class Vertex:
|
|||
def _parse_data(self) -> None:
|
||||
self.data = self._data["data"]
|
||||
self.output = self.data["node"]["base_classes"]
|
||||
self.display_name = self.data["node"]["display_name"]
|
||||
self.pinned = self.data["node"].get("pinned", False)
|
||||
template_dicts = {key: value for key, value in self.data["node"]["template"].items() if isinstance(value, dict)}
|
||||
template_dicts = {
|
||||
key: value
|
||||
for key, value in self.data["node"]["template"].items()
|
||||
if isinstance(value, dict)
|
||||
}
|
||||
|
||||
self.required_inputs = [
|
||||
template_dicts[key]["type"] for key, value in template_dicts.items() if value["required"]
|
||||
template_dicts[key]["type"]
|
||||
for key, value in template_dicts.items()
|
||||
if value["required"]
|
||||
]
|
||||
self.optional_inputs = [
|
||||
template_dicts[key]["type"] for key, value in template_dicts.items() if not value["required"]
|
||||
template_dicts[key]["type"]
|
||||
for key, value in template_dicts.items()
|
||||
if not value["required"]
|
||||
]
|
||||
# Add the template_dicts[key]["input_types"] to the optional_inputs
|
||||
self.optional_inputs.extend(
|
||||
[input_type for value in template_dicts.values() for input_type in value.get("input_types", [])]
|
||||
[
|
||||
input_type
|
||||
for value in template_dicts.values()
|
||||
for input_type in value.get("input_types", [])
|
||||
]
|
||||
)
|
||||
|
||||
template_dict = self.data["node"]["template"]
|
||||
|
|
@ -239,7 +266,11 @@ class Vertex:
|
|||
if self.graph is None:
|
||||
raise ValueError("Graph not found")
|
||||
|
||||
template_dict = {key: value for key, value in self.data["node"]["template"].items() if isinstance(value, dict)}
|
||||
template_dict = {
|
||||
key: value
|
||||
for key, value in self.data["node"]["template"].items()
|
||||
if isinstance(value, dict)
|
||||
}
|
||||
params = {}
|
||||
|
||||
for edge in self.edges:
|
||||
|
|
@ -290,7 +321,11 @@ class Vertex:
|
|||
# list of dicts, so we need to convert it to a dict
|
||||
# before passing it to the build method
|
||||
if isinstance(val, list):
|
||||
params[key] = {k: v for item in value.get("value", []) for k, v in item.items()}
|
||||
params[key] = {
|
||||
k: v
|
||||
for item in value.get("value", [])
|
||||
for k, v in item.items()
|
||||
}
|
||||
elif isinstance(val, dict):
|
||||
params[key] = val
|
||||
elif value.get("type") == "int" and val is not None:
|
||||
|
|
@ -350,7 +385,9 @@ class Vertex:
|
|||
if isinstance(self._built_object, str):
|
||||
self._built_result = self._built_object
|
||||
|
||||
result = await generate_result(self._built_object, inputs, self.has_external_output, session_id)
|
||||
result = await generate_result(
|
||||
self._built_object, inputs, self.has_external_output, session_id
|
||||
)
|
||||
self._built_result = result
|
||||
|
||||
async def _build_each_node_in_params_dict(self, user_id=None):
|
||||
|
|
@ -378,7 +415,9 @@ class Vertex:
|
|||
"""
|
||||
return all(self._is_node(node) for node in value)
|
||||
|
||||
async def get_result(self, requester: Optional["Vertex"] = None, user_id=None, timeout=None) -> Any:
|
||||
async def get_result(
|
||||
self, requester: Optional["Vertex"] = None, user_id=None, timeout=None
|
||||
) -> Any:
|
||||
# PLEASE REVIEW THIS IF STATEMENT
|
||||
# Check if the Vertex was built already
|
||||
if self._built:
|
||||
|
|
@ -412,7 +451,9 @@ class Vertex:
|
|||
self._extend_params_list_with_result(key, result)
|
||||
self.params[key] = result
|
||||
|
||||
async def _build_list_of_nodes_and_update_params(self, key, nodes: List["Vertex"], user_id=None):
|
||||
async def _build_list_of_nodes_and_update_params(
|
||||
self, key, nodes: List["Vertex"], user_id=None
|
||||
):
|
||||
"""
|
||||
Iterates over a list of nodes, builds each and updates the params dictionary.
|
||||
"""
|
||||
|
|
@ -464,7 +505,9 @@ class Vertex:
|
|||
self._update_built_object_and_artifacts(result)
|
||||
except Exception as exc:
|
||||
logger.exception(exc)
|
||||
raise ValueError(f"Error building node {self.vertex_type}(ID:{self.id}): {str(exc)}") from exc
|
||||
raise ValueError(
|
||||
f"Error building node {self.display_name}: {str(exc)}"
|
||||
) from exc
|
||||
|
||||
def _update_built_object_and_artifacts(self, result):
|
||||
"""
|
||||
|
|
@ -534,16 +577,24 @@ class Vertex:
|
|||
return self._built_object
|
||||
|
||||
# Get the requester edge
|
||||
requester_edge = next((edge for edge in self.edges if edge.target_id == requester.id), None)
|
||||
requester_edge = next(
|
||||
(edge for edge in self.edges if edge.target_id == requester.id), None
|
||||
)
|
||||
# Return the result of the requester edge
|
||||
return None if requester_edge is None else await requester_edge.get_result(source=self, target=requester)
|
||||
return (
|
||||
None
|
||||
if requester_edge is None
|
||||
else await requester_edge.get_result(source=self, target=requester)
|
||||
)
|
||||
|
||||
def add_edge(self, edge: "ContractEdge") -> None:
|
||||
if edge not in self.edges:
|
||||
self.edges.append(edge)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"Vertex(id={self.id}, data={self.data})"
|
||||
return (
|
||||
f"Vertex(display_name={self.display_name}, id={self.id}, data={self.data})"
|
||||
)
|
||||
|
||||
def __eq__(self, __o: object) -> bool:
|
||||
try:
|
||||
|
|
@ -556,7 +607,11 @@ class Vertex:
|
|||
|
||||
def _built_object_repr(self):
|
||||
# Add a message with an emoji, stars for sucess,
|
||||
return "Built sucessfully ✨" if self._built_object is not None else "Failed to build 😵💫"
|
||||
return (
|
||||
"Built sucessfully ✨"
|
||||
if self._built_object is not None
|
||||
else "Failed to build 😵💫"
|
||||
)
|
||||
|
||||
|
||||
class StatefulVertex(Vertex):
|
||||
|
|
|
|||
|
|
@ -67,7 +67,7 @@ class LLMVertex(StatelessVertex):
|
|||
class_built_object = None
|
||||
|
||||
def __init__(self, data: Dict, graph, params: Optional[Dict] = None):
|
||||
super().__init__(data, graph=graph, base_type="llms", params=params)
|
||||
super().__init__(data, graph=graph, base_type="models", params=params)
|
||||
self.steps: List[Callable] = [self._custom_build]
|
||||
|
||||
async def _custom_build(self, *args, **kwargs):
|
||||
|
|
|
|||
|
|
@ -1,17 +1,19 @@
|
|||
import operator
|
||||
from pathlib import Path
|
||||
from typing import Any, Callable, ClassVar, List, Optional, Union
|
||||
from typing import Any, Callable, ClassVar, List, Optional, Sequence, Union
|
||||
from uuid import UUID
|
||||
|
||||
import yaml
|
||||
from cachetools import TTLCache, cachedmethod
|
||||
from fastapi import HTTPException
|
||||
from langchain_core.documents import Document
|
||||
|
||||
from langflow.interface.custom.code_parser.utils import (
|
||||
extract_inner_type_from_generic_alias,
|
||||
extract_union_types_from_generic_alias,
|
||||
)
|
||||
from langflow.interface.custom.custom_component.component import Component
|
||||
from langflow.schema import Record
|
||||
from langflow.services.database.models.flow import Flow
|
||||
from langflow.services.database.utils import session_getter
|
||||
from langflow.services.deps import (
|
||||
|
|
@ -86,6 +88,56 @@ class CustomComponent(Component):
|
|||
def tree(self):
|
||||
return self.get_code_tree(self.code or "")
|
||||
|
||||
def to_records(
|
||||
self, data: Any, text_key: str = "text", data_key: str = "data"
|
||||
) -> List[dict]:
|
||||
"""
|
||||
Convert data into a list of records.
|
||||
|
||||
Args:
|
||||
data (Any): The input data to be converted.
|
||||
text_key (str, optional): The key to extract the text from a dictionary item. Defaults to "text".
|
||||
data_key (str, optional): The key to extract the data from a dictionary item. Defaults to "data".
|
||||
|
||||
Returns:
|
||||
List[dict]: A list of records, where each record is a dictionary with 'text' and 'data' keys.
|
||||
"""
|
||||
records = []
|
||||
if not isinstance(data, Sequence):
|
||||
data = [data]
|
||||
for item in data:
|
||||
if isinstance(item, str):
|
||||
records.append(Record(text=item))
|
||||
elif isinstance(item, dict):
|
||||
records.append(Record(text=item.get(text_key), data=item.get(data_key)))
|
||||
elif isinstance(item, Document):
|
||||
records.append(Record(text=item.page_content, data=item.metadata))
|
||||
else:
|
||||
raise ValueError(f"Invalid data type: {type(item)}")
|
||||
|
||||
return records
|
||||
|
||||
def create_references_from_records(
|
||||
self, records: List[dict], include_data: bool = False
|
||||
) -> str:
|
||||
"""
|
||||
Create references from a list of records.
|
||||
|
||||
Args:
|
||||
records (List[dict]): A list of records, where each record is a dictionary.
|
||||
include_data (bool, optional): Whether to include data in the references. Defaults to False.
|
||||
|
||||
Returns:
|
||||
str: A string containing the references in markdown format.
|
||||
"""
|
||||
markdown_string = "---\n"
|
||||
for record in records:
|
||||
markdown_string += f"- Text: {record['text']}"
|
||||
if include_data:
|
||||
markdown_string += f" Data: {record['data']}"
|
||||
markdown_string += "\n"
|
||||
return markdown_string
|
||||
|
||||
@property
|
||||
def get_function_entrypoint_args(self) -> list:
|
||||
build_method = self.get_build_method()
|
||||
|
|
@ -100,7 +152,8 @@ class CustomComponent(Component):
|
|||
detail={
|
||||
"error": "Type hint Error",
|
||||
"traceback": (
|
||||
"Prompt type is not supported in the build method." " Try using PromptTemplate instead."
|
||||
"Prompt type is not supported in the build method."
|
||||
" Try using PromptTemplate instead."
|
||||
),
|
||||
},
|
||||
)
|
||||
|
|
@ -114,14 +167,20 @@ class CustomComponent(Component):
|
|||
if not self.code:
|
||||
return {}
|
||||
|
||||
component_classes = [cls for cls in self.tree["classes"] if self.code_class_base_inheritance in cls["bases"]]
|
||||
component_classes = [
|
||||
cls
|
||||
for cls in self.tree["classes"]
|
||||
if self.code_class_base_inheritance in cls["bases"]
|
||||
]
|
||||
if not component_classes:
|
||||
return {}
|
||||
|
||||
# Assume the first Component class is the one we're interested in
|
||||
component_class = component_classes[0]
|
||||
build_methods = [
|
||||
method for method in component_class["methods"] if method["name"] == self.function_entrypoint_name
|
||||
method
|
||||
for method in component_class["methods"]
|
||||
if method["name"] == self.function_entrypoint_name
|
||||
]
|
||||
|
||||
return build_methods[0] if build_methods else {}
|
||||
|
|
@ -178,7 +237,9 @@ class CustomComponent(Component):
|
|||
# Retrieve and decrypt the credential by name for the current user
|
||||
db_service = get_db_service()
|
||||
with session_getter(db_service) as session:
|
||||
return credential_service.get_credential(user_id=self._user_id or "", name=name, session=session)
|
||||
return credential_service.get_credential(
|
||||
user_id=self._user_id or "", name=name, session=session
|
||||
)
|
||||
|
||||
return get_credential
|
||||
|
||||
|
|
@ -188,7 +249,9 @@ class CustomComponent(Component):
|
|||
credential_service = get_credential_service()
|
||||
db_service = get_db_service()
|
||||
with session_getter(db_service) as session:
|
||||
return credential_service.list_credentials(user_id=self._user_id, session=session)
|
||||
return credential_service.list_credentials(
|
||||
user_id=self._user_id, session=session
|
||||
)
|
||||
|
||||
def index(self, value: int = 0):
|
||||
"""Returns a function that returns the value at the given index in the iterable."""
|
||||
|
|
@ -239,7 +302,11 @@ class CustomComponent(Component):
|
|||
if flow_id:
|
||||
flow = session.query(Flow).get(flow_id)
|
||||
elif flow_name:
|
||||
flow = (session.query(Flow).filter(Flow.name == flow_name).filter(Flow.user_id == self.user_id)).first()
|
||||
flow = (
|
||||
session.query(Flow)
|
||||
.filter(Flow.name == flow_name)
|
||||
.filter(Flow.user_id == self.user_id)
|
||||
).first()
|
||||
else:
|
||||
raise ValueError("Either flow_name or flow_id must be provided")
|
||||
|
||||
|
|
|
|||
|
|
@ -35,7 +35,7 @@ def import_by_type(_type: str, name: str) -> Any:
|
|||
func_dict = {
|
||||
"agents": import_agent,
|
||||
"prompts": import_prompt,
|
||||
"llms": {"llm": import_llm, "chat": import_chat_llm},
|
||||
"models": {"llm": import_llm, "chat": import_chat_llm},
|
||||
"tools": import_tool,
|
||||
"chains": import_chain,
|
||||
"toolkits": import_toolkit,
|
||||
|
|
@ -50,7 +50,7 @@ def import_by_type(_type: str, name: str) -> Any:
|
|||
"retrievers": import_retriever,
|
||||
"custom_components": import_custom_component,
|
||||
}
|
||||
if _type == "llms":
|
||||
if _type == "models":
|
||||
key = "chat" if "chat" in name.lower() else "llm"
|
||||
loaded_func = func_dict[_type][key] # type: ignore
|
||||
else:
|
||||
|
|
|
|||
|
|
@ -19,7 +19,11 @@ from langflow.interface.custom.utils import get_function
|
|||
from langflow.interface.custom_lists import CUSTOM_NODES
|
||||
from langflow.interface.importing.utils import import_by_type
|
||||
from langflow.interface.initialize.llm import initialize_vertexai
|
||||
from langflow.interface.initialize.utils import handle_format_kwargs, handle_node_type, handle_partial_variables
|
||||
from langflow.interface.initialize.utils import (
|
||||
handle_format_kwargs,
|
||||
handle_node_type,
|
||||
handle_partial_variables,
|
||||
)
|
||||
from langflow.interface.initialize.vector_store import vecstore_initializer
|
||||
from langflow.interface.output_parsers.base import output_parser_creator
|
||||
from langflow.interface.retrievers.base import retriever_creator
|
||||
|
|
@ -105,7 +109,7 @@ async def instantiate_based_on_type(class_object, base_type, node_type, params,
|
|||
return instantiate_chains(node_type, class_object, params)
|
||||
elif base_type == "output_parsers":
|
||||
return instantiate_output_parser(node_type, class_object, params)
|
||||
elif base_type == "llms":
|
||||
elif base_type == "models":
|
||||
return instantiate_llm(node_type, class_object, params)
|
||||
elif base_type == "retrievers":
|
||||
return instantiate_retriever(node_type, class_object, params)
|
||||
|
|
|
|||
|
|
@ -1,16 +1,16 @@
|
|||
from typing import Dict, List, Optional, Type
|
||||
|
||||
from loguru import logger
|
||||
|
||||
from langflow.interface.base import LangChainTypeCreator
|
||||
from langflow.interface.custom_lists import llm_type_to_cls_dict
|
||||
from langflow.services.deps import get_settings_service
|
||||
|
||||
from langflow.template.frontend_node.llms import LLMFrontendNode
|
||||
from loguru import logger
|
||||
from langflow.utils.util import build_template_from_class
|
||||
|
||||
|
||||
class LLMCreator(LangChainTypeCreator):
|
||||
type_name: str = "llms"
|
||||
type_name: str = "models"
|
||||
|
||||
@property
|
||||
def frontend_node_class(self) -> Type[LLMFrontendNode]:
|
||||
|
|
|
|||
|
|
@ -11,7 +11,11 @@ from starlette.websockets import WebSocket
|
|||
|
||||
from langflow.services.database.models.api_key.model import ApiKey
|
||||
from langflow.services.database.models.api_key.crud import check_key
|
||||
from langflow.services.database.models.user.crud import get_user_by_id, get_user_by_username, update_user_last_login_at
|
||||
from langflow.services.database.models.user.crud import (
|
||||
get_user_by_id,
|
||||
get_user_by_username,
|
||||
update_user_last_login_at,
|
||||
)
|
||||
from langflow.services.database.models.user.model import User
|
||||
from langflow.services.deps import get_session, get_settings_service
|
||||
|
||||
|
|
@ -323,7 +327,7 @@ def add_padding(s):
|
|||
|
||||
def get_fernet(settings_service=Depends(get_settings_service)):
|
||||
SECRET_KEY = settings_service.auth_settings.SECRET_KEY
|
||||
# It's important that your secret key is 32 url-safe base64-encoded bytes
|
||||
# It's important that your secret key is 32 url-safe base64-encoded byte
|
||||
padded_secret_key = add_padding(SECRET_KEY)
|
||||
fernet = Fernet(padded_secret_key)
|
||||
return fernet
|
||||
|
|
|
|||
|
|
@ -1,7 +1,15 @@
|
|||
from typing import Any, Callable, Optional, Union
|
||||
|
||||
from pydantic import (
|
||||
BaseModel,
|
||||
ConfigDict,
|
||||
Field,
|
||||
field_serializer,
|
||||
field_validator,
|
||||
model_serializer,
|
||||
)
|
||||
|
||||
from langflow.field_typing.range_spec import RangeSpec
|
||||
from pydantic import BaseModel, ConfigDict, Field, field_serializer, model_serializer
|
||||
|
||||
|
||||
class TemplateField(BaseModel):
|
||||
|
|
@ -28,7 +36,7 @@ class TemplateField(BaseModel):
|
|||
"""The value of the field. Default is None."""
|
||||
|
||||
file_types: list[str] = Field(default=[], serialization_alias="fileTypes")
|
||||
"""List of file types associated with the field. Default is an empty list. (duplicate)"""
|
||||
"""List of file types associated with the field . Default is an empty list."""
|
||||
|
||||
file_path: Optional[str] = ""
|
||||
"""The file path of the field if it is a file. Defaults to None."""
|
||||
|
|
@ -63,7 +71,7 @@ class TemplateField(BaseModel):
|
|||
range_spec: Optional[RangeSpec] = Field(default=None, serialization_alias="rangeSpec")
|
||||
"""Range specification for the field. Defaults to None."""
|
||||
|
||||
title_case: bool = True
|
||||
title_case: bool = False
|
||||
"""Specifies if the field should be displayed in title case. Defaults to True."""
|
||||
|
||||
def to_dict(self):
|
||||
|
|
@ -101,3 +109,12 @@ class TemplateField(BaseModel):
|
|||
if self.title_case:
|
||||
value = value.title()
|
||||
return value
|
||||
|
||||
@field_validator("file_types")
|
||||
def validate_file_types(cls, value):
|
||||
if not isinstance(value, list):
|
||||
raise ValueError("file_types must be a list")
|
||||
return [
|
||||
(f".{file_type}" if isinstance(file_type, str) and not file_type.startswith(".") else file_type)
|
||||
for file_type in value
|
||||
]
|
||||
|
|
|
|||
|
|
@ -158,8 +158,8 @@ LOADERS_INFO: List[Dict[str, Any]] = [
|
|||
"loader": "UnstructuredMarkdownLoader",
|
||||
"name": "Unstructured Markdown (.md)",
|
||||
"import": "langchain_community.document_loaders.UnstructuredMarkdownLoader",
|
||||
"defaultFor": ["md"],
|
||||
"allowdTypes": ["md"],
|
||||
"defaultFor": ["md", "mdx"],
|
||||
"allowdTypes": ["md", "mdx"],
|
||||
},
|
||||
{
|
||||
"loader": "UnstructuredPowerPointLoader",
|
||||
|
|
|
|||
|
|
@ -6,7 +6,6 @@ from typing import Dict, List, Optional, Union
|
|||
|
||||
from langflow.field_typing.constants import CUSTOM_COMPONENT_SUPPORTED_TYPES
|
||||
|
||||
|
||||
PROMPT_INPUT_TYPES = ["Document", "BaseOutputParser", "Text", "Record"]
|
||||
|
||||
|
||||
|
|
@ -204,7 +203,9 @@ def prepare_global_scope(code, module):
|
|||
for alias in node.names:
|
||||
exec_globals[alias.name] = getattr(imported_module, alias.name)
|
||||
except ModuleNotFoundError as e:
|
||||
raise ModuleNotFoundError(f"Module {node.module} not found. Please install it and try again.") from e
|
||||
raise ModuleNotFoundError(
|
||||
f"Module {node.module} not found. Please install it and try again. Error: {repr(e)}"
|
||||
)
|
||||
return exec_globals
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -324,7 +324,9 @@ export default function ParameterComponent({
|
|||
) : (
|
||||
title
|
||||
)}
|
||||
<span className="text-status-red">{required ? " *" : ""}</span>
|
||||
<span className={(info === "" ? "" : "ml-1 ") + " text-status-red"}>
|
||||
{required ? " *" : ""}
|
||||
</span>
|
||||
<div className="">
|
||||
{info !== "" && (
|
||||
<ShadTooltip content={infoHtml.current}>
|
||||
|
|
|
|||
|
|
@ -10,6 +10,7 @@ import { Textarea } from "../../components/ui/textarea";
|
|||
import { priorityFields } from "../../constants/constants";
|
||||
import { BuildStatus } from "../../constants/enums";
|
||||
import NodeToolbarComponent from "../../pages/FlowPage/components/nodeToolbarComponent";
|
||||
import { useDarkStore } from "../../stores/darkStore";
|
||||
import useFlowStore from "../../stores/flowStore";
|
||||
import useFlowsManagerStore from "../../stores/flowsManagerStore";
|
||||
import { useTypesStore } from "../../stores/typesStore";
|
||||
|
|
@ -168,7 +169,6 @@ export default function GenericNode({
|
|||
validationStatus: validationStatusType | null
|
||||
) => {
|
||||
const isValid = validationStatus && validationStatus.valid;
|
||||
|
||||
if (isValid) {
|
||||
return "green-status";
|
||||
} else if (!isValid && buildStatus === BuildStatus.INACTIVE) {
|
||||
|
|
@ -177,13 +177,15 @@ export default function GenericNode({
|
|||
return "green-status";
|
||||
} else if (!isValid && buildStatus === BuildStatus.BUILT) {
|
||||
return "red-status";
|
||||
} else if (!validationStatus) {
|
||||
return "yellow-status";
|
||||
} else if (!validationStatus && buildStatus === BuildStatus.TO_BUILD) {
|
||||
return "green-status";
|
||||
} else if (buildStatus === BuildStatus.BUILDING) {
|
||||
return "status-build-animation";
|
||||
} else {
|
||||
return "yellow-status";
|
||||
}
|
||||
};
|
||||
|
||||
const isDark = useDarkStore((state) => state.dark);
|
||||
const renderIconStatusComponents = (
|
||||
buildStatus: BuildStatus | undefined,
|
||||
validationStatus: validationStatusType | null
|
||||
|
|
@ -212,12 +214,15 @@ export default function GenericNode({
|
|||
validationStatus: validationStatusType | null
|
||||
) => {
|
||||
let isInvalid = validationStatus && !validationStatus.valid;
|
||||
|
||||
if (buildStatus === BuildStatus.INACTIVE && isInvalid) {
|
||||
// INACTIVE should have its own class
|
||||
return "inactive-status";
|
||||
}
|
||||
if (buildStatus === BuildStatus.BUILT && isInvalid) {
|
||||
return "built-invalid-status";
|
||||
return isDark
|
||||
? "border-none ring ring-[#751C1C]"
|
||||
: "built-invalid-status";
|
||||
} else if (buildStatus === BuildStatus.BUILDING) {
|
||||
return "building-status";
|
||||
} else {
|
||||
|
|
@ -231,11 +236,17 @@ export default function GenericNode({
|
|||
buildStatus: BuildStatus | undefined,
|
||||
validationStatus: validationStatusType | null
|
||||
) => {
|
||||
const specificClassFromBuildStatus = getSpecificClassFromBuildStatus(
|
||||
buildStatus,
|
||||
validationStatus
|
||||
);
|
||||
const baseBorderClass = getBaseBorderClass(selected);
|
||||
const nodeSizeClass = getNodeSizeClass(showNode);
|
||||
return classNames(
|
||||
getBaseBorderClass(selected),
|
||||
getNodeSizeClass(showNode),
|
||||
baseBorderClass,
|
||||
nodeSizeClass,
|
||||
"generic-node-div",
|
||||
getSpecificClassFromBuildStatus(buildStatus, validationStatus)
|
||||
specificClassFromBuildStatus
|
||||
);
|
||||
};
|
||||
|
||||
|
|
@ -267,12 +278,11 @@ export default function GenericNode({
|
|||
onCloseAdvancedModal={() => {}}
|
||||
></NodeToolbarComponent>
|
||||
</NodeToolbar>
|
||||
|
||||
<div
|
||||
className={getNodeBorderClassName(
|
||||
selected,
|
||||
showNode,
|
||||
data?.build_status,
|
||||
data?.buildStatus,
|
||||
validationStatus
|
||||
)}
|
||||
>
|
||||
|
|
@ -451,7 +461,7 @@ export default function GenericNode({
|
|||
variant="secondary"
|
||||
className={"group h-9 px-1.5"}
|
||||
onClick={() => {
|
||||
if (data?.build_status === BuildStatus.BUILDING || isBuilding)
|
||||
if (data?.buildStatus === BuildStatus.BUILDING || isBuilding)
|
||||
return;
|
||||
buildFlow(data.id);
|
||||
}}
|
||||
|
|
@ -459,7 +469,7 @@ export default function GenericNode({
|
|||
<div>
|
||||
<Tooltip
|
||||
title={
|
||||
data?.build_status === BuildStatus.BUILDING ? (
|
||||
data?.buildStatus === BuildStatus.BUILDING ? (
|
||||
<span>Building...</span>
|
||||
) : !validationStatus ? (
|
||||
<span className="flex">
|
||||
|
|
@ -480,7 +490,7 @@ export default function GenericNode({
|
|||
>
|
||||
<div className="generic-node-status-position flex items-center justify-center">
|
||||
{renderIconPlayOrPauseComponents(
|
||||
data?.build_status,
|
||||
data?.buildStatus,
|
||||
validationStatus
|
||||
)}
|
||||
</div>
|
||||
|
|
@ -489,7 +499,7 @@ export default function GenericNode({
|
|||
</Button>
|
||||
)}
|
||||
<div className="">
|
||||
{renderIconStatusComponents(data?.build_status, validationStatus)}
|
||||
{renderIconStatusComponents(data?.buildStatus, validationStatus)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -391,9 +391,7 @@ export default function NodeToolbarComponent({
|
|||
name="FileText"
|
||||
className="relative top-0.5 mr-2 h-4 w-4"
|
||||
/>{" "}
|
||||
{data.node?.documentation === ""
|
||||
? "Coming Soon"
|
||||
: "Documentation"}
|
||||
Docs
|
||||
</div>{" "}
|
||||
</SelectItem>
|
||||
{isMinimal && (
|
||||
|
|
@ -419,13 +417,19 @@ export default function NodeToolbarComponent({
|
|||
</SelectItem>
|
||||
)}
|
||||
|
||||
<SelectItem value={"delete"}>
|
||||
<div className="font-red flex text-red-500 hover:text-red-500">
|
||||
<SelectItem value={"delete"} className="focus:bg-red-400/[.20]">
|
||||
<div className="font-red flex text-status-red">
|
||||
<IconComponent
|
||||
name="Trash2"
|
||||
className="relative top-0.5 mr-2 h-4 w-4 "
|
||||
/>{" "}
|
||||
Delete{" "}
|
||||
<span className="">Delete</span>{" "}
|
||||
<span>
|
||||
<IconComponent
|
||||
name="Delete"
|
||||
className="absolute right-2 top-2 h-4 w-4 text-red-300"
|
||||
></IconComponent>
|
||||
</span>
|
||||
</div>
|
||||
</SelectItem>
|
||||
</SelectContent>
|
||||
|
|
|
|||
|
|
@ -401,7 +401,7 @@ const useFlowStore = create<FlowStoreType>((set, get) => ({
|
|||
nodeId,
|
||||
onBuildComplete: () => {
|
||||
if (nodeId) {
|
||||
setSuccessData({ title: `${nodeId} built successfully` });
|
||||
setSuccessData({ title: `${get().nodes.find((node) => node.id === nodeId)?.data.node?.display_name} built successfully` });
|
||||
} else {
|
||||
setSuccessData({ title: `Flow built successfully` });
|
||||
}
|
||||
|
|
@ -416,7 +416,7 @@ const useFlowStore = create<FlowStoreType>((set, get) => ({
|
|||
useFlowStore.getState().updateBuildStatus(idList, BuildStatus.BUILDING);
|
||||
},
|
||||
});
|
||||
get().revertAllVerticesToBuild();
|
||||
get().revertBuiltStatusFromBuilding();
|
||||
},
|
||||
getFlow: () => {
|
||||
return {
|
||||
|
|
@ -425,26 +425,11 @@ const useFlowStore = create<FlowStoreType>((set, get) => ({
|
|||
viewport: get().reactFlowInstance?.getViewport()!,
|
||||
};
|
||||
},
|
||||
updateBuildStatus: (nodeIdList: string[], status: BuildStatus) => {
|
||||
nodeIdList.forEach((id) => {
|
||||
const nodeToUpdate = get().nodes.find((node) => node.id === id);
|
||||
if (nodeToUpdate) {
|
||||
nodeToUpdate.data.build_status = status;
|
||||
get().setNodes(get().nodes);
|
||||
}
|
||||
});
|
||||
},
|
||||
updateVerticesBuild: (vertices: string[]) => {
|
||||
set({ verticesBuild: vertices });
|
||||
},
|
||||
verticesBuild: [],
|
||||
revertAllVerticesToBuild: () => {
|
||||
// set all vertices to TO_BUILD
|
||||
const verticesIds = get()
|
||||
.nodes.filter((node) => node.data.build_status === BuildStatus.BUILDING)
|
||||
.map((node) => node.id);
|
||||
get().updateBuildStatus(verticesIds, BuildStatus.TO_BUILD);
|
||||
},
|
||||
|
||||
removeFromVerticesBuild: (vertices: string[]) => {
|
||||
set({
|
||||
verticesBuild: get().verticesBuild.filter(
|
||||
|
|
@ -452,6 +437,23 @@ const useFlowStore = create<FlowStoreType>((set, get) => ({
|
|||
),
|
||||
});
|
||||
},
|
||||
updateBuildStatus: (nodeIdList: string[], status: BuildStatus) => {
|
||||
nodeIdList.forEach((id) => {
|
||||
const nodeToUpdate = get().nodes.find((node) => node.id === id);
|
||||
if (nodeToUpdate) {
|
||||
nodeToUpdate.data.buildStatus = status;
|
||||
set({ nodes: get().nodes });
|
||||
}
|
||||
});
|
||||
},
|
||||
revertBuiltStatusFromBuilding: () => {
|
||||
get().nodes.forEach((node) => {
|
||||
if (node.data.buildStatus === BuildStatus.BUILDING) {
|
||||
node.data.buildStatus = BuildStatus.TO_BUILD;
|
||||
}
|
||||
});
|
||||
set({ nodes: get().nodes });
|
||||
},
|
||||
}));
|
||||
|
||||
export default useFlowStore;
|
||||
|
|
|
|||
|
|
@ -296,7 +296,7 @@
|
|||
@apply generic-node-status text-status-green;
|
||||
}
|
||||
.gray-status {
|
||||
@apply generic-node-status text-muted-foreground;
|
||||
@apply generic-node-status text-status-gray;
|
||||
}
|
||||
|
||||
.red-status {
|
||||
|
|
@ -313,7 +313,7 @@
|
|||
@apply border-none ring ring-muted-foreground;
|
||||
}
|
||||
.built-invalid-status {
|
||||
@apply border-none ring ring-red-300;
|
||||
@apply border-none ring ring-[#FF9090];
|
||||
}
|
||||
.building-status {
|
||||
@apply border-none ring;
|
||||
|
|
|
|||
|
|
@ -60,6 +60,7 @@
|
|||
--chat-send: #059669;
|
||||
--status-green: #4ade80;
|
||||
--status-blue: #2563eb;
|
||||
--status-gray: #6b7280;
|
||||
--connection: #555;
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,4 @@
|
|||
import { Edge, Node, Viewport } from "reactflow";
|
||||
import { BuildStatus } from "../../constants/enums";
|
||||
import { FlowType } from "../flow";
|
||||
//kind and class are just representative names to represent the actual structure of the object received by the API
|
||||
export type APIDataType = { [key: string]: APIKindType };
|
||||
|
|
@ -37,7 +36,6 @@ export type APIClassType = {
|
|||
| CustomFieldsType
|
||||
| boolean
|
||||
| undefined;
|
||||
build_status?: BuildStatus;
|
||||
};
|
||||
|
||||
export type TemplateVariableType = {
|
||||
|
|
|
|||
|
|
@ -29,7 +29,7 @@ export type NodeDataType = {
|
|||
node?: APIClassType;
|
||||
id: string;
|
||||
output_types?: string[];
|
||||
build_status?: BuildStatus;
|
||||
buildStatus?: BuildStatus;
|
||||
};
|
||||
// FlowStyleType is the type of the style object that is used to style the
|
||||
// Flow card with an emoji and a color.
|
||||
|
|
|
|||
|
|
@ -86,9 +86,9 @@ export type FlowStoreType = {
|
|||
unselectAll: () => void;
|
||||
buildFlow: (nodeId?: string) => Promise<void>;
|
||||
getFlow: () => { nodes: Node[]; edges: Edge[]; viewport: Viewport };
|
||||
updateBuildStatus: (nodeId: string[], status: BuildStatus) => void;
|
||||
updateVerticesBuild: (vertices: string[]) => void;
|
||||
removeFromVerticesBuild: (vertices: string[]) => void;
|
||||
revertAllVerticesToBuild: () => void;
|
||||
verticesBuild: string[];
|
||||
updateBuildStatus: (nodeId: string[], status: BuildStatus) => void;
|
||||
revertBuiltStatusFromBuilding: () => void;
|
||||
};
|
||||
|
|
|
|||
|
|
@ -27,6 +27,7 @@ import {
|
|||
Compass,
|
||||
Copy,
|
||||
Cpu,
|
||||
Delete,
|
||||
Download,
|
||||
DownloadCloud,
|
||||
Edit,
|
||||
|
|
@ -197,7 +198,7 @@ export const gradients = [
|
|||
export const nodeColors: { [char: string]: string } = {
|
||||
prompts: "#4367BF",
|
||||
models: "#AA2411",
|
||||
llms: "#6344BE",
|
||||
model_specs: "#6344BE",
|
||||
chains: "#FE7500",
|
||||
Document: "#7AAE42",
|
||||
list: "#9AAE42",
|
||||
|
|
@ -226,7 +227,7 @@ export const nodeColors: { [char: string]: string } = {
|
|||
export const nodeNames: { [char: string]: string } = {
|
||||
prompts: "Prompts",
|
||||
models: "Language Models",
|
||||
llms: "Model Settings",
|
||||
model_specs: "Model Specs",
|
||||
chains: "Chains",
|
||||
agents: "Agents",
|
||||
tools: "Tools",
|
||||
|
|
@ -298,7 +299,7 @@ export const nodeIconsLucide: iconsType = {
|
|||
chains: Link,
|
||||
memories: Cpu,
|
||||
models: Bot,
|
||||
llms: Lightbulb,
|
||||
model_specs: Lightbulb,
|
||||
prompts: TerminalSquare,
|
||||
tools: Wrench,
|
||||
advanced: Laptop2,
|
||||
|
|
@ -422,4 +423,5 @@ export const nodeIconsLucide: iconsType = {
|
|||
FlaskConical,
|
||||
AlertCircle,
|
||||
Bot,
|
||||
Delete,
|
||||
};
|
||||
|
|
|
|||
|
|
@ -80,6 +80,7 @@ module.exports = {
|
|||
"status-green": "var(--status-green)",
|
||||
"status-red": "var(--status-red)",
|
||||
"status-yellow": "var(--status-yellow)",
|
||||
"status-gray": "var(--status-gray)",
|
||||
"success-background": "var(--success-background)",
|
||||
"success-foreground": "var(--success-foreground)",
|
||||
"beta-background": "var(--beta-background)",
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue