Merge remote-tracking branch 'origin/dev' into feature/store

This commit is contained in:
Gabriel Luiz Freitas Almeida 2023-11-14 21:41:58 -03:00
commit 60f6d08fc3
123 changed files with 2288 additions and 1124 deletions

View file

@ -15,7 +15,7 @@
// "forwardPorts": [],
// Use 'postCreateCommand' to run commands after the container is created.
"postCreateCommand": "make install_frontend && make install_backend",
"postCreateCommand": "make setup_devcontainer",
"containerEnv": {
"POETRY_VIRTUALENVS_IN_PROJECT": "true"
@ -31,11 +31,13 @@
"sourcery.sourcery",
"eamodio.gitlens",
"ms-vscode.makefile-tools",
"GitHub.vscode-pull-request-github"
"GitHub.vscode-pull-request-github",
"Codium.codium",
"ms-azuretools.vscode-docker"
]
}
}
// Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root.
// "remoteUser": "root"
}
}

View file

@ -15,7 +15,7 @@ on:
- "pyproject.toml"
env:
POETRY_VERSION: "1.4.0"
POETRY_VERSION: "1.7.0"
jobs:
lint:

View file

@ -11,5 +11,5 @@ WORKDIR $HOME/app
COPY --chown=user . $HOME/app
RUN pip install langflow>==0.0.86 -U --user
RUN pip install langflow>==0.5.0 -U --user
CMD ["python", "-m", "langflow", "run", "--host", "0.0.0.0", "--port", "7860"]

View file

@ -49,15 +49,16 @@ run_frontend:
cd src/frontend && npm start
run_cli:
poetry run langflow run --path src/frontend/build
poetry run langflow --path src/frontend/build
run_cli_debug:
poetry run langflow run --path src/frontend/build --log-level debug
poetry run langflow --path src/frontend/build --log-level debug
setup_devcontainer:
make init
make build_frontend
poetry run langflow --path src/frontend/build
@echo 'Run Cli'
make run_cli
frontend:
@-make install_frontend || (echo "An error occurred while installing frontend dependencies. Attempting to fix." && make install_frontendc)

View file

@ -23,7 +23,7 @@ ENV PYTHONUNBUFFERED=1 \
\
# poetry
# https://python-poetry.org/docs/configuration/#using-environment-variables
POETRY_VERSION=1.5.1 \
POETRY_VERSION=1.7 \
# make poetry install to this location
POETRY_HOME="/opt/poetry" \
# make poetry create the virtual environment in the project's root

View file

@ -1,14 +1,15 @@
FROM python:3.10-slim
RUN apt-get update && apt-get install gcc g++ git make -y
RUN apt-get update && apt-get install gcc g++ git make -y && apt-get clean \
&& rm -rf /var/lib/apt/lists/*
RUN useradd -m -u 1000 user
USER user
ENV HOME=/home/user \
PATH=/home/user/.local/bin:$PATH
PATH=/home/user/.local/bin:$PATH
WORKDIR $HOME/app
COPY --chown=user . $HOME/app
RUN pip install langflow>==0.0.71 -U --user
CMD ["langflow", "--host", "0.0.0.0", "--port", "7860"]
RUN pip install langflow>==0.5.0 -U --user
CMD ["python", "-m", "langflow", "run", "--host", "0.0.0.0", "--port", "7860"]

View file

@ -7,4 +7,4 @@ services:
dockerfile: Dockerfile
ports:
- "7860:7860"
command: langflow --host 0.0.0.0
command: langflow run --host 0.0.0.0

View file

@ -12,6 +12,22 @@ Embeddings are vector representations of text that capture the semantic meaning
---
### BedrockEmbeddings
Used to load [Amazon Bedrockss](https://aws.amazon.com/bedrock/) embedding models.
**Params**
- **credentials_profile_name:** The name of the profile in the ~/.aws/credentials or ~/.aws/config files, which has either access keys or role information specified. If not specified, the default credential profile or, if on an EC2 instance, credentials from IMDS will be used. See [the AWS documentation](https://boto3.amazonaws.com/v1/documentation/api/latest/guide/credentials.html) for more details.
- **model_id:** Id of the model to call, e.g., amazon.titan-embed-text-v1, this is equivalent to the modelId property in the list-foundation-models api.
- **endpoint_url:** Needed if you dont want to default to us-east-1 endpoint.
- **region_name:** The aws region e.g., us-west-2. Fallsback to AWS_DEFAULT_REGION env variable or region specified in ~/.aws/config in case it is not provided here.
---
### CohereEmbeddings
Used to load [Coheres](https://cohere.com/) embedding models.

View file

@ -5,7 +5,7 @@ import ReactPlayer from "react-player";
# Component
Components are the building blocks of the flows. They are made of inputs, outputs, and parameters that define their functionality, providing a convenient and straightforward way to compose LLM-based applications. Learn more about components and how they work in the LangChain [documentation](https://docs.langchain.com/docs/category/components) section.
Components are the building blocks of the flows. They are made of inputs, outputs, and parameters that define their functionality, providing a convenient and straightforward way to compose LLM-based applications. Learn more about components and how they work in the LangChain [documentation](https://python.langchain.com/docs/integrations/components) section.
### Component's Features

View file

@ -31,7 +31,7 @@ module.exports = {
[
remarkCodeHike,
{
theme: "github-light",
theme: "github-dark",
showCopyButton: true,
lineNumbers: true,
},
@ -112,8 +112,10 @@ module.exports = {
},
colorMode: {
defaultMode: "light",
disableSwitch: true,
respectPrefersColorScheme: false,
/* Allow users to chose light or dark mode. */
disableSwitch: false,
/* Respect user preferences, such as low light mode in the evening */
respectPrefersColorScheme: true,
},
announcementBar: {
content:

1290
poetry.lock generated

File diff suppressed because it is too large Load diff

View file

@ -1,6 +1,6 @@
[tool.poetry]
name = "langflow"
version = "0.5.3"
version = "0.6.0a0"
description = "A Python package with a built-in web application"
authors = ["Logspace <contact@logspace.ai>"]
maintainers = [
@ -25,30 +25,32 @@ documentation = "https://docs.langflow.org"
langflow = "langflow.__main__:main"
[tool.poetry.dependencies]
python = ">=3.9,<3.11"
fastapi = "^0.103.0"
fastapi = "^0.104.0"
uvicorn = "^0.23.0"
beautifulsoup4 = "^4.12.2"
google-search-results = "^2.4.1"
google-api-python-client = "^2.79.0"
typer = "^0.9.0"
gunicorn = "^21.2.0"
langchain = "^0.0.312"
langchain = "~0.0.327"
openai = "^0.27.8"
pandas = "2.0.3"
chromadb = "^0.3.21"
chromadb = "^0.4.0"
huggingface-hub = { version = "^0.16.0", extras = ["inference"] }
rich = "^13.5.0"
llama-cpp-python = { version = "~0.1.0", optional = true }
rich = "^13.6.0"
llama-cpp-python = { version = "~0.2.0", optional = true }
networkx = "^3.1"
unstructured = "^0.10.0"
pypdf = "^3.15.0"
pypdf = "^3.17.0"
lxml = "^4.9.2"
pysrt = "^1.1.2"
fake-useragent = "^1.2.1"
fake-useragent = "^1.3.0"
docstring-parser = "^0.15"
psycopg2-binary = "^2.9.6"
pyarrow = "^12.0.0"
pyarrow = "^14.0.0"
tiktoken = "~0.5.0"
wikipedia = "^1.4.0"
qdrant-client = "^1.4.0"
@ -57,25 +59,26 @@ weaviate-client = "^3.23.0"
jina = "3.15.2"
sentence-transformers = { version = "^2.2.2", optional = true }
ctransformers = { version = "^0.2.10", optional = true }
cohere = "^4.27.0"
cohere = "^4.32.0"
python-multipart = "^0.0.6"
sqlmodel = "^0.0.8"
# install sqlmodel using https://github.com/honglei/sqlmodel.git
sqlmodel = { git = "https://github.com/honglei/sqlmodel.git", branch = "main" }
faiss-cpu = "^1.7.4"
anthropic = "^0.3.0"
anthropic = "^0.5.0"
orjson = "3.9.3"
multiprocess = "^0.70.14"
cachetools = "^5.3.1"
types-cachetools = "^5.3.0.5"
appdirs = "^1.4.4"
platformdirs = "^3.11.0"
pinecone-client = "^2.2.2"
supabase = "^1.0.3"
pymongo = "^4.4.0"
pymongo = "^4.5.0"
supabase = "^2.0.3"
certifi = "^2023.5.7"
google-cloud-aiplatform = "^1.26.1"
google-cloud-aiplatform = "^1.36.0"
psycopg = "^3.1.9"
psycopg-binary = "^3.1.9"
fastavro = "^1.8.0"
langchain-experimental = "^0.0.8"
langchain-experimental = "*"
celery = { extras = ["redis"], version = "^5.3.1", optional = true }
redis = { version = "^4.6.0", optional = true }
flower = { version = "^2.0.0", optional = true }
@ -84,15 +87,21 @@ passlib = "^1.7.4"
bcrypt = "^4.0.1"
python-jose = "^3.3.0"
metaphor-python = "^0.1.11"
pydantic = "^2.0.0"
pydantic-settings = "^2.0.3"
zep-python = { version = "^1.3.0", allow-prereleases = true }
pywin32 = { version = "^306", markers = "sys_platform == 'win32'" }
loguru = "^0.7.1"
langfuse = "^1.0.13"
langfuse = "^1.1.11"
pillow = "^10.0.0"
metal-sdk = "^2.2.0"
metal-sdk = "^2.4.0"
markupsafe = "^2.1.3"
extract-msg = "^0.45.0"
jq = "^1.6.0"
boto3 = "^1.28.63"
numexpr = "^2.8.6"
qianfan = "0.0.5"
pgvector = "^0.2.3"
[tool.poetry.group.dev.dependencies]
types-redis = "^4.6.0.5"
@ -100,13 +109,12 @@ ipykernel = "^6.21.2"
mypy = "^1.1.1"
ruff = "^0.1.5"
httpx = "*"
pytest = "^7.2.2"
types-requests = "^2.28.11"
requests = "^2.28.0"
pytest-cov = "^4.0.0"
pytest = "^7.4.2"
types-requests = "^2.31.0"
requests = "^2.31.0"
pytest-cov = "^4.1.0"
pandas-stubs = "^2.0.0.230412"
types-pillow = "^9.5.0.2"
types-appdirs = "^1.4.3.5"
types-pyyaml = "^6.0.12.8"
types-python-jose = "^3.3.4.8"
types-passlib = "^1.7.7.13"

View file

@ -1,6 +1,6 @@
from typing import Optional
from langflow.template.frontend_node.base import FrontendNode
from pydantic import BaseModel, validator
from pydantic import field_validator, BaseModel
from langflow.interface.utils import extract_input_variables_from_prompt
from langchain.prompts import PromptTemplate
@ -30,11 +30,13 @@ class CodeValidationResponse(BaseModel):
imports: dict
function: dict
@validator("imports")
@field_validator("imports")
@classmethod
def validate_imports(cls, v):
return v or {"errors": []}
@validator("function")
@field_validator("function")
@classmethod
def validate_function(cls, v):
return v or {"errors": []}

View file

@ -12,7 +12,10 @@ from langflow.api.utils import build_input_keys_response
from langflow.api.v1.schemas import BuildStatus, BuiltResponse, InitResponse, StreamData
from langflow.graph.graph.base import Graph
from langflow.services.auth.utils import get_current_active_user, get_current_user
from langflow.services.auth.utils import (
get_current_active_user,
get_current_user_by_jwt,
)
from langflow.services.cache.utils import update_build_status
from loguru import logger
from langflow.services.deps import get_chat_service, get_session, get_cache_service
@ -34,8 +37,8 @@ async def chat(
):
"""Websocket endpoint for chat."""
try:
user = await get_current_user_by_jwt(token, db)
await websocket.accept()
user = await get_current_user(token, db)
if not user:
await websocket.close(code=status.WS_1008_POLICY_VIOLATION, reason="Unauthorized")
if not user.is_active:
@ -149,6 +152,11 @@ async def stream_build(
number_of_nodes = len(graph.nodes)
update_build_status(cache_service, flow_id, BuildStatus.IN_PROGRESS)
try:
user_id = cache_service[flow_id]["user_id"]
except KeyError:
logger.debug("No user_id found in cache_service")
user_id = None
for i, vertex in enumerate(graph.generator_build(), 1):
try:
log_dict = {
@ -156,9 +164,9 @@ async def stream_build(
}
yield str(StreamData(event="log", data=log_dict))
if vertex.is_task:
vertex = try_running_celery_task(vertex)
vertex = try_running_celery_task(vertex, user_id)
else:
vertex.build()
vertex.build(user_id=user_id)
params = vertex._built_object_repr()
valid = True
logger.debug(f"Building node {str(vertex.vertex_type)}")
@ -216,7 +224,7 @@ async def stream_build(
raise HTTPException(status_code=500, detail=str(exc))
def try_running_celery_task(vertex):
def try_running_celery_task(vertex, user_id):
# Try running the task in celery
# and set the task_id to the local vertex
# if it fails, run the task locally
@ -228,5 +236,5 @@ def try_running_celery_task(vertex):
except Exception as exc:
logger.debug(f"Error running task in celery: {exc}")
vertex.task_id = None
vertex.build()
vertex.build(user_id=user_id)
return vertex

View file

@ -210,6 +210,7 @@ def get_version():
@router.post("/custom_component", status_code=HTTPStatus.OK)
async def custom_component(
raw_code: CustomComponentCode,
user: User = Depends(get_current_active_user),
):
from langflow.interface.types import (
build_langchain_template_custom_component,
@ -218,4 +219,4 @@ async def custom_component(
extractor = CustomComponent(code=raw_code.code)
extractor.is_check_valid()
return build_langchain_template_custom_component(extractor)
return build_langchain_template_custom_component(extractor, user_id=user.id)

View file

@ -7,7 +7,7 @@ from langflow.services.database.models.flow import FlowCreate, FlowRead
from langflow.services.database.models.user import UserRead
from langflow.services.database.models.base import orjson_dumps
from pydantic import BaseModel, Field, validator
from pydantic import BaseModel, Field, field_validator
class BuildStatus(Enum):
@ -91,7 +91,8 @@ class ChatResponse(ChatMessage):
is_bot: bool = True
files: list = []
@validator("type")
@field_validator("type")
@classmethod
def validate_message_type(cls, v):
if v not in ["start", "stream", "end", "error", "info", "file"]:
raise ValueError("type must be start, stream, end, error, info, or file")
@ -109,12 +110,13 @@ class PromptResponse(ChatMessage):
class FileResponse(ChatMessage):
"""File response schema."""
data: Any
data: Any = None
data_type: str
type: str = "file"
is_bot: bool = True
@validator("data_type")
@field_validator("data_type")
@classmethod
def validate_data_type(cls, v):
if v not in ["image", "csv"]:
raise ValueError("data_type must be image or csv")

View file

@ -20,10 +20,11 @@ class ConversationalAgent(CustomComponent):
def build_config(self):
openai_function_models = [
"gpt-3.5-turbo-0613",
"gpt-3.5-turbo-16k-0613",
"gpt-4-0613",
"gpt-4-32k-0613",
"gpt-4-1106-preview",
"gpt-3.5-turbo",
"gpt-3.5-turbo-16k",
"gpt-4",
"gpt-4-32k",
]
return {
"tools": {"is_list": True, "display_name": "Tools"},

View file

@ -0,0 +1,29 @@
from langflow import CustomComponent
from langchain.chains import ConversationChain
from typing import Optional, Union, Callable
from langflow.field_typing import BaseLanguageModel, BaseMemory, Chain
class ConversationChainComponent(CustomComponent):
display_name = "ConversationChain"
description = "Chain to have a conversation and load context from memory."
def build_config(self):
return {
"prompt": {"display_name": "Prompt"},
"llm": {"display_name": "LLM"},
"memory": {
"display_name": "Memory",
"info": "Memory to load context from. If none is provided, a ConversationBufferMemory will be used.",
},
"code": {"show": False},
}
def build(
self,
llm: BaseLanguageModel,
memory: Optional[BaseMemory] = None,
) -> Union[Chain, Callable]:
if memory is None:
return ConversationChain(llm=llm)
return ConversationChain(llm=llm, memory=memory)

View file

@ -0,0 +1,30 @@
from langflow import CustomComponent
from langchain.chains import LLMChain
from typing import Optional, Union, Callable
from langflow.field_typing import (
BasePromptTemplate,
BaseLanguageModel,
BaseMemory,
Chain,
)
class LLMChainComponent(CustomComponent):
display_name = "LLMChain"
description = "Chain to run queries against LLMs"
def build_config(self):
return {
"prompt": {"display_name": "Prompt"},
"llm": {"display_name": "LLM"},
"memory": {"display_name": "Memory"},
"code": {"show": False},
}
def build(
self,
prompt: BasePromptTemplate,
llm: BaseLanguageModel,
memory: Optional[BaseMemory] = None,
) -> Union[Chain, Callable]:
return LLMChain(prompt=prompt, llm=llm, memory=memory)

View file

@ -8,7 +8,7 @@ from langchain.schema import Document
class PromptRunner(CustomComponent):
display_name: str = "Prompt Runner"
description: str = "Run a Chain with the given PromptTemplate"
beta = True
beta: bool = True
field_config = {
"llm": {"display_name": "LLM"},
"prompt": {

View file

@ -0,0 +1,232 @@
from langflow import CustomComponent
from langchain.schema import Document
from typing import Any, Dict, List
loaders_info: List[Dict[str, Any]] = [
{
"loader": "AirbyteJSONLoader",
"name": "Airbyte JSON (.jsonl)",
"import": "langchain.document_loaders.AirbyteJSONLoader",
"defaultFor": ["jsonl"],
"allowdTypes": ["jsonl"],
},
{
"loader": "JSONLoader",
"name": "JSON (.json)",
"import": "langchain.document_loaders.JSONLoader",
"defaultFor": ["json"],
"allowdTypes": ["json"],
},
{
"loader": "BSHTMLLoader",
"name": "BeautifulSoup4 HTML (.html, .htm)",
"import": "langchain.document_loaders.BSHTMLLoader",
"allowdTypes": ["html", "htm"],
},
{
"loader": "CSVLoader",
"name": "CSV (.csv)",
"import": "langchain.document_loaders.CSVLoader",
"defaultFor": ["csv"],
"allowdTypes": ["csv"],
},
{
"loader": "CoNLLULoader",
"name": "CoNLL-U (.conllu)",
"import": "langchain.document_loaders.CoNLLULoader",
"defaultFor": ["conllu"],
"allowdTypes": ["conllu"],
},
{
"loader": "EverNoteLoader",
"name": "EverNote (.enex)",
"import": "langchain.document_loaders.EverNoteLoader",
"defaultFor": ["enex"],
"allowdTypes": ["enex"],
},
{
"loader": "FacebookChatLoader",
"name": "Facebook Chat (.json)",
"import": "langchain.document_loaders.FacebookChatLoader",
"allowdTypes": ["json"],
},
{
"loader": "OutlookMessageLoader",
"name": "Outlook Message (.msg)",
"import": "langchain.document_loaders.OutlookMessageLoader",
"defaultFor": ["msg"],
"allowdTypes": ["msg"],
},
{
"loader": "PyPDFLoader",
"name": "PyPDF (.pdf)",
"import": "langchain.document_loaders.PyPDFLoader",
"defaultFor": ["pdf"],
"allowdTypes": ["pdf"],
},
{
"loader": "STRLoader",
"name": "Subtitle (.str)",
"import": "langchain.document_loaders.STRLoader",
"defaultFor": ["str"],
"allowdTypes": ["str"],
},
{
"loader": "TextLoader",
"name": "Text (.txt)",
"import": "langchain.document_loaders.TextLoader",
"defaultFor": ["txt"],
"allowdTypes": ["txt"],
},
{
"loader": "UnstructuredEmailLoader",
"name": "Unstructured Email (.eml)",
"import": "langchain.document_loaders.UnstructuredEmailLoader",
"defaultFor": ["eml"],
"allowdTypes": ["eml"],
},
{
"loader": "UnstructuredHTMLLoader",
"name": "Unstructured HTML (.html, .htm)",
"import": "langchain.document_loaders.UnstructuredHTMLLoader",
"defaultFor": ["html", "htm"],
"allowdTypes": ["html", "htm"],
},
{
"loader": "UnstructuredMarkdownLoader",
"name": "Unstructured Markdown (.md)",
"import": "langchain.document_loaders.UnstructuredMarkdownLoader",
"defaultFor": ["md"],
"allowdTypes": ["md"],
},
{
"loader": "UnstructuredPowerPointLoader",
"name": "Unstructured PowerPoint (.pptx)",
"import": "langchain.document_loaders.UnstructuredPowerPointLoader",
"defaultFor": ["pptx"],
"allowdTypes": ["pptx"],
},
{
"loader": "UnstructuredWordLoader",
"name": "Unstructured Word (.docx)",
"import": "langchain.document_loaders.UnstructuredWordLoader",
"defaultFor": ["docx"],
"allowdTypes": ["docx"],
},
]
class FileLoaderComponent(CustomComponent):
display_name: str = "File Loader"
description: str = "Generic File Loader"
beta = True
def build_config(self):
loader_options = ["Automatic"] + [
loader_info["name"] for loader_info in loaders_info
]
file_types = []
suffixes = []
for loader_info in loaders_info:
if "allowedTypes" in loader_info:
file_types.extend(loader_info["allowedTypes"])
suffixes.extend([f".{ext}" for ext in loader_info["allowedTypes"]])
return {
"file_path": {
"display_name": "File Path",
"required": True,
"field_type": "file",
"file_types": [
"json",
"txt",
"csv",
"jsonl",
"html",
"htm",
"conllu",
"enex",
"msg",
"pdf",
"srt",
"eml",
"md",
"pptx",
"docx",
],
"suffixes": [
".json",
".txt",
".csv",
".jsonl",
".html",
".htm",
".conllu",
".enex",
".msg",
".pdf",
".srt",
".eml",
".md",
".pptx",
".docx",
],
# "file_types" : file_types,
# "suffixes": suffixes,
},
"loader": {
"display_name": "Loader",
"is_list": True,
"required": True,
"options": loader_options,
"value": "Automatic",
},
"code": {"show": False},
}
def build(self, file_path: str, loader: str) -> Document:
file_type = file_path.split(".")[-1]
# Mapeie o nome do loader selecionado para suas informações
selected_loader_info = None
for loader_info in loaders_info:
if loader_info["name"] == loader:
selected_loader_info = loader_info
break
if selected_loader_info is None and loader != "Automatic":
raise ValueError(f"Loader {loader} not found in the loader info list")
if loader == "Automatic":
# Determine o loader automaticamente com base na extensão do arquivo
default_loader_info = None
for info in loaders_info:
if "defaultFor" in info and file_type in info["defaultFor"]:
default_loader_info = info
break
if default_loader_info is None:
raise ValueError(f"No default loader found for file type: {file_type}")
selected_loader_info = default_loader_info
if isinstance(selected_loader_info, dict):
loader_import: str = selected_loader_info["import"]
else:
raise ValueError(
f"Loader info for {loader} is not a dict\nLoader info:\n{selected_loader_info}"
)
module_name, class_name = loader_import.rsplit(".", 1)
try:
# Importe o loader dinamicamente
loader_module = __import__(module_name, fromlist=[class_name])
loader_instance = getattr(loader_module, class_name)
except ImportError as e:
raise ValueError(
f"Loader {loader} could not be imported\nLoader info:\n{selected_loader_info}"
) from e
result = loader_instance(file_path=file_path)
return result.load()

View file

@ -0,0 +1,62 @@
from typing import List
from langflow import CustomComponent
from langchain.document_loaders import AZLyricsLoader
from langchain.document_loaders import CollegeConfidentialLoader
from langchain.document_loaders import GitbookLoader
from langchain.document_loaders import HNLoader
from langchain.document_loaders import IFixitLoader
from langchain.document_loaders import IMSDbLoader
from langchain.document_loaders import WebBaseLoader
from langchain.schema import Document
class UrlLoaderComponent(CustomComponent):
display_name: str = "Url Loader"
description: str = "Generic Url Loader Component"
def build_config(self):
return {
"web_path": {
"display_name": "Url",
"required": True,
},
"loader": {
"display_name": "Loader",
"is_list": True,
"required": True,
"options": [
"AZLyricsLoader",
"CollegeConfidentialLoader",
"GitbookLoader",
"HNLoader",
"IFixitLoader",
"IMSDbLoader",
"WebBaseLoader",
],
"value": "WebBaseLoader",
},
"code": {"show": False},
}
def build(self, web_path: str, loader: str) -> List[Document]:
if loader == "AZLyricsLoader":
loader_instance = AZLyricsLoader(web_path=web_path) # type: ignore
elif loader == "CollegeConfidentialLoader":
loader_instance = CollegeConfidentialLoader(web_path=web_path) # type: ignore
elif loader == "GitbookLoader":
loader_instance = GitbookLoader(web_page=web_path) # type: ignore
elif loader == "HNLoader":
loader_instance = HNLoader(web_path=web_path) # type: ignore
elif loader == "IFixitLoader":
loader_instance = IFixitLoader(web_path=web_path) # type: ignore
elif loader == "IMSDbLoader":
loader_instance = IMSDbLoader(web_path=web_path) # type: ignore
elif loader == "WebBaseLoader":
loader_instance = WebBaseLoader(web_path=web_path) # type: ignore
if loader_instance is None:
raise ValueError(f"No loader found for: {web_path}")
return loader_instance.load()

View file

@ -0,0 +1,46 @@
from typing import Optional
from langflow import CustomComponent
from langchain.embeddings import BedrockEmbeddings
from langchain.embeddings.base import Embeddings
class AmazonBedrockEmeddingsComponent(CustomComponent):
"""
A custom component for implementing an Embeddings Model using Amazon Bedrock.
"""
display_name: str = "Amazon Bedrock Embeddings"
description: str = "Embeddings model from Amazon Bedrock."
documentation = "https://python.langchain.com/docs/modules/data_connection/text_embedding/integrations/bedrock"
beta = True
def build_config(self):
return {
"model_id": {
"display_name": "Model Id",
"options": ["amazon.titan-embed-text-v1"],
},
"credentials_profile_name": {"display_name": "Credentials Profile Name"},
"endpoint_url": {"display_name": "Bedrock Endpoint URL"},
"region_name": {"display_name": "AWS Region"},
"code": {"show": False},
}
def build(
self,
model_id: str = "amazon.titan-embed-text-v1",
credentials_profile_name: Optional[str] = None,
endpoint_url: Optional[str] = None,
region_name: Optional[str] = None,
) -> Embeddings:
try:
output = BedrockEmbeddings(
credentials_profile_name=credentials_profile_name,
model_id=model_id,
endpoint_url=endpoint_url,
region_name=region_name,
) # type: ignore
except Exception as e:
raise ValueError("Could not connect to AmazonBedrock API.") from e
return output

View file

@ -0,0 +1,45 @@
from typing import Optional
from langflow import CustomComponent
from langchain.llms.bedrock import Bedrock
from langchain.llms.base import BaseLLM
class AmazonBedrockComponent(CustomComponent):
display_name: str = "Amazon Bedrock"
description: str = "LLM model from Amazon Bedrock."
def build_config(self):
return {
"model_id": {
"display_name": "Model Id",
"options": [
"ai21.j2-grande-instruct",
"ai21.j2-jumbo-instruct",
"ai21.j2-mid",
"ai21.j2-mid-v1",
"ai21.j2-ultra",
"ai21.j2-ultra-v1",
"anthropic.claude-instant-v1",
"anthropic.claude-v1",
"anthropic.claude-v2",
"cohere.command-text-v14",
],
},
"credentials_profile_name": {"display_name": "Credentials Profile Name"},
"streaming": {"display_name": "Streaming", "field_type": "bool"},
"code": {"show": False},
}
def build(
self,
model_id: str = "anthropic.claude-instant-v1",
credentials_profile_name: Optional[str] = None,
) -> BaseLLM:
try:
output = Bedrock(
credentials_profile_name=credentials_profile_name,
model_id=model_id,
) # type: ignore
except Exception as e:
raise ValueError("Could not connect to AmazonBedrock API.") from e
return output

View file

@ -0,0 +1,48 @@
from typing import Optional
from langflow import CustomComponent
from langchain.retrievers import AmazonKendraRetriever
from langchain.schema import BaseRetriever
class AmazonKendraRetrieverComponent(CustomComponent):
display_name: str = "Amazon Kendra Retriever"
description: str = "Retriever that uses the Amazon Kendra API."
def build_config(self):
return {
"index_id": {"display_name": "Index ID"},
"region_name": {"display_name": "Region Name"},
"credentials_profile_name": {"display_name": "Credentials Profile Name"},
"attribute_filter": {
"display_name": "Attribute Filter",
"field_type": "code",
},
"top_k": {"display_name": "Top K", "field_type": "int"},
"user_context": {
"display_name": "User Context",
"field_type": "code",
},
"code": {"show": False},
}
def build(
self,
index_id: str,
top_k: int = 3,
region_name: Optional[str] = None,
credentials_profile_name: Optional[str] = None,
attribute_filter: Optional[dict] = None,
user_context: Optional[dict] = None,
) -> BaseRetriever:
try:
output = AmazonKendraRetriever(
index_id=index_id,
top_k=top_k,
region_name=region_name,
credentials_profile_name=credentials_profile_name,
attribute_filter=attribute_filter,
user_context=user_context,
) # type: ignore
except Exception as e:
raise ValueError("Could not connect to AmazonKendra API.") from e
return output

View file

@ -1,17 +1,18 @@
from typing import List, Union
from langflow import CustomComponent
from metaphor_python import Metaphor # type: ignore
from langchain.tools import Tool
from langchain.agents import tool
from langchain.agents.agent_toolkits.base import BaseToolkit
from langchain.tools import Tool
from metaphor_python import Metaphor # type: ignore
from langflow import CustomComponent
class MetaphorToolkit(CustomComponent):
display_name: str = "Metaphor"
description: str = "Metaphor Toolkit"
documentation = "https://python.langchain.com/docs/integrations/tools/metaphor_search"
beta = True
beta: bool = True
# api key should be password = True
field_config = {
"metaphor_api_key": {"display_name": "Metaphor API Key", "password": True},

View file

@ -10,7 +10,7 @@ class GetRequest(CustomComponent):
description: str = "Make a GET request to the given URL."
output_types: list[str] = ["Document"]
documentation: str = "https://docs.langflow.org/components/utilities#get-request"
beta = True
beta: bool = True
field_config = {
"url": {
"display_name": "URL",

View file

@ -11,8 +11,8 @@
# - **Document:** The Document containing the JSON object.
from langflow import CustomComponent
from langchain.schema import Document
from langflow import CustomComponent
from langflow.services.database.models.base import orjson_dumps

View file

@ -10,7 +10,7 @@ class PostRequest(CustomComponent):
description: str = "Make a POST request to the given URL."
output_types: list[str] = ["Document"]
documentation: str = "https://docs.langflow.org/components/utilities#post-request"
beta = True
beta: bool = True
field_config = {
"url": {"display_name": "URL", "info": "The URL to make the request to."},
"headers": {

View file

@ -10,7 +10,7 @@ class UpdateRequest(CustomComponent):
description: str = "Make a PATCH request to the given URL."
output_types: list[str] = ["Document"]
documentation: str = "https://docs.langflow.org/components/utilities#update-request"
beta = True
beta: bool = True
field_config = {
"url": {"display_name": "URL", "info": "The URL to make the request to."},
"headers": {

View file

@ -14,10 +14,10 @@ class ChromaComponent(CustomComponent):
A custom component for implementing a Vector Store using Chroma.
"""
display_name: str = "Chroma (Custom Component)"
display_name: str = "Chroma"
description: str = "Implementation of Vector Store using Chroma"
documentation = "https://python.langchain.com/docs/integrations/vectorstores/chroma"
beta = True
beta: bool = True
def build_config(self):
"""

View file

@ -0,0 +1,64 @@
from typing import Optional
from langflow import CustomComponent
from langchain.vectorstores.redis import Redis
from langchain.schema import Document
from langchain.vectorstores.base import VectorStore
from langchain.embeddings.base import Embeddings
class RedisComponent(CustomComponent):
"""
A custom component for implementing a Vector Store using Redis.
"""
display_name: str = "Redis"
description: str = "Implementation of Vector Store using Redis"
documentation = "https://python.langchain.com/docs/integrations/vectorstores/redis"
beta = True
def build_config(self):
"""
Builds the configuration for the component.
Returns:
- dict: A dictionary containing the configuration options for the component.
"""
return {
"index_name": {"display_name": "Index Name", "value": "your_index"},
"code": {"show": False, "display_name": "Code"},
"documents": {"display_name": "Documents", "is_list": True},
"embedding": {"display_name": "Embedding"},
"redis_server_url": {
"display_name": "Redis Server Connection String",
"advanced": False,
},
"redis_index_name": {"display_name": "Redis Index", "advanced": False},
}
def build(
self,
embedding: Embeddings,
redis_server_url: str,
redis_index_name: str,
documents: Optional[Document] = None,
) -> VectorStore:
"""
Builds the Vector Store or BaseRetriever object.
Args:
- embedding (Embeddings): The embeddings to use for the Vector Store.
- documents (Optional[Document]): The documents to use for the Vector Store.
- redis_index_name (str): The name of the Redis index.
- redis_server_url (str): The URL for the Redis server.
Returns:
- VectorStore: The Vector Store object.
"""
return Redis.from_documents(
documents=documents, # type: ignore
embedding=embedding,
redis_url=redis_server_url,
index_name=redis_index_name,
)

View file

@ -1,10 +1,10 @@
from typing import Optional, Union
from langflow import CustomComponent
from langchain.schema import BaseRetriever, Document
from langchain.vectorstores import Vectara
from langchain.schema import Document
from langchain.vectorstores.base import VectorStore
from langchain.schema import BaseRetriever
from langflow import CustomComponent
class VectaraComponent(CustomComponent):

View file

@ -0,0 +1,76 @@
from typing import Optional, List
from langflow import CustomComponent
from langchain.vectorstores.pgvector import PGVector
from langchain.schema import Document
from langchain.vectorstores.base import VectorStore
from langchain.embeddings.base import Embeddings
class PostgresqlVectorComponent(CustomComponent):
"""
A custom component for implementing a Vector Store using PostgreSQL.
"""
display_name: str = "PGVector"
description: str = "Implementation of Vector Store using PostgreSQL"
documentation = (
"https://python.langchain.com/docs/integrations/vectorstores/pgvector"
)
beta = True
def build_config(self):
"""
Builds the configuration for the component.
Returns:
- dict: A dictionary containing the configuration options for the component.
"""
return {
"index_name": {"display_name": "Index Name", "value": "your_index"},
"code": {"show": True, "display_name": "Code"},
"documents": {"display_name": "Documents", "is_list": True},
"embedding": {"display_name": "Embedding"},
"pg_server_url": {
"display_name": "PostgreSQL Server Connection String",
"advanced": False,
},
"collection_name": {"display_name": "Table", "advanced": False},
}
def build(
self,
embedding: Embeddings,
pg_server_url: str,
collection_name: str,
documents: Optional[List[Document]] = None,
) -> VectorStore:
"""
Builds the Vector Store or BaseRetriever object.
Args:
- embedding (Embeddings): The embeddings to use for the Vector Store.
- documents (Optional[Document]): The documents to use for the Vector Store.
- collection_name (str): The name of the PG table.
- pg_server_url (str): The URL for the PG server.
Returns:
- VectorStore: The Vector Store object.
"""
try:
if documents is None:
return PGVector.from_existing_index(
embedding=embedding,
collection_name=collection_name,
connection_string=pg_server_url,
)
return PGVector.from_documents(
embedding=embedding,
documents=documents,
collection_name=collection_name,
connection_string=pg_server_url,
)
except Exception as e:
raise RuntimeError(f"Failed to build PGVector: {e}")

View file

@ -14,14 +14,14 @@ agents:
SQLAgent:
documentation: ""
chains:
LLMChain:
documentation: "https://python.langchain.com/docs/modules/chains/foundational/llm_chain"
# LLMChain:
# documentation: "https://python.langchain.com/docs/modules/chains/foundational/llm_chain"
LLMMathChain:
documentation: "https://python.langchain.com/docs/modules/chains/additional/llm_math"
LLMCheckerChain:
documentation: "https://python.langchain.com/docs/modules/chains/additional/llm_checker"
ConversationChain:
documentation: ""
# ConversationChain:
# documentation: ""
SeriesCharacterChain:
documentation: ""
MidJourneyPromptChain:
@ -106,6 +106,9 @@ embeddings:
documentation: "https://python.langchain.com/docs/modules/data_connection/text_embedding/integrations/cohere"
VertexAIEmbeddings:
documentation: "https://python.langchain.com/docs/modules/data_connection/text_embedding/integrations/google_vertex_ai_palm"
AmazonBedrockEmbeddings:
documentation: "https://python.langchain.com/docs/modules/data_connection/text_embedding/integrations/bedrock"
llms:
OpenAI:
documentation: "https://python.langchain.com/docs/modules/model_io/models/llms/integrations/openai"
@ -265,8 +268,8 @@ retrievers:
# ZepRetriever:
# documentation: "https://python.langchain.com/docs/modules/data_connection/retrievers/integrations/zep_memorystore"
vectorstores:
Chroma:
documentation: "https://python.langchain.com/docs/modules/data_connection/vectorstores/integrations/chroma"
# Chroma:
# documentation: "https://python.langchain.com/docs/modules/data_connection/vectorstores/integrations/chroma"
Qdrant:
documentation: "https://python.langchain.com/docs/modules/data_connection/vectorstores/integrations/qdrant"
Weaviate:

View file

@ -14,25 +14,6 @@
# "BaseMemory": BaseMemory,
# "BaseChatMemory": BaseChatMemory,
# }
from .constants import (
Tool,
PromptTemplate,
Chain,
BaseChatMemory,
BaseLLM,
BaseLoader,
BaseMemory,
BaseOutputParser,
BaseRetriever,
VectorStore,
Embeddings,
TextSplitter,
Document,
AgentExecutor,
NestedDict,
Data,
Object,
)
__all__ = [
"NestedDict",
@ -42,6 +23,7 @@ __all__ = [
"Chain",
"BaseChatMemory",
"BaseLLM",
"BaseLanguageModel",
"BaseLoader",
"BaseMemory",
"BaseOutputParser",
@ -52,4 +34,7 @@ __all__ = [
"Document",
"AgentExecutor",
"Object",
"Callable",
"BasePromptTemplate",
"ChatPromptTemplate",
]

View file

@ -1,11 +1,11 @@
from typing import Dict, Union
from typing import Callable, Dict, Union
from langchain.agents.agent import AgentExecutor
from langchain.chains.base import Chain
from langchain.document_loaders.base import BaseLoader
from langchain.llms.base import BaseLLM
from langchain.llms.base import BaseLanguageModel, BaseLLM
from langchain.memory.chat_memory import BaseChatMemory
from langchain.prompts import PromptTemplate
from langchain.prompts import BasePromptTemplate, ChatPromptTemplate, PromptTemplate
from langchain.schema import BaseOutputParser, BaseRetriever, Document
from langchain.schema.embeddings import Embeddings
from langchain.schema.memory import BaseMemory
@ -30,7 +30,10 @@ LANGCHAIN_BASE_TYPES = {
"AgentExecutor": AgentExecutor,
"Tool": Tool,
"BaseLLM": BaseLLM,
"BaseLanguageModel": BaseLanguageModel,
"PromptTemplate": PromptTemplate,
"ChatPromptTemplate": ChatPromptTemplate,
"BasePromptTemplate": BasePromptTemplate,
"BaseLoader": BaseLoader,
"Document": Document,
"TextSplitter": TextSplitter,
@ -53,4 +56,5 @@ CUSTOM_COMPONENT_SUPPORTED_TYPES = {
"NestedDict": NestedDict,
"Data": Data,
"Object": Object,
"Callable": Callable,
}

View file

@ -204,6 +204,10 @@ class ChainVertex(Vertex):
# Temporarily remove the code from the params
self.params.pop("code", None)
# Check if the chain requires a PromptVertex
# Temporarily remove "code" from the params
self.params.pop("code", None)
for key, value in self.params.items():
if isinstance(value, PromptVertex):
# Build the PromptVertex, passing the tools if available

View file

@ -1,4 +1,4 @@
from typing import Dict, List, Optional
from typing import ClassVar, Dict, List, Optional
from langchain.agents import types
@ -15,7 +15,7 @@ from langflow.utils.util import build_template_from_class, build_template_from_m
class AgentCreator(LangChainTypeCreator):
type_name: str = "agents"
from_method_nodes = {"ZeroShotAgent": "from_llm_and_tools"}
from_method_nodes: ClassVar[Dict] = {"ZeroShotAgent": "from_llm_and_tools"}
@property
def frontend_node_class(self) -> type[AgentFrontendNode]:

View file

@ -16,8 +16,10 @@ from langchain.agents.agent_toolkits import (
)
from langchain.agents.agent_toolkits.json.prompt import JSON_PREFIX, JSON_SUFFIX
from langchain.agents.agent_toolkits.json.toolkit import JsonToolkit
from langchain.agents.agent_toolkits.pandas.prompt import PREFIX as PANDAS_PREFIX
from langchain.agents.agent_toolkits.pandas.prompt import (
from langchain_experimental.agents.agent_toolkits.pandas.prompt import (
PREFIX as PANDAS_PREFIX,
)
from langchain_experimental.agents.agent_toolkits.pandas.prompt import (
SUFFIX_WITH_DF as PANDAS_SUFFIX,
)
from langchain.agents.agent_toolkits.sql.prompt import SQL_PREFIX, SQL_SUFFIX
@ -31,7 +33,7 @@ from langchain.agents.mrkl.prompt import FORMAT_INSTRUCTIONS
from langchain.base_language import BaseLanguageModel
from langchain.memory.chat_memory import BaseChatMemory
from langchain.sql_database import SQLDatabase
from langchain.tools.python.tool import PythonAstREPLTool
from langchain_experimental.tools.python.tool import PythonAstREPLTool
from langchain.tools.sql_database.prompt import QUERY_CHECKER
from langflow.interface.base import CustomAgentExecutor
@ -103,9 +105,9 @@ class CSVAgent(CustomAgentExecutor):
tools,
prefix=PANDAS_PREFIX,
suffix=PANDAS_SUFFIX,
input_variables=["df", "input", "agent_scratchpad"],
input_variables=["df_head", "input", "agent_scratchpad"],
)
partial_prompt = prompt.partial(df=str(df.head()))
partial_prompt = prompt.partial(df_head=str(df.head()))
llm_chain = LLMChain(
llm=llm,
prompt=partial_prompt,

View file

@ -1,4 +1,4 @@
from typing import Any, Dict, List, Optional, Type
from typing import Any, ClassVar, Dict, List, Optional, Type
from langflow.custom.customs import get_custom_nodes
from langflow.interface.base import LangChainTypeCreator
@ -9,7 +9,7 @@ from langflow.template.frontend_node.chains import ChainFrontendNode
from loguru import logger
from langflow.utils.util import build_template_from_class, build_template_from_method
from langchain import chains
from langchain_experimental.sql import SQLDatabaseChain # type: ignore
from langchain_experimental.sql import SQLDatabaseChain
# Assuming necessary imports for Field, Template, and FrontendNode classes
@ -22,7 +22,7 @@ class ChainCreator(LangChainTypeCreator):
return ChainFrontendNode
#! We need to find a better solution for this
from_method_nodes = {
from_method_nodes: ClassVar[Dict] = {
"ConversationalRetrievalChain": "from_llm",
"LLMCheckerChain": "from_llm",
"SQLDatabaseChain": "from_llm",

View file

@ -4,7 +4,7 @@ from langchain.chains import ConversationChain
from langchain.memory.buffer import ConversationBufferMemory
from langchain.schema import BaseMemory
from langflow.interface.base import CustomChain
from pydantic import Field, root_validator
from pydantic.v1 import Field, root_validator
from langchain.chains.question_answering import load_qa_chain
from langflow.interface.utils import extract_input_variables_from_prompt
from langchain.base_language import BaseLanguageModel

View file

@ -1,10 +1,10 @@
import ast
from typing import Any, Optional
from pydantic import BaseModel
from typing import Any, ClassVar, Optional
from fastapi import HTTPException
from langflow.utils import validate
from langflow.interface.custom.code_parser import CodeParser
from langflow.utils import validate
class ComponentCodeNullError(HTTPException):
@ -15,16 +15,17 @@ class ComponentFunctionEntrypointNameNullError(HTTPException):
pass
class Component(BaseModel):
ERROR_CODE_NULL = "Python code must be provided."
ERROR_FUNCTION_ENTRYPOINT_NAME_NULL = "The name of the entrypoint function must be provided."
class Component:
ERROR_CODE_NULL: ClassVar[str] = "Python code must be provided."
ERROR_FUNCTION_ENTRYPOINT_NAME_NULL: ClassVar[str] = "The name of the entrypoint function must be provided."
code: Optional[str]
function_entrypoint_name = "build"
code: Optional[str] = None
_function_entrypoint_name: str = "build"
field_config: dict = {}
def __init__(self, **data):
super().__init__(**data)
for key, value in data.items():
setattr(self, key, value)
def get_code_tree(self, code: str):
parser = CodeParser(code)
@ -37,7 +38,7 @@ class Component(BaseModel):
detail={"error": self.ERROR_CODE_NULL, "traceback": ""},
)
if not self.function_entrypoint_name:
if not self._function_entrypoint_name:
raise ComponentFunctionEntrypointNameNullError(
status_code=400,
detail={
@ -46,7 +47,7 @@ class Component(BaseModel):
},
)
return validate.create_function(self.code, self.function_entrypoint_name)
return validate.create_function(self.code, self._function_entrypoint_name)
def build_template_config(self, attributes) -> dict:
template_config = {}

View file

@ -1,27 +1,28 @@
from typing import Any, Callable, List, Optional, Union
from typing import Any, Callable, ClassVar, List, Optional, Union
from uuid import UUID
import yaml
from fastapi import HTTPException
from langflow.field_typing.constants import CUSTOM_COMPONENT_SUPPORTED_TYPES
from langflow.interface.custom.component import Component
from langflow.interface.custom.directory_reader import DirectoryReader
from langflow.services.deps import get_db_service
from langflow.interface.custom.utils import extract_inner_type
from langflow.interface.custom.utils import extract_inner_type, extract_union_types
from langflow.services.database.models.flow import Flow
from langflow.services.database.utils import session_getter
from langflow.services.getters import get_db_service
from langflow.utils import validate
from langflow.services.database.utils import session_getter
from langflow.services.database.models.flow import Flow
from pydantic import Extra
import yaml
class CustomComponent(Component, extra=Extra.allow):
code: Optional[str]
class CustomComponent(Component):
display_name: Optional[str] = "Custom Component"
description: Optional[str] = "Custom Component"
code: Optional[str] = None
field_config: dict = {}
code_class_base_inheritance = "CustomComponent"
function_entrypoint_name = "build"
code_class_base_inheritance: ClassVar[str] = "CustomComponent"
function_entrypoint_name: ClassVar[str] = "build"
function: Optional[Callable] = None
return_type_valid_list = list(CUSTOM_COMPONENT_SUPPORTED_TYPES.keys())
return_type_valid_list: List[str] = list(CUSTOM_COMPONENT_SUPPORTED_TYPES.keys())
repr_value: Optional[Any] = ""
user_id: Optional[Union[UUID, str]] = None
@ -138,9 +139,7 @@ class CustomComponent(Component, extra=Extra.allow):
return [return_type] if return_type in self.return_type_valid_list else []
# If the return type is a Union, then we need to parse it
return_type = return_type.replace("Union", "").replace("[", "").replace("]", "")
return_type = return_type.split(",")
return_type = [item.strip() for item in return_type]
return_type = extract_union_types(return_type)
return [item for item in return_type if item in self.return_type_valid_list]
@property
@ -179,8 +178,7 @@ class CustomComponent(Component, extra=Extra.allow):
return validate.create_function(self.code, self.function_entrypoint_name)
def load_flow(self, flow_id: str, tweaks: Optional[dict] = None) -> Any:
from langflow.processing.process import build_sorted_vertices
from langflow.processing.process import process_tweaks
from langflow.processing.process import build_sorted_vertices, process_tweaks
db_service = get_db_service()
with session_getter(db_service) as session:
@ -189,7 +187,7 @@ class CustomComponent(Component, extra=Extra.allow):
raise ValueError(f"Flow {flow_id} not found")
if tweaks:
graph_data = process_tweaks(graph_data=graph_data, tweaks=tweaks)
return build_sorted_vertices(graph_data)
return build_sorted_vertices(graph_data, self.user_id)
def list_flows(self, *, get_session: Optional[Callable] = None) -> List[Flow]:
if not self.user_id:

View file

@ -10,7 +10,7 @@ class ClassCodeDetails(BaseModel):
"""
name: str
doc: Optional[str]
doc: Optional[str] = None
bases: list
attributes: list
methods: list
@ -23,7 +23,7 @@ class CallableCodeDetails(BaseModel):
"""
name: str
doc: Optional[str]
doc: Optional[str] = None
args: list
body: list
return_type: Optional[str]
return_type: Optional[str] = None

View file

@ -8,3 +8,14 @@ def extract_inner_type(return_type: str) -> str:
if match := re.match(r"list\[(.*)\]", return_type, re.IGNORECASE):
return match[1]
return return_type
def extract_union_types(return_type: str) -> list[str]:
"""
Extracts the inner type from a type hint that is a list.
"""
# If the return type is a Union, then we need to parse it
return_type = return_type.replace("Union", "").replace("[", "").replace("]", "")
return_types = return_type.split(",")
return_types = [item.strip() for item in return_types]
return return_types

View file

@ -1,40 +1,39 @@
import json
from typing import TYPE_CHECKING, Any, Callable, Dict, Sequence, Type
import orjson
from typing import Any, Callable, Dict, Sequence, Type, TYPE_CHECKING
from langchain.schema import Document
from langchain.agents import agent as agent_module
from langchain.agents.agent import AgentExecutor
from langchain.agents.agent_toolkits.base import BaseToolkit
from langchain.agents.tools import BaseTool
from langchain.chains.base import Chain
from langchain.document_loaders.base import BaseLoader
from langchain.schema import Document
from langchain.vectorstores.base import VectorStore
from loguru import logger
from pydantic import ValidationError
from langflow.interface.agents.base import agent_creator
from langflow.interface.chains.base import chain_creator
from langflow.interface.custom_lists import CUSTOM_NODES
from langflow.interface.importing.utils import (
get_function,
get_function_custom,
import_by_type,
)
from langflow.interface.initialize.llm import initialize_vertexai
from langflow.interface.initialize.utils import (
handle_format_kwargs,
handle_node_type,
handle_partial_variables,
)
from langflow.interface.initialize.vector_store import vecstore_initializer
from pydantic import ValidationError
from langflow.interface.importing.utils import (
get_function,
get_function_custom,
import_by_type,
)
from langflow.interface.custom_lists import CUSTOM_NODES
from langflow.interface.agents.base import agent_creator
from langflow.interface.toolkits.base import toolkits_creator
from langflow.interface.chains.base import chain_creator
from langflow.interface.output_parsers.base import output_parser_creator
from langflow.interface.retrievers.base import retriever_creator
from langflow.interface.wrappers.base import wrapper_creator
from langflow.interface.toolkits.base import toolkits_creator
from langflow.interface.utils import load_file_into_dict
from langflow.interface.wrappers.base import wrapper_creator
from langflow.utils import validate
from langchain.chains.base import Chain
from langchain.vectorstores.base import VectorStore
from langchain.document_loaders.base import BaseLoader
from loguru import logger
if TYPE_CHECKING:
from langflow import CustomComponent
@ -279,12 +278,14 @@ def instantiate_embedding(node_type, class_object, params: Dict):
try:
return class_object(**params)
except ValidationError:
params = {key: value for key, value in params.items() if key in class_object.__fields__}
params = {key: value for key, value in params.items() if key in class_object.model_fields}
return class_object(**params)
def instantiate_vectorstore(class_object: Type[VectorStore], params: Dict):
search_kwargs = params.pop("search_kwargs", {})
if search_kwargs == {"yourkey": "value"}:
search_kwargs = {}
# clean up docs or texts to have only documents
if "texts" in params:
params["documents"] = params.pop("texts")

View file

@ -184,7 +184,7 @@ def initialize_chroma(class_object: Type[Chroma], params: dict):
params.pop("documents", None)
params.pop("texts", None)
params["embedding_function"] = params.pop("embedding")
chromadb = class_object(**params)
chromadb_instance = class_object(**params)
else:
if "texts" in params:
params["documents"] = params.pop("texts")
@ -199,10 +199,10 @@ def initialize_chroma(class_object: Type[Chroma], params: dict):
if value is None:
doc.metadata[key] = ""
chromadb = class_object.from_documents(**params)
chromadb_instance = class_object.from_documents(**params)
if persist:
chromadb.persist()
return chromadb
chromadb_instance.persist()
return chromadb_instance
def initialize_qdrant(class_object: Type[Qdrant], params: dict):

View file

@ -1,4 +1,4 @@
from typing import Dict, List, Optional, Type
from typing import ClassVar, Dict, List, Optional, Type
from langflow.interface.base import LangChainTypeCreator
from langflow.interface.custom_lists import memory_type_to_cls_dict
@ -14,7 +14,7 @@ from langflow.custom.customs import get_custom_nodes
class MemoryCreator(LangChainTypeCreator):
type_name: str = "memories"
from_method_nodes = {
from_method_nodes: ClassVar[Dict] = {
"ZepChatMessageHistory": "__init__",
"SQLiteEntityStore": "__init__",
}

View file

@ -1,4 +1,4 @@
from typing import Dict, List, Optional, Type
from typing import ClassVar, Dict, List, Optional, Type
from langchain import output_parsers
@ -13,7 +13,7 @@ from langflow.utils.util import build_template_from_class, build_template_from_m
class OutputParserCreator(LangChainTypeCreator):
type_name: str = "output_parsers"
from_method_nodes = {
from_method_nodes: ClassVar[Dict] = {
"StructuredOutputParser": "from_response_schemas",
}

View file

@ -1,7 +1,7 @@
from typing import Dict, List, Optional, Type
from langchain.prompts import PromptTemplate
from pydantic import root_validator
from pydantic.v1 import root_validator
from langflow.interface.utils import extract_input_variables_from_prompt

View file

@ -1,4 +1,4 @@
from typing import Any, Dict, List, Optional, Type
from typing import Any, ClassVar, Dict, List, Optional, Type
from langchain import retrievers
@ -14,7 +14,10 @@ from langflow.utils.util import build_template_from_method, build_template_from_
class RetrieverCreator(LangChainTypeCreator):
type_name: str = "retrievers"
from_method_nodes = {"MultiQueryRetriever": "from_llm", "ZepRetriever": "__init__"}
from_method_nodes: ClassVar[Dict] = {
"MultiQueryRetriever": "from_llm",
"ZepRetriever": "__init__",
}
@property
def frontend_node_class(self) -> Type[RetrieverFrontendNode]:

View file

@ -1,9 +1,12 @@
from typing import Dict, Tuple
from typing import Dict, Tuple, Optional, Union
from langflow.graph import Graph
from loguru import logger
from uuid import UUID
def build_sorted_vertices(data_graph) -> Tuple[Graph, Dict]:
def build_sorted_vertices(
data_graph, user_id: Optional[Union[str, UUID]] = None
) -> Tuple[Graph, Dict]:
"""
Build langchain object from data_graph.
"""
@ -13,7 +16,7 @@ def build_sorted_vertices(data_graph) -> Tuple[Graph, Dict]:
sorted_vertices = graph.topological_sort()
artifacts = {}
for vertex in sorted_vertices:
vertex.build()
vertex.build(user_id=user_id)
if vertex.artifacts:
artifacts.update(vertex.artifacts)
return graph, artifacts

View file

@ -21,6 +21,7 @@ from langflow.template.field.base import TemplateField
from langflow.template.template.base import Template
from langflow.utils import util
from langflow.utils.util import build_template_from_class
from langflow.utils.logger import logger
TOOL_INPUTS = {
"str": TemplateField(
@ -33,7 +34,7 @@ TOOL_INPUTS = {
),
"llm": TemplateField(field_type="BaseLanguageModel", required=True, is_list=False, show=True),
"func": TemplateField(
field_type="function",
field_type="Callable",
required=True,
is_list=False,
show=True,
@ -70,7 +71,11 @@ class ToolCreator(LangChainTypeCreator):
all_tools = {}
for tool, tool_fcn in ALL_TOOLS_NAMES.items():
tool_params = get_tool_params(tool_fcn)
try:
tool_params = get_tool_params(tool_fcn)
except Exception:
logger.error(f"Error getting params for tool {tool}")
continue
tool_name = tool_params.get("name") or tool
@ -116,7 +121,7 @@ class ToolCreator(LangChainTypeCreator):
elif tool_type in CUSTOM_TOOLS:
# Get custom tool params
params = self.type_to_loader_dict[name]["params"] # type: ignore
base_classes = ["function"]
base_classes = ["Callable"]
if node := customs.get_custom_nodes("tools").get(tool_type):
return node
elif tool_type in FILE_TOOLS:
@ -126,10 +131,15 @@ class ToolCreator(LangChainTypeCreator):
tool_dict = build_template_from_class(tool_type, OTHER_TOOLS)
fields = tool_dict["template"]
# _type is the only key in fields
# return None
if len(fields) == 1 and "_type" in fields:
return None
# Pop unnecessary fields and add name
fields.pop("_type") # type: ignore
fields.pop("return_direct") # type: ignore
fields.pop("verbose") # type: ignore
fields.pop("return_direct", None) # type: ignore
fields.pop("verbose", None) # type: ignore
tool_params = {
"name": fields.pop("name")["value"], # type: ignore

View file

@ -1,7 +1,7 @@
from typing import Callable, Optional
from langflow.interface.importing.utils import get_function
from pydantic import BaseModel, validator
from pydantic.v1 import BaseModel, validator
from langflow.utils import validate
from langchain.agents.tools import Tool

View file

@ -1,6 +1,7 @@
import ast
import contextlib
from typing import Any, List
from typing import Any, List, Union, Optional
from uuid import UUID
from langflow.api.utils import get_new_key
from langflow.interface.agents.base import agent_creator
from langflow.interface.chains.base import chain_creator
@ -202,7 +203,9 @@ def update_attributes(frontend_node, template_config):
frontend_node[attribute] = template_config[attribute]
def build_field_config(custom_component: CustomComponent):
def build_field_config(
custom_component: CustomComponent, user_id: Optional[Union[str, UUID]] = None
):
"""Build the field configuration for a custom component"""
try:
@ -212,7 +215,7 @@ def build_field_config(custom_component: CustomComponent):
return {}
try:
return custom_class().build_config()
return custom_class(user_id=user_id).build_config()
except Exception as exc:
logger.error(f"Error while building field config: {str(exc)}")
return {}
@ -296,7 +299,9 @@ def add_output_types(frontend_node, return_types: List[str]):
frontend_node.get("output_types").append(return_type)
def build_langchain_template_custom_component(custom_component: CustomComponent):
def build_langchain_template_custom_component(
custom_component: CustomComponent, user_id: Optional[Union[str, UUID]] = None
):
"""Build a custom component template for the langchain"""
try:
logger.debug("Building custom component template")
@ -309,7 +314,7 @@ def build_langchain_template_custom_component(custom_component: CustomComponent)
update_attributes(frontend_node, template_config)
logger.debug("Updated attributes")
field_config = build_field_config(custom_component)
field_config = build_field_config(custom_component, user_id=user_id)
logger.debug("Built field config")
entrypoint_args = custom_component.get_function_entrypoint_args

View file

@ -1,14 +1,13 @@
from typing import Dict, List, Optional, Type
from langchain import utilities
from loguru import logger
from langflow.custom.customs import get_custom_nodes
from langflow.interface.base import LangChainTypeCreator
from langflow.interface.importing.utils import import_class
from langflow.services.deps import get_settings_service
from langflow.template.frontend_node.utilities import UtilitiesFrontendNode
from loguru import logger
from langflow.utils.util import build_template_from_class
@ -28,9 +27,14 @@ class UtilityCreator(LangChainTypeCreator):
"""
if self.type_dict is None:
settings_service = get_settings_service()
self.type_dict = {
utility_name: import_class(f"langchain.utilities.{utility_name}") for utility_name in utilities.__all__
}
self.type_dict = {}
for utility_name in utilities.__all__:
try:
imported = import_class(f"langchain.utilities.{utility_name}")
self.type_dict[utility_name] = imported
except Exception:
pass
self.type_dict["SQLDatabase"] = utilities.SQLDatabase
# Filter according to settings.utilities
self.type_dict = {

View file

@ -1,4 +1,4 @@
from typing import Dict, List, Optional
from typing import ClassVar, Dict, List, Optional
from langchain.utilities import requests, sql_database
@ -10,7 +10,7 @@ from langflow.utils.util import build_template_from_class, build_template_from_m
class WrapperCreator(LangChainTypeCreator):
type_name: str = "wrappers"
from_method_nodes = {"SQLDatabase": "from_uri"}
from_method_nodes: ClassVar[Dict] = {"SQLDatabase": "from_uri"}
@property
def type_to_loader_dict(self) -> Dict:

View file

@ -16,7 +16,7 @@ from langflow.services.deps import get_session, get_settings_service
from sqlmodel import Session
from cryptography.fernet import Fernet
oauth2_login = OAuth2PasswordBearer(tokenUrl="api/v1/login")
oauth2_login = OAuth2PasswordBearer(tokenUrl="api/v1/login", auto_error=False)
API_KEY_NAME = "x-api-key"
@ -66,6 +66,30 @@ async def api_key_security(
async def get_current_user(
token: str = Security(oauth2_login),
query_param: str = Security(api_key_query),
header_param: str = Security(api_key_header),
db: Session = Depends(get_session),
) -> User:
if token:
return await get_current_user_by_jwt(token, db)
else:
if not query_param and not header_param:
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail="An API key must be passed as query or header",
)
user = await api_key_security(query_param, header_param, db)
if user:
return user
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail="Invalid or missing API key",
)
async def get_current_user_by_jwt(
token: Annotated[str, Depends(oauth2_login)],
db: Session = Depends(get_session),
) -> User:

View file

@ -7,7 +7,7 @@ import tempfile
from collections import OrderedDict
from pathlib import Path
from typing import TYPE_CHECKING, Any, Dict
from appdirs import user_cache_dir
from platformdirs import user_cache_dir
from fastapi import UploadFile
from langflow.api.v1.schemas import BuildStatus
from langflow.services.database.models.base import orjson_dumps

View file

@ -19,7 +19,6 @@ def orjson_dumps(v, *, default=None, sort_keys=False, indent_2=True):
class SQLModelSerializable(SQLModel):
class Config:
orm_mode = True
json_loads = orjson.loads
json_dumps = orjson_dumps
# TODO[pydantic]: The following keys were removed: `json_loads`, `json_dumps`.
# Check https://docs.pydantic.dev/dev-v2/migration/#changes-to-config for more information.
pass

View file

@ -1,7 +1,7 @@
# Path: src/backend/langflow/database/models/flow.py
from langflow.services.database.models.base import SQLModelSerializable
from pydantic import validator
from pydantic import field_validator
from sqlmodel import Field, JSON, Column, Relationship
from uuid import UUID, uuid4
@ -13,11 +13,11 @@ if TYPE_CHECKING:
class FlowBase(SQLModelSerializable):
name: str = Field(index=True)
description: Optional[str] = Field(index=True)
description: Optional[str] = Field(index=True, nullable=True, default=None)
data: Optional[Dict] = Field(default=None, nullable=True)
is_component: Optional[bool] = Field(default=False, nullable=True)
@validator("data")
@field_validator("data")
def validate_json(v):
if not v:
return v

View file

@ -50,6 +50,9 @@ def update_user(user_db: Optional[User], user: UserUpdate, db: Session = Depends
def update_user_last_login_at(user_id: UUID, db: Session = Depends(get_session)):
user_data = UserUpdate(last_login_at=datetime.now(timezone.utc)) # type: ignore
user = get_user_by_id(db, user_id)
return update_user(user, user_data, db)
try:
user_data = UserUpdate(last_login_at=datetime.now(timezone.utc)) # type: ignore
user = get_user_by_id(db, user_id)
return update_user(user, user_data, db)
except Exception:
pass

View file

@ -20,7 +20,7 @@ class User(SQLModelSerializable, table=True):
is_superuser: bool = Field(default=False)
create_at: datetime = Field(default_factory=datetime.utcnow)
updated_at: datetime = Field(default_factory=datetime.utcnow)
last_login_at: Optional[datetime] = Field()
last_login_at: Optional[datetime] = Field(nullable=True)
api_keys: list["ApiKey"] = Relationship(
back_populates="user",
sa_relationship_kwargs={"cascade": "delete"},
@ -42,13 +42,13 @@ class UserRead(SQLModel):
is_superuser: bool = Field()
create_at: datetime = Field()
updated_at: datetime = Field()
last_login_at: Optional[datetime] = Field()
last_login_at: Optional[datetime] = Field(nullable=True)
class UserUpdate(SQLModel):
username: Optional[str] = Field()
profile_image: Optional[str] = Field()
password: Optional[str] = Field()
is_active: Optional[bool] = Field()
is_superuser: Optional[bool] = Field()
last_login_at: Optional[datetime] = Field()
username: Optional[str] = None
profile_image: Optional[str] = None
password: Optional[str] = None
is_active: Optional[bool] = None
is_superuser: Optional[bool] = None
last_login_at: Optional[datetime] = None

View file

@ -91,7 +91,7 @@ class DatabaseService(Service):
legacy_tables = ["flowstyle"]
for table, model in model_mapping.items():
expected_columns = list(model.__fields__.keys())
expected_columns = list(model.model_fields.keys())
try:
available_columns = [col["name"] for col in inspector.get_columns(table)]

View file

@ -1,16 +1,18 @@
import secrets
from pathlib import Path
from typing import Optional
import secrets
from loguru import logger
from passlib.context import CryptContext
from pydantic import Field, validator
from pydantic_settings import BaseSettings
from langflow.services.settings.constants import (
DEFAULT_SUPERUSER,
DEFAULT_SUPERUSER_PASSWORD,
)
from langflow.services.settings.utils import read_secret_from_file, write_secret_to_file
from pydantic import BaseSettings, Field, validator
from passlib.context import CryptContext
from loguru import logger
class AuthSettings(BaseSettings):
# Login settings
@ -18,8 +20,7 @@ class AuthSettings(BaseSettings):
SECRET_KEY: str = Field(
default="",
description="Secret key for JWT. If not provided, a random one will be generated.",
env="LANGFLOW_SECRET_KEY",
allow_mutation=False,
frozen=False,
)
ALGORITHM: str = "HS256"
ACCESS_TOKEN_EXPIRE_MINUTES: int = 60
@ -37,7 +38,7 @@ class AuthSettings(BaseSettings):
SUPERUSER: str = DEFAULT_SUPERUSER
SUPERUSER_PASSWORD: str = DEFAULT_SUPERUSER_PASSWORD
pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
pwd_context: CryptContext = CryptContext(schemes=["bcrypt"], deprecated="auto")
class Config:
validate_assignment = True

View file

@ -7,7 +7,8 @@ from typing import Optional, List
from pathlib import Path
import yaml
from pydantic import BaseSettings, root_validator, validator
from pydantic import field_validator, validator
from pydantic_settings import BaseSettings, SettingsConfigDict
from loguru import logger
# BASE_COMPONENTS_PATH = str(Path(__file__).parent / "components")
@ -60,14 +61,14 @@ class Settings(BaseSettings):
@validator("CONFIG_DIR", pre=True, allow_reuse=True)
def set_langflow_dir(cls, value):
if not value:
import appdirs
from platformdirs import user_cache_dir
# Define the app name and author
app_name = "langflow"
app_author = "logspace"
# Get the cache directory for the application
cache_dir = appdirs.user_cache_dir(app_name, app_author)
cache_dir = user_cache_dir(app_name, app_author)
# Create a .langflow directory inside the cache directory
value = Path(cache_dir)
@ -112,7 +113,7 @@ class Settings(BaseSettings):
return value
@validator("COMPONENTS_PATH", pre=True)
@field_validator("COMPONENTS_PATH", mode="before")
def set_components_path(cls, value):
if os.getenv("LANGFLOW_COMPONENTS_PATH"):
logger.debug("Adding LANGFLOW_COMPONENTS_PATH to components_path")
@ -137,17 +138,17 @@ class Settings(BaseSettings):
logger.debug(f"Components path: {value}")
return value
class Config:
validate_assignment = True
extra = "ignore"
env_prefix = "LANGFLOW_"
model_config = SettingsConfigDict(
validate_assignment=True, extra="ignore", env_prefix="LANGFLOW_"
)
@root_validator(allow_reuse=True)
def validate_lists(cls, values):
for key, value in values.items():
if key != "dev" and not value:
values[key] = []
return values
# @model_validator()
# @classmethod
# def validate_lists(cls, values):
# for key, value in values.items():
# if key != "dev" and not value:
# values[key] = []
# return values
def update_from_yaml(self, file_path: str, dev: bool = False):
new_settings = load_settings_from_yaml(file_path)
@ -221,7 +222,7 @@ def load_settings_from_yaml(file_path: str) -> Settings:
settings_dict = {k.upper(): v for k, v in settings_dict.items()}
for key in settings_dict:
if key not in Settings.__fields__.keys():
if key not in Settings.model_fields.keys():
raise KeyError(f"Key {key} not found in settings")
logger.debug(f"Loading {len(settings_dict[key])} {key} from {file_path}")

View file

@ -28,7 +28,7 @@ class SettingsService(Service):
settings_dict = {k.upper(): v for k, v in settings_dict.items()}
for key in settings_dict:
if key not in Settings.__fields__.keys():
if key not in Settings.model_fields.keys():
raise KeyError(f"Key {key} not found in settings")
logger.debug(f"Loading {len(settings_dict[key])} {key} from {file_path}")

View file

@ -0,0 +1,177 @@
import contextlib
import json
import os
from typing import Optional, List
from pathlib import Path
import yaml
from pydantic import validator, model_validator
from pydantic_settings import BaseSettings
from langflow.utils.logger import logger
BASE_COMPONENTS_PATH = str(Path(__file__).parent / "components")
class Settings(BaseSettings):
CHAINS: dict = {}
AGENTS: dict = {}
PROMPTS: dict = {}
LLMS: dict = {}
TOOLS: dict = {}
MEMORIES: dict = {}
EMBEDDINGS: dict = {}
VECTORSTORES: dict = {}
DOCUMENTLOADERS: dict = {}
WRAPPERS: dict = {}
RETRIEVERS: dict = {}
TOOLKITS: dict = {}
TEXTSPLITTERS: dict = {}
UTILITIES: dict = {}
OUTPUT_PARSERS: dict = {}
CUSTOM_COMPONENTS: dict = {}
DEV: bool = False
DATABASE_URL: Optional[str] = None
CACHE: str = "InMemoryCache"
REMOVE_API_KEYS: bool = False
COMPONENTS_PATH: List[str] = []
@validator("DATABASE_URL", pre=True)
def set_database_url(cls, value):
if not value:
logger.debug(
"No database_url provided, trying LANGFLOW_DATABASE_URL env variable"
)
if langflow_database_url := os.getenv("LANGFLOW_DATABASE_URL"):
value = langflow_database_url
logger.debug("Using LANGFLOW_DATABASE_URL env variable.")
else:
logger.debug("No DATABASE_URL env variable, using sqlite database")
value = "sqlite:///./langflow.db"
return value
@validator("COMPONENTS_PATH", pre=True)
def set_components_path(cls, value):
if os.getenv("LANGFLOW_COMPONENTS_PATH"):
logger.debug("Adding LANGFLOW_COMPONENTS_PATH to components_path")
langflow_component_path = os.getenv("LANGFLOW_COMPONENTS_PATH")
if (
Path(langflow_component_path).exists()
and langflow_component_path not in value
):
if isinstance(langflow_component_path, list):
for path in langflow_component_path:
if path not in value:
value.append(path)
logger.debug(
f"Extending {langflow_component_path} to components_path"
)
elif langflow_component_path not in value:
value.append(langflow_component_path)
logger.debug(
f"Appending {langflow_component_path} to components_path"
)
if not value:
value = [BASE_COMPONENTS_PATH]
logger.debug("Setting default components path to components_path")
elif BASE_COMPONENTS_PATH not in value:
value.append(BASE_COMPONENTS_PATH)
logger.debug("Adding default components path to components_path")
logger.debug(f"Components path: {value}")
return value
class Config:
validate_assignment = True
extra = "ignore"
env_prefix = "LANGFLOW_"
@model_validator(mode="after")
def validate_lists(cls, values):
for key, value in values.items():
if key != "dev" and not value:
values[key] = []
return values
def update_from_yaml(self, file_path: str, dev: bool = False):
new_settings = load_settings_from_yaml(file_path)
self.CHAINS = new_settings.CHAINS or {}
self.AGENTS = new_settings.AGENTS or {}
self.PROMPTS = new_settings.PROMPTS or {}
self.LLMS = new_settings.LLMS or {}
self.TOOLS = new_settings.TOOLS or {}
self.MEMORIES = new_settings.MEMORIES or {}
self.WRAPPERS = new_settings.WRAPPERS or {}
self.TOOLKITS = new_settings.TOOLKITS or {}
self.TEXTSPLITTERS = new_settings.TEXTSPLITTERS or {}
self.UTILITIES = new_settings.UTILITIES or {}
self.EMBEDDINGS = new_settings.EMBEDDINGS or {}
self.VECTORSTORES = new_settings.VECTORSTORES or {}
self.DOCUMENTLOADERS = new_settings.DOCUMENTLOADERS or {}
self.RETRIEVERS = new_settings.RETRIEVERS or {}
self.OUTPUT_PARSERS = new_settings.OUTPUT_PARSERS or {}
self.CUSTOM_COMPONENTS = new_settings.CUSTOM_COMPONENTS or {}
self.COMPONENTS_PATH = new_settings.COMPONENTS_PATH or []
self.DEV = dev
def update_settings(self, **kwargs):
logger.debug("Updating settings")
for key, value in kwargs.items():
# value may contain sensitive information, so we don't want to log it
if not hasattr(self, key):
logger.debug(f"Key {key} not found in settings")
continue
logger.debug(f"Updating {key}")
if isinstance(getattr(self, key), list):
# value might be a '[something]' string
with contextlib.suppress(json.decoder.JSONDecodeError):
value = json.loads(str(value))
if isinstance(value, list):
for item in value:
if isinstance(item, Path):
item = str(item)
if item not in getattr(self, key):
getattr(self, key).append(item)
logger.debug(f"Extended {key}")
else:
if isinstance(value, Path):
value = str(value)
if value not in getattr(self, key):
getattr(self, key).append(value)
logger.debug(f"Appended {key}")
else:
setattr(self, key, value)
logger.debug(f"Updated {key}")
logger.debug(f"{key}: {getattr(self, key)}")
def save_settings_to_yaml(settings: Settings, file_path: str):
with open(file_path, "w") as f:
settings_dict = settings.dict()
yaml.dump(settings_dict, f)
def load_settings_from_yaml(file_path: str) -> Settings:
# Check if a string is a valid path or a file name
if "/" not in file_path:
# Get current path
current_path = os.path.dirname(os.path.abspath(__file__))
file_path = os.path.join(current_path, file_path)
with open(file_path, "r") as f:
settings_dict = yaml.safe_load(f)
settings_dict = {k.upper(): v for k, v in settings_dict.items()}
for key in settings_dict:
if key not in Settings.model_fields.keys():
raise KeyError(f"Key {key} not found in settings")
logger.debug(f"Loading {len(settings_dict[key])} {key} from {file_path}")
return Settings(**settings_dict)
settings = load_settings_from_yaml("config.yaml")

View file

@ -206,7 +206,7 @@ class InitializeAgentNode(FrontendNode):
],
)
description: str = """Construct a zero shot agent from an LLM and tools."""
base_classes: list[str] = ["AgentExecutor", "function"]
base_classes: list[str] = ["AgentExecutor", "Callable"]
def to_dict(self):
return super().to_dict()

View file

@ -1,6 +1,6 @@
from collections import defaultdict
import re
from typing import List, Optional
from typing import ClassVar, DefaultDict, Dict, List, Optional
from pydantic import BaseModel, Field
@ -15,10 +15,10 @@ from langflow.utils import constants
class FieldFormatters(BaseModel):
formatters = {
formatters: ClassVar[Dict] = {
"openai_api_key": field_formatters.OpenAIAPIKeyFormatter(),
}
base_formatters = {
base_formatters: ClassVar[Dict] = {
"kwargs": field_formatters.KwargsFormatter(),
"optional": field_formatters.RemoveOptionalFormatter(),
"list": field_formatters.ListTypeFormatter(),
@ -49,7 +49,7 @@ class FrontendNode(BaseModel):
name: str = ""
display_name: str = ""
documentation: str = ""
custom_fields: defaultdict = defaultdict(list)
custom_fields: Optional[DefaultDict[str, List[str]]] = defaultdict(list)
output_types: List[str] = []
field_formatters: FieldFormatters = Field(default_factory=FieldFormatters)
beta: bool = False

View file

@ -87,6 +87,8 @@ class ChainFrontendNode(FrontendNode):
field.required = True
field.show = True
field.advanced = False
field.field_type = "BaseLanguageModel" # temporary fix
field.is_list = False
if field.name == "return_source_documents":
field.required = False
@ -142,7 +144,7 @@ class SeriesCharacterChainNode(FrontendNode):
"Chain",
"ConversationChain",
"SeriesCharacterChain",
"function",
"Callable",
]
@ -243,7 +245,7 @@ class CombineDocsChainNode(FrontendNode):
],
)
description: str = """Load question answering chain."""
base_classes: list[str] = ["BaseCombineDocumentsChain", "function"]
base_classes: list[str] = ["BaseCombineDocumentsChain", "Callable"]
def to_dict(self):
return super().to_dict()

View file

@ -1,4 +1,4 @@
from typing import Optional
from typing import ClassVar, Dict, Optional
from langflow.template.field.base import TemplateField
from langflow.template.frontend_node.base import FrontendNode
@ -21,7 +21,7 @@ class DocumentLoaderFrontNode(FrontendNode):
self.base_classes = ["Document"]
self.output_types = ["Document"]
file_path_templates = {
file_path_templates: ClassVar[Dict] = {
"AirbyteJSONLoader": build_file_field(suffixes=[".json"], fileTypes=["json"]),
"CoNLLULoader": build_file_field(suffixes=[".csv"], fileTypes=["csv"]),
"CSVLoader": build_file_field(suffixes=[".csv"], fileTypes=["csv"]),

View file

@ -1,4 +1,4 @@
from typing import Optional
from typing import ClassVar, Dict, Optional
from langflow.template.field.base import TemplateField
from langflow.template.frontend_node.constants import FORCE_SHOW_FIELDS
from langflow.template.frontend_node.formatter.base import FieldFormatter
@ -21,7 +21,7 @@ class OpenAIAPIKeyFormatter(FieldFormatter):
class ModelSpecificFieldFormatter(FieldFormatter):
MODEL_DICT = {
MODEL_DICT: ClassVar[Dict] = {
"OpenAI": OPENAI_MODELS,
"ChatOpenAI": CHAT_OPENAI_MODELS,
"Anthropic": ANTHROPIC_MODELS,
@ -86,7 +86,7 @@ class UnionTypeFormatter(FieldFormatter):
class SpecialFieldFormatter(FieldFormatter):
SPECIAL_FIELD_HANDLERS = {
SPECIAL_FIELD_HANDLERS: ClassVar[Dict] = {
"allowed_tools": lambda field: "Tool",
"max_value_length": lambda field: "int",
}

View file

@ -35,7 +35,7 @@ class ToolNode(FrontendNode):
),
TemplateField(
name="func",
field_type="function",
field_type="Callable",
required=True,
is_list=False,
show=True,
@ -135,7 +135,7 @@ class PythonFunctionNode(FrontendNode):
],
)
description: str = "Python function to be executed."
base_classes: list[str] = ["function"]
base_classes: list[str] = ["Callable"]
def to_dict(self):
return super().to_dict()

View file

@ -6,16 +6,14 @@ OPENAI_MODELS = [
"text-ada-001",
]
CHAT_OPENAI_MODELS = [
"gpt-3.5-turbo-0613",
"gpt-3.5-turbo",
"gpt-3.5-turbo-16k-0613",
"gpt-3.5-turbo-16k",
"gpt-4-0613",
"gpt-4-32k-0613",
"gpt-4-1106-preview",
"gpt-4",
"gpt-4-32k",
"gpt-3.5-turbo",
"gpt-3.5-turbo-16k",
]
ANTHROPIC_MODELS = [
# largest model, ideal for a wide range of more complex tasks.
"claude-v1",

View file

@ -2,9 +2,9 @@ from typing import Optional
from loguru import logger
from pathlib import Path
from rich.logging import RichHandler
from platformdirs import user_cache_dir
import os
import orjson
import appdirs
VALID_LOG_LEVELS = ["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"]
@ -50,7 +50,7 @@ def configure(log_level: Optional[str] = None, log_file: Optional[Path] = None):
)
if not log_file:
cache_dir = Path(appdirs.user_cache_dir("langflow"))
cache_dir = Path(user_cache_dir("langflow"))
log_file = cache_dir / "langflow.log"
log_file = Path(log_file)

View file

@ -30,7 +30,7 @@ def build_template_from_function(name: str, type_to_loader_dict: Dict, add_funct
docs = parse(_class.__doc__)
variables = {"_type": _type}
for class_field_items, value in _class.__fields__.items():
for class_field_items, value in _class.model_fields.items():
if class_field_items in ["callback_manager"]:
continue
variables[class_field_items] = {}
@ -52,7 +52,7 @@ def build_template_from_function(name: str, type_to_loader_dict: Dict, add_funct
# the output to be a function
base_classes = get_base_classes(_class)
if add_function:
base_classes.append("function")
base_classes.append("Callable")
return {
"template": format_dict(variables, name),
@ -100,7 +100,7 @@ def build_template_from_class(name: str, type_to_cls_dict: Dict, add_function: b
# Adding function to base classes to allow
# the output to be a function
if add_function:
base_classes.append("function")
base_classes.append("Callable")
return {
"template": format_dict(variables, name),
"description": docs.short_description or "",
@ -158,7 +158,7 @@ def build_template_from_method(
# Adding function to base classes to allow the output to be a function
if add_function:
base_classes.append("function")
base_classes.append("Callable")
return {
"template": format_dict(variables, class_name),
@ -254,6 +254,7 @@ def format_dict(dictionary: Dict[str, Any], class_name: Optional[str] = None) ->
_type = remove_optional_wrapper(_type)
_type = check_list_type(_type, value)
_type = replace_mapping_with_dict(_type)
_type = get_type_from_union_literal(_type)
value["type"] = get_formatted_type(key, _type)
value["show"] = should_show_field(value, key)
@ -273,6 +274,15 @@ def format_dict(dictionary: Dict[str, Any], class_name: Optional[str] = None) ->
return dictionary
# "Union[Literal['f-string'], Literal['jinja2']]" -> "str"
def get_type_from_union_literal(union_literal: str) -> str:
# if types are literal strings
# the type is a string
if "Literal" in union_literal:
return "str"
return union_literal
def get_type(value: Any) -> Union[str, type]:
"""
Retrieves the type value from the dictionary.
@ -280,7 +290,8 @@ def get_type(value: Any) -> Union[str, type]:
Returns:
The type value.
"""
_type = value["type"]
# get "type" or "annotation" from the value
_type = value.get("type") or value.get("annotation")
return _type if isinstance(_type, str) else _type.__name__

View file

@ -16,8 +16,8 @@ import {
FETCH_ERROR_MESSAGE,
} from "./constants/constants";
import { alertContext } from "./contexts/alertContext";
import { FlowsContext } from "./contexts/flowsContext";
import { locationContext } from "./contexts/locationContext";
import { TabsContext } from "./contexts/tabsContext";
import { typesContext } from "./contexts/typesContext";
import Router from "./routes";
@ -30,7 +30,7 @@ export default function App() {
setShowSideBar(true);
setIsStackedOpen(true);
}, [location.pathname, setCurrent, setIsStackedOpen, setShowSideBar]);
const { hardReset } = useContext(TabsContext);
const { hardReset } = useContext(FlowsContext);
const {
errorData,

View file

@ -23,10 +23,9 @@ import TextAreaComponent from "../../../../components/textAreaComponent";
import ToggleShadComponent from "../../../../components/toggleShadComponent";
import { Button } from "../../../../components/ui/button";
import { TOOLTIP_EMPTY } from "../../../../constants/constants";
import { TabsContext } from "../../../../contexts/tabsContext";
import { FlowsContext } from "../../../../contexts/flowsContext";
import { typesContext } from "../../../../contexts/typesContext";
import { ParameterComponentType } from "../../../../types/components";
import { TabsState } from "../../../../types/tabs";
import {
convertObjToArray,
convertValuesToNumbers,
@ -63,7 +62,7 @@ export default function ParameterComponent({
const infoHtml = useRef<HTMLDivElement & ReactNode>(null);
const updateNodeInternals = useUpdateNodeInternals();
const [position, setPosition] = useState(0);
const { setTabsState, tabId, flows } = useContext(TabsContext);
const { setTabsState, tabId, flows } = useContext(FlowsContext);
const flow = flows.find((flow) => flow.id === tabId)?.data?.nodes ?? null;

View file

@ -7,7 +7,7 @@ import IconComponent from "../../components/genericIconComponent";
import InputComponent from "../../components/inputComponent";
import { Textarea } from "../../components/ui/textarea";
import { useSSE } from "../../contexts/SSEContext";
import { TabsContext } from "../../contexts/tabsContext";
import { FlowsContext } from "../../contexts/flowsContext";
import { typesContext } from "../../contexts/typesContext";
import NodeToolbarComponent from "../../pages/FlowPage/components/nodeToolbarComponent";
import { validationStatusType } from "../../types/components";
@ -18,7 +18,7 @@ import {
scapedJSONStringfy,
} from "../../utils/reactflowUtils";
import { nodeColors, nodeIconsLucide } from "../../utils/styleUtils";
import { classNames, toTitleCase } from "../../utils/utils";
import { classNames, getFieldTitle } from "../../utils/utils";
import ParameterComponent from "./components/parameterComponent";
export default function GenericNode({
@ -33,7 +33,7 @@ export default function GenericNode({
yPos: number;
}): JSX.Element {
const [data, setData] = useState(olddata);
const { updateFlow, flows, tabId } = useContext(TabsContext);
const { updateFlow, flows, tabId } = useContext(FlowsContext);
const updateNodeInternals = useUpdateNodeInternals();
const { types, deleteNode, reactFlowInstance, setFilterEdge, getFilterEdge } =
useContext(typesContext);
@ -239,15 +239,10 @@ export default function GenericNode({
] ??
nodeColors.unknown
}
title={
data.node?.template[templateField].display_name
? data.node.template[templateField].display_name
: data.node?.template[templateField].name
? toTitleCase(
data.node.template[templateField].name
)
: toTitleCase(templateField)
}
title={getFieldTitle(
data.node?.template!,
templateField
)}
info={data.node?.template[templateField].info}
name={templateField}
tooltipTitle={
@ -448,15 +443,10 @@ export default function GenericNode({
] ??
nodeColors.unknown
}
title={
data.node?.template[templateField].display_name
? data.node.template[templateField].display_name
: data.node?.template[templateField].name
? toTitleCase(
data.node.template[templateField].name
)
: toTitleCase(templateField)
}
title={getFieldTitle(
data.node?.template!,
templateField
)}
info={data.node?.template[templateField].info}
name={templateField}
tooltipTitle={

View file

@ -7,9 +7,9 @@ import { typesContext } from "../../../contexts/typesContext";
import { postBuildInit } from "../../../controllers/API";
import { FlowType } from "../../../types/flow";
import { TabsContext } from "../../../contexts/tabsContext";
import { FlowsContext } from "../../../contexts/flowsContext";
import { parsedDataType } from "../../../types/components";
import { TabsState } from "../../../types/tabs";
import { FlowsState } from "../../../types/tabs";
import { validateNodes } from "../../../utils/reactflowUtils";
import RadialProgressComponent from "../../RadialProgress";
import IconComponent from "../../genericIconComponent";
@ -26,7 +26,7 @@ export default function BuildTrigger({
}): JSX.Element {
const { updateSSEData, isBuilding, setIsBuilding, sseData } = useSSE();
const { reactFlowInstance } = useContext(typesContext);
const { setTabsState, saveFlow } = useContext(TabsContext);
const { setTabsState, saveFlow } = useContext(FlowsContext);
const { setErrorData, setSuccessData } = useContext(alertContext);
const [isIconTouched, setIsIconTouched] = useState(false);
const eventClick = isBuilding ? "pointer-events-none" : "";
@ -103,7 +103,7 @@ export default function BuildTrigger({
setSuccessData({ title: parsedData.log });
} else if (parsedData.input_keys !== undefined) {
//@ts-ignore
setTabsState((old: TabsState) => {
setTabsState((old: FlowsState) => {
return {
...old,
[flowId]: {

View file

@ -5,7 +5,7 @@ import BuildTrigger from "./buildTrigger";
import ChatTrigger from "./chatTrigger";
import * as _ from "lodash";
import { TabsContext } from "../../contexts/tabsContext";
import { FlowsContext } from "../../contexts/flowsContext";
import { getBuildStatus } from "../../controllers/API";
import FormModal from "../../modals/formModal";
import { NodeType } from "../../types/flow";
@ -13,7 +13,7 @@ import { NodeType } from "../../types/flow";
export default function Chat({ flow }: ChatType): JSX.Element {
const [open, setOpen] = useState(false);
const [canOpen, setCanOpen] = useState(false);
const { tabsState, isBuilt, setIsBuilt } = useContext(TabsContext);
const { tabsState, isBuilt, setIsBuilt } = useContext(FlowsContext);
useEffect(() => {
const handleKeyDown = (event: KeyboardEvent) => {

View file

@ -1,5 +1,5 @@
import { useContext, useState } from "react";
import { TabsContext } from "../../../../contexts/tabsContext";
import { FlowsContext } from "../../../../contexts/flowsContext";
import {
DropdownMenu,
DropdownMenuContent,
@ -17,7 +17,7 @@ import IconComponent from "../../../genericIconComponent";
import { Button } from "../../../ui/button";
export const MenuBar = ({ flows, tabId }: menuBarPropsType): JSX.Element => {
const { addFlow } = useContext(TabsContext);
const { addFlow } = useContext(FlowsContext);
const { setErrorData } = useContext(alertContext);
const { undo, redo } = useContext(undoRedoContext);
const [openSettings, setOpenSettings] = useState(false);

View file

@ -7,7 +7,8 @@ import { alertContext } from "../../contexts/alertContext";
import { AuthContext } from "../../contexts/authContext";
import { darkContext } from "../../contexts/darkContext";
import { StoreContext } from "../../contexts/storeContext";
import { TabsContext } from "../../contexts/tabsContext";
import { FlowsContext } from "../../contexts/flowsContext";
import { gradients } from "../../utils/styleUtils";
import IconComponent from "../genericIconComponent";
import { Button } from "../ui/button";
@ -23,7 +24,7 @@ import { Separator } from "../ui/separator";
import MenuBar from "./components/menuBar";
export default function Header(): JSX.Element {
const { flows, tabId } = useContext(TabsContext);
const { flows, tabId } = useContext(FlowsContext);
const { dark, setDark } = useContext(darkContext);
const { notificationCenter } = useContext(alertContext);
const location = useLocation();

View file

@ -1,6 +1,6 @@
import { useContext, useEffect, useState } from "react";
import { alertContext } from "../../contexts/alertContext";
import { TabsContext } from "../../contexts/tabsContext";
import { FlowsContext } from "../../contexts/flowsContext";
import { uploadFile } from "../../controllers/API";
import { FileComponentType } from "../../types/components";
import IconComponent from "../genericIconComponent";
@ -17,7 +17,7 @@ export default function InputFileComponent({
const [myValue, setMyValue] = useState(value);
const [loading, setLoading] = useState(false);
const { setErrorData } = useContext(alertContext);
const { tabId } = useContext(TabsContext);
const { tabId } = useContext(FlowsContext);
// Clear component state
useEffect(() => {

View file

@ -28,7 +28,7 @@ import {
sourceHandleType,
targetHandleType,
} from "../types/flow";
import { TabsContextType, TabsState } from "../types/tabs";
import { FlowsContextType, TabsState } from "../types/tabs";
import {
addVersionToDuplicates,
checkOldEdgesHandles,
@ -52,7 +52,7 @@ import { typesContext } from "./typesContext";
const uid = new ShortUniqueId({ length: 5 });
const TabsContextInitialValue: TabsContextType = {
const FlowsContextInitialValue: FlowsContextType = {
tabId: "",
setTabId: (index: string) => {},
isLoading: true,
@ -84,11 +84,11 @@ const TabsContextInitialValue: TabsContextType = {
deleteComponent: (id: string, key: string) => {},
};
export const TabsContext = createContext<TabsContextType>(
TabsContextInitialValue
export const FlowsContext = createContext<FlowsContextType>(
FlowsContextInitialValue
);
export function TabsProvider({ children }: { children: ReactNode }) {
export function FlowsProvider({ children }: { children: ReactNode }) {
const { setErrorData, setNoticeData, setSuccessData } =
useContext(alertContext);
const { getAuthentication, isAuthenticated } = useContext(AuthContext);
@ -713,7 +713,7 @@ export function TabsProvider({ children }: { children: ReactNode }) {
const [isBuilt, setIsBuilt] = useState(false);
return (
<TabsContext.Provider
<FlowsContext.Provider
value={{
saveFlow,
isBuilt,
@ -744,6 +744,6 @@ export function TabsProvider({ children }: { children: ReactNode }) {
}}
>
{children}
</TabsContext.Provider>
</FlowsContext.Provider>
);
}

View file

@ -7,9 +7,10 @@ import { SSEProvider } from "./SSEContext";
import { AlertProvider } from "./alertContext";
import { AuthProvider } from "./authContext";
import { DarkProvider } from "./darkContext";
import { FlowsProvider } from "./flowsContext";
import { LocationProvider } from "./locationContext";
import { StoreProvider } from "./storeContext";
import { TabsProvider } from "./tabsContext";
import { TypesProvider } from "./typesContext";
import { UndoRedoProvider } from "./undoRedoContext";
@ -27,11 +28,11 @@ export default function ContextWrapper({ children }: { children: ReactNode }) {
<LocationProvider>
<ApiInterceptor />
<SSEProvider>
<TabsProvider>
<FlowsProvider>
<UndoRedoProvider>
<StoreProvider>{children}</StoreProvider>
</UndoRedoProvider>
</TabsProvider>
</FlowsProvider>
</SSEProvider>
</LocationProvider>
</TypesProvider>

View file

@ -13,7 +13,7 @@ import {
undoRedoContextType,
} from "../types/typesContext";
import { isWrappedWithClass } from "../utils/utils";
import { TabsContext } from "./tabsContext";
import { FlowsContext } from "./flowsContext";
const initialValue = {
undo: () => {},
@ -29,7 +29,7 @@ const defaultOptions: UseUndoRedoOptions = {
export const undoRedoContext = createContext<undoRedoContextType>(initialValue);
export function UndoRedoProvider({ children }) {
const { tabId, flows } = useContext(TabsContext);
const { tabId, flows } = useContext(FlowsContext);
const [past, setPast] = useState<HistoryItem[][]>(flows.map(() => []));
const [future, setFuture] = useState<HistoryItem[][]>(flows.map(() => []));

View file

@ -0,0 +1,31 @@
const SvgAWS = (props) => (
<svg
xmlns="http://www.w3.org/2000/svg"
xmlSpace="preserve"
id="Layer_1"
x={0}
y={0}
style={{
enableBackground: "new 0 0 304 182",
}}
viewBox="0 0 304 182"
{...props}
>
<style>{".st1{fill-rule:evenodd;clip-rule:evenodd;fill:#f90}"}</style>
<path
d="M86.4 66.4c0 3.7.4 6.7 1.1 8.9.8 2.2 1.8 4.6 3.2 7.2.5.8.7 1.6.7 2.3 0 1-.6 2-1.9 3L83.2 92c-.9.6-1.8.9-2.6.9-1 0-2-.5-3-1.4-1.4-1.5-2.6-3.1-3.6-4.7-1-1.7-2-3.6-3.1-5.9-7.8 9.2-17.6 13.8-29.4 13.8-8.4 0-15.1-2.4-20-7.2-4.9-4.8-7.4-11.2-7.4-19.2 0-8.5 3-15.4 9.1-20.6 6.1-5.2 14.2-7.8 24.5-7.8 3.4 0 6.9.3 10.6.8 3.7.5 7.5 1.3 11.5 2.2v-7.3c0-7.6-1.6-12.9-4.7-16-3.2-3.1-8.6-4.6-16.3-4.6-3.5 0-7.1.4-10.8 1.3-3.7.9-7.3 2-10.8 3.4-1.6.7-2.8 1.1-3.5 1.3-.7.2-1.2.3-1.6.3-1.4 0-2.1-1-2.1-3.1v-4.9c0-1.6.2-2.8.7-3.5.5-.7 1.4-1.4 2.8-2.1 3.5-1.8 7.7-3.3 12.6-4.5C41 1.9 46.2 1.3 51.7 1.3c11.9 0 20.6 2.7 26.2 8.1 5.5 5.4 8.3 13.6 8.3 24.6v32.4zM45.8 81.6c3.3 0 6.7-.6 10.3-1.8 3.6-1.2 6.8-3.4 9.5-6.4 1.6-1.9 2.8-4 3.4-6.4.6-2.4 1-5.3 1-8.7v-4.2c-2.9-.7-6-1.3-9.2-1.7-3.2-.4-6.3-.6-9.4-.6-6.7 0-11.6 1.3-14.9 4-3.3 2.7-4.9 6.5-4.9 11.5 0 4.7 1.2 8.2 3.7 10.6 2.4 2.5 5.9 3.7 10.5 3.7zm80.3 10.8c-1.8 0-3-.3-3.8-1-.8-.6-1.5-2-2.1-3.9L96.7 10.2c-.6-2-.9-3.3-.9-4 0-1.6.8-2.5 2.4-2.5h9.8c1.9 0 3.2.3 3.9 1 .8.6 1.4 2 2 3.9l16.8 66.2 15.6-66.2c.5-2 1.1-3.3 1.9-3.9.8-.6 2.2-1 4-1h8c1.9 0 3.2.3 4 1 .8.6 1.5 2 1.9 3.9l15.8 67 17.3-67c.6-2 1.3-3.3 2-3.9.8-.6 2.1-1 3.9-1h9.3c1.6 0 2.5.8 2.5 2.5 0 .5-.1 1-.2 1.6-.1.6-.3 1.4-.7 2.5l-24.1 77.3c-.6 2-1.3 3.3-2.1 3.9-.8.6-2.1 1-3.8 1h-8.6c-1.9 0-3.2-.3-4-1-.8-.7-1.5-2-1.9-4L156 23l-15.4 64.4c-.5 2-1.1 3.3-1.9 4-.8.7-2.2 1-4 1h-8.6zm128.5 2.7c-5.2 0-10.4-.6-15.4-1.8-5-1.2-8.9-2.5-11.5-4-1.6-.9-2.7-1.9-3.1-2.8-.4-.9-.6-1.9-.6-2.8v-5.1c0-2.1.8-3.1 2.3-3.1.6 0 1.2.1 1.8.3.6.2 1.5.6 2.5 1 3.4 1.5 7.1 2.7 11 3.5 4 .8 7.9 1.2 11.9 1.2 6.3 0 11.2-1.1 14.6-3.3 3.4-2.2 5.2-5.4 5.2-9.5 0-2.8-.9-5.1-2.7-7-1.8-1.9-5.2-3.6-10.1-5.2L246 52c-7.3-2.3-12.7-5.7-16-10.2-3.3-4.4-5-9.3-5-14.5 0-4.2.9-7.9 2.7-11.1 1.8-3.2 4.2-6 7.2-8.2 3-2.3 6.4-4 10.4-5.2 4-1.2 8.2-1.7 12.6-1.7 2.2 0 4.5.1 6.7.4 2.3.3 4.4.7 6.5 1.1 2 .5 3.9 1 5.7 1.6 1.8.6 3.2 1.2 4.2 1.8 1.4.8 2.4 1.6 3 2.5.6.8.9 1.9.9 3.3v4.7c0 2.1-.8 3.2-2.3 3.2-.8 0-2.1-.4-3.8-1.2-5.7-2.6-12.1-3.9-19.2-3.9-5.7 0-10.2.9-13.3 2.8-3.1 1.9-4.7 4.8-4.7 8.9 0 2.8 1 5.2 3 7.1 2 1.9 5.7 3.8 11 5.5l14.2 4.5c7.2 2.3 12.4 5.5 15.5 9.6 3.1 4.1 4.6 8.8 4.6 14 0 4.3-.9 8.2-2.6 11.6-1.8 3.4-4.2 6.4-7.3 8.8-3.1 2.5-6.8 4.3-11.1 5.6-4.5 1.4-9.2 2.1-14.3 2.1z"
style={{
fill: "#252f3e",
}}
/>
<path
d="M273.5 143.7c-32.9 24.3-80.7 37.2-121.8 37.2-57.6 0-109.5-21.3-148.7-56.7-3.1-2.8-.3-6.6 3.4-4.4 42.4 24.6 94.7 39.5 148.8 39.5 36.5 0 76.6-7.6 113.5-23.2 5.5-2.5 10.2 3.6 4.8 7.6z"
className="st1"
/>
<path
d="M287.2 128.1c-4.2-5.4-27.8-2.6-38.5-1.3-3.2.4-3.7-2.4-.8-4.5 18.8-13.2 49.7-9.4 53.3-5 3.6 4.5-1 35.4-18.6 50.2-2.7 2.3-5.3 1.1-4.1-1.9 4-9.9 12.9-32.2 8.7-37.5z"
className="st1"
/>
</svg>
);
export default SvgAWS;

View file

@ -0,0 +1,38 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Generator: Adobe Illustrator 19.0.1, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
viewBox="0 0 304 182" style="enable-background:new 0 0 304 182;" xml:space="preserve">
<style type="text/css">
.st0{fill:#252F3E;}
.st1{fill-rule:evenodd;clip-rule:evenodd;fill:#FF9900;}
</style>
<g>
<path class="st0" d="M86.4,66.4c0,3.7,0.4,6.7,1.1,8.9c0.8,2.2,1.8,4.6,3.2,7.2c0.5,0.8,0.7,1.6,0.7,2.3c0,1-0.6,2-1.9,3l-6.3,4.2
c-0.9,0.6-1.8,0.9-2.6,0.9c-1,0-2-0.5-3-1.4C76.2,90,75,88.4,74,86.8c-1-1.7-2-3.6-3.1-5.9c-7.8,9.2-17.6,13.8-29.4,13.8
c-8.4,0-15.1-2.4-20-7.2c-4.9-4.8-7.4-11.2-7.4-19.2c0-8.5,3-15.4,9.1-20.6c6.1-5.2,14.2-7.8,24.5-7.8c3.4,0,6.9,0.3,10.6,0.8
c3.7,0.5,7.5,1.3,11.5,2.2v-7.3c0-7.6-1.6-12.9-4.7-16c-3.2-3.1-8.6-4.6-16.3-4.6c-3.5,0-7.1,0.4-10.8,1.3c-3.7,0.9-7.3,2-10.8,3.4
c-1.6,0.7-2.8,1.1-3.5,1.3c-0.7,0.2-1.2,0.3-1.6,0.3c-1.4,0-2.1-1-2.1-3.1v-4.9c0-1.6,0.2-2.8,0.7-3.5c0.5-0.7,1.4-1.4,2.8-2.1
c3.5-1.8,7.7-3.3,12.6-4.5c4.9-1.3,10.1-1.9,15.6-1.9c11.9,0,20.6,2.7,26.2,8.1c5.5,5.4,8.3,13.6,8.3,24.6V66.4z M45.8,81.6
c3.3,0,6.7-0.6,10.3-1.8c3.6-1.2,6.8-3.4,9.5-6.4c1.6-1.9,2.8-4,3.4-6.4c0.6-2.4,1-5.3,1-8.7v-4.2c-2.9-0.7-6-1.3-9.2-1.7
c-3.2-0.4-6.3-0.6-9.4-0.6c-6.7,0-11.6,1.3-14.9,4c-3.3,2.7-4.9,6.5-4.9,11.5c0,4.7,1.2,8.2,3.7,10.6
C37.7,80.4,41.2,81.6,45.8,81.6z M126.1,92.4c-1.8,0-3-0.3-3.8-1c-0.8-0.6-1.5-2-2.1-3.9L96.7,10.2c-0.6-2-0.9-3.3-0.9-4
c0-1.6,0.8-2.5,2.4-2.5h9.8c1.9,0,3.2,0.3,3.9,1c0.8,0.6,1.4,2,2,3.9l16.8,66.2l15.6-66.2c0.5-2,1.1-3.3,1.9-3.9c0.8-0.6,2.2-1,4-1
h8c1.9,0,3.2,0.3,4,1c0.8,0.6,1.5,2,1.9,3.9l15.8,67l17.3-67c0.6-2,1.3-3.3,2-3.9c0.8-0.6,2.1-1,3.9-1h9.3c1.6,0,2.5,0.8,2.5,2.5
c0,0.5-0.1,1-0.2,1.6c-0.1,0.6-0.3,1.4-0.7,2.5l-24.1,77.3c-0.6,2-1.3,3.3-2.1,3.9c-0.8,0.6-2.1,1-3.8,1h-8.6c-1.9,0-3.2-0.3-4-1
c-0.8-0.7-1.5-2-1.9-4L156,23l-15.4,64.4c-0.5,2-1.1,3.3-1.9,4c-0.8,0.7-2.2,1-4,1H126.1z M254.6,95.1c-5.2,0-10.4-0.6-15.4-1.8
c-5-1.2-8.9-2.5-11.5-4c-1.6-0.9-2.7-1.9-3.1-2.8c-0.4-0.9-0.6-1.9-0.6-2.8v-5.1c0-2.1,0.8-3.1,2.3-3.1c0.6,0,1.2,0.1,1.8,0.3
c0.6,0.2,1.5,0.6,2.5,1c3.4,1.5,7.1,2.7,11,3.5c4,0.8,7.9,1.2,11.9,1.2c6.3,0,11.2-1.1,14.6-3.3c3.4-2.2,5.2-5.4,5.2-9.5
c0-2.8-0.9-5.1-2.7-7c-1.8-1.9-5.2-3.6-10.1-5.2L246,52c-7.3-2.3-12.7-5.7-16-10.2c-3.3-4.4-5-9.3-5-14.5c0-4.2,0.9-7.9,2.7-11.1
c1.8-3.2,4.2-6,7.2-8.2c3-2.3,6.4-4,10.4-5.2c4-1.2,8.2-1.7,12.6-1.7c2.2,0,4.5,0.1,6.7,0.4c2.3,0.3,4.4,0.7,6.5,1.1
c2,0.5,3.9,1,5.7,1.6c1.8,0.6,3.2,1.2,4.2,1.8c1.4,0.8,2.4,1.6,3,2.5c0.6,0.8,0.9,1.9,0.9,3.3v4.7c0,2.1-0.8,3.2-2.3,3.2
c-0.8,0-2.1-0.4-3.8-1.2c-5.7-2.6-12.1-3.9-19.2-3.9c-5.7,0-10.2,0.9-13.3,2.8c-3.1,1.9-4.7,4.8-4.7,8.9c0,2.8,1,5.2,3,7.1
c2,1.9,5.7,3.8,11,5.5l14.2,4.5c7.2,2.3,12.4,5.5,15.5,9.6c3.1,4.1,4.6,8.8,4.6,14c0,4.3-0.9,8.2-2.6,11.6
c-1.8,3.4-4.2,6.4-7.3,8.8c-3.1,2.5-6.8,4.3-11.1,5.6C264.4,94.4,259.7,95.1,254.6,95.1z"/>
<g>
<path class="st1" d="M273.5,143.7c-32.9,24.3-80.7,37.2-121.8,37.2c-57.6,0-109.5-21.3-148.7-56.7c-3.1-2.8-0.3-6.6,3.4-4.4
c42.4,24.6,94.7,39.5,148.8,39.5c36.5,0,76.6-7.6,113.5-23.2C274.2,133.6,278.9,139.7,273.5,143.7z"/>
<path class="st1" d="M287.2,128.1c-4.2-5.4-27.8-2.6-38.5-1.3c-3.2,0.4-3.7-2.4-0.8-4.5c18.8-13.2,49.7-9.4,53.3-5
c3.6,4.5-1,35.4-18.6,50.2c-2.7,2.3-5.3,1.1-4.1-1.9C282.5,155.7,291.4,133.4,287.2,128.1z"/>
</g>
</g>
</svg>

After

Width:  |  Height:  |  Size: 3.4 KiB

View file

@ -0,0 +1,8 @@
import React, { forwardRef } from "react";
import SvgAWS from "./AWS";
export const AWSIcon = forwardRef<SVGSVGElement, React.PropsWithChildren<{}>>(
(props, ref) => {
return <SvgAWS ref={ref} {...props} />;
}
);

View file

@ -15,7 +15,7 @@ import CodeTabsComponent from "../../components/codeTabsComponent";
import IconComponent from "../../components/genericIconComponent";
import { EXPORT_CODE_DIALOG } from "../../constants/constants";
import { AuthContext } from "../../contexts/authContext";
import { TabsContext } from "../../contexts/tabsContext";
import { FlowsContext } from "../../contexts/flowsContext";
import { TemplateVariableType } from "../../types/api";
import { tweakType, uniqueTweakType } from "../../types/components";
import { FlowType, NodeType } from "../../types/flow/index";
@ -45,7 +45,7 @@ const ApiModal = forwardRef(
const [activeTab, setActiveTab] = useState("0");
const tweak = useRef<tweakType>([]);
const tweaksList = useRef<string[]>([]);
const { setTweak, getTweak, tabsState } = useContext(TabsContext);
const { setTweak, getTweak, tabsState } = useContext(FlowsContext);
const pythonApiCode = getPythonApiCode(
flow,
autoLogin,

View file

@ -33,10 +33,10 @@ import {
TableRow,
} from "../../components/ui/table";
import { limitScrollFieldsModal } from "../../constants/constants";
import { TabsContext } from "../../contexts/tabsContext";
import { FlowsContext } from "../../contexts/flowsContext";
import { typesContext } from "../../contexts/typesContext";
import { NodeDataType } from "../../types/flow";
import { TabsState } from "../../types/tabs";
import { FlowsState } from "../../types/tabs";
import {
convertObjToArray,
convertValuesToNumbers,
@ -69,7 +69,7 @@ const EditNodeModal = forwardRef(
const myData = useRef(data);
const { setTabsState, tabId } = useContext(TabsContext);
const { setTabsState, tabId } = useContext(FlowsContext);
const { reactFlowInstance } = useContext(typesContext);
let disabled =
reactFlowInstance
@ -542,7 +542,7 @@ const EditNodeModal = forwardRef(
const newData = cloneDeep(myData.current);
myData.current = newData;
//@ts-ignore
setTabsState((prev: TabsState) => {
setTabsState((prev: FlowsState) => {
return {
...prev,
[tabId]: {

View file

@ -5,13 +5,15 @@ import { Button } from "../../components/ui/button";
import { Checkbox } from "../../components/ui/checkbox";
import { EXPORT_DIALOG_SUBTITLE } from "../../constants/constants";
import { alertContext } from "../../contexts/alertContext";
import { TabsContext } from "../../contexts/tabsContext";
import { FlowsContext } from "../../contexts/flowsContext";
import { typesContext } from "../../contexts/typesContext";
import { removeApiKeys } from "../../utils/reactflowUtils";
import BaseModal from "../baseModal";
const ExportModal = forwardRef(
(props: { children: ReactNode }, ref): JSX.Element => {
const { flows, tabId, downloadFlow } = useContext(TabsContext);
const { flows, tabId, downloadFlow } = useContext(FlowsContext);
const { reactFlowInstance } = useContext(typesContext);
const { setNoticeData } = useContext(alertContext);
const [checked, setChecked] = useState(true);
const flow = flows.find((f) => f.id === tabId);
@ -66,7 +68,12 @@ const ExportModal = forwardRef(
onClick={() => {
if (checked) {
downloadFlow(
flows.find((flow) => flow.id === tabId)!,
{
id: tabId,
data: reactFlowInstance?.toObject()!,
description,
name,
},
name!,
description
);
@ -76,7 +83,12 @@ const ExportModal = forwardRef(
});
} else
downloadFlow(
removeApiKeys(flows.find((flow) => flow.id === tabId)!),
removeApiKeys({
id: tabId,
data: reactFlowInstance?.toObject()!,
description,
name,
}),
name!,
description
);

View file

@ -3,7 +3,7 @@ import EditFlowSettings from "../../components/EditFlowSettingsComponent";
import IconComponent from "../../components/genericIconComponent";
import { Button } from "../../components/ui/button";
import { SETTINGS_DIALOG_SUBTITLE } from "../../constants/constants";
import { TabsContext } from "../../contexts/tabsContext";
import { FlowsContext } from "../../contexts/flowsContext";
import { FlowSettingsPropsType } from "../../types/components";
import BaseModal from "../baseModal";
@ -11,7 +11,7 @@ export default function FlowSettingsModal({
open,
setOpen,
}: FlowSettingsPropsType): JSX.Element {
const { flows, tabId, updateFlow, saveFlow } = useContext(TabsContext);
const { flows, tabId, updateFlow, saveFlow } = useContext(FlowsContext);
const flow = flows.find((f) => f.id === tabId);
useEffect(() => {
setName(flow!.name);

View file

@ -24,9 +24,9 @@ import {
import { Textarea } from "../../components/ui/textarea";
import { CHAT_FORM_DIALOG_SUBTITLE } from "../../constants/constants";
import { AuthContext } from "../../contexts/authContext";
import { TabsContext } from "../../contexts/tabsContext";
import { FlowsContext } from "../../contexts/flowsContext";
import { getBuildStatus } from "../../controllers/API";
import { TabsState } from "../../types/tabs";
import { FlowsState } from "../../types/tabs";
import { validateNodes } from "../../utils/reactflowUtils";
export default function FormModal({
@ -38,7 +38,7 @@ export default function FormModal({
setOpen: (open: boolean) => void;
flow: FlowType;
}): JSX.Element {
const { tabsState, setTabsState } = useContext(TabsContext);
const { tabsState, setTabsState } = useContext(FlowsContext);
const [chatValue, setChatValue] = useState(() => {
try {
const { formKeysData } = tabsState[flow.id];
@ -401,7 +401,7 @@ export default function FormModal({
chatKey: chatKey!,
});
//@ts-ignore
setTabsState((old: TabsState) => {
setTabsState((old: FlowsState) => {
if (!chatKey) return old;
let newTabsState = _.cloneDeep(old);
newTabsState[id.current].formKeysData.input_keys![chatKey] = "";
@ -522,7 +522,7 @@ export default function FormModal({
}
onChange={(e) => {
//@ts-ignore
setTabsState((old: TabsState) => {
setTabsState((old: FlowsState) => {
let newTabsState = _.cloneDeep(old);
newTabsState[
id.current
@ -634,7 +634,7 @@ export default function FormModal({
setChatValue={(value) => {
setChatValue(value);
//@ts-ignore
setTabsState((old: TabsState) => {
setTabsState((old: FlowsState) => {
let newTabsState = _.cloneDeep(old);
newTabsState[id.current].formKeysData.input_keys![
chatKey!

View file

@ -22,7 +22,7 @@ import {
} from "../../constants/constants";
import { alertContext } from "../../contexts/alertContext";
import { AuthContext } from "../../contexts/authContext";
import { TabsContext } from "../../contexts/tabsContext";
import { FlowsContext } from "../../contexts/flowsContext";
import {
addUser,
deleteUser,
@ -43,7 +43,8 @@ export default function AdminPage() {
const { setErrorData, setSuccessData } = useContext(alertContext);
const { userData } = useContext(AuthContext);
const [totalRowsCount, setTotalRowsCount] = useState(0);
const { setTabId } = useContext(TabsContext);
const { setTabId } = useContext(FlowsContext);
// set null id
useEffect(() => {

Some files were not shown because too many files have changed in this diff Show more