Merge branch 'authentication' into login

This commit is contained in:
anovazzi1 2023-08-10 10:44:48 -03:00
commit d6d5aae1ef
138 changed files with 2945 additions and 1449 deletions

10
.vscode/launch.json vendored
View file

@ -1,4 +1,5 @@
{
"version": "0.2.0",
"configurations": [
{
"name": "Debug Backend",
@ -38,6 +39,15 @@
"request": "launch",
"url": "http://localhost:3000/",
"webRoot": "${workspaceRoot}/src/frontend"
},
{
"name": "Python: Debug Tests",
"type": "python",
"request": "launch",
"program": "${file}",
"purpose": ["debug-test"],
"console": "integratedTerminal",
"justMyCode": false
}
]
}

View file

@ -7,6 +7,11 @@ to contributions, whether it be in the form of a new feature, improved infra, or
To contribute to this project, please follow a ["fork and pull request"](https://docs.github.com/en/get-started/quickstart/contributing-to-projects) workflow.
Please do not try to push directly to this repo unless you are a maintainer.
The branch structure is as follows:
- `main`: The stable version of Langflow
- `dev`: The development version of Langflow. This branch is used to test new features before they are merged into `main` and, as such, may be unstable.
## 🗺Contributing Guidelines
## 🚩GitHub Issues

View file

@ -275,6 +275,8 @@ flow("Hey, have you heard of Langflow?")
We welcome contributions from developers of all levels to our open-source project on GitHub. If you'd like to contribute, please check our [contributing guidelines](./CONTRIBUTING.md) and help make Langflow more accessible.
---
Join our [Discord](https://discord.com/invite/EqksyE2EX9) server to ask questions, make suggestions and showcase your projects! 🦾
<p>

View file

@ -43,7 +43,7 @@ This guide takes you through the process of augmenting the "Basic Chat with Prom
8. Connect this loader to the `{context}` variable that we just added.
9. In the "Web Page" field, enter "https://langflow.org/how-upload-examples".
9. In the "Web Page" field, enter "https://docs.langflow.org/how-upload-examples".
10. Now, click on "ConversationBufferMemory".

2
docs/static/CNAME vendored
View file

@ -1 +1 @@
langflow.org
docs.langflow.org

679
poetry.lock generated

File diff suppressed because it is too large Load diff

View file

@ -1,6 +1,6 @@
[tool.poetry]
name = "langflow"
version = "0.4.1"
version = "0.4.3"
description = "A Python package with a built-in web application"
authors = ["Logspace <contact@logspace.ai>"]
maintainers = [
@ -19,7 +19,7 @@ readme = "README.md"
keywords = ["nlp", "langchain", "openai", "gpt", "gui"]
packages = [{ include = "langflow", from = "src/backend" }]
include = ["src/backend/langflow/*", "src/backend/langflow/**/*"]
documentation = "https://docs.langflow.org"
[tool.poetry.scripts]
langflow = "langflow.__main__:main"
@ -33,7 +33,7 @@ google-search-results = "^2.4.1"
google-api-python-client = "^2.79.0"
typer = "^0.9.0"
gunicorn = "^21.1.0"
langchain = "^0.0.250"
langchain = "^0.0.256"
openai = "^0.27.8"
pandas = "^2.0.0"
chromadb = "^0.3.21"
@ -63,7 +63,7 @@ python-multipart = "^0.0.6"
sqlmodel = "^0.0.8"
faiss-cpu = "^1.7.4"
anthropic = "^0.3.0"
orjson = "^3.9.1"
orjson = "3.9.3"
multiprocess = "^0.70.14"
cachetools = "^5.3.1"
types-cachetools = "^5.3.0.5"
@ -77,6 +77,10 @@ psycopg = "^3.1.9"
psycopg-binary = "^3.1.9"
fastavro = "^1.8.0"
langchain-experimental = "^0.0.8"
alembic = "^1.11.2"
passlib = "^1.7.4"
bcrypt = "^4.0.1"
python-jose = "^3.3.0"
[tool.poetry.group.dev.dependencies]
black = "^23.1.0"

View file

@ -1,5 +1,7 @@
from importlib import metadata
from langflow.cache import cache_manager
# Deactivate cache manager for now
# from langflow.services.cache import cache_manager
from langflow.processing.process import load_flow_from_json
from langflow.interface.custom.custom_component import CustomComponent

View file

@ -1,6 +1,7 @@
import sys
import time
import httpx
from langflow.services.utils import get_settings_manager
from langflow.utils.util import get_number_of_workers
from multiprocess import Process # type: ignore
import platform
@ -12,7 +13,6 @@ from rich import box
from rich import print as rprint
import typer
from langflow.main import setup_app
from langflow.settings import settings
from langflow.utils.logger import configure, logger
import webbrowser
from dotenv import load_dotenv
@ -30,19 +30,19 @@ def update_settings(
"""Update the settings from a config file."""
# Check for database_url in the environment variables
settings_manager = get_settings_manager()
if config:
logger.debug(f"Loading settings from {config}")
settings.update_from_yaml(config, dev=dev)
settings_manager.settings.update_from_yaml(config, dev=dev)
if remove_api_keys:
logger.debug(f"Setting remove_api_keys to {remove_api_keys}")
settings.update_settings(REMOVE_API_KEYS=remove_api_keys)
settings_manager.settings.update_settings(REMOVE_API_KEYS=remove_api_keys)
if cache:
logger.debug(f"Setting cache to {cache}")
settings.update_settings(CACHE=cache)
settings_manager.settings.update_settings(CACHE=cache)
if components_path:
logger.debug(f"Adding component path {components_path}")
settings.update_settings(COMPONENTS_PATH=components_path)
settings_manager.settings.update_settings(COMPONENTS_PATH=components_path)
def serve_on_jcloud():
@ -106,7 +106,9 @@ def serve(
help="Path to the directory containing custom components.",
envvar="LANGFLOW_COMPONENTS_PATH",
),
config: str = typer.Option("config.yaml", help="Path to the configuration file."),
config: str = typer.Option(
Path(__file__).parent / "config.yaml", help="Path to the configuration file."
),
# .env file param
env_file: Path = typer.Option(
None, help="Path to the .env file containing environment variables."
@ -146,6 +148,11 @@ def serve(
help="Remove API keys from the projects saved in the database.",
envvar="LANGFLOW_REMOVE_API_KEYS",
),
backend_only: bool = typer.Option(
False,
help="Run only the backend server without the frontend.",
envvar="LANGFLOW_BACKEND_ONLY",
),
):
"""
Run the Langflow server.
@ -167,7 +174,7 @@ def serve(
)
# create path object if path is provided
static_files_dir: Optional[Path] = Path(path) if path else None
app = setup_app(static_files_dir=static_files_dir)
app = setup_app(static_files_dir=static_files_dir, backend_only=backend_only)
# check if port is being used
if is_port_in_use(port, host):
port = get_free_port(port)
@ -179,6 +186,10 @@ def serve(
"timeout": timeout,
}
# Define an env variable to know if we are just testing the server
if "pytest" in sys.modules:
return
if platform.system() in ["Windows"]:
# Run using uvicorn on MacOS and Windows
# Windows doesn't support gunicorn

View file

@ -0,0 +1,113 @@
# A generic, single database configuration.
[alembic]
# path to migration scripts
script_location = alembic
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
# Uncomment the line below if you want the files to be prepended with date and time
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
# for all available tokens
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
# sys.path path, will be prepended to sys.path if present.
# defaults to the current working directory.
prepend_sys_path = .
# timezone to use when rendering the date within the migration file
# as well as the filename.
# If specified, requires the python-dateutil library that can be
# installed by adding `alembic[tz]` to the pip requirements
# string value is passed to dateutil.tz.gettz()
# leave blank for localtime
# timezone =
# max length of characters to apply to the
# "slug" field
# truncate_slug_length = 40
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
# sourceless = false
# version location specification; This defaults
# to alembic/versions. When using multiple version
# directories, initial revisions must be specified with --version-path.
# The path separator used here should be the separator specified by "version_path_separator" below.
# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions
# version path separator; As mentioned above, this is the character used to split
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
# Valid values for version_path_separator are:
#
# version_path_separator = :
# version_path_separator = ;
# version_path_separator = space
version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
# set to 'true' to search source files recursively
# in each "version_locations" directory
# new in Alembic version 1.10
# recursive_version_locations = false
# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8
# This is the path to the db in the root of the project.
# When the user runs the Langflow the database url will
# be set dinamically.
sqlalchemy.url = sqlite:///../../../langflow.db
[post_write_hooks]
# post_write_hooks defines scripts or Python functions that are run
# on newly generated revision scripts. See the documentation for further
# detail and examples
# format using "black" - use the console_scripts runner, against the "black" entrypoint
# hooks = black
# black.type = console_scripts
# black.entrypoint = black
# black.options = -l 79 REVISION_SCRIPT_FILENAME
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

View file

@ -0,0 +1 @@
Generic single-database configuration.

View file

@ -0,0 +1,78 @@
from logging.config import fileConfig
from sqlalchemy import engine_from_config
from sqlalchemy import pool
from alembic import context
from langflow.services.database.manager import SQLModel
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
if config.config_file_name is not None:
fileConfig(config.config_file_name)
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
target_metadata = SQLModel.metadata
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online() -> None:
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
connectable = engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
with connectable.connect() as connection:
context.configure(connection=connection, target_metadata=target_metadata)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

View file

@ -0,0 +1,27 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
import sqlmodel
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision: str = ${repr(up_revision)}
down_revision: Union[str, None] = ${repr(down_revision)}
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
def upgrade() -> None:
${upgrades if upgrades else "pass"}
def downgrade() -> None:
${downgrades if downgrades else "pass"}

View file

@ -0,0 +1,42 @@
"""Remove FlowStyles table
Revision ID: 0a534bdfd84b
Revises: 4814b6f4abfd
Create Date: 2023-08-07 14:09:06.844104
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = "0a534bdfd84b"
down_revision: Union[str, None] = "4814b6f4abfd"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table("flowstyle")
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"flowstyle",
sa.Column("color", sa.VARCHAR(), nullable=False),
sa.Column("emoji", sa.VARCHAR(), nullable=False),
sa.Column("flow_id", sa.CHAR(length=32), nullable=True),
sa.Column("id", sa.CHAR(length=32), nullable=False),
sa.ForeignKeyConstraint(
["flow_id"],
["flow.id"],
),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("id"),
)
# ### end Alembic commands ###

View file

@ -0,0 +1,65 @@
"""Add Flow table
Revision ID: 4814b6f4abfd
Revises:
Create Date: 2023-08-05 17:47:42.879824
"""
import contextlib
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
import sqlmodel
# revision identifiers, used by Alembic.
revision: str = "4814b6f4abfd"
down_revision: Union[str, None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
# This suppress is used to not break the migration if the table already exists.
with contextlib.suppress(sa.exc.OperationalError):
op.create_table(
"flow",
sa.Column("data", sa.JSON(), nullable=True),
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("description", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("id"),
)
op.create_index(
op.f("ix_flow_description"), "flow", ["description"], unique=False
)
op.create_index(op.f("ix_flow_name"), "flow", ["name"], unique=False)
with contextlib.suppress(sa.exc.OperationalError):
op.create_table(
"flowstyle",
sa.Column("color", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("emoji", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("flow_id", sqlmodel.sql.sqltypes.GUID(), nullable=True),
sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.ForeignKeyConstraint(
["flow_id"],
["flow.id"],
),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("id"),
)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table("flowstyle")
op.drop_index(op.f("ix_flow_name"), table_name="flow")
op.drop_index(op.f("ix_flow_description"), table_name="flow")
op.drop_table("flow")
# ### end Alembic commands ###

View file

@ -5,7 +5,6 @@ from langflow.api.v1 import (
endpoints_router,
validate_router,
flows_router,
flow_styles_router,
component_router,
)
@ -17,4 +16,3 @@ router.include_router(endpoints_router)
router.include_router(validate_router)
router.include_router(component_router)
router.include_router(flows_router)
router.include_router(flow_styles_router)

View file

@ -66,3 +66,30 @@ def merge_nested_dicts(dict1, dict2):
else:
dict1[key] = value
return dict1
def merge_nested_dicts_with_renaming(dict1, dict2):
for key, value in dict2.items():
if (
key in dict1
and isinstance(value, dict)
and isinstance(dict1.get(key), dict)
):
for sub_key, sub_value in value.items():
if sub_key in dict1[key]:
new_key = get_new_key(dict1[key], sub_key)
dict1[key][new_key] = sub_value
else:
dict1[key][sub_key] = sub_value
else:
dict1[key] = value
return dict1
def get_new_key(dictionary, original_key):
counter = 1
new_key = original_key + " (" + str(counter) + ")"
while new_key in dictionary:
counter += 1
new_key = original_key + " (" + str(counter) + ")"
return new_key

View file

@ -2,7 +2,6 @@ from langflow.api.v1.endpoints import router as endpoints_router
from langflow.api.v1.validate import router as validate_router
from langflow.api.v1.chat import router as chat_router
from langflow.api.v1.flows import router as flows_router
from langflow.api.v1.flow_styles import router as flow_styles_router
from langflow.api.v1.components import router as component_router
__all__ = [
@ -11,5 +10,4 @@ __all__ = [
"component_router",
"validate_router",
"flows_router",
"flow_styles_router",
]

View file

@ -3,13 +3,13 @@ from fastapi.responses import StreamingResponse
from langflow.api.utils import build_input_keys_response
from langflow.api.v1.schemas import BuildStatus, BuiltResponse, InitResponse, StreamData
from langflow.chat.manager import ChatManager
from langflow.services import service_manager, ServiceType
from langflow.graph.graph.base import Graph
from langflow.utils.logger import logger
from cachetools import LRUCache
router = APIRouter(tags=["Chat"])
chat_manager = ChatManager()
flow_data_store: LRUCache = LRUCache(maxsize=10)
@ -17,6 +17,7 @@ flow_data_store: LRUCache = LRUCache(maxsize=10)
async def chat(client_id: str, websocket: WebSocket):
"""Websocket endpoint for chat."""
try:
chat_manager = service_manager.get(ServiceType.CHAT_MANAGER)
if client_id in chat_manager.in_memory_cache:
await chat_manager.handle_websocket(client_id, websocket)
else:
@ -45,6 +46,7 @@ async def init_build(graph_data: dict, flow_id: str):
return InitResponse(flowId=flow_id)
# Delete from cache if already exists
chat_manager = service_manager.get(ServiceType.CHAT_MANAGER)
if flow_id in chat_manager.in_memory_cache:
with chat_manager.in_memory_cache._lock:
chat_manager.in_memory_cache.delete(flow_id)
@ -155,12 +157,12 @@ async def stream_build(flow_id: str):
)
else:
input_keys_response = {
"input_keys": {},
"input_keys": None,
"memory_keys": [],
"handle_keys": [],
}
yield str(StreamData(event="message", data=input_keys_response))
chat_manager = service_manager.get(ServiceType.CHAT_MANAGER)
chat_manager.set_cache(flow_id, langchain_object)
# We need to reset the chat history
chat_manager.chat_history.empty_history(flow_id)

View file

@ -1,8 +1,8 @@
from datetime import timezone
from typing import List
from uuid import UUID
from langflow.database.models.component import Component, ComponentModel
from langflow.database.base import get_session
from langflow.services.database.models.component import Component, ComponentModel
from langflow.services.utils import get_session
from sqlmodel import Session, select
from fastapi import APIRouter, Depends, HTTPException
from sqlalchemy.exc import IntegrityError

View file

@ -1,12 +1,11 @@
from http import HTTPStatus
from typing import Annotated, Optional
from langflow.cache.utils import save_uploaded_file
from langflow.database.models.flow import Flow
from langflow.services.cache.utils import save_uploaded_file
from langflow.services.database.models.flow import Flow
from langflow.processing.process import process_graph_cached, process_tweaks
from langflow.services.utils import get_settings_manager
from langflow.utils.logger import logger
from langflow.settings import settings
from fastapi import APIRouter, Depends, HTTPException, UploadFile, Body
from langflow.interface.custom.custom_component import CustomComponent
@ -18,7 +17,7 @@ from langflow.api.v1.schemas import (
CustomComponentCode,
)
from langflow.api.utils import merge_nested_dicts
from langflow.api.utils import merge_nested_dicts_with_renaming
from langflow.interface.types import (
build_langchain_types_dict,
@ -26,7 +25,7 @@ from langflow.interface.types import (
build_langchain_custom_component_list_from_path,
)
from langflow.database.base import get_session
from langflow.services.utils import get_session
from sqlmodel import Session
# build router
@ -40,20 +39,32 @@ def get_all():
# custom_components is a list of dicts
# need to merge all the keys into one dict
custom_components_from_file = {}
if settings.COMPONENTS_PATH:
logger.info(f"Building custom components from {settings.COMPONENTS_PATH}")
settings_manager = get_settings_manager()
if settings_manager.settings.COMPONENTS_PATH:
logger.info(
f"Building custom components from {settings_manager.settings.COMPONENTS_PATH}"
)
custom_component_dicts = [
build_langchain_custom_component_list_from_path(str(path))
for path in settings.COMPONENTS_PATH
for path in settings_manager.settings.COMPONENTS_PATH
]
logger.info(f"Loading {len(custom_component_dicts)} custom components")
logger.info(f"Loading {len(custom_component_dicts)} category(ies)")
for custom_component_dict in custom_component_dicts:
custom_components_from_file = merge_nested_dicts(
# custom_component_dict is a dict of dicts
if not custom_component_dict:
continue
category = list(custom_component_dict.keys())[0]
logger.info(
f"Loading {len(custom_component_dict[category])} component(s) from category {category}"
)
logger.debug(custom_component_dict)
custom_components_from_file = merge_nested_dicts_with_renaming(
custom_components_from_file, custom_component_dict
)
logger.info(f"Loaded {custom_component_dict}")
return merge_nested_dicts(native_components, custom_components_from_file)
return merge_nested_dicts_with_renaming(
native_components, custom_components_from_file
)
# For backwards compatibility we will keep the old endpoint

View file

@ -1,83 +0,0 @@
from uuid import UUID
from langflow.database.models.flow_style import (
FlowStyle,
FlowStyleCreate,
FlowStyleRead,
FlowStyleUpdate,
)
from langflow.database.base import get_session
from sqlmodel import Session, select
from fastapi import APIRouter, Depends, HTTPException
# build router
router = APIRouter(prefix="/flow_styles", tags=["FlowStyles"])
# FlowStyleCreate:
# class FlowStyleBase(SQLModel):
# color: str = Field(index=True)
# emoji: str = Field(index=False)
# flow_id: UUID = Field(default=None, foreign_key="flow.id")
@router.post("/", response_model=FlowStyleRead)
def create_flow_style(
*, session: Session = Depends(get_session), flow_style: FlowStyleCreate
):
"""Create a new flow_style."""
db_flow_style = FlowStyle.from_orm(flow_style)
session.add(db_flow_style)
session.commit()
session.refresh(db_flow_style)
return db_flow_style
@router.get("/", response_model=list[FlowStyleRead])
def read_flow_styles(*, session: Session = Depends(get_session)):
"""Read all flows."""
try:
flows = session.exec(select(FlowStyle)).all()
except Exception as e:
raise HTTPException(status_code=500, detail=str(e)) from e
return flows
@router.get("/{flow_styles_id}", response_model=FlowStyleRead)
def read_flow_style(*, session: Session = Depends(get_session), flow_styles_id: UUID):
"""Read a flow_style."""
if flow_style := session.get(FlowStyle, flow_styles_id):
return flow_style
else:
raise HTTPException(status_code=404, detail="FlowStyle not found")
@router.patch("/{flow_style_id}", response_model=FlowStyleRead)
def update_flow_style(
*,
session: Session = Depends(get_session),
flow_style_id: UUID,
flow_style: FlowStyleUpdate,
):
"""Update a flow_style."""
db_flow_style = session.get(FlowStyle, flow_style_id)
if not db_flow_style:
raise HTTPException(status_code=404, detail="FlowStyle not found")
flow_data = flow_style.dict(exclude_unset=True)
for key, value in flow_data.items():
if hasattr(db_flow_style, key) and value is not None:
setattr(db_flow_style, key, value)
session.add(db_flow_style)
session.commit()
session.refresh(db_flow_style)
return db_flow_style
@router.delete("/{flow_id}")
def delete_flow_style(*, session: Session = Depends(get_session), flow_id: UUID):
"""Delete a flow_style."""
flow_style = session.get(FlowStyle, flow_id)
if not flow_style:
raise HTTPException(status_code=404, detail="FlowStyle not found")
session.delete(flow_style)
session.commit()
return {"message": "FlowStyle deleted successfully"}

View file

@ -1,16 +1,15 @@
from typing import List
from uuid import UUID
from langflow.settings import settings
from langflow.api.utils import remove_api_keys
from langflow.api.v1.schemas import FlowListCreate, FlowListRead
from langflow.database.models.flow import (
from langflow.services.database.models.flow import (
Flow,
FlowCreate,
FlowRead,
FlowReadWithStyle,
FlowUpdate,
)
from langflow.database.base import get_session
from langflow.services.utils import get_session
from langflow.services.utils import get_settings_manager
from sqlmodel import Session, select
from fastapi import APIRouter, Depends, HTTPException
from fastapi.encoders import jsonable_encoder
@ -32,7 +31,7 @@ def create_flow(*, session: Session = Depends(get_session), flow: FlowCreate):
return db_flow
@router.get("/", response_model=list[FlowReadWithStyle], status_code=200)
@router.get("/", response_model=list[FlowRead], status_code=200)
def read_flows(*, session: Session = Depends(get_session)):
"""Read all flows."""
try:
@ -42,7 +41,7 @@ def read_flows(*, session: Session = Depends(get_session)):
return [jsonable_encoder(flow) for flow in flows]
@router.get("/{flow_id}", response_model=FlowReadWithStyle, status_code=200)
@router.get("/{flow_id}", response_model=FlowRead, status_code=200)
def read_flow(*, session: Session = Depends(get_session), flow_id: UUID):
"""Read a flow."""
if flow := session.get(Flow, flow_id):
@ -61,7 +60,8 @@ def update_flow(
if not db_flow:
raise HTTPException(status_code=404, detail="Flow not found")
flow_data = flow.dict(exclude_unset=True)
if settings.REMOVE_API_KEYS:
settings_manager = get_settings_manager()
if settings_manager.settings.REMOVE_API_KEYS:
flow_data = remove_api_keys(flow_data)
for key, value in flow_data.items():
setattr(db_flow, key, value)

View file

@ -1,7 +1,7 @@
from enum import Enum
from pathlib import Path
from typing import Any, Dict, List, Optional, Union
from langflow.database.models.flow import FlowCreate, FlowRead
from langflow.services.database.models.flow import FlowCreate, FlowRead
from pydantic import BaseModel, Field, validator
import json

View file

@ -0,0 +1,76 @@
from typing import Annotated
from jose import JWTError, jwt
from sqlalchemy.orm import Session
from passlib.context import CryptContext
from fastapi.security import OAuth2PasswordBearer
from fastapi import Depends, HTTPException, status
from datetime import datetime, timedelta, timezone
from langflow.database.models.token import TokenData
from langflow.database.models.user import get_user, User
from langflow.services.utils import get_session
# TODO: Move to env - Test propose!!!!!
SECRET_KEY = "698619adad2d916f1f32d264540976964b3c0d3828e0870a65add5800a8cc6b9"
ALGORITHM = "HS256"
ACCESS_TOKEN_EXPIRE_MINUTES = 30
pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
oauth2_scheme = OAuth2PasswordBearer(tokenUrl="token")
def verify_password(plain_password, hashed_password):
return pwd_context.verify(plain_password, hashed_password)
def get_password_hash(password):
return pwd_context.hash(password)
def create_access_token(data: dict, expires_delta: timedelta = None): # type: ignore
to_encode = data.copy()
if expires_delta:
expire = datetime.now(timezone.utc) + expires_delta
else:
expire = datetime.now(timezone.utc) + timedelta(minutes=15)
to_encode["exp"] = expire
return jwt.encode(to_encode, SECRET_KEY, algorithm=ALGORITHM)
def authenticate_user(db: Session, username: str, password: str):
if user := get_user(db, username):
return user if verify_password(password, user.password) else False
else:
return False
async def get_current_user(
token: Annotated[str, Depends(oauth2_scheme)], db: Session = Depends(get_session)
):
credentials_exception = HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Could not validate credentials",
headers={"WWW-Authenticate": "Bearer"},
)
try:
payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM])
username: str = payload.get("sub") # type: ignore
if username is None:
raise credentials_exception
token_data = TokenData(username=username)
except JWTError as e:
raise credentials_exception from e
user = get_user(db, token_data.username) # type: ignore
if user is None:
raise credentials_exception
return user
async def get_current_active_user(
current_user: Annotated[User, Depends(get_current_user)]
):
if current_user.is_disabled:
raise HTTPException(status_code=400, detail="Inactive user")
return current_user

View file

@ -1,7 +0,0 @@
from langflow.cache.manager import cache_manager
from langflow.cache.flow import InMemoryCache
__all__ = [
"cache_manager",
"InMemoryCache",
]

View file

@ -1,78 +0,0 @@
from contextlib import contextmanager
import os
from sqlmodel import SQLModel, Session, create_engine
from langflow.utils.logger import logger
class Engine:
_instance = None
@classmethod
def get(cls):
logger.debug("Getting database engine")
if cls._instance is None:
cls.create()
return cls._instance
@classmethod
def create(cls):
logger.debug("Creating database engine")
from langflow.settings import settings
if langflow_database_url := os.getenv("LANGFLOW_DATABASE_URL"):
settings.DATABASE_URL = langflow_database_url
logger.debug("Using LANGFLOW_DATABASE_URL")
if settings.DATABASE_URL and settings.DATABASE_URL.startswith("sqlite"):
connect_args = {"check_same_thread": False}
else:
connect_args = {}
if not settings.DATABASE_URL:
raise RuntimeError("No database_url provided")
cls._instance = create_engine(settings.DATABASE_URL, connect_args=connect_args)
@classmethod
def update(cls):
logger.debug("Updating database engine")
cls._instance = None
cls.create()
def create_db_and_tables():
logger.debug("Creating database and tables")
try:
SQLModel.metadata.create_all(Engine.get())
except Exception as exc:
logger.error(f"Error creating database and tables: {exc}")
raise RuntimeError("Error creating database and tables") from exc
# Now check if the table Flow exists, if not, something went wrong
# and we need to create the tables again.
from sqlalchemy import inspect
inspector = inspect(Engine.get())
if "flow" not in inspector.get_table_names():
logger.error("Something went wrong creating the database and tables.")
logger.error("Please check your database settings.")
raise RuntimeError("Something went wrong creating the database and tables.")
else:
logger.debug("Database and tables created successfully")
@contextmanager
def session_getter():
try:
session = Session(Engine.get())
yield session
except Exception as e:
print("Session rollback because of exception:", e)
session.rollback()
raise
finally:
session.close()
def get_session():
with session_getter() as session:
yield session

View file

@ -1,33 +0,0 @@
# Path: src/backend/langflow/database/models/flowstyle.py
from langflow.database.models.base import SQLModelSerializable
from sqlmodel import Field, Relationship
from uuid import UUID, uuid4
from typing import TYPE_CHECKING, Optional
if TYPE_CHECKING:
from langflow.database.models.flow import Flow
class FlowStyleBase(SQLModelSerializable):
color: str
emoji: str
flow_id: UUID = Field(default=None, foreign_key="flow.id")
class FlowStyle(FlowStyleBase, table=True):
id: UUID = Field(default_factory=uuid4, primary_key=True, unique=True)
flow: "Flow" = Relationship(back_populates="style")
class FlowStyleUpdate(SQLModelSerializable):
color: Optional[str] = None
emoji: Optional[str] = None
class FlowStyleCreate(FlowStyleBase):
pass
class FlowStyleRead(FlowStyleBase):
id: UUID

View file

@ -0,0 +1,10 @@
from pydantic import BaseModel
class Token(BaseModel):
access_token: str
token_type: str
class TokenData(BaseModel):
username: str | None = None

View file

@ -0,0 +1,37 @@
from datetime import datetime
from sqlalchemy.orm import Session
from langflow.services.database.models.base import SQLModelSerializable, SQLModel
from sqlmodel import Field
from uuid import UUID, uuid4
class User(SQLModelSerializable, table=True):
id: UUID = Field(default_factory=uuid4, primary_key=True, unique=True)
username: str = Field(index=True, unique=True)
password: str = Field()
is_disabled: bool = Field(default=False)
is_superuser: bool = Field(default=False)
create_at: datetime = Field(default_factory=datetime.utcnow)
updated_at: datetime = Field(default_factory=datetime.utcnow)
class UserAddModel(SQLModel):
username: str = Field()
password: str = Field()
is_disabled: bool = Field(default=False)
is_superuser: bool = Field(default=False)
class UserListModel(SQLModel):
id: UUID = Field(default_factory=uuid4)
username: str = Field()
is_disabled: bool = Field()
is_superuser: bool = Field()
create_at: datetime = Field()
updated_at: datetime = Field()
def get_user(db: Session, username: str) -> User:
db_user = db.query(User).filter(User.username == username).first()
return User.from_orm(db_user) if db_user else None # type: ignore

View file

@ -1,7 +1,7 @@
from typing import Dict, Generator, List, Type, Union
from langflow.graph.edge.base import Edge
from langflow.graph.graph.constants import VERTEX_TYPE_MAP
from langflow.graph.graph.constants import lazy_load_vertex_dict
from langflow.graph.vertex.base import Vertex
from langflow.graph.vertex.types import (
FileToolVertex,
@ -187,10 +187,12 @@ class Graph:
"""Returns the node class based on the node type."""
if node_type in FILE_TOOLS:
return FileToolVertex
if node_type in VERTEX_TYPE_MAP:
return VERTEX_TYPE_MAP[node_type]
if node_type in lazy_load_vertex_dict.VERTEX_TYPE_MAP:
return lazy_load_vertex_dict.VERTEX_TYPE_MAP[node_type]
return (
VERTEX_TYPE_MAP[node_lc_type] if node_lc_type in VERTEX_TYPE_MAP else Vertex
lazy_load_vertex_dict.VERTEX_TYPE_MAP[node_lc_type]
if node_lc_type in lazy_load_vertex_dict.VERTEX_TYPE_MAP
else Vertex
)
def _build_vertices(self) -> List[Vertex]:

View file

@ -1,4 +1,3 @@
from langflow.graph.vertex.base import Vertex
from langflow.graph.vertex import types
from langflow.interface.agents.base import agent_creator
from langflow.interface.chains.base import chain_creator
@ -15,23 +14,45 @@ from langflow.interface.wrappers.base import wrapper_creator
from langflow.interface.output_parsers.base import output_parser_creator
from langflow.interface.retrievers.base import retriever_creator
from langflow.interface.custom.base import custom_component_creator
from typing import Dict, Type
from langflow.utils.lazy_load import LazyLoadDictBase
VERTEX_TYPE_MAP: Dict[str, Type[Vertex]] = {
**{t: types.PromptVertex for t in prompt_creator.to_list()},
**{t: types.AgentVertex for t in agent_creator.to_list()},
**{t: types.ChainVertex for t in chain_creator.to_list()},
**{t: types.ToolVertex for t in tool_creator.to_list()},
**{t: types.ToolkitVertex for t in toolkits_creator.to_list()},
**{t: types.WrapperVertex for t in wrapper_creator.to_list()},
**{t: types.LLMVertex for t in llm_creator.to_list()},
**{t: types.MemoryVertex for t in memory_creator.to_list()},
**{t: types.EmbeddingVertex for t in embedding_creator.to_list()},
**{t: types.VectorStoreVertex for t in vectorstore_creator.to_list()},
**{t: types.DocumentLoaderVertex for t in documentloader_creator.to_list()},
**{t: types.TextSplitterVertex for t in textsplitter_creator.to_list()},
**{t: types.OutputParserVertex for t in output_parser_creator.to_list()},
**{t: types.CustomComponentVertex for t in custom_component_creator.to_list()},
**{t: types.RetrieverVertex for t in retriever_creator.to_list()},
}
class VertexTypesDict(LazyLoadDictBase):
def __init__(self):
self._all_types_dict = None
@property
def VERTEX_TYPE_MAP(self):
return self.all_types_dict
def _build_dict(self):
langchain_types_dict = self.get_type_dict()
return {
**langchain_types_dict,
"Custom": ["Custom Tool", "Python Function"],
}
def get_type_dict(self):
return {
**{t: types.PromptVertex for t in prompt_creator.to_list()},
**{t: types.AgentVertex for t in agent_creator.to_list()},
**{t: types.ChainVertex for t in chain_creator.to_list()},
**{t: types.ToolVertex for t in tool_creator.to_list()},
**{t: types.ToolkitVertex for t in toolkits_creator.to_list()},
**{t: types.WrapperVertex for t in wrapper_creator.to_list()},
**{t: types.LLMVertex for t in llm_creator.to_list()},
**{t: types.MemoryVertex for t in memory_creator.to_list()},
**{t: types.EmbeddingVertex for t in embedding_creator.to_list()},
**{t: types.VectorStoreVertex for t in vectorstore_creator.to_list()},
**{t: types.DocumentLoaderVertex for t in documentloader_creator.to_list()},
**{t: types.TextSplitterVertex for t in textsplitter_creator.to_list()},
**{t: types.OutputParserVertex for t in output_parser_creator.to_list()},
**{
t: types.CustomComponentVertex
for t in custom_component_creator.to_list()
},
**{t: types.RetrieverVertex for t in retriever_creator.to_list()},
}
lazy_load_vertex_dict = VertexTypesDict()

View file

@ -1,6 +1,6 @@
import ast
from langflow.interface.initialize import loading
from langflow.interface.listing import ALL_TYPES_DICT
from langflow.interface.listing import lazy_load_dict
from langflow.utils.constants import DIRECT_TYPES
from langflow.utils.logger import logger
from langflow.utils.util import sync_to_async
@ -62,7 +62,7 @@ class Vertex:
)
if self.base_type is None:
for base_type, value in ALL_TYPES_DICT.items():
for base_type, value in lazy_load_dict.ALL_TYPES_DICT.items():
if self.vertex_type in value:
self.base_type = base_type
break

View file

@ -5,7 +5,8 @@ from langchain.agents import types
from langflow.custom.customs import get_custom_nodes
from langflow.interface.agents.custom import CUSTOM_AGENTS
from langflow.interface.base import LangChainTypeCreator
from langflow.settings import settings
from langflow.services.utils import get_settings_manager
from langflow.template.frontend_node.agents import AgentFrontendNode
from langflow.utils.logger import logger
from langflow.utils.util import build_template_from_class, build_template_from_method
@ -53,13 +54,17 @@ class AgentCreator(LangChainTypeCreator):
# Now this is a generator
def to_list(self) -> List[str]:
names = []
settings_manager = get_settings_manager()
for _, agent in self.type_to_loader_dict.items():
agent_name = (
agent.function_name()
if hasattr(agent, "function_name")
else agent.__name__
)
if agent_name in settings.AGENTS or settings.DEV:
if (
agent_name in settings_manager.settings.AGENTS
or settings_manager.settings.DEV
):
names.append(agent_name)
return names

View file

@ -2,13 +2,14 @@ from abc import ABC, abstractmethod
from typing import Any, Dict, List, Optional, Type, Union
from langchain.chains.base import Chain
from langchain.agents import AgentExecutor
from langflow.services.utils import get_settings_manager
from pydantic import BaseModel
from langflow.template.field.base import TemplateField
from langflow.template.frontend_node.base import FrontendNode
from langflow.template.template.base import Template
from langflow.utils.logger import logger
from langflow.settings import settings
# Assuming necessary imports for Field, Template, and FrontendNode classes
@ -26,9 +27,12 @@ class LangChainTypeCreator(BaseModel, ABC):
@property
def docs_map(self) -> Dict[str, str]:
"""A dict with the name of the component as key and the documentation link as value."""
settings_manager = get_settings_manager()
if self.name_docs_dict is None:
try:
type_settings = getattr(settings, self.type_name.upper())
type_settings = getattr(
settings_manager.settings, self.type_name.upper()
)
self.name_docs_dict = {
name: value_dict["documentation"]
for name, value_dict in type_settings.items()

View file

@ -3,7 +3,8 @@ from typing import Any, Dict, List, Optional, Type
from langflow.custom.customs import get_custom_nodes
from langflow.interface.base import LangChainTypeCreator
from langflow.interface.importing.utils import import_class
from langflow.settings import settings
from langflow.services.utils import get_settings_manager
from langflow.template.frontend_node.chains import ChainFrontendNode
from langflow.utils.logger import logger
from langflow.utils.util import build_template_from_class, build_template_from_method
@ -30,6 +31,7 @@ class ChainCreator(LangChainTypeCreator):
@property
def type_to_loader_dict(self) -> Dict:
if self.type_dict is None:
settings_manager = get_settings_manager()
self.type_dict: dict[str, Any] = {
chain_name: import_class(f"langchain.chains.{chain_name}")
for chain_name in chains.__all__
@ -43,7 +45,8 @@ class ChainCreator(LangChainTypeCreator):
self.type_dict = {
name: chain
for name, chain in self.type_dict.items()
if name in settings.CHAINS or settings.DEV
if name in settings_manager.settings.CHAINS
or settings_manager.settings.DEV
}
return self.type_dict

View file

@ -3,11 +3,12 @@ from fastapi import HTTPException
from langflow.interface.custom.constants import CUSTOM_COMPONENT_SUPPORTED_TYPES
from langflow.interface.custom.component import Component
from langflow.interface.custom.directory_reader import DirectoryReader
from langflow.services.utils import get_db_manager
from langflow.utils import validate
from langflow.database.base import session_getter
from langflow.database.models.flow import Flow
from langflow.services.database.utils import session_getter
from langflow.services.database.models.flow import Flow
from pydantic import Extra
import yaml
@ -159,7 +160,8 @@ class CustomComponent(Component, extra=Extra.allow):
from langflow.processing.process import build_sorted_vertices_with_caching
from langflow.processing.process import process_tweaks
with session_getter() as session:
db_manager = get_db_manager()
with session_getter(db_manager) as session:
graph_data = flow.data if (flow := session.get(Flow, flow_id)) else None
if not graph_data:
raise ValueError(f"Flow {flow_id} not found")
@ -169,7 +171,8 @@ class CustomComponent(Component, extra=Extra.allow):
def list_flows(self, *, get_session: Optional[Callable] = None) -> List[Flow]:
get_session = get_session or session_getter
with get_session() as session:
db_manager = get_db_manager()
with get_session(db_manager) as session:
flows = session.query(Flow).all()
return flows
@ -182,8 +185,8 @@ class CustomComponent(Component, extra=Extra.allow):
get_session: Optional[Callable] = None,
) -> Flow:
get_session = get_session or session_getter
with get_session() as session:
db_manager = get_db_manager()
with get_session(db_manager) as session:
if flow_id:
flow = session.query(Flow).get(flow_id)
elif flow_name:

View file

@ -1,9 +1,10 @@
from typing import Dict, List, Optional, Type
from langflow.interface.base import LangChainTypeCreator
from langflow.services.utils import get_settings_manager
from langflow.template.frontend_node.documentloaders import DocumentLoaderFrontNode
from langflow.interface.custom_lists import documentloaders_type_to_cls_dict
from langflow.settings import settings
from langflow.utils.logger import logger
from langflow.utils.util import build_template_from_class
@ -30,10 +31,12 @@ class DocumentLoaderCreator(LangChainTypeCreator):
return None
def to_list(self) -> List[str]:
settings_manager = get_settings_manager()
return [
documentloader.__name__
for documentloader in self.type_to_loader_dict.values()
if documentloader.__name__ in settings.DOCUMENTLOADERS or settings.DEV
if documentloader.__name__ in settings_manager.settings.DOCUMENTLOADERS
or settings_manager.settings.DEV
]

View file

@ -2,7 +2,8 @@ from typing import Dict, List, Optional, Type
from langflow.interface.base import LangChainTypeCreator
from langflow.interface.custom_lists import embedding_type_to_cls_dict
from langflow.settings import settings
from langflow.services.utils import get_settings_manager
from langflow.template.frontend_node.base import FrontendNode
from langflow.template.frontend_node.embeddings import EmbeddingFrontendNode
from langflow.utils.logger import logger
@ -32,10 +33,12 @@ class EmbeddingCreator(LangChainTypeCreator):
return None
def to_list(self) -> List[str]:
settings_manager = get_settings_manager()
return [
embedding.__name__
for embedding in self.type_to_loader_dict.values()
if embedding.__name__ in settings.EMBEDDINGS or settings.DEV
if embedding.__name__ in settings_manager.settings.EMBEDDINGS
or settings_manager.settings.DEV
]

View file

@ -170,6 +170,26 @@ def initialize_pinecone(class_object: Type[Pinecone], params: dict):
def initialize_chroma(class_object: Type[Chroma], params: dict):
"""Initialize a ChromaDB object from the params"""
if ( # type: ignore
"chroma_server_host" in params or "chroma_server_http_port" in params
):
import chromadb # type: ignore
settings_params = {
key: params[key]
for key, value_ in params.items()
if key.startswith("chroma_server_") and value_
}
chroma_settings = chromadb.config.Settings(**settings_params)
params["client_settings"] = chroma_settings
else:
# remove all chroma_server_ keys from params
params = {
key: value
for key, value in params.items()
if not key.startswith("chroma_server_")
}
persist = params.pop("persist", False)
if not docs_in_params(params):
params.pop("documents", None)

View file

@ -14,34 +14,43 @@ from langflow.interface.wrappers.base import wrapper_creator
from langflow.interface.output_parsers.base import output_parser_creator
from langflow.interface.retrievers.base import retriever_creator
from langflow.interface.custom.base import custom_component_creator
from langflow.utils.lazy_load import LazyLoadDictBase
def get_type_dict():
return {
"agents": agent_creator.to_list(),
"prompts": prompt_creator.to_list(),
"llms": llm_creator.to_list(),
"tools": tool_creator.to_list(),
"chains": chain_creator.to_list(),
"memory": memory_creator.to_list(),
"toolkits": toolkits_creator.to_list(),
"wrappers": wrapper_creator.to_list(),
"documentLoaders": documentloader_creator.to_list(),
"vectorStore": vectorstore_creator.to_list(),
"embeddings": embedding_creator.to_list(),
"textSplitters": textsplitter_creator.to_list(),
"utilities": utility_creator.to_list(),
"outputParsers": output_parser_creator.to_list(),
"retrievers": retriever_creator.to_list(),
"custom_components": custom_component_creator.to_list(),
}
class AllTypesDict(LazyLoadDictBase):
def __init__(self):
self._all_types_dict = None
@property
def ALL_TYPES_DICT(self):
return self.all_types_dict
def _build_dict(self):
langchain_types_dict = self.get_type_dict()
return {
**langchain_types_dict,
"Custom": ["Custom Tool", "Python Function"],
}
def get_type_dict(self):
return {
"agents": agent_creator.to_list(),
"prompts": prompt_creator.to_list(),
"llms": llm_creator.to_list(),
"tools": tool_creator.to_list(),
"chains": chain_creator.to_list(),
"memory": memory_creator.to_list(),
"toolkits": toolkits_creator.to_list(),
"wrappers": wrapper_creator.to_list(),
"documentLoaders": documentloader_creator.to_list(),
"vectorStore": vectorstore_creator.to_list(),
"embeddings": embedding_creator.to_list(),
"textSplitters": textsplitter_creator.to_list(),
"utilities": utility_creator.to_list(),
"outputParsers": output_parser_creator.to_list(),
"retrievers": retriever_creator.to_list(),
"custom_components": custom_component_creator.to_list(),
}
LANGCHAIN_TYPES_DICT = get_type_dict()
# Now we'll build a dict with Langchain types and ours
ALL_TYPES_DICT = {
**LANGCHAIN_TYPES_DICT,
"Custom": ["Custom Tool", "Python Function"],
}
lazy_load_dict = AllTypesDict()

View file

@ -2,7 +2,8 @@ from typing import Dict, List, Optional, Type
from langflow.interface.base import LangChainTypeCreator
from langflow.interface.custom_lists import llm_type_to_cls_dict
from langflow.settings import settings
from langflow.services.utils import get_settings_manager
from langflow.template.frontend_node.llms import LLMFrontendNode
from langflow.utils.logger import logger
from langflow.utils.util import build_template_from_class
@ -33,10 +34,12 @@ class LLMCreator(LangChainTypeCreator):
return None
def to_list(self) -> List[str]:
settings_manager = get_settings_manager()
return [
llm.__name__
for llm in self.type_to_loader_dict.values()
if llm.__name__ in settings.LLMS or settings.DEV
if llm.__name__ in settings_manager.settings.LLMS
or settings_manager.settings.DEV
]

View file

@ -2,7 +2,8 @@ from typing import Dict, List, Optional, Type
from langflow.interface.base import LangChainTypeCreator
from langflow.interface.custom_lists import memory_type_to_cls_dict
from langflow.settings import settings
from langflow.services.utils import get_settings_manager
from langflow.template.frontend_node.base import FrontendNode
from langflow.template.frontend_node.memories import MemoryFrontendNode
from langflow.utils.logger import logger
@ -48,10 +49,12 @@ class MemoryCreator(LangChainTypeCreator):
return None
def to_list(self) -> List[str]:
settings_manager = get_settings_manager()
return [
memory.__name__
for memory in self.type_to_loader_dict.values()
if memory.__name__ in settings.MEMORIES or settings.DEV
if memory.__name__ in settings_manager.settings.MEMORIES
or settings_manager.settings.DEV
]

View file

@ -4,7 +4,8 @@ from langchain import output_parsers
from langflow.interface.base import LangChainTypeCreator
from langflow.interface.importing.utils import import_class
from langflow.settings import settings
from langflow.services.utils import get_settings_manager
from langflow.template.frontend_node.output_parsers import OutputParserFrontendNode
from langflow.utils.logger import logger
from langflow.utils.util import build_template_from_class, build_template_from_method
@ -23,6 +24,7 @@ class OutputParserCreator(LangChainTypeCreator):
@property
def type_to_loader_dict(self) -> Dict:
if self.type_dict is None:
settings_manager = get_settings_manager()
self.type_dict = {
output_parser_name: import_class(
f"langchain.output_parsers.{output_parser_name}"
@ -33,7 +35,8 @@ class OutputParserCreator(LangChainTypeCreator):
self.type_dict = {
name: output_parser
for name, output_parser in self.type_dict.items()
if name in settings.OUTPUT_PARSERS or settings.DEV
if name in settings_manager.settings.OUTPUT_PARSERS
or settings_manager.settings.DEV
}
return self.type_dict

View file

@ -5,7 +5,8 @@ from langchain import prompts
from langflow.custom.customs import get_custom_nodes
from langflow.interface.base import LangChainTypeCreator
from langflow.interface.importing.utils import import_class
from langflow.settings import settings
from langflow.services.utils import get_settings_manager
from langflow.template.frontend_node.prompts import PromptFrontendNode
from langflow.utils.logger import logger
from langflow.utils.util import build_template_from_class
@ -20,6 +21,7 @@ class PromptCreator(LangChainTypeCreator):
@property
def type_to_loader_dict(self) -> Dict:
settings_manager = get_settings_manager()
if self.type_dict is None:
self.type_dict = {
prompt_name: import_class(f"langchain.prompts.{prompt_name}")
@ -34,7 +36,8 @@ class PromptCreator(LangChainTypeCreator):
self.type_dict = {
name: prompt
for name, prompt in self.type_dict.items()
if name in settings.PROMPTS or settings.DEV
if name in settings_manager.settings.PROMPTS
or settings_manager.settings.DEV
}
return self.type_dict

View file

@ -4,7 +4,8 @@ from langchain import retrievers
from langflow.interface.base import LangChainTypeCreator
from langflow.interface.importing.utils import import_class
from langflow.settings import settings
from langflow.services.utils import get_settings_manager
from langflow.template.frontend_node.retrievers import RetrieverFrontendNode
from langflow.utils.logger import logger
from langflow.utils.util import build_template_from_method, build_template_from_class
@ -48,10 +49,12 @@ class RetrieverCreator(LangChainTypeCreator):
return None
def to_list(self) -> List[str]:
settings_manager = get_settings_manager()
return [
retriever
for retriever in self.type_to_loader_dict.keys()
if retriever in settings.RETRIEVERS or settings.DEV
if retriever in settings_manager.settings.RETRIEVERS
or settings_manager.settings.DEV
]

View file

@ -1,4 +1,4 @@
from langflow.cache.utils import memoize_dict
from langflow.services.cache.utils import memoize_dict
from langflow.graph import Graph
from langflow.utils.logger import logger

View file

@ -1,9 +1,10 @@
from typing import Dict, List, Optional, Type
from langflow.interface.base import LangChainTypeCreator
from langflow.services.utils import get_settings_manager
from langflow.template.frontend_node.textsplitters import TextSplittersFrontendNode
from langflow.interface.custom_lists import textsplitter_type_to_cls_dict
from langflow.settings import settings
from langflow.utils.logger import logger
from langflow.utils.util import build_template_from_class
@ -30,10 +31,12 @@ class TextSplitterCreator(LangChainTypeCreator):
return None
def to_list(self) -> List[str]:
settings_manager = get_settings_manager()
return [
textsplitter.__name__
for textsplitter in self.type_to_loader_dict.values()
if textsplitter.__name__ in settings.TEXTSPLITTERS or settings.DEV
if textsplitter.__name__ in settings_manager.settings.TEXTSPLITTERS
or settings_manager.settings.DEV
]

View file

@ -4,7 +4,8 @@ from langchain.agents import agent_toolkits
from langflow.interface.base import LangChainTypeCreator
from langflow.interface.importing.utils import import_class, import_module
from langflow.settings import settings
from langflow.services.utils import get_settings_manager
from langflow.utils.logger import logger
from langflow.utils.util import build_template_from_class
@ -29,13 +30,15 @@ class ToolkitCreator(LangChainTypeCreator):
@property
def type_to_loader_dict(self) -> Dict:
if self.type_dict is None:
settings_manager = get_settings_manager()
self.type_dict = {
toolkit_name: import_class(
f"langchain.agents.agent_toolkits.{toolkit_name}"
)
# if toolkit_name is not lower case it is a class
for toolkit_name in agent_toolkits.__all__
if not toolkit_name.islower() and toolkit_name in settings.TOOLKITS
if not toolkit_name.islower()
and toolkit_name in settings_manager.settings.TOOLKITS
}
return self.type_dict

View file

@ -15,7 +15,8 @@ from langflow.interface.tools.constants import (
OTHER_TOOLS,
)
from langflow.interface.tools.util import get_tool_params
from langflow.settings import settings
from langflow.services.utils import get_settings_manager
from langflow.template.field.base import TemplateField
from langflow.template.template.base import Template
from langflow.utils import util
@ -66,6 +67,7 @@ class ToolCreator(LangChainTypeCreator):
@property
def type_to_loader_dict(self) -> Dict:
settings_manager = get_settings_manager()
if self.tools_dict is None:
all_tools = {}
@ -74,7 +76,10 @@ class ToolCreator(LangChainTypeCreator):
tool_name = tool_params.get("name") or tool
if tool_name in settings.TOOLS or settings.DEV:
if (
tool_name in settings_manager.settings.TOOLS
or settings_manager.settings.DEV
):
if tool_name == "JsonSpec":
tool_params["path"] = tool_params.pop("dict_") # type: ignore
all_tools[tool_name] = {

View file

@ -1,6 +1,7 @@
import ast
import contextlib
from typing import Any
from langflow.api.utils import merge_nested_dicts_with_renaming
from langflow.interface.agents.base import agent_creator
from langflow.interface.chains.base import chain_creator
from langflow.interface.custom.constants import CUSTOM_COMPONENT_SUPPORTED_TYPES
@ -30,7 +31,6 @@ from langflow.interface.retrievers.base import retriever_creator
from langflow.interface.custom.directory_reader import DirectoryReader
from langflow.utils.logger import logger
from langflow.utils.util import get_base_classes
from langflow.api.utils import merge_nested_dicts
import re
import warnings
@ -423,4 +423,4 @@ def build_langchain_custom_component_list_from_path(path: str):
valid_menu = build_valid_menu(valid_components)
invalid_menu = build_invalid_menu(invalid_components)
return merge_nested_dicts(valid_menu, invalid_menu)
return merge_nested_dicts_with_renaming(valid_menu, invalid_menu)

View file

@ -5,7 +5,8 @@ from langchain import SQLDatabase, utilities
from langflow.custom.customs import get_custom_nodes
from langflow.interface.base import LangChainTypeCreator
from langflow.interface.importing.utils import import_class
from langflow.settings import settings
from langflow.services.utils import get_settings_manager
from langflow.template.frontend_node.utilities import UtilitiesFrontendNode
from langflow.utils.logger import logger
from langflow.utils.util import build_template_from_class
@ -26,6 +27,7 @@ class UtilityCreator(LangChainTypeCreator):
from the langchain.chains module and filtering them according to the settings.utilities list.
"""
if self.type_dict is None:
settings_manager = get_settings_manager()
self.type_dict = {
utility_name: import_class(f"langchain.utilities.{utility_name}")
for utility_name in utilities.__all__
@ -35,7 +37,8 @@ class UtilityCreator(LangChainTypeCreator):
self.type_dict = {
name: utility
for name, utility in self.type_dict.items()
if name in settings.UTILITIES or settings.DEV
if name in settings_manager.settings.UTILITIES
or settings_manager.settings.DEV
}
return self.type_dict

View file

@ -9,7 +9,8 @@ import yaml
from langchain.base_language import BaseLanguageModel
from PIL.Image import Image
from langflow.utils.logger import logger
from langflow.chat.config import ChatConfig
from langflow.services.chat.config import ChatConfig
from langflow.services.utils import get_settings_manager
def load_file_into_dict(file_path: str) -> dict:
@ -63,13 +64,11 @@ def extract_input_variables_from_prompt(prompt: str) -> list[str]:
def setup_llm_caching():
"""Setup LLM caching."""
from langflow.settings import settings
settings_manager = get_settings_manager()
try:
set_langchain_cache(settings)
set_langchain_cache(settings_manager.settings)
except ImportError:
logger.warning(f"Could not import {settings.CACHE}. ")
logger.warning(f"Could not import {settings_manager.settings.CACHE}. ")
except Exception as exc:
logger.warning(f"Could not setup LLM caching. Error: {exc}")

View file

@ -4,7 +4,8 @@ from langchain import vectorstores
from langflow.interface.base import LangChainTypeCreator
from langflow.interface.importing.utils import import_class
from langflow.settings import settings
from langflow.services.utils import get_settings_manager
from langflow.template.frontend_node.vectorstores import VectorStoreFrontendNode
from langflow.utils.logger import logger
from langflow.utils.util import build_template_from_method
@ -43,10 +44,12 @@ class VectorstoreCreator(LangChainTypeCreator):
return None
def to_list(self) -> List[str]:
settings_manager = get_settings_manager()
return [
vectorstore
for vectorstore in self.type_to_loader_dict.keys()
if vectorstore in settings.VECTORSTORES or settings.DEV
if vectorstore in settings_manager.settings.VECTORSTORES
or settings_manager.settings.DEV
]

View file

@ -6,24 +6,22 @@ from fastapi.responses import FileResponse
from fastapi.staticfiles import StaticFiles
from langflow.api import router
from langflow.database.base import create_db_and_tables, Engine
from langflow.routers import login, users, health
from langflow.interface.utils import setup_llm_caching
from langflow.services.database.utils import initialize_database
from langflow.services.manager import initialize_services
from langflow.utils.logger import configure
def create_app():
"""Create the FastAPI app and include the router."""
configure()
app = FastAPI()
origins = [
"*",
]
@app.get("/health")
def get_health():
return {"status": "OK"}
origins = ["*"]
app.add_middleware(
CORSMiddleware,
@ -33,9 +31,14 @@ def create_app():
allow_headers=["*"],
)
app.include_router(login.router)
app.include_router(users.router)
app.include_router(health.router)
app.include_router(router)
app.on_event("startup")(Engine.update)
app.on_event("startup")(create_db_and_tables)
app.on_event("startup")(initialize_services)
app.on_event("startup")(initialize_database)
app.on_event("startup")(setup_llm_caching)
return app
@ -68,16 +71,19 @@ def get_static_files_dir():
return frontend_path / "frontend"
def setup_app(static_files_dir: Optional[Path] = None) -> FastAPI:
def setup_app(
static_files_dir: Optional[Path] = None, backend_only: bool = False
) -> FastAPI:
"""Setup the FastAPI app."""
# get the directory of the current file
if not static_files_dir:
static_files_dir = get_static_files_dir()
if not static_files_dir or not static_files_dir.exists():
if not backend_only and (not static_files_dir or not static_files_dir.exists()):
raise RuntimeError(f"Static files directory {static_files_dir} does not exist.")
app = create_app()
setup_static_files(app, static_files_dir)
if not backend_only and static_files_dir is not None:
setup_static_files(app, static_files_dir)
return app

View file

@ -0,0 +1,8 @@
from fastapi import APIRouter
router = APIRouter()
@router.get("/health")
def get_health():
return {"status": "OK"}

View file

@ -0,0 +1,41 @@
from datetime import timedelta
from fastapi import APIRouter, Depends, HTTPException, status
from fastapi.security import OAuth2PasswordRequestForm
from langflow.database.models.token import Token
from langflow.auth.auth import (
ACCESS_TOKEN_EXPIRE_MINUTES,
authenticate_user,
create_access_token,
)
from sqlalchemy.orm import Session
from langflow.services.utils import get_session
from langflow.database.models.user import User
router = APIRouter()
def create_user_token(user: User) -> dict:
access_token_expires = timedelta(minutes=ACCESS_TOKEN_EXPIRE_MINUTES)
access_token = create_access_token(
data={"sub": user.username},
expires_delta=access_token_expires,
)
return {"access_token": access_token, "token_type": "bearer"}
@router.post("/token", response_model=Token)
async def login_to_get_access_token(
form_data: OAuth2PasswordRequestForm = Depends(), db: Session = Depends(get_session)
):
if user := authenticate_user(db, form_data.username, form_data.password):
return create_user_token(user)
else:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Incorrect username or password",
headers={"WWW-Authenticate": "Bearer"},
)

View file

@ -0,0 +1,79 @@
from typing import List
from sqlmodel import Session, select
from sqlalchemy.exc import IntegrityError
from fastapi import APIRouter, Depends, HTTPException
from langflow.services.utils import get_session
from langflow.auth.auth import get_current_active_user
from langflow.database.models.user import UserAddModel, UserListModel, User
from passlib.context import CryptContext
router = APIRouter(tags=["Login"])
def get_password_hash(password):
pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
return pwd_context.hash(password)
@router.get("/user", response_model=UserListModel)
def read_current_user(current_user: User = Depends(get_current_active_user)):
return current_user
@router.get("/users", response_model=List[UserListModel])
def read_all_users(
skip: int = 0,
limit: int = 10,
_: Session = Depends(get_current_active_user),
db: Session = Depends(get_session),
):
query = select(User)
query = query.offset(skip).limit(limit)
return db.execute(query).fetchall()
@router.post("/user", response_model=User)
def add_user(
user: UserAddModel,
_: Session = Depends(get_current_active_user),
db: Session = Depends(get_session),
):
new_user = User(**user.dict())
try:
new_user.password = get_password_hash(user.password)
db.add(new_user)
db.commit()
db.refresh(new_user)
except IntegrityError as e:
db.rollback()
raise HTTPException(
status_code=400,
detail="User exists",
) from e
return new_user
# TODO: Remove - Just for testing purposes
@router.post("/super_user", response_model=User)
def add_super_user_to_testing_purposes(db: Session = Depends(get_session)):
new_user = User(username="superuser", password="12345", is_superuser=True)
try:
new_user.password = get_password_hash(new_user.password)
db.add(new_user)
db.commit()
db.refresh(new_user)
except IntegrityError as e:
db.rollback()
raise HTTPException(
status_code=400,
detail="User exists",
) from e
return new_user

View file

@ -0,0 +1,4 @@
from .manager import service_manager
from .schema import ServiceType
__all__ = ["service_manager", "ServiceType"]

View file

@ -0,0 +1,2 @@
class Service:
name: str

View file

@ -0,0 +1,11 @@
from . import factory, manager
from langflow.services.cache.manager import cache_manager
from langflow.services.cache.flow import InMemoryCache
__all__ = [
"cache_manager",
"factory",
"manager",
"InMemoryCache",
]

View file

@ -0,0 +1,11 @@
from langflow.services.cache.manager import CacheManager
from langflow.services.factory import ServiceFactory
class CacheManagerFactory(ServiceFactory):
def __init__(self):
super().__init__(CacheManager)
def create(self, settings_service):
# Here you would have logic to create and configure a CacheManager
return CacheManager()

View file

@ -2,7 +2,7 @@ import threading
import time
from collections import OrderedDict
from langflow.cache.base import BaseCache
from langflow.services.cache.base import BaseCache
class InMemoryCache(BaseCache):

View file

@ -1,5 +1,6 @@
from contextlib import contextmanager
from typing import Any, Awaitable, Callable, List, Optional
from langflow.services.base import Service
import pandas as pd
from PIL import Image
@ -49,9 +50,11 @@ class AsyncSubject:
await observer()
class CacheManager(Subject):
class CacheManager(Subject, Service):
"""Manages cache for different clients and notifies observers on changes."""
name = "cache_manager"
def __init__(self):
super().__init__()
self._cache = {}

View file

@ -0,0 +1,11 @@
from langflow.services.chat.manager import ChatManager
from langflow.services.factory import ServiceFactory
class ChatManagerFactory(ServiceFactory):
def __init__(self):
super().__init__(ChatManager)
def create(self, settings_service):
# Here you would have logic to create and configure a ChatManager
return ChatManager()

View file

@ -1,10 +1,12 @@
from collections import defaultdict
from fastapi import WebSocket, status
from langflow.api.v1.schemas import ChatMessage, ChatResponse, FileResponse
from langflow.cache import cache_manager
from langflow.cache.manager import Subject
from langflow.chat.utils import process_graph
from langflow.services.base import Service
from langflow.services import service_manager
from langflow.services.cache.manager import Subject
from langflow.services.chat.utils import process_graph
from langflow.interface.utils import pil_to_base64
from langflow.services.schema import ServiceType
from langflow.utils.logger import logger
@ -12,7 +14,7 @@ import asyncio
import json
from typing import Any, Dict, List
from langflow.cache.flow import InMemoryCache
from langflow.services.cache.flow import InMemoryCache
class ChatHistory(Subject):
@ -42,11 +44,13 @@ class ChatHistory(Subject):
self.history[client_id] = []
class ChatManager:
class ChatManager(Service):
name = "chat_manager"
def __init__(self):
self.active_connections: Dict[str, WebSocket] = {}
self.chat_history = ChatHistory()
self.cache_manager = cache_manager
self.cache_manager = service_manager.get(ServiceType.CACHE_MANAGER)
self.cache_manager.attach(self.update)
self.in_memory_cache = InMemoryCache()
@ -117,7 +121,7 @@ class ChatManager:
self, client_id: str, payload: Dict, langchain_object: Any
):
# Process the graph data and chat message
chat_inputs = payload.pop("inputs", "")
chat_inputs = payload.pop("inputs", {})
chat_inputs = ChatMessage(message=chat_inputs)
self.chat_history.add_message(client_id, chat_inputs)

View file

@ -21,9 +21,9 @@ async def process_graph(
# Generate result and thought
try:
if not chat_inputs.message:
if chat_inputs.message is None:
logger.debug("No message provided")
raise ValueError("No message provided")
chat_inputs.message = {}
logger.debug("Generating result and thought")
result, intermediate_steps = await get_result_and_steps(

View file

@ -0,0 +1,17 @@
from typing import TYPE_CHECKING
from langflow.services.database.manager import DatabaseManager
from langflow.services.factory import ServiceFactory
if TYPE_CHECKING:
from langflow.services.settings.manager import SettingsManager
class DatabaseManagerFactory(ServiceFactory):
def __init__(self):
super().__init__(DatabaseManager)
def create(self, settings_service: "SettingsManager"):
# Here you would have logic to create and configure a DatabaseManager
if not settings_service.settings.DATABASE_URL:
raise ValueError("No database URL provided")
return DatabaseManager(settings_service.settings.DATABASE_URL)

View file

@ -0,0 +1,67 @@
from pathlib import Path
from langflow.services.base import Service
from sqlmodel import SQLModel, Session, create_engine
from langflow.utils.logger import logger
from alembic.config import Config
from alembic import command
from langflow.services.database import models # noqa
class DatabaseManager(Service):
name = "database_manager"
def __init__(self, database_url: str):
self.database_url = database_url
# This file is in langflow.services.database.manager.py
# the ini is in langflow
langflow_dir = Path(__file__).parent.parent.parent
self.script_location = langflow_dir / "alembic"
self.alembic_cfg_path = langflow_dir / "alembic.ini"
self.engine = create_engine(database_url)
def __enter__(self):
self._session = Session(self.engine)
return self._session
def __exit__(self, exc_type, exc_value, traceback):
if exc_type is not None: # If an exception has been raised
logger.error(
f"Session rollback because of exception: {exc_type.__name__} {exc_value}"
)
self._session.rollback()
else:
self._session.commit()
self._session.close()
def get_session(self):
with Session(self.engine) as session:
yield session
def run_migrations(self):
logger.info(
f"Running DB migrations in {self.script_location} on {self.database_url}"
)
alembic_cfg = Config()
alembic_cfg.set_main_option("script_location", str(self.script_location))
alembic_cfg.set_main_option("sqlalchemy.url", self.database_url)
command.upgrade(alembic_cfg, "head")
def create_db_and_tables(self):
logger.debug("Creating database and tables")
try:
SQLModel.metadata.create_all(self.engine)
except Exception as exc:
logger.error(f"Error creating database and tables: {exc}")
raise RuntimeError("Error creating database and tables") from exc
# Now check if the table "flow" exists, if not, something went wrong
# and we need to create the tables again.
from sqlalchemy import inspect
inspector = inspect(self.engine)
if "flow" not in inspector.get_table_names():
logger.error("Something went wrong creating the database and tables.")
logger.error("Please check your database settings.")
raise RuntimeError("Something went wrong creating the database and tables.")
else:
logger.debug("Database and tables created successfully")

View file

@ -0,0 +1,4 @@
from .flow import Flow
__all__ = ["Flow"]

View file

@ -1,4 +1,4 @@
from langflow.database.models.base import SQLModelSerializable, SQLModel
from langflow.services.database.models.base import SQLModelSerializable, SQLModel
from sqlmodel import Field
from typing import Optional
from datetime import datetime

View file

@ -1,13 +1,12 @@
# Path: src/backend/langflow/database/models/flow.py
from langflow.database.models.base import SQLModelSerializable
from langflow.services.database.models.base import SQLModelSerializable
from pydantic import validator
from sqlmodel import Field, Relationship, JSON, Column
from sqlmodel import Field, JSON, Column
from uuid import UUID, uuid4
from typing import Dict, Optional
# if TYPE_CHECKING:
from langflow.database.models.flow_style import FlowStyle, FlowStyleRead
class FlowBase(SQLModelSerializable):
@ -35,11 +34,6 @@ class FlowBase(SQLModelSerializable):
class Flow(FlowBase, table=True):
id: UUID = Field(default_factory=uuid4, primary_key=True, unique=True)
data: Optional[Dict] = Field(default=None, sa_column=Column(JSON))
style: Optional["FlowStyle"] = Relationship(
back_populates="flow",
# use "uselist=False" to make it a one-to-one relationship
sa_relationship_kwargs={"uselist": False},
)
class FlowCreate(FlowBase):
@ -50,10 +44,6 @@ class FlowRead(FlowBase):
id: UUID
class FlowReadWithStyle(FlowRead):
style: Optional["FlowStyleRead"] = None
class FlowUpdate(SQLModelSerializable):
name: Optional[str] = None
description: Optional[str] = None

View file

@ -0,0 +1,47 @@
from typing import TYPE_CHECKING
from langflow.utils.logger import logger
from contextlib import contextmanager
from alembic.util.exc import CommandError
from sqlmodel import Session
if TYPE_CHECKING:
from langflow.services.database.manager import DatabaseManager
def initialize_database():
logger.debug("Initializing database")
from langflow.services import service_manager, ServiceType
database_manager = service_manager.get(ServiceType.DATABASE_MANAGER)
try:
database_manager.run_migrations()
except CommandError as exc:
if "Can't locate revision identified by" not in str(exc):
raise exc
# This means there's wrong revision in the DB
# We need to delete the alembic_version table
# and run the migrations again
logger.warning(
"Wrong revision in DB, deleting alembic_version table and running migrations again"
)
with session_getter(database_manager) as session:
session.execute("DROP TABLE alembic_version")
database_manager.run_migrations()
except Exception as exc:
logger.error(f"Error running migrations: {exc}")
raise RuntimeError("Error running migrations") from exc
database_manager.create_db_and_tables()
logger.debug("Database initialized")
@contextmanager
def session_getter(db_manager: "DatabaseManager"):
try:
session = Session(db_manager.engine)
yield session
except Exception as e:
print("Session rollback because of exception:", e)
session.rollback()
raise
finally:
session.close()

View file

@ -0,0 +1,6 @@
class ServiceFactory:
def __init__(self, service_class):
self.service_class = service_class
def create(self, *args, **kwargs):
raise NotImplementedError

View file

@ -0,0 +1,87 @@
from langflow.services.schema import ServiceType
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from langflow.services.factory import ServiceFactory
class ServiceManager:
"""
Manages the creation of different services.
"""
def __init__(self):
self.services = {}
self.factories = {}
def register_factory(self, service_factory: "ServiceFactory"):
"""
Registers a new factory.
"""
self.factories[service_factory.service_class.name] = service_factory
def get(self, service_name: ServiceType):
"""
Get (or create) a service by its name.
"""
if service_name not in self.services:
self._create_service(service_name)
return self.services[service_name]
def _create_service(self, service_name: ServiceType):
"""
Create a new service given its name.
"""
self._validate_service_creation(service_name)
if service_name == ServiceType.SETTINGS_MANAGER:
self.services[service_name] = self.factories[service_name].create()
else:
settings_service = self.get(ServiceType.SETTINGS_MANAGER)
self.services[service_name] = self.factories[service_name].create(
settings_service
)
def _validate_service_creation(self, service_name: ServiceType):
"""
Validate whether the service can be created.
"""
if service_name not in self.factories:
raise ValueError(
f"No factory registered for the service class '{service_name.name}'"
)
if (
ServiceType.SETTINGS_MANAGER not in self.factories
and service_name != ServiceType.SETTINGS_MANAGER
):
raise ValueError(
f"Cannot create service '{service_name.name}' before the settings service"
)
def update(self, service_name: ServiceType):
"""
Update a service by its name.
"""
if service_name in self.services:
self.services.pop(service_name, None)
self.get(service_name)
service_manager = ServiceManager()
def initialize_services():
"""
Initialize all the services needed.
"""
from langflow.services.database import factory as database_factory
from langflow.services.cache import factory as cache_factory
from langflow.services.chat import factory as chat_factory
from langflow.services.settings import factory as settings_factory
service_manager.register_factory(settings_factory.SettingsManagerFactory())
service_manager.register_factory(database_factory.DatabaseManagerFactory())
service_manager.register_factory(cache_factory.CacheManagerFactory())
service_manager.register_factory(chat_factory.ChatManagerFactory())

View file

@ -0,0 +1,13 @@
from enum import Enum
class ServiceType(str, Enum):
"""
Enum for the different types of services that can be
registered with the service manager.
"""
CACHE_MANAGER = "cache_manager"
SETTINGS_MANAGER = "settings_manager"
DATABASE_MANAGER = "database_manager"
CHAT_MANAGER = "chat_manager"

View file

@ -0,0 +1,3 @@
from . import factory, manager
__all__ = ["factory", "manager"]

View file

@ -0,0 +1,168 @@
import contextlib
import json
import os
from typing import Optional, List
from pathlib import Path
import yaml
from pydantic import BaseSettings, root_validator, validator
from langflow.utils.logger import logger
BASE_COMPONENTS_PATH = str(Path(__file__).parent / "components")
class Settings(BaseSettings):
CHAINS: dict = {}
AGENTS: dict = {}
PROMPTS: dict = {}
LLMS: dict = {}
TOOLS: dict = {}
MEMORIES: dict = {}
EMBEDDINGS: dict = {}
VECTORSTORES: dict = {}
DOCUMENTLOADERS: dict = {}
WRAPPERS: dict = {}
RETRIEVERS: dict = {}
TOOLKITS: dict = {}
TEXTSPLITTERS: dict = {}
UTILITIES: dict = {}
OUTPUT_PARSERS: dict = {}
CUSTOM_COMPONENTS: dict = {}
DEV: bool = False
DATABASE_URL: Optional[str] = None
CACHE: str = "InMemoryCache"
REMOVE_API_KEYS: bool = False
COMPONENTS_PATH: List[str] = []
@validator("DATABASE_URL", pre=True)
def set_database_url(cls, value):
if not value:
logger.debug(
"No database_url provided, trying LANGFLOW_DATABASE_URL env variable"
)
if langflow_database_url := os.getenv("LANGFLOW_DATABASE_URL"):
value = langflow_database_url
logger.debug("Using LANGFLOW_DATABASE_URL env variable.")
else:
logger.debug("No DATABASE_URL env variable, using sqlite database")
value = "sqlite:///./langflow.db"
return value
@validator("COMPONENTS_PATH", pre=True)
def set_components_path(cls, value):
if os.getenv("LANGFLOW_COMPONENTS_PATH"):
logger.debug("Adding LANGFLOW_COMPONENTS_PATH to components_path")
langflow_component_path = os.getenv("LANGFLOW_COMPONENTS_PATH")
if (
Path(langflow_component_path).exists()
and langflow_component_path not in value
):
if isinstance(langflow_component_path, list):
for path in langflow_component_path:
if path not in value:
value.append(path)
logger.debug(
f"Extending {langflow_component_path} to components_path"
)
elif langflow_component_path not in value:
value.append(langflow_component_path)
logger.debug(
f"Appending {langflow_component_path} to components_path"
)
if not value:
value = [BASE_COMPONENTS_PATH]
logger.debug("Setting default components path to components_path")
elif BASE_COMPONENTS_PATH not in value:
value.append(BASE_COMPONENTS_PATH)
logger.debug("Adding default components path to components_path")
logger.debug(f"Components path: {value}")
return value
class Config:
validate_assignment = True
extra = "ignore"
env_prefix = "LANGFLOW_"
@root_validator(allow_reuse=True)
def validate_lists(cls, values):
for key, value in values.items():
if key != "dev" and not value:
values[key] = []
return values
def update_from_yaml(self, file_path: str, dev: bool = False):
new_settings = load_settings_from_yaml(file_path)
self.CHAINS = new_settings.CHAINS or {}
self.AGENTS = new_settings.AGENTS or {}
self.PROMPTS = new_settings.PROMPTS or {}
self.LLMS = new_settings.LLMS or {}
self.TOOLS = new_settings.TOOLS or {}
self.MEMORIES = new_settings.MEMORIES or {}
self.WRAPPERS = new_settings.WRAPPERS or {}
self.TOOLKITS = new_settings.TOOLKITS or {}
self.TEXTSPLITTERS = new_settings.TEXTSPLITTERS or {}
self.UTILITIES = new_settings.UTILITIES or {}
self.EMBEDDINGS = new_settings.EMBEDDINGS or {}
self.VECTORSTORES = new_settings.VECTORSTORES or {}
self.DOCUMENTLOADERS = new_settings.DOCUMENTLOADERS or {}
self.RETRIEVERS = new_settings.RETRIEVERS or {}
self.OUTPUT_PARSERS = new_settings.OUTPUT_PARSERS or {}
self.CUSTOM_COMPONENTS = new_settings.CUSTOM_COMPONENTS or {}
self.COMPONENTS_PATH = new_settings.COMPONENTS_PATH or []
self.DEV = dev
def update_settings(self, **kwargs):
logger.debug("Updating settings")
for key, value in kwargs.items():
# value may contain sensitive information, so we don't want to log it
if not hasattr(self, key):
logger.debug(f"Key {key} not found in settings")
continue
logger.debug(f"Updating {key}")
if isinstance(getattr(self, key), list):
# value might be a '[something]' string
with contextlib.suppress(json.decoder.JSONDecodeError):
value = json.loads(str(value))
if isinstance(value, list):
for item in value:
if item not in getattr(self, key):
getattr(self, key).append(item)
logger.debug(f"Extended {key}")
else:
getattr(self, key).append(value)
logger.debug(f"Appended {key}")
else:
setattr(self, key, value)
logger.debug(f"Updated {key}")
logger.debug(f"{key}: {getattr(self, key)}")
def save_settings_to_yaml(settings: Settings, file_path: str):
with open(file_path, "w") as f:
settings_dict = settings.dict()
yaml.dump(settings_dict, f)
def load_settings_from_yaml(file_path: str) -> Settings:
# Check if a string is a valid path or a file name
if "/" not in file_path:
# Get current path
current_path = os.path.dirname(os.path.abspath(__file__))
file_path = os.path.join(current_path, file_path)
with open(file_path, "r") as f:
settings_dict = yaml.safe_load(f)
settings_dict = {k.upper(): v for k, v in settings_dict.items()}
for key in settings_dict:
if key not in Settings.__fields__.keys():
raise KeyError(f"Key {key} not found in settings")
logger.debug(f"Loading {len(settings_dict[key])} {key} from {file_path}")
return Settings(**settings_dict)

View file

@ -0,0 +1,15 @@
from pathlib import Path
from langflow.services.settings.manager import SettingsManager
from langflow.services.factory import ServiceFactory
class SettingsManagerFactory(ServiceFactory):
def __init__(self):
super().__init__(SettingsManager)
def create(self):
# Here you would have logic to create and configure a SettingsManager
langflow_dir = Path(__file__).parent.parent.parent
return SettingsManager.load_settings_from_yaml(
str(langflow_dir / "config.yaml")
)

View file

@ -0,0 +1,36 @@
from langflow.services.base import Service
from langflow.services.settings.base import Settings
from langflow.utils.logger import logger
import os
import yaml
class SettingsManager(Service):
name = "settings_manager"
def __init__(self, settings: Settings):
super().__init__()
self.settings = settings
@classmethod
def load_settings_from_yaml(cls, file_path: str) -> "SettingsManager":
# Check if a string is a valid path or a file name
if "/" not in file_path:
# Get current path
current_path = os.path.dirname(os.path.abspath(__file__))
file_path = os.path.join(current_path, file_path)
with open(file_path, "r") as f:
settings_dict = yaml.safe_load(f)
settings_dict = {k.upper(): v for k, v in settings_dict.items()}
for key in settings_dict:
if key not in Settings.__fields__.keys():
raise KeyError(f"Key {key} not found in settings")
logger.debug(
f"Loading {len(settings_dict[key])} {key} from {file_path}"
)
settings = Settings(**settings_dict)
return cls(settings)

View file

@ -0,0 +1,18 @@
from langflow.services import ServiceType, service_manager
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from langflow.services.settings.manager import SettingsManager
def get_settings_manager() -> "SettingsManager":
return service_manager.get(ServiceType.SETTINGS_MANAGER)
def get_db_manager():
return service_manager.get(ServiceType.DATABASE_MANAGER)
def get_session():
db_manager = service_manager.get(ServiceType.DATABASE_MANAGER)
yield from db_manager.get_session()

View file

@ -30,7 +30,6 @@ class DocumentLoaderFrontNode(FrontendNode):
"UnstructuredEmailLoader": build_file_field(
suffixes=[".eml"], fileTypes=["eml"]
),
"SlackDirectoryLoader": build_file_field(suffixes=[".zip"], fileTypes=["zip"]),
"EverNoteLoader": build_file_field(suffixes=[".xml"], fileTypes=["xml"]),
"FacebookChatLoader": build_file_field(suffixes=[".json"], fileTypes=["json"]),
"BSHTMLLoader": build_file_field(suffixes=[".html"], fileTypes=["html"]),
@ -105,7 +104,30 @@ class DocumentLoaderFrontNode(FrontendNode):
advanced=False,
)
)
elif self.template.type_name in {"SlackDirectoryLoader"}:
self.template.add_field(
TemplateField(
field_type="file",
required=True,
show=True,
name="zip_path",
value="",
display_name="Path to zip file",
suffixes=[".zip"],
file_types=["zip"],
)
)
self.template.add_field(
TemplateField(
field_type="str",
required=False,
show=True,
name="workspace_url",
value="",
display_name="Workspace URL",
advanced=False,
)
)
elif self.template.type_name in self.file_path_templates:
self.template.add_field(self.file_path_templates[self.template.type_name])
elif self.template.type_name in {

View file

@ -12,8 +12,11 @@ class UtilitiesFrontendNode(FrontendNode):
FrontendNode.format_field(field, name)
# field.field_type could be "Literal['news', 'search', 'places', 'images']
# we need to convert it to a list
# It seems it could also be like "typing_extensions.['news', 'search', 'places', 'images']"
if "Literal" in field.field_type:
field.options = ast.literal_eval(field.field_type.replace("Literal", ""))
field_type = field.field_type.replace("typing_extensions.", "")
field_type = field_type.replace("Literal", "")
field.options = ast.literal_eval(field_type)
field.is_list = True
field.field_type = "str"

View file

@ -4,6 +4,52 @@ from langflow.template.field.base import TemplateField
from langflow.template.frontend_node.base import FrontendNode
BASIC_FIELDS = [
"work_dir",
"collection_name",
"api_key",
"location",
"persist_directory",
"persist",
"weaviate_url",
"index_name",
"namespace",
"folder_path",
"table_name",
"query_name",
"supabase_url",
"supabase_service_key",
"mongodb_atlas_cluster_uri",
"collection_name",
"db_name",
]
ADVANCED_FIELDS = [
"n_dim",
"key",
"prefix",
"distance_func",
"content_payload_key",
"metadata_payload_key",
"timeout",
"host",
"path",
"url",
"port",
"https",
"prefer_grpc",
"grpc_port",
"pinecone_api_key",
"pinecone_env",
"client_kwargs",
"search_kwargs",
"chroma_server_host",
"chroma_server_http_port",
"chroma_server_ssl_enabled",
"chroma_server_grpc_port",
"chroma_server_cors_allow_origins",
]
class VectorStoreFrontendNode(FrontendNode):
def add_extra_fields(self) -> None:
extra_fields: List[TemplateField] = []
@ -45,16 +91,62 @@ class VectorStoreFrontendNode(FrontendNode):
elif self.template.type_name == "Chroma":
# New bool field for persist parameter
extra_field = TemplateField(
name="persist",
field_type="bool",
required=False,
show=True,
advanced=False,
value=False,
display_name="Persist",
)
extra_fields.append(extra_field)
chroma_fields = [
TemplateField(
name="persist",
field_type="bool",
required=False,
show=True,
advanced=False,
value=False,
display_name="Persist",
),
# chroma_server_grpc_port: str | None = None,
TemplateField(
name="chroma_server_host",
field_type="str",
required=False,
show=True,
advanced=True,
display_name="Chroma Server Host",
),
TemplateField(
name="chroma_server_http_port",
field_type="str",
required=False,
show=True,
advanced=True,
display_name="Chroma Server HTTP Port",
),
TemplateField(
name="chroma_server_ssl_enabled",
field_type="bool",
required=False,
show=True,
advanced=True,
value=False,
display_name="Chroma Server SSL Enabled",
),
TemplateField(
name="chroma_server_grpc_port",
field_type="str",
required=False,
show=True,
advanced=True,
display_name="Chroma Server GRPC Port",
),
TemplateField(
name="chroma_server_cors_allow_origins",
field_type="str",
required=False,
is_list=True,
show=True,
advanced=True,
display_name="Chroma Server CORS Allow Origins",
),
]
extra_fields.extend(chroma_fields)
elif self.template.type_name == "Pinecone":
# add pinecone_api_key and pinecone_env
extra_field = TemplateField(
@ -208,45 +300,6 @@ class VectorStoreFrontendNode(FrontendNode):
def format_field(field: TemplateField, name: Optional[str] = None) -> None:
FrontendNode.format_field(field, name)
# Define common field attributes
basic_fields = [
"work_dir",
"collection_name",
"api_key",
"location",
"persist_directory",
"persist",
"weaviate_url",
"index_name",
"namespace",
"folder_path",
"table_name",
"query_name",
"supabase_url",
"supabase_service_key",
"mongodb_atlas_cluster_uri",
"collection_name",
"db_name",
]
advanced_fields = [
"n_dim",
"key",
"prefix",
"distance_func",
"content_payload_key",
"metadata_payload_key",
"timeout",
"host",
"path",
"url",
"port",
"https",
"prefer_grpc",
"grpc_port",
"pinecone_api_key",
"pinecone_env",
"client_kwargs",
"search_kwargs",
]
# Check and set field attributes
if field.name == "texts":
@ -269,7 +322,7 @@ class VectorStoreFrontendNode(FrontendNode):
field.display_name = "Embedding"
field.field_type = "Embeddings"
elif field.name in basic_fields:
elif field.name in BASIC_FIELDS:
field.show = True
field.advanced = False
if field.name == "api_key":
@ -279,7 +332,7 @@ class VectorStoreFrontendNode(FrontendNode):
field.value = ":memory:"
field.placeholder = ":memory:"
elif field.name in advanced_fields:
elif field.name in ADVANCED_FIELDS:
field.show = True
field.advanced = True
if "key" in field.name:

View file

@ -0,0 +1,15 @@
class LazyLoadDictBase:
def __init__(self):
self._all_types_dict = None
@property
def all_types_dict(self):
if self._all_types_dict is None:
self._all_types_dict = self._build_dict()
return self._all_types_dict
def _build_dict(self):
raise NotImplementedError
def get_type_dict(self):
raise NotImplementedError

View file

@ -1 +1,2 @@
**/node_modules
**/node_modules
**/build

View file

@ -1,5 +1,11 @@
import { cloneDeep } from "lodash";
import React, { useContext, useEffect, useRef, useState } from "react";
import React, {
ReactNode,
useContext,
useEffect,
useRef,
useState,
} from "react";
import { Handle, Position, useUpdateNodeInternals } from "reactflow";
import ShadTooltip from "../../../../components/ShadTooltipComponent";
import CodeAreaComponent from "../../../../components/codeAreaComponent";
@ -17,6 +23,7 @@ import { TOOLTIP_EMPTY } from "../../../../constants/constants";
import { TabsContext } from "../../../../contexts/tabsContext";
import { typesContext } from "../../../../contexts/typesContext";
import { ParameterComponentType } from "../../../../types/components";
import { TabsState } from "../../../../types/tabs";
import { isValidConnection } from "../../../../utils/reactflowUtils";
import {
nodeColors,
@ -38,15 +45,15 @@ export default function ParameterComponent({
required = false,
optionalHandle = null,
info = "",
}: ParameterComponentType) {
const ref = useRef(null);
const refHtml = useRef(null);
const infoHtml = useRef(null);
}: ParameterComponentType): JSX.Element {
const ref = useRef<HTMLDivElement>(null);
const refHtml = useRef<HTMLDivElement & ReactNode>(null);
const infoHtml = useRef<HTMLDivElement & ReactNode>(null);
const updateNodeInternals = useUpdateNodeInternals();
const [position, setPosition] = useState(0);
const { setTabsState, tabId, save, flows } = useContext(TabsContext);
const flow = flows.find((f) => f.id === tabId).data?.nodes ?? null;
const flow = flows.find((flow) => flow.id === tabId)?.data?.nodes ?? null;
// Update component position
useEffect(() => {
@ -62,16 +69,18 @@ export default function ParameterComponent({
const { reactFlowInstance } = useContext(typesContext);
let disabled =
reactFlowInstance?.getEdges().some((e) => e.targetHandle === id) ?? false;
reactFlowInstance?.getEdges().some((edge) => edge.targetHandle === id) ??
false;
const { data: myData } = useContext(typesContext);
const handleOnNewValue = (newValue: any) => {
const handleOnNewValue = (newValue: string | string[] | boolean): void => {
let newData = cloneDeep(data);
newData.node.template[name].value = newValue;
newData.node!.template[name].value = newValue;
setData(newData);
// Set state to pending
setTabsState((prev) => {
//@ts-ignore
setTabsState((prev: TabsState) => {
return {
...prev,
[tabId]: {
@ -86,10 +95,11 @@ export default function ParameterComponent({
useEffect(() => {
if (name === "openai_api_base") console.log(info);
// @ts-ignore
infoHtml.current = (
<div className="h-full w-full word-break-break-word">
{info.split("\n").map((line, i) => (
<p key={i} className="block">
<div className="h-full w-full break-words">
{info.split("\n").map((line, index) => (
<p key={index} className="block">
{line}
</p>
))}
@ -98,18 +108,19 @@ export default function ParameterComponent({
}, [info]);
function renderTooltips() {
let groupedObj = groupByFamily(myData, tooltipTitle, left, flow);
let groupedObj = groupByFamily(myData, tooltipTitle!, left, flow!);
if (groupedObj && groupedObj.length > 0) {
refHtml.current = groupedObj.map((item, i) => {
//@ts-ignore
refHtml.current = groupedObj.map((item, index) => {
const Icon: any =
nodeIconsLucide[item.family] ?? nodeIconsLucide["unknown"];
return (
<span
key={i}
key={index}
className={classNames(
i > 0 ? "mt-2 flex items-center" : "flex items-center"
index > 0 ? "mt-2 flex items-center" : "flex items-center"
)}
>
<div
@ -132,10 +143,10 @@ export default function ParameterComponent({
{" "}
{item.type === "" ? "" : " - "}
{item.type.split(", ").length > 2
? item.type.split(", ").map((el, i) => (
<React.Fragment key={el + i}>
? item.type.split(", ").map((el, index) => (
<React.Fragment key={el + index}>
<span>
{i === item.type.split(", ").length - 1
{index === item.type.split(", ").length - 1
? el
: (el += `, `)}
</span>
@ -148,6 +159,7 @@ export default function ParameterComponent({
);
});
} else {
//@ts-ignore
refHtml.current = <span>{TOOLTIP_EMPTY}</span>;
}
}
@ -207,7 +219,7 @@ export default function ParameterComponent({
position={left ? Position.Left : Position.Right}
id={id}
isValidConnection={(connection) =>
isValidConnection(connection, reactFlowInstance)
isValidConnection(connection, reactFlowInstance!)
}
className={classNames(
left ? "-ml-0.5 " : "-mr-0.5 ",
@ -223,9 +235,9 @@ export default function ParameterComponent({
{left === true &&
type === "str" &&
!data.node.template[name].options ? (
!data.node?.template[name].options ? (
<div className="mt-2 w-full">
{data.node.template[name].list ? (
{data.node?.template[name].list ? (
<InputListComponent
disabled={disabled}
value={
@ -236,7 +248,7 @@ export default function ParameterComponent({
}
onChange={handleOnNewValue}
/>
) : data.node.template[name].multiline ? (
) : data.node?.template[name].multiline ? (
<TextAreaComponent
disabled={disabled}
value={data.node.template[name].value ?? ""}
@ -245,8 +257,8 @@ export default function ParameterComponent({
) : (
<InputComponent
disabled={disabled}
password={data.node.template[name].password ?? false}
value={data.node.template[name].value ?? ""}
password={data.node?.template[name].password ?? false}
value={data.node?.template[name].value ?? ""}
onChange={handleOnNewValue}
/>
)}
@ -255,9 +267,9 @@ export default function ParameterComponent({
<div className="mt-2 w-full">
<ToggleShadComponent
disabled={disabled}
enabled={data.node.template[name].value ?? false}
setEnabled={(t) => {
handleOnNewValue(t);
enabled={data.node?.template[name].value ?? false}
setEnabled={(isEnabled) => {
handleOnNewValue(isEnabled);
}}
size="large"
/>
@ -266,13 +278,13 @@ export default function ParameterComponent({
<div className="mt-2 w-full">
<FloatComponent
disabled={disabled}
value={data.node.template[name].value ?? ""}
value={data.node?.template[name].value ?? ""}
onChange={handleOnNewValue}
/>
</div>
) : left === true &&
type === "str" &&
data.node.template[name].options ? (
data.node?.template[name].options ? (
<div className="mt-2 w-full">
<Dropdown
options={data.node.template[name].options}
@ -283,13 +295,13 @@ export default function ParameterComponent({
) : left === true && type === "code" ? (
<div className="mt-2 w-full">
<CodeAreaComponent
dynamic={data.node.template[name].dynamic ?? false}
dynamic={data.node?.template[name].dynamic ?? false}
setNodeClass={(nodeClass) => {
data.node = nodeClass;
}}
nodeClass={data.node}
disabled={disabled}
value={data.node.template[name].value ?? ""}
value={data.node?.template[name].value ?? ""}
onChange={handleOnNewValue}
/>
</div>
@ -297,12 +309,12 @@ export default function ParameterComponent({
<div className="mt-2 w-full">
<InputFileComponent
disabled={disabled}
value={data.node.template[name].value ?? ""}
value={data.node?.template[name].value ?? ""}
onChange={handleOnNewValue}
fileTypes={data.node.template[name].fileTypes}
suffixes={data.node.template[name].suffixes}
onFileChange={(t: string) => {
data.node.template[name].file_path = t;
fileTypes={data.node?.template[name].fileTypes}
suffixes={data.node?.template[name].suffixes}
onFileChange={(filePath: string) => {
data.node!.template[name].file_path = filePath;
save();
}}
></InputFileComponent>
@ -311,7 +323,7 @@ export default function ParameterComponent({
<div className="mt-2 w-full">
<IntComponent
disabled={disabled}
value={data.node.template[name].value ?? ""}
value={data.node?.template[name].value ?? ""}
onChange={handleOnNewValue}
/>
</div>
@ -324,7 +336,7 @@ export default function ParameterComponent({
}}
nodeClass={data.node}
disabled={disabled}
value={data.node.template[name].value ?? ""}
value={data.node?.template[name].value ?? ""}
onChange={handleOnNewValue}
/>
</div>

View file

@ -159,52 +159,59 @@ export default function GenericNode({
<>
{Object.keys(data.node.template)
.filter((t) => t.charAt(0) !== "_")
.map((t: string, idx) => (
.filter((templateField) => templateField.charAt(0) !== "_")
.map((templateField: string, idx) => (
<div key={idx}>
{data.node.template[t].show &&
!data.node.template[t].advanced ? (
{data.node.template[templateField].show &&
!data.node.template[templateField].advanced ? (
<ParameterComponent
key={
(data.node.template[t].input_types?.join(";") ??
data.node.template[t].type) +
(data.node.template[templateField].input_types?.join(
";"
) ?? data.node.template[templateField].type) +
"|" +
t +
templateField +
"|" +
data.id
}
data={data}
setData={setData}
color={
nodeColors[types[data.node.template[t].type]] ??
nodeColors[data.node.template[t].type] ??
nodeColors[
types[data.node.template[templateField].type]
] ??
nodeColors[data.node.template[templateField].type] ??
nodeColors.unknown
}
title={
data.node.template[t].display_name
? data.node.template[t].display_name
: data.node.template[t].name
? toTitleCase(data.node.template[t].name)
: toTitleCase(t)
data.node.template[templateField].display_name
? data.node.template[templateField].display_name
: data.node.template[templateField].name
? toTitleCase(data.node.template[templateField].name)
: toTitleCase(templateField)
}
info={data.node.template[t].info}
name={t}
info={data.node.template[templateField].info}
name={templateField}
tooltipTitle={
data.node.template[t].input_types?.join("\n") ??
data.node.template[t].type
data.node.template[templateField].input_types?.join(
"\n"
) ?? data.node.template[templateField].type
}
required={data.node.template[t].required}
required={data.node.template[templateField].required}
id={
(data.node.template[t].input_types?.join(";") ??
data.node.template[t].type) +
(data.node.template[templateField].input_types?.join(
";"
) ?? data.node.template[templateField].type) +
"|" +
t +
templateField +
"|" +
data.id
}
left={true}
type={data.node.template[t].type}
optionalHandle={data.node.template[t].input_types}
type={data.node.template[templateField].type}
optionalHandle={
data.node.template[templateField].input_types
}
/>
) : (
<></>

View file

@ -22,9 +22,9 @@ export default function AlertDropdown({ children }: AlertDropdownType) {
return (
<Popover
open={open}
onOpenChange={(k) => {
setOpen(k);
if (k) setNotificationCenter(false);
onOpenChange={(target) => {
setOpen(target);
if (target) setNotificationCenter(false);
}}
>
<PopoverTrigger>{children}</PopoverTrigger>

View file

@ -55,12 +55,12 @@ export const EditFlowSettings: React.FC<InputProps> = ({
};
const [desc, setDesc] = useState(
flows.find((f) => f.id === tabId).description
flows.find((flow) => flow.id === tabId).description
);
const handleDescriptionChange = (event: ChangeEvent<HTMLTextAreaElement>) => {
flows.find((f) => f.id === tabId).description = event.target.value;
setDesc(flows.find((f) => f.id === tabId).description);
flows.find((flow) => flow.id === tabId).description = event.target.value;
setDesc(flows.find((flow) => flow.id === tabId).description);
setDescription(event.target.value);
};

View file

@ -82,13 +82,15 @@ export default function BuildTrigger({
const parsedData = JSON.parse(event.data);
// if the event is the end of the stream, close the connection
if (parsedData.end_of_stream) {
// Close the connection and finish
finished = true;
eventSource.close();
return;
} else if (parsedData.log) {
// If the event is a log, log it
setSuccessData({ title: parsedData.log });
} else if (parsedData.input_keys) {
} else if (parsedData.input_keys !== undefined) {
setTabsState((old) => {
return {
...old,

Some files were not shown because too many files have changed in this diff Show more