Refactor error page layout and fixes migration scripts (#1485)

This pull request introduces a comprehensive refactor of our
application's error page layout. The primary goal of these changes is to
enhance user experience during error states by providing clearer, more
helpful information and improving the overall aesthetics of the error
page.

Fixes #1274
This commit is contained in:
Gabriel Luiz Freitas Almeida 2024-02-29 15:31:09 -03:00 committed by GitHub
commit 4bec4dee4b
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
28 changed files with 492 additions and 395 deletions

View file

@ -1,6 +0,0 @@
#!/bin/sh
added_files=$(git diff --name-only --cached --diff-filter=d)
make format
git add ${added_files}

View file

@ -1,6 +1,6 @@
[tool.poetry]
name = "langflow"
version = "0.6.8"
version = "0.6.9"
description = "A Python package with a built-in web application"
authors = ["Logspace <contact@logspace.ai>"]
maintainers = [

View file

@ -109,7 +109,11 @@ def version_callback(value: bool):
@app.callback()
def main_entry_point(
version: bool = typer.Option(
None, "--version", callback=version_callback, is_eager=True, help="Show the version and exit."
None,
"--version",
callback=version_callback,
is_eager=True,
help="Show the version and exit.",
),
):
"""

View file

@ -63,7 +63,7 @@ version_path_separator = os # Use os.pathsep. Default configuration used for ne
# This is the path to the db in the root of the project.
# When the user runs the Langflow the database url will
# be set dinamically.
sqlalchemy.url = sqlite:///../../../langflow.db
sqlalchemy.url = sqlite:///./langflow.db
[post_write_hooks]
@ -98,7 +98,7 @@ handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
level = DEBUG
handlers =
qualname = alembic

View file

@ -1,10 +1,11 @@
import os
from logging.config import fileConfig
from sqlalchemy import engine_from_config
from sqlalchemy import pool
from alembic import context
from loguru import logger
from sqlalchemy import engine_from_config, pool
from langflow.services.database.models import * # noqa
from langflow.services.database.service import SQLModel
# this is the Alembic Config object, which provides
@ -40,7 +41,8 @@ def run_migrations_offline() -> None:
script output.
"""
url = config.get_main_option("sqlalchemy.url")
url = os.getenv("LANGFLOW_DATABASE_URL")
url = url or config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
@ -60,12 +62,17 @@ def run_migrations_online() -> None:
and associate a connection with the context.
"""
connectable = engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
from langflow.services.deps import get_db_service
try:
connectable = get_db_service().engine
except Exception as e:
logger.error(f"Error getting database engine: {e}")
connectable = engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
with connectable.connect() as connection:
context.configure(
connection=connection, target_metadata=target_metadata, render_as_batch=True

View file

@ -10,6 +10,7 @@ from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
import sqlmodel
from sqlalchemy.engine.reflection import Inspector
${imports if imports else ""}
# revision identifiers, used by Alembic.
@ -20,8 +21,12 @@ depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
def upgrade() -> None:
conn = op.get_bind()
inspector = Inspector.from_engine(conn) # type: ignore
${upgrades if upgrades else "pass"}
def downgrade() -> None:
conn = op.get_bind()
inspector = Inspector.from_engine(conn) # type: ignore
${downgrades if downgrades else "pass"}

View file

@ -5,28 +5,43 @@ Revises: 1ef9c4f3765d
Create Date: 2023-12-13 18:55:52.587360
"""
from typing import Sequence, Union
from alembic import op
from sqlalchemy.engine.reflection import Inspector
# revision identifiers, used by Alembic.
revision: str = '006b3990db50'
down_revision: Union[str, None] = '1ef9c4f3765d'
revision: str = "006b3990db50"
down_revision: Union[str, None] = "1ef9c4f3765d"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
inspector = Inspector.from_engine(conn) # type: ignore
api_key_constraints = inspector.get_unique_constraints("apikey")
flow_constraints = inspector.get_unique_constraints("flow")
user_constraints = inspector.get_unique_constraints("user")
try:
with op.batch_alter_table('apikey', schema=None) as batch_op:
batch_op.create_unique_constraint('uq_apikey_id', ['id'])
if not any(
constraint["name"] == "uq_apikey_id" for constraint in api_key_constraints
):
with op.batch_alter_table("apikey", schema=None) as batch_op:
with op.batch_alter_table('flow', schema=None) as batch_op:
batch_op.create_unique_constraint('uq_flow_id', ['id'])
with op.batch_alter_table('user', schema=None) as batch_op:
batch_op.create_unique_constraint('uq_user_id', ['id'])
batch_op.create_unique_constraint("uq_apikey_id", ["id"])
if not any(
constraint["name"] == "uq_flow_id" for constraint in flow_constraints
):
with op.batch_alter_table("flow", schema=None) as batch_op:
batch_op.create_unique_constraint("uq_flow_id", ["id"])
if not any(
constraint["name"] == "uq_user_id" for constraint in user_constraints
):
with op.batch_alter_table("user", schema=None) as batch_op:
batch_op.create_unique_constraint("uq_user_id", ["id"])
except Exception as e:
print(e)
pass
@ -36,15 +51,24 @@ def upgrade() -> None:
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
inspector = Inspector.from_engine(conn) # type: ignore
api_key_constraints = inspector.get_unique_constraints("apikey")
flow_constraints = inspector.get_unique_constraints("flow")
user_constraints = inspector.get_unique_constraints("user")
try:
with op.batch_alter_table('user', schema=None) as batch_op:
batch_op.drop_constraint('uq_user_id', type_='unique')
if any(
constraint["name"] == "uq_apikey_id" for constraint in api_key_constraints
):
with op.batch_alter_table("user", schema=None) as batch_op:
batch_op.drop_constraint("uq_user_id", type_="unique")
if any(constraint["name"] == "uq_flow_id" for constraint in flow_constraints):
with op.batch_alter_table("flow", schema=None) as batch_op:
batch_op.drop_constraint("uq_flow_id", type_="unique")
if any(constraint["name"] == "uq_user_id" for constraint in user_constraints):
with op.batch_alter_table('flow', schema=None) as batch_op:
batch_op.drop_constraint('uq_flow_id', type_='unique')
with op.batch_alter_table('apikey', schema=None) as batch_op:
batch_op.drop_constraint('uq_apikey_id', type_='unique')
with op.batch_alter_table("apikey", schema=None) as batch_op:
batch_op.drop_constraint("uq_apikey_id", type_="unique")
except Exception as e:
print(e)
pass

View file

@ -5,67 +5,25 @@ Revises: 006b3990db50
Create Date: 2024-01-17 10:32:56.686287
"""
from typing import Sequence, Union
from alembic import op
# revision identifiers, used by Alembic.
revision: str = '0b8757876a7c'
down_revision: Union[str, None] = '006b3990db50'
revision: str = "0b8757876a7c"
down_revision: Union[str, None] = "006b3990db50"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
try:
with op.batch_alter_table('apikey', schema=None) as batch_op:
batch_op.create_index(batch_op.f('ix_apikey_api_key'), ['api_key'], unique=True)
batch_op.create_index(batch_op.f('ix_apikey_name'), ['name'], unique=False)
batch_op.create_index(batch_op.f('ix_apikey_user_id'), ['user_id'], unique=False)
except Exception as e:
print(e)
pass
try:
with op.batch_alter_table('flow', schema=None) as batch_op:
batch_op.create_index(batch_op.f('ix_flow_description'), ['description'], unique=False)
batch_op.create_index(batch_op.f('ix_flow_name'), ['name'], unique=False)
batch_op.create_index(batch_op.f('ix_flow_user_id'), ['user_id'], unique=False)
except Exception as e:
print(e)
pass
pass
try:
with op.batch_alter_table('user', schema=None) as batch_op:
batch_op.create_index(batch_op.f('ix_user_username'), ['username'], unique=True)
except Exception as e:
print(e)
pass
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
try:
with op.batch_alter_table('user', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('ix_user_username'))
except Exception as e:
print(e)
pass
try:
with op.batch_alter_table('flow', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('ix_flow_user_id'))
batch_op.drop_index(batch_op.f('ix_flow_name'))
batch_op.drop_index(batch_op.f('ix_flow_description'))
except Exception as e:
print(e)
pass
try:
with op.batch_alter_table('apikey', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('ix_apikey_user_id'))
batch_op.drop_index(batch_op.f('ix_apikey_name'))
batch_op.drop_index(batch_op.f('ix_apikey_api_key'))
except Exception as e:
print(e)
pass
# ### end Alembic commands ###
pass
# ### end Alembic commands ###

View file

@ -6,6 +6,7 @@ Revises: fd531f8868b1
Create Date: 2023-12-04 15:00:27.968998
"""
from typing import Sequence, Union
import sqlalchemy as sa
@ -13,8 +14,8 @@ import sqlmodel
from alembic import op
# revision identifiers, used by Alembic.
revision: str = '1ef9c4f3765d'
down_revision: Union[str, None] = 'fd531f8868b1'
revision: str = "1ef9c4f3765d"
down_revision: Union[str, None] = "fd531f8868b1"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
@ -22,10 +23,10 @@ depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
try:
with op.batch_alter_table('apikey', schema=None) as batch_op:
batch_op.alter_column('name',
existing_type=sqlmodel.sql.sqltypes.AutoString(),
nullable=True)
with op.batch_alter_table("apikey", schema=None) as batch_op:
batch_op.alter_column(
"name", existing_type=sqlmodel.sql.sqltypes.AutoString(), nullable=True
)
except Exception as e:
pass
# ### end Alembic commands ###
@ -34,10 +35,8 @@ def upgrade() -> None:
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
try:
with op.batch_alter_table('apikey', schema=None) as batch_op:
batch_op.alter_column('name',
existing_type=sa.VARCHAR(),
nullable=False)
with op.batch_alter_table("apikey", schema=None) as batch_op:
batch_op.alter_column("name", existing_type=sa.VARCHAR(), nullable=False)
except Exception as e:
pass
# ### end Alembic commands ###

View file

@ -5,6 +5,7 @@ Revises:
Create Date: 2023-08-27 19:49:02.681355
"""
from typing import Sequence, Union
import sqlalchemy as sa
@ -33,7 +34,9 @@ def upgrade() -> None:
if "ix_flowstyle_flow_id" in [
index["name"] for index in inspector.get_indexes("flowstyle")
]:
op.drop_index("ix_flowstyle_flow_id", table_name="flowstyle")
op.drop_index(
"ix_flowstyle_flow_id", table_name="flowstyle", if_exists=True
)
existing_indices_flow = []
existing_fks_flow = []
@ -80,8 +83,7 @@ def upgrade() -> None:
sa.Column("api_key", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("user_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.ForeignKeyConstraint(
["user_id"],
["user.id"],
["user_id"], ["user.id"], name="fk_apikey_user_id_user"
),
sa.PrimaryKeyConstraint("id", name="pk_apikey"),
sa.UniqueConstraint("id", name="uq_apikey_id"),
@ -103,8 +105,7 @@ def upgrade() -> None:
sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("user_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.ForeignKeyConstraint(
["user_id"],
["user.id"],
["user_id"], ["user.id"], name="fk_flow_user_id_user"
),
sa.PrimaryKeyConstraint("id", name="pk_flow"),
sa.UniqueConstraint("id", name="uq_flow_id"),
@ -151,21 +152,21 @@ def downgrade() -> None:
existing_tables = inspector.get_table_names()
if "flow" in existing_tables:
with op.batch_alter_table("flow", schema=None) as batch_op:
batch_op.drop_index(batch_op.f("ix_flow_user_id"))
batch_op.drop_index(batch_op.f("ix_flow_name"))
batch_op.drop_index(batch_op.f("ix_flow_description"))
batch_op.drop_index(batch_op.f("ix_flow_user_id"), if_exists=True)
batch_op.drop_index(batch_op.f("ix_flow_name"), if_exists=True)
batch_op.drop_index(batch_op.f("ix_flow_description"), if_exists=True)
op.drop_table("flow")
if "apikey" in existing_tables:
with op.batch_alter_table("apikey", schema=None) as batch_op:
batch_op.drop_index(batch_op.f("ix_apikey_user_id"))
batch_op.drop_index(batch_op.f("ix_apikey_name"))
batch_op.drop_index(batch_op.f("ix_apikey_api_key"))
batch_op.drop_index(batch_op.f("ix_apikey_user_id"), if_exists=True)
batch_op.drop_index(batch_op.f("ix_apikey_name"), if_exists=True)
batch_op.drop_index(batch_op.f("ix_apikey_api_key"), if_exists=True)
op.drop_table("apikey")
if "user" in existing_tables:
with op.batch_alter_table("user", schema=None) as batch_op:
batch_op.drop_index(batch_op.f("ix_user_username"))
batch_op.drop_index(batch_op.f("ix_user_username"), if_exists=True)
op.drop_table("user")

View file

@ -5,34 +5,44 @@ Revises: 7d2162acc8b2
Create Date: 2023-11-24 10:45:38.465302
"""
from typing import Sequence, Union
import sqlalchemy as sa
import sqlmodel
from alembic import op
from sqlalchemy.engine.reflection import Inspector
# revision identifiers, used by Alembic.
revision: str = '2ac71eb9c3ae'
down_revision: Union[str, None] = '7d2162acc8b2'
revision: str = "2ac71eb9c3ae"
down_revision: Union[str, None] = "7d2162acc8b2"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
inspector = Inspector.from_engine(conn) # type: ignore
tables = inspector.get_table_names()
try:
op.create_table('credential',
sa.Column('name', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column('value', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column('provider', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column('user_id', sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column('id', sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id'),
)
if "credential" not in tables:
op.create_table(
"credential",
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column("value", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column(
"provider", sqlmodel.sql.sqltypes.AutoString(), nullable=True
),
sa.Column("user_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("created_at", sa.DateTime(), nullable=False),
sa.Column("updated_at", sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint("id"),
)
except Exception as e:
print(e)
pass
# ### end Alembic commands ###
@ -40,7 +50,7 @@ def upgrade() -> None:
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
try:
op.drop_table('credential')
op.drop_table("credential")
except Exception as e:
print(e)
pass

View file

@ -5,6 +5,7 @@ Revises: 260dbcc8b680
Create Date: 2023-09-08 07:36:13.387318
"""
from typing import Sequence, Union
import sqlalchemy as sa
@ -21,29 +22,36 @@ depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
inspector = Inspector.from_engine(conn) # type: ignore
if "user" in inspector.get_table_names() and "profile_image" not in [
column["name"] for column in inspector.get_columns("user")
]:
with op.batch_alter_table("user", schema=None) as batch_op:
batch_op.add_column(
sa.Column(
"profile_image", sqlmodel.sql.sqltypes.AutoString(), nullable=True
try:
conn = op.get_bind()
inspector = Inspector.from_engine(conn) # type: ignore
if "user" in inspector.get_table_names() and "profile_image" not in [
column["name"] for column in inspector.get_columns("user")
]:
with op.batch_alter_table("user", schema=None) as batch_op:
batch_op.add_column(
sa.Column(
"profile_image",
sqlmodel.sql.sqltypes.AutoString(),
nullable=True,
)
)
)
except Exception as e:
print(e)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
inspector = Inspector.from_engine(conn) # type: ignore
if "user" in inspector.get_table_names() and "profile_image" in [
column["name"] for column in inspector.get_columns("user")
]:
with op.batch_alter_table("user", schema=None) as batch_op:
batch_op.drop_column("profile_image")
try:
conn = op.get_bind()
inspector = Inspector.from_engine(conn) # type: ignore
if "user" in inspector.get_table_names() and "profile_image" in [
column["name"] for column in inspector.get_columns("user")
]:
with op.batch_alter_table("user", schema=None) as batch_op:
batch_op.drop_column("profile_image")
except Exception as e:
print(e)
# ### end Alembic commands ###

View file

@ -5,12 +5,13 @@ Revises: eb5866d51fd2
Create Date: 2023-10-18 23:08:57.744906
"""
from typing import Sequence, Union
import sqlalchemy as sa
import sqlmodel
from alembic import op
from loguru import logger
from sqlalchemy.engine.reflection import Inspector
# revision identifiers, used by Alembic.
revision: str = "7843803a87b5"
@ -21,19 +22,26 @@ depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
inspector = Inspector.from_engine(conn) # type: ignore
flow_columns = [column["name"] for column in inspector.get_columns("flow")]
user_columns = [column["name"] for column in inspector.get_columns("user")]
try:
with op.batch_alter_table("flow", schema=None) as batch_op:
batch_op.add_column(sa.Column("is_component", sa.Boolean(), nullable=True))
with op.batch_alter_table("user", schema=None) as batch_op:
batch_op.add_column(
sa.Column(
"store_api_key", sqlmodel.AutoString(), nullable=True
if "is_component" not in flow_columns:
with op.batch_alter_table("flow", schema=None) as batch_op:
batch_op.add_column(
sa.Column("is_component", sa.Boolean(), nullable=True)
)
)
except Exception as e:
logger.exception(e)
pass
try:
if "store_api_key" not in user_columns:
with op.batch_alter_table("user", schema=None) as batch_op:
batch_op.add_column(
sa.Column("store_api_key", sqlmodel.AutoString(), nullable=True)
)
except Exception as e:
pass
# ### end Alembic commands ###

View file

@ -5,88 +5,74 @@ Revises: f5ee9749d1a6
Create Date: 2023-11-21 20:56:53.998781
"""
from typing import Sequence, Union
import sqlalchemy as sa
import sqlmodel
from alembic import op
from sqlalchemy.engine.reflection import Inspector
# revision identifiers, used by Alembic.
revision: str = '7d2162acc8b2'
down_revision: Union[str, None] = 'f5ee9749d1a6'
revision: str = "7d2162acc8b2"
down_revision: Union[str, None] = "f5ee9749d1a6"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
inspector = Inspector.from_engine(conn) # type: ignore
api_key_columns = [column["name"] for column in inspector.get_columns("apikey")]
flow_columns = [column["name"] for column in inspector.get_columns("flow")]
try:
with op.batch_alter_table('component', schema=None) as batch_op:
batch_op.drop_index('ix_component_frontend_node_id')
batch_op.drop_index('ix_component_name')
op.drop_table('component')
op.drop_table('flowstyle')
if "name" in api_key_columns:
with op.batch_alter_table("apikey", schema=None) as batch_op:
batch_op.alter_column(
"name", existing_type=sa.VARCHAR(), nullable=False
)
except Exception as e:
print(e)
pass
with op.batch_alter_table('apikey', schema=None) as batch_op:
batch_op.alter_column('name',
existing_type=sa.VARCHAR(),
nullable=False)
with op.batch_alter_table('flow', schema=None) as batch_op:
batch_op.add_column(sa.Column('updated_at', sa.DateTime(), nullable=True))
batch_op.add_column(sa.Column('folder', sqlmodel.sql.sqltypes.AutoString(), nullable=True))
pass
try:
with op.batch_alter_table("flow", schema=None) as batch_op:
if "updated_at" not in flow_columns:
batch_op.add_column(
sa.Column("updated_at", sa.DateTime(), nullable=True)
)
if "folder" not in flow_columns:
batch_op.add_column(
sa.Column(
"folder", sqlmodel.sql.sqltypes.AutoString(), nullable=True
)
)
except Exception as e:
print(e)
pass
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
try:
with op.batch_alter_table('flow', schema=None) as batch_op:
batch_op.drop_column('folder')
batch_op.drop_column('updated_at')
with op.batch_alter_table("flow", schema=None) as batch_op:
batch_op.drop_column("folder")
batch_op.drop_column("updated_at")
except Exception as e:
print(e)
pass
try:
with op.batch_alter_table('apikey', schema=None) as batch_op:
batch_op.alter_column('name',
existing_type=sa.VARCHAR(),
nullable=True)
with op.batch_alter_table("apikey", schema=None) as batch_op:
batch_op.alter_column("name", existing_type=sa.VARCHAR(), nullable=True)
except Exception as e:
print(e)
pass
try:
op.create_table('flowstyle',
sa.Column('color', sa.VARCHAR(), nullable=False),
sa.Column('emoji', sa.VARCHAR(), nullable=False),
sa.Column('flow_id', sa.CHAR(length=32), nullable=True),
sa.Column('id', sa.CHAR(length=32), nullable=False),
sa.ForeignKeyConstraint(['flow_id'], ['flow.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('id')
)
op.create_table('component',
sa.Column('id', sa.CHAR(length=32), nullable=False),
sa.Column('frontend_node_id', sa.CHAR(length=32), nullable=False),
sa.Column('name', sa.VARCHAR(), nullable=False),
sa.Column('description', sa.VARCHAR(), nullable=True),
sa.Column('python_code', sa.VARCHAR(), nullable=True),
sa.Column('return_type', sa.VARCHAR(), nullable=True),
sa.Column('is_disabled', sa.BOOLEAN(), nullable=False),
sa.Column('is_read_only', sa.BOOLEAN(), nullable=False),
sa.Column('create_at', sa.DATETIME(), nullable=False),
sa.Column('update_at', sa.DATETIME(), nullable=False),
sa.PrimaryKeyConstraint('id')
)
with op.batch_alter_table('component', schema=None) as batch_op:
batch_op.create_index('ix_component_name', ['name'], unique=False)
batch_op.create_index('ix_component_frontend_node_id', ['frontend_node_id'], unique=False)
except Exception as e:
print(e)
pass
# ### end Alembic commands ###

View file

@ -5,55 +5,105 @@ Revises: 0b8757876a7c
Create Date: 2024-01-26 13:31:14.797548
"""
from typing import Sequence, Union
import sqlalchemy as sa
import sqlmodel
from alembic import op
from sqlalchemy.engine.reflection import Inspector
# revision identifiers, used by Alembic.
revision: str = 'b2fa308044b5'
down_revision: Union[str, None] = '0b8757876a7c'
revision: str = "b2fa308044b5"
down_revision: Union[str, None] = "0b8757876a7c"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
inspector = Inspector.from_engine(conn) # type: ignore
tables = inspector.get_table_names()
# ### commands auto generated by Alembic - please adjust! ###
try:
op.drop_table('flowstyle')
with op.batch_alter_table('flow', schema=None) as batch_op:
batch_op.add_column(sa.Column('is_component', sa.Boolean(), nullable=True))
batch_op.add_column(sa.Column('updated_at', sa.DateTime(), nullable=True))
batch_op.add_column(sa.Column('folder', sqlmodel.sql.sqltypes.AutoString(), nullable=True))
batch_op.add_column(sa.Column('user_id', sqlmodel.sql.sqltypes.GUID(), nullable=True))
batch_op.create_index(batch_op.f('ix_flow_user_id'), ['user_id'], unique=False)
batch_op.create_foreign_key('fk_flow_user_id_user', 'user', ['user_id'], ['id'])
if "flowstyle" in tables:
op.drop_table("flowstyle")
with op.batch_alter_table("flow", schema=None) as batch_op:
flow_columns = [column["name"] for column in inspector.get_columns("flow")]
if "is_component" not in flow_columns:
batch_op.add_column(
sa.Column("is_component", sa.Boolean(), nullable=True)
)
if "updated_at" not in flow_columns:
batch_op.add_column(
sa.Column("updated_at", sa.DateTime(), nullable=True)
)
if "folder" not in flow_columns:
batch_op.add_column(
sa.Column(
"folder", sqlmodel.sql.sqltypes.AutoString(), nullable=True
)
)
if "user_id" not in flow_columns:
batch_op.add_column(
sa.Column("user_id", sqlmodel.sql.sqltypes.GUID(), nullable=True)
)
indices = inspector.get_indexes("flow")
indices_names = [index["name"] for index in indices]
if "ix_flow_user_id" not in indices_names:
batch_op.create_index(
batch_op.f("ix_flow_user_id"), ["user_id"], unique=False
)
if "fk_flow_user_id_user" not in indices_names:
batch_op.create_foreign_key(
"fk_flow_user_id_user", "user", ["user_id"], ["id"]
)
except Exception:
pass
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
inspector = Inspector.from_engine(conn) # type: ignore
try:
with op.batch_alter_table('flow', schema=None) as batch_op:
batch_op.drop_constraint('fk_flow_user_id_user', type_='foreignkey')
batch_op.drop_index(batch_op.f('ix_flow_user_id'))
batch_op.drop_column('user_id')
batch_op.drop_column('folder')
batch_op.drop_column('updated_at')
batch_op.drop_column('is_component')
# Re-create the dropped table 'flowstyle' if it was previously dropped in upgrade
if "flowstyle" not in inspector.get_table_names():
op.create_table(
"flowstyle",
sa.Column("color", sa.String(), nullable=False),
sa.Column("emoji", sa.String(), nullable=False),
sa.Column("flow_id", sqlmodel.sql.sqltypes.GUID(), nullable=True),
sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.ForeignKeyConstraint(["flow_id"], ["flow.id"]),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("id"),
)
op.create_table('flowstyle',
sa.Column('color', sa.VARCHAR(), nullable=False),
sa.Column('emoji', sa.VARCHAR(), nullable=False),
sa.Column('flow_id', sa.CHAR(length=32), nullable=True),
sa.Column('id', sa.CHAR(length=32), nullable=False),
sa.ForeignKeyConstraint(['flow_id'], ['flow.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('id')
)
except Exception:
pass
# ### end Alembic commands ###
with op.batch_alter_table("flow", schema=None) as batch_op:
# Check and remove newly added columns and constraints in upgrade
flow_columns = [column["name"] for column in inspector.get_columns("flow")]
if "user_id" in flow_columns:
batch_op.drop_column("user_id")
if "folder" in flow_columns:
batch_op.drop_column("folder")
if "updated_at" in flow_columns:
batch_op.drop_column("updated_at")
if "is_component" in flow_columns:
batch_op.drop_column("is_component")
indices = inspector.get_indexes("flow")
indices_names = [index["name"] for index in indices]
if "ix_flow_user_id" in indices_names:
batch_op.drop_index("ix_flow_user_id")
# Assuming fk_flow_user_id_user is a foreign key constraint's name, not an index
constraints = inspector.get_foreign_keys("flow")
constraint_names = [constraint["name"] for constraint in constraints]
if "fk_flow_user_id_user" in constraint_names:
batch_op.drop_constraint("fk_flow_user_id_user", type_="foreignkey")
except Exception as e:
# It's generally a good idea to log the exception or handle it in a way other than a bare pass
print(f"Error during downgrade: {e}")

View file

@ -5,46 +5,68 @@ Revises: b2fa308044b5
Create Date: 2024-01-26 13:34:14.496769
"""
from typing import Sequence, Union
import sqlalchemy as sa
import sqlmodel
from alembic import op
from sqlalchemy.engine.reflection import Inspector
# revision identifiers, used by Alembic.
revision: str = 'bc2f01c40e4a'
down_revision: Union[str, None] = 'b2fa308044b5'
revision: str = "bc2f01c40e4a"
down_revision: Union[str, None] = "b2fa308044b5"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
try:
with op.batch_alter_table('flow', schema=None) as batch_op:
batch_op.add_column(sa.Column('is_component', sa.Boolean(), nullable=True))
batch_op.add_column(sa.Column('updated_at', sa.DateTime(), nullable=True))
batch_op.add_column(sa.Column('folder', sqlmodel.sql.sqltypes.AutoString(), nullable=True))
batch_op.add_column(sa.Column('user_id', sqlmodel.sql.sqltypes.GUID(), nullable=True))
batch_op.create_index(batch_op.f('ix_flow_user_id'), ['user_id'], unique=False)
batch_op.create_foreign_key('flow_user_id_fkey'
, 'user', ['user_id'], ['id'])
except Exception:
pass
# ### end Alembic commands ###
conn = op.get_bind()
inspector = Inspector.from_engine(conn) # type: ignore
flow_columns = {column["name"] for column in inspector.get_columns("flow")}
flow_indexes = {index["name"] for index in inspector.get_indexes("flow")}
flow_fks = {fk["name"] for fk in inspector.get_foreign_keys("flow")}
with op.batch_alter_table("flow", schema=None) as batch_op:
if "is_component" not in flow_columns:
batch_op.add_column(sa.Column("is_component", sa.Boolean(), nullable=True))
if "updated_at" not in flow_columns:
batch_op.add_column(sa.Column("updated_at", sa.DateTime(), nullable=True))
if "folder" not in flow_columns:
batch_op.add_column(
sa.Column("folder", sqlmodel.sql.sqltypes.AutoString(), nullable=True)
)
if "user_id" not in flow_columns:
batch_op.add_column(
sa.Column("user_id", sqlmodel.sql.sqltypes.GUID(), nullable=True)
)
if "ix_flow_user_id" not in flow_indexes:
batch_op.create_index(
batch_op.f("ix_flow_user_id"), ["user_id"], unique=False
)
if "flow_user_id_fkey" not in flow_fks:
batch_op.create_foreign_key(
"flow_user_id_fkey", "user", ["user_id"], ["id"]
)
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
try:
with op.batch_alter_table('flow', schema=None) as batch_op:
batch_op.drop_constraint('flow_user_id_fkey', type_='foreignkey')
batch_op.drop_index(batch_op.f('ix_flow_user_id'))
batch_op.drop_column('user_id')
batch_op.drop_column('folder')
batch_op.drop_column('updated_at')
batch_op.drop_column('is_component')
except Exception:
pass
conn = op.get_bind()
inspector = Inspector.from_engine(conn) # type: ignore
flow_columns = {column["name"] for column in inspector.get_columns("flow")}
flow_indexes = {index["name"] for index in inspector.get_indexes("flow")}
flow_fks = {fk["name"] for fk in inspector.get_foreign_keys("flow")}
# ### end Alembic commands ###
with op.batch_alter_table("flow", schema=None) as batch_op:
if "flow_user_id_fkey" in flow_fks:
batch_op.drop_constraint("flow_user_id_fkey", type_="foreignkey")
if "ix_flow_user_id" in flow_indexes:
batch_op.drop_index(batch_op.f("ix_flow_user_id"))
if "user_id" in flow_columns:
batch_op.drop_column("user_id")
if "folder" in flow_columns:
batch_op.drop_column("folder")
if "updated_at" in flow_columns:
batch_op.drop_column("updated_at")
if "is_component" in flow_columns:
batch_op.drop_column("is_component")

View file

@ -5,11 +5,10 @@ Revises: 67cc006d50bf
Create Date: 2023-10-04 10:18:25.640458
"""
from typing import Sequence, Union
import sqlalchemy as sa
from alembic import op
from sqlalchemy import exc
# revision identifiers, used by Alembic.
revision: str = "eb5866d51fd2"
@ -21,70 +20,12 @@ depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
connection = op.get_bind()
try:
op.drop_table("flowstyle")
with op.batch_alter_table("component", schema=None) as batch_op:
batch_op.drop_index("ix_component_frontend_node_id")
batch_op.drop_index("ix_component_name")
except exc.SQLAlchemyError:
# connection.execute(text("ROLLBACK"))
pass
except Exception as e:
print(e)
pass
try:
op.drop_table("component")
except exc.SQLAlchemyError:
# connection.execute(text("ROLLBACK"))
pass
except Exception as e:
print(e)
pass
pass
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
try:
op.create_table(
"component",
sa.Column("id", sa.CHAR(length=32), nullable=False),
sa.Column("frontend_node_id", sa.CHAR(length=32), nullable=False),
sa.Column("name", sa.VARCHAR(), nullable=False),
sa.Column("description", sa.VARCHAR(), nullable=True),
sa.Column("python_code", sa.VARCHAR(), nullable=True),
sa.Column("return_type", sa.VARCHAR(), nullable=True),
sa.Column("is_disabled", sa.BOOLEAN(), nullable=False),
sa.Column("is_read_only", sa.BOOLEAN(), nullable=False),
sa.Column("create_at", sa.DATETIME(), nullable=False),
sa.Column("update_at", sa.DATETIME(), nullable=False),
sa.PrimaryKeyConstraint("id", name="pk_component"),
)
with op.batch_alter_table("component", schema=None) as batch_op:
batch_op.create_index("ix_component_name", ["name"], unique=False)
batch_op.create_index(
"ix_component_frontend_node_id", ["frontend_node_id"], unique=False
)
except Exception as e:
print(e)
pass
try:
op.create_table(
"flowstyle",
sa.Column("color", sa.VARCHAR(), nullable=False),
sa.Column("emoji", sa.VARCHAR(), nullable=False),
sa.Column("flow_id", sa.CHAR(length=32), nullable=True),
sa.Column("id", sa.CHAR(length=32), nullable=False),
sa.ForeignKeyConstraint(
["flow_id"],
["flow.id"],
),
sa.PrimaryKeyConstraint("id", name="pk_flowstyle"),
sa.UniqueConstraint("id", name="uq_flowstyle_id"),
)
except Exception as e:
print(e)
pass
pass
# ### end Alembic commands ###

View file

@ -5,6 +5,7 @@ Revises: 7843803a87b5
Create Date: 2023-10-18 23:12:27.297016
"""
from typing import Sequence, Union
import sqlalchemy as sa

View file

@ -5,22 +5,35 @@ Revises: 2ac71eb9c3ae
Create Date: 2023-11-24 15:07:37.566516
"""
from typing import Sequence, Union
from typing import Optional, Sequence, Union
from alembic import op
from sqlalchemy.engine.reflection import Inspector
# revision identifiers, used by Alembic.
revision: str = 'fd531f8868b1'
down_revision: Union[str, None] = '2ac71eb9c3ae'
revision: str = "fd531f8868b1"
down_revision: Union[str, None] = "2ac71eb9c3ae"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
inspector = Inspector.from_engine(conn) # type: ignore
tables = inspector.get_table_names()
foreign_keys_names = []
if "credential" in tables:
foreign_keys = inspector.get_foreign_keys("credential")
foreign_keys_names = [fk["name"] for fk in foreign_keys]
try:
with op.batch_alter_table('credential', schema=None) as batch_op:
batch_op.create_foreign_key("fk_credential_user_id", 'user', ['user_id'], ['id'])
if "credential" in tables and "fk_credential_user_id" not in foreign_keys_names:
with op.batch_alter_table("credential", schema=None) as batch_op:
batch_op.create_foreign_key(
"fk_credential_user_id", "user", ["user_id"], ["id"]
)
except Exception as e:
print(e)
pass
@ -30,9 +43,17 @@ def upgrade() -> None:
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
inspector = Inspector.from_engine(conn) # type: ignore
tables = inspector.get_table_names()
foreign_keys_names: list[Optional[str]] = []
if "credential" in tables:
foreign_keys = inspector.get_foreign_keys("credential")
foreign_keys_names = [fk["name"] for fk in foreign_keys]
try:
with op.batch_alter_table('credential', schema=None) as batch_op:
batch_op.drop_constraint("fk_credential_user_id", type_='foreignkey')
if "credential" in tables and "fk_credential_user_id" in foreign_keys_names:
with op.batch_alter_table("credential", schema=None) as batch_op:
batch_op.drop_constraint("fk_credential_user_id", type_="foreignkey")
except Exception as e:
print(e)
pass

View file

@ -1,17 +1,28 @@
import time
from fastapi import APIRouter, Depends, HTTPException, WebSocket, WebSocketException, status
from fastapi import (
APIRouter,
Depends,
HTTPException,
WebSocket,
WebSocketException,
status,
)
from fastapi.responses import StreamingResponse
from loguru import logger
from sqlmodel import Session
from langflow.api.utils import build_input_keys_response, format_elapsed_time
from langflow.api.v1.schemas import BuildStatus, BuiltResponse, InitResponse, StreamData
from langflow.graph.graph.base import Graph
from langflow.services.auth.utils import get_current_active_user, get_current_user_for_websocket
from langflow.services.auth.utils import (
get_current_active_user,
get_current_user_for_websocket,
)
from langflow.services.cache.service import BaseCacheService
from langflow.services.cache.utils import update_build_status
from langflow.services.chat.service import ChatService
from langflow.services.deps import get_cache_service, get_chat_service, get_session
from loguru import logger
from sqlmodel import Session
router = APIRouter(tags=["Chat"])

View file

@ -5,16 +5,17 @@ from typing import TYPE_CHECKING
import sqlalchemy as sa
from alembic import command, util
from alembic.config import Config
from loguru import logger
from sqlalchemy import inspect
from sqlalchemy.exc import OperationalError
from sqlmodel import Session, SQLModel, create_engine, select, text
from langflow.services.base import Service
from langflow.services.database import models # noqa
from langflow.services.database.models.user.crud import get_user_by_username
from langflow.services.database.utils import Result, TableResults
from langflow.services.deps import get_settings_service
from langflow.services.utils import teardown_superuser
from loguru import logger
from sqlalchemy import inspect
from sqlalchemy.exc import OperationalError
from sqlmodel import Session, SQLModel, create_engine, select, text
if TYPE_CHECKING:
from sqlalchemy.engine import Engine
@ -110,13 +111,11 @@ class DatabaseService(Service):
return True
def init_alembic(self):
def init_alembic(self, alembic_cfg):
logger.info("Initializing alembic")
alembic_cfg = Config()
alembic_cfg.set_main_option("script_location", str(self.script_location))
alembic_cfg.set_main_option("sqlalchemy.url", self.database_url)
command.stamp(alembic_cfg, "head")
# command.upgrade(alembic_cfg, "head")
command.ensure_version(alembic_cfg)
# alembic_cfg.attributes["connection"].commit()
command.upgrade(alembic_cfg, "head")
logger.info("Alembic initialized")
def run_migrations(self, fix=False):
@ -125,6 +124,11 @@ class DatabaseService(Service):
# if not self.script_location.exists(): # this is not the correct way to check if alembic has been initialized
# We need to check if the alembic_version table exists
# if not, we need to initialize alembic
alembic_cfg = Config()
# alembic_cfg.attributes["connection"] = session
alembic_cfg.set_main_option("script_location", str(self.script_location))
alembic_cfg.set_main_option("sqlalchemy.url", self.database_url)
should_initialize_alembic = False
with Session(self.engine) as session:
# If the table does not exist it throws an error
# so we need to catch it
@ -132,18 +136,19 @@ class DatabaseService(Service):
session.exec(text("SELECT * FROM alembic_version"))
except Exception:
logger.info("Alembic not initialized")
try:
self.init_alembic()
except Exception as exc:
logger.error(f"Error initializing alembic: {exc}")
raise RuntimeError("Error initializing alembic") from exc
should_initialize_alembic = True
else:
logger.info("Alembic already initialized")
if should_initialize_alembic:
try:
self.init_alembic(alembic_cfg)
except Exception as exc:
logger.error(f"Error initializing alembic: {exc}")
raise RuntimeError("Error initializing alembic") from exc
logger.info(f"Running DB migrations in {self.script_location}")
alembic_cfg = Config()
alembic_cfg.set_main_option("script_location", str(self.script_location))
alembic_cfg.set_main_option("sqlalchemy.url", self.database_url)
try:
command.check(alembic_cfg)
except Exception as exc:
@ -154,7 +159,7 @@ class DatabaseService(Service):
try:
command.check(alembic_cfg)
except util.exc.AutogenerateDiffsDetected as e:
logger.exception("AutogenerateDiffsDetected: {exc}")
logger.error("AutogenerateDiffsDetected: {exc}")
if not fix:
raise RuntimeError(
"Something went wrong running migrations. Please, run `langflow migration --fix`"

View file

@ -1,10 +1,14 @@
from loguru import logger
from sqlmodel import Session, select
from langflow.services.auth.utils import create_super_user, verify_password
from langflow.services.database.utils import initialize_database
from langflow.services.manager import service_manager
from langflow.services.schema import ServiceType
from langflow.services.settings.constants import DEFAULT_SUPERUSER, DEFAULT_SUPERUSER_PASSWORD
from loguru import logger
from sqlmodel import Session, select
from langflow.services.settings.constants import (
DEFAULT_SUPERUSER,
DEFAULT_SUPERUSER_PASSWORD,
)
from .deps import get_db_service, get_session, get_settings_service
@ -16,7 +20,9 @@ def get_factories_and_deps():
from langflow.services.credentials import factory as credentials_factory
from langflow.services.database import factory as database_factory
from langflow.services.plugins import factory as plugins_factory
from langflow.services.session import factory as session_service_factory # type: ignore
from langflow.services.session import (
factory as session_service_factory,
) # type: ignore
from langflow.services.settings import factory as settings_factory
from langflow.services.store import factory as store_factory
from langflow.services.task import factory as task_factory
@ -43,7 +49,10 @@ def get_factories_and_deps():
),
(plugins_factory.PluginServiceFactory(), [ServiceType.SETTINGS_SERVICE]),
(store_factory.StoreServiceFactory(), [ServiceType.SETTINGS_SERVICE]),
(credentials_factory.CredentialServiceFactory(), [ServiceType.SETTINGS_SERVICE]),
(
credentials_factory.CredentialServiceFactory(),
[ServiceType.SETTINGS_SERVICE],
),
]
@ -173,7 +182,9 @@ def initialize_session_service():
Initialize the session manager.
"""
from langflow.services.cache import factory as cache_factory
from langflow.services.session import factory as session_service_factory # type: ignore
from langflow.services.session import (
factory as session_service_factory,
) # type: ignore
initialize_settings_service()
@ -202,7 +213,7 @@ def initialize_services(fix_migration: bool = False):
try:
initialize_database(fix_migration=fix_migration)
except Exception as exc:
logger.exception(exc)
logger.error(exc)
raise exc
setup_superuser(service_manager.get(ServiceType.SETTINGS_SERVICE), next(get_session()))
try:

View file

@ -20,8 +20,8 @@ import Router from "./routes";
import useAlertStore from "./stores/alertStore";
import { useDarkStore } from "./stores/darkStore";
import useFlowsManagerStore from "./stores/flowsManagerStore";
import { useTypesStore } from "./stores/typesStore";
import { useStoreStore } from "./stores/storeStore";
import { useTypesStore } from "./stores/typesStore";
export default function App() {
const errorData = useAlertStore((state) => state.errorData);

View file

@ -1,36 +1,63 @@
import { XCircle } from "lucide-react";
import { crashComponentPropsType } from "../../types/components";
import { Button } from "../ui/button";
import { Card, CardContent, CardFooter, CardHeader } from "../ui/card";
export default function CrashErrorComponent({
error,
resetErrorBoundary,
}: crashComponentPropsType): JSX.Element {
return (
<div className="fixed left-0 top-0 z-50 flex h-full w-full items-center justify-center bg-foreground bg-opacity-50">
<div className="flex h-1/3 min-h-fit max-w-4xl flex-col justify-evenly rounded-lg bg-background p-8 text-start shadow-lg">
<h1 className="mb-4 text-3xl text-status-red">
Oops! An unknown error has occurred.
</h1>
<p className="mb-4 text-xl text-foreground">
Please click the 'Reset Application' button to restore the
application's state. If the error persists, please create an issue on
our GitHub page. We apologize for any inconvenience this may have
caused.
</p>
<div className="flex justify-center">
<button
onClick={resetErrorBoundary}
className="mr-4 rounded bg-primary px-4 py-2 font-bold text-background hover:bg-ring"
>
Reset Application
</button>
<a
href="https://github.com/logspace-ai/langflow/issues/new"
target="_blank"
rel="noopener noreferrer"
className="rounded bg-status-red px-4 py-2 font-bold text-background hover:bg-error-foreground"
>
Create Issue
</a>
<div className="z-50 flex h-screen w-screen items-center justify-center bg-foreground bg-opacity-50">
<div className="flex h-screen w-screen flex-col bg-background text-start shadow-lg">
<div className="m-auto grid w-1/2 justify-center gap-5 text-center">
<Card className="p-8">
<CardHeader>
<div className="m-auto">
<XCircle strokeWidth={1.5} className="h-16 w-16" />
</div>
<div>
<p className="mb-4 text-xl text-foreground">
Sorry, we found an unexpected error!
</p>
</div>
</CardHeader>
<CardContent className="grid">
<div>
<p>
Please report errors with detailed tracebacks on the{" "}
<a
href="https://github.com/logspace-ai/langflow/issues"
target="_blank"
rel="noopener noreferrer"
className="font-medium hover:underline "
>
GitHub Issues
</a>{" "}
page.
<br></br>
Thank you!
</p>
</div>
</CardContent>
<CardFooter>
<div className="m-auto mt-4 flex justify-center">
<Button onClick={resetErrorBoundary}>Restart Langflow</Button>
<a
href="https://github.com/logspace-ai/langflow/issues/new"
target="_blank"
rel="noopener noreferrer"
>
<Button className="ml-3" variant={"outline"}>
Report on GitHub
</Button>
</a>
</div>
</CardFooter>
</Card>
</div>
</div>
</div>

View file

@ -172,16 +172,26 @@ export default function NodeToolbarComponent({
<IconComponent name="Copy" className="h-4 w-4" />
</button>
</ShadTooltip>
{hasStore && (
<ShadTooltip content={(!hasApiKey || !validApiKey) ? "Set a valid API key to share this component." : "Share"} side="top">
{hasStore && (
<ShadTooltip
content={
!hasApiKey || !validApiKey
? "Set a valid API key to share this component."
: "Share"
}
side="top"
>
<button
className={classNames(
"relative -ml-px inline-flex items-center bg-background px-2 py-2 text-foreground shadow-md ring-1 ring-inset ring-ring transition-all duration-500 ease-in-out hover:bg-muted focus:z-10",
(!hasApiKey || !validApiKey) ? " text-muted-foreground cursor-not-allowed" : ""
!hasApiKey || !validApiKey
? " cursor-not-allowed text-muted-foreground"
: ""
)}
onClick={(event) => {
event.preventDefault();
if (hasApiKey && hasStore && validApiKey) setShowconfirmShare(true);
if (hasApiKey && hasStore && validApiKey)
setShowconfirmShare(true);
}}
>
<IconComponent name="Share3" className="-m-1 h-6 w-6" />

View file

@ -21,11 +21,7 @@ import {
SelectValue,
} from "../../components/ui/select";
import { AuthContext } from "../../contexts/authContext";
import {
checkHasApiKey,
getStoreComponents,
getStoreTags,
} from "../../controllers/API";
import { getStoreComponents, getStoreTags } from "../../controllers/API";
import StoreApiKeyModal from "../../modals/StoreApiKeyModal";
import useAlertStore from "../../stores/alertStore";
import useFlowsManagerStore from "../../stores/flowsManagerStore";
@ -46,9 +42,7 @@ export default function StorePage(): JSX.Element {
const setCurrentFlowId = useFlowsManagerStore(
(state) => state.setCurrentFlowId
);
const currentFlowId = useFlowsManagerStore(
(state) => state.currentFlowId
);
const currentFlowId = useFlowsManagerStore((state) => state.currentFlowId);
const [loading, setLoading] = useState(true);
const [loadingTags, setLoadingTags] = useState(true);
const { id } = useParams();

View file

@ -1,7 +1,6 @@
import { create } from "zustand";
import { checkHasApiKey, checkHasStore } from "../controllers/API";
import { StoreStoreType } from "../types/zustand/store";
import useAlertStore from "./alertStore";
export const useStoreStore = create<StoreStoreType>((set) => ({
hasStore: true,

View file

@ -4,6 +4,7 @@ from collections import namedtuple
import pytest
from fastapi.testclient import TestClient
from langflow.interface.tools.constants import CUSTOM_TOOLS
from langflow.processing.process import Result
from langflow.services.auth.utils import get_password_hash