feat: Update SQLModel dependency and improve UUID handling (#4891)
* Update sqlmodel dependency to version 0.0.20 in pyproject.toml * Handle UUID conversion for message IDs in memory update logic * Refactor Alembic migrations to use `sa.inspect` and update GUID to Uuid type * refactor: Change flow_id parameter type from str to uuid.UUID in graph building functions * refactor: Ensure UUID handling for flow_id and user_id across various services and models * refactor: improve UUID handling and graph caching for compatibility with sqlmodel 0.0.20 * fix: update message assertion in component events test * chore: update sqlmodel dependency to version 0.0.22 in uv.lock and pyproject.toml * fix: enhance flow_id validation to ensure valid UUID format in MessageBase model * fix: add error handling for cache directory cleanup * refactor: improve flow_id type handling in message storage * refactor: enhance flow_id handling in message functions to support UUID type * refactor: integrate Properties into message creation in component event tests * update test durations * fix: correct flow_id parameter in database query * refactor: update session_id and flow_id parameters to support UUID type across message handling functions and models * fix: handle message data update in SQLModel update method * refactor: improve flow_id assignment in message update method to enhance UUID handling
This commit is contained in:
parent
4cc336fa45
commit
3de42f4575
55 changed files with 1035 additions and 898 deletions
|
|
@ -8,6 +8,7 @@ Create Date: 2023-12-13 18:55:52.587360
|
|||
|
||||
from typing import Sequence, Union
|
||||
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
from sqlalchemy.engine.reflection import Inspector
|
||||
|
||||
|
|
@ -21,7 +22,7 @@ depends_on: Union[str, Sequence[str], None] = None
|
|||
def upgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
inspector = Inspector.from_engine(conn) # type: ignore
|
||||
inspector = sa.inspect(conn) # type: ignore
|
||||
api_key_constraints = inspector.get_unique_constraints("apikey")
|
||||
flow_constraints = inspector.get_unique_constraints("flow")
|
||||
user_constraints = inspector.get_unique_constraints("user")
|
||||
|
|
@ -45,7 +46,7 @@ def upgrade() -> None:
|
|||
def downgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
inspector = Inspector.from_engine(conn) # type: ignore
|
||||
inspector = sa.inspect(conn) # type: ignore
|
||||
api_key_constraints = inspector.get_unique_constraints("apikey")
|
||||
flow_constraints = inspector.get_unique_constraints("flow")
|
||||
user_constraints = inspector.get_unique_constraints("user")
|
||||
|
|
|
|||
|
|
@ -22,7 +22,7 @@ depends_on: Union[str, Sequence[str], None] = None
|
|||
|
||||
def upgrade() -> None:
|
||||
conn = op.get_bind()
|
||||
inspector = Inspector.from_engine(conn) # type: ignore
|
||||
inspector = sa.inspect(conn) # type: ignore
|
||||
table_names = inspector.get_table_names()
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
if "folder" not in table_names:
|
||||
|
|
@ -30,9 +30,9 @@ def upgrade() -> None:
|
|||
"folder",
|
||||
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
||||
sa.Column("description", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
|
||||
sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
|
||||
sa.Column("parent_id", sqlmodel.sql.sqltypes.GUID(), nullable=True),
|
||||
sa.Column("user_id", sqlmodel.sql.sqltypes.GUID(), nullable=True),
|
||||
sa.Column("id", sqlmodel.sql.sqltypes.types.Uuid(), nullable=False),
|
||||
sa.Column("parent_id", sqlmodel.sql.sqltypes.types.Uuid(), nullable=True),
|
||||
sa.Column("user_id", sqlmodel.sql.sqltypes.types.Uuid(), nullable=True),
|
||||
sa.ForeignKeyConstraint(
|
||||
["parent_id"],
|
||||
["folder.id"],
|
||||
|
|
@ -51,7 +51,7 @@ def upgrade() -> None:
|
|||
column_names = [column["name"] for column in inspector.get_columns("flow")]
|
||||
with op.batch_alter_table("flow", schema=None) as batch_op:
|
||||
if "folder_id" not in column_names:
|
||||
batch_op.add_column(sa.Column("folder_id", sqlmodel.sql.sqltypes.GUID(), nullable=True))
|
||||
batch_op.add_column(sa.Column("folder_id", sqlmodel.sql.sqltypes.types.Uuid(), nullable=True))
|
||||
batch_op.create_foreign_key("flow_folder_id_fkey", "folder", ["folder_id"], ["id"])
|
||||
if "folder" in column_names:
|
||||
batch_op.drop_column("folder")
|
||||
|
|
@ -61,7 +61,7 @@ def upgrade() -> None:
|
|||
|
||||
def downgrade() -> None:
|
||||
conn = op.get_bind()
|
||||
inspector = Inspector.from_engine(conn) # type: ignore
|
||||
inspector = sa.inspect(conn) # type: ignore
|
||||
table_names = inspector.get_table_names()
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
column_names = [column["name"] for column in inspector.get_columns("flow")]
|
||||
|
|
|
|||
|
|
@ -7,12 +7,13 @@ Create Date: 2024-10-04 17:30:12.924809
|
|||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
import sqlmodel
|
||||
from sqlalchemy.engine.reflection import Inspector
|
||||
from langflow.utils import migration
|
||||
from alembic import op
|
||||
from sqlalchemy.dialects import sqlite
|
||||
from sqlalchemy.engine.reflection import Inspector
|
||||
|
||||
from langflow.utils import migration
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '0ae3a2674f32'
|
||||
|
|
@ -23,7 +24,7 @@ depends_on: Union[str, Sequence[str], None] = None
|
|||
def upgrade() -> None:
|
||||
conn = op.get_bind()
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
inspector = Inspector.from_engine(conn) # type: ignore
|
||||
inspector = sa.inspect(conn) # type: ignore
|
||||
|
||||
with op.batch_alter_table("vertex_build", schema=None) as batch_op:
|
||||
if migration.column_exists(table_name="vertex_build", column_name="params", conn=conn):
|
||||
|
|
@ -49,7 +50,7 @@ def upgrade() -> None:
|
|||
def downgrade() -> None:
|
||||
conn = op.get_bind()
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
inspector = Inspector.from_engine(conn) # type: ignore
|
||||
inspector = sa.inspect(conn) # type: ignore
|
||||
with op.batch_alter_table("message", schema=None) as batch_op:
|
||||
if migration.column_exists(table_name="message", column_name="text", conn=conn):
|
||||
columns = inspector.get_columns("message")
|
||||
|
|
@ -67,4 +68,4 @@ def downgrade() -> None:
|
|||
batch_op.alter_column(
|
||||
"params", existing_type=sa.VARCHAR(), type_=sa.Text(), existing_nullable=True
|
||||
)
|
||||
# ### end Alembic commands ###
|
||||
# ### end Alembic commands ###
|
||||
|
|
|
|||
|
|
@ -8,6 +8,9 @@ Create Date: 2024-01-17 10:32:56.686287
|
|||
|
||||
from typing import Sequence, Union
|
||||
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "0b8757876a7c"
|
||||
down_revision: Union[str, None] = "006b3990db50"
|
||||
|
|
|
|||
|
|
@ -8,11 +8,11 @@ Create Date: 2024-07-26 11:41:31.274271
|
|||
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
import sqlmodel
|
||||
from langflow.utils import migration
|
||||
from alembic import op
|
||||
|
||||
from langflow.utils import migration
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "0d60fcbd4e8e"
|
||||
|
|
@ -31,8 +31,8 @@ def upgrade() -> None:
|
|||
sa.Column("data", sa.JSON(), nullable=True),
|
||||
sa.Column("artifacts", sa.JSON(), nullable=True),
|
||||
sa.Column("params", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
|
||||
sa.Column("build_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
|
||||
sa.Column("flow_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
|
||||
sa.Column("build_id", sqlmodel.sql.sqltypes.types.Uuid(), nullable=False),
|
||||
sa.Column("flow_id", sqlmodel.sql.sqltypes.types.Uuid(), nullable=False),
|
||||
sa.Column("valid", sa.BOOLEAN(), nullable=False),
|
||||
sa.ForeignKeyConstraint(
|
||||
["flow_id"],
|
||||
|
|
|
|||
|
|
@ -22,7 +22,7 @@ depends_on: Union[str, Sequence[str], None] = None
|
|||
|
||||
def upgrade() -> None:
|
||||
conn = op.get_bind()
|
||||
inspector = Inspector.from_engine(conn) # type: ignore
|
||||
inspector = sa.inspect(conn) # type: ignore
|
||||
table_names = inspector.get_table_names()
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
if "variable" not in table_names:
|
||||
|
|
@ -31,10 +31,10 @@ def upgrade() -> None:
|
|||
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
|
||||
sa.Column("value", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
|
||||
sa.Column("type", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
|
||||
sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
|
||||
sa.Column("id", sqlmodel.sql.sqltypes.types.Uuid(), nullable=False),
|
||||
sa.Column("created_at", sa.DateTime(), nullable=False),
|
||||
sa.Column("updated_at", sa.DateTime(), nullable=True),
|
||||
sa.Column("user_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
|
||||
sa.Column("user_id", sqlmodel.sql.sqltypes.types.Uuid(), nullable=False),
|
||||
sa.ForeignKeyConstraint(["user_id"], ["user.id"], name="fk_variable_user_id"),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
|
|
@ -45,7 +45,7 @@ def upgrade() -> None:
|
|||
|
||||
def downgrade() -> None:
|
||||
conn = op.get_bind()
|
||||
inspector = Inspector.from_engine(conn) # type: ignore
|
||||
inspector = sa.inspect(conn) # type: ignore
|
||||
table_names = inspector.get_table_names()
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
if "credential" not in table_names:
|
||||
|
|
|
|||
|
|
@ -8,6 +8,7 @@ Create Date: 2024-05-29 23:12:09.146880
|
|||
|
||||
from typing import Sequence, Union
|
||||
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
from sqlalchemy.engine.reflection import Inspector
|
||||
|
||||
|
|
@ -20,7 +21,7 @@ depends_on: Union[str, Sequence[str], None] = None
|
|||
|
||||
def upgrade() -> None:
|
||||
conn = op.get_bind()
|
||||
inspector = Inspector.from_engine(conn) # type: ignore
|
||||
inspector = sa.inspect(conn) # type: ignore
|
||||
constraints_names = [constraint["name"] for constraint in inspector.get_unique_constraints("folder")]
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table("folder", schema=None) as batch_op:
|
||||
|
|
@ -32,7 +33,7 @@ def upgrade() -> None:
|
|||
|
||||
def downgrade() -> None:
|
||||
conn = op.get_bind()
|
||||
inspector = Inspector.from_engine(conn) # type: ignore
|
||||
inspector = sa.inspect(conn) # type: ignore
|
||||
constraints_names = [constraint["name"] for constraint in inspector.get_unique_constraints("folder")]
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table("folder", schema=None) as batch_op:
|
||||
|
|
|
|||
|
|
@ -10,9 +10,10 @@ from typing import Sequence, Union
|
|||
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
from langflow.utils import migration
|
||||
from sqlalchemy.engine.reflection import Inspector
|
||||
|
||||
from langflow.utils import migration
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "4522eb831f5c"
|
||||
down_revision: Union[str, None] = "0d60fcbd4e8e"
|
||||
|
|
@ -23,7 +24,7 @@ depends_on: Union[str, Sequence[str], None] = None
|
|||
def upgrade() -> None:
|
||||
conn = op.get_bind()
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
inspector = Inspector.from_engine(conn) # type: ignore
|
||||
inspector = sa.inspect(conn) # type: ignore
|
||||
|
||||
with op.batch_alter_table("flow", schema=None) as batch_op:
|
||||
if migration.column_exists(table_name="flow", column_name="description", conn=conn):
|
||||
|
|
@ -49,7 +50,7 @@ def upgrade() -> None:
|
|||
def downgrade() -> None:
|
||||
conn = op.get_bind()
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
inspector = Inspector.from_engine(conn) # type: ignore
|
||||
inspector = sa.inspect(conn) # type: ignore
|
||||
with op.batch_alter_table("folder", schema=None) as batch_op:
|
||||
if migration.column_exists(table_name="folder", column_name="description", conn=conn):
|
||||
columns = inspector.get_columns("folder")
|
||||
|
|
|
|||
|
|
@ -21,7 +21,7 @@ depends_on: Union[str, Sequence[str], None] = None
|
|||
|
||||
def upgrade() -> None:
|
||||
conn = op.get_bind()
|
||||
inspector = Inspector.from_engine(conn) # type: ignore
|
||||
inspector = sa.inspect(conn) # type: ignore
|
||||
table_names = inspector.get_table_names() # noqa
|
||||
column_names = [column["name"] for column in inspector.get_columns("message")]
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
|
|
@ -38,7 +38,7 @@ def upgrade() -> None:
|
|||
|
||||
def downgrade() -> None:
|
||||
conn = op.get_bind()
|
||||
inspector = Inspector.from_engine(conn) # type: ignore
|
||||
inspector = sa.inspect(conn) # type: ignore
|
||||
table_names = inspector.get_table_names() # noqa
|
||||
column_names = [column["name"] for column in inspector.get_columns("message")]
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
|
|
|
|||
|
|
@ -26,7 +26,7 @@ depends_on: Union[str, Sequence[str], None] = None
|
|||
def upgrade() -> None:
|
||||
conn = op.get_bind()
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
inspector = Inspector.from_engine(conn) # type: ignore
|
||||
inspector = sa.inspect(conn) # type: ignore
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
|
||||
with op.batch_alter_table("apikey", schema=None) as batch_op:
|
||||
|
|
@ -42,7 +42,7 @@ def upgrade() -> None:
|
|||
def downgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
inspector = Inspector.from_engine(conn) # type: ignore
|
||||
inspector = sa.inspect(conn) # type: ignore
|
||||
|
||||
with op.batch_alter_table("apikey", schema=None) as batch_op:
|
||||
if migration.column_exists(table_name="apikey", column_name="name", conn=conn):
|
||||
|
|
|
|||
|
|
@ -21,7 +21,7 @@ depends_on: Union[str, Sequence[str], None] = None
|
|||
|
||||
def upgrade() -> None:
|
||||
conn = op.get_bind()
|
||||
inspector = Inspector.from_engine(conn) # type: ignore
|
||||
inspector = sa.inspect(conn) # type: ignore
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
column_names = [column["name"] for column in inspector.get_columns("variable")]
|
||||
with op.batch_alter_table("variable", schema=None) as batch_op:
|
||||
|
|
@ -33,7 +33,7 @@ def upgrade() -> None:
|
|||
|
||||
def downgrade() -> None:
|
||||
conn = op.get_bind()
|
||||
inspector = Inspector.from_engine(conn) # type: ignore
|
||||
inspector = sa.inspect(conn) # type: ignore
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
column_names = [column["name"] for column in inspector.get_columns("variable")]
|
||||
with op.batch_alter_table("variable", schema=None) as batch_op:
|
||||
|
|
|
|||
|
|
@ -24,7 +24,7 @@ def upgrade() -> None:
|
|||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
|
||||
conn = op.get_bind()
|
||||
inspector = Inspector.from_engine(conn) # type: ignore
|
||||
inspector = sa.inspect(conn) # type: ignore
|
||||
# List existing tables
|
||||
existing_tables = inspector.get_table_names()
|
||||
# Drop 'flowstyle' table if it exists
|
||||
|
|
@ -48,7 +48,7 @@ def upgrade() -> None:
|
|||
if "user" not in existing_tables:
|
||||
op.create_table(
|
||||
"user",
|
||||
sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
|
||||
sa.Column("id", sqlmodel.sql.sqltypes.types.Uuid(), nullable=False),
|
||||
sa.Column("username", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
||||
sa.Column("password", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
||||
sa.Column("is_active", sa.Boolean(), nullable=False),
|
||||
|
|
@ -70,9 +70,9 @@ def upgrade() -> None:
|
|||
sa.Column("last_used_at", sa.DateTime(), nullable=True),
|
||||
sa.Column("total_uses", sa.Integer(), nullable=False, default=0),
|
||||
sa.Column("is_active", sa.Boolean(), nullable=False, default=True),
|
||||
sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
|
||||
sa.Column("id", sqlmodel.sql.sqltypes.types.Uuid(), nullable=False),
|
||||
sa.Column("api_key", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
||||
sa.Column("user_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
|
||||
sa.Column("user_id", sqlmodel.sql.sqltypes.types.Uuid(), nullable=False),
|
||||
sa.ForeignKeyConstraint(["user_id"], ["user.id"], name="fk_apikey_user_id_user"),
|
||||
sa.PrimaryKeyConstraint("id", name="pk_apikey"),
|
||||
sa.UniqueConstraint("id", name="uq_apikey_id"),
|
||||
|
|
@ -87,8 +87,8 @@ def upgrade() -> None:
|
|||
sa.Column("data", sa.JSON(), nullable=True),
|
||||
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
||||
sa.Column("description", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
|
||||
sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
|
||||
sa.Column("user_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
|
||||
sa.Column("id", sqlmodel.sql.sqltypes.types.Uuid(), nullable=False),
|
||||
sa.Column("user_id", sqlmodel.sql.sqltypes.types.Uuid(), nullable=False),
|
||||
sa.ForeignKeyConstraint(["user_id"], ["user.id"], name="fk_flow_user_id_user"),
|
||||
sa.PrimaryKeyConstraint("id", name="pk_flow"),
|
||||
sa.UniqueConstraint("id", name="uq_flow_id"),
|
||||
|
|
@ -105,7 +105,7 @@ def upgrade() -> None:
|
|||
batch_op.add_column(
|
||||
sa.Column(
|
||||
"user_id",
|
||||
sqlmodel.sql.sqltypes.GUID(),
|
||||
sqlmodel.sql.sqltypes.types.Uuid(),
|
||||
nullable=True, # This should be False, but we need to allow NULL values for now
|
||||
)
|
||||
)
|
||||
|
|
@ -126,7 +126,7 @@ def downgrade() -> None:
|
|||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
|
||||
conn = op.get_bind()
|
||||
inspector = Inspector.from_engine(conn) # type: ignore
|
||||
inspector = sa.inspect(conn) # type: ignore
|
||||
# List existing tables
|
||||
existing_tables = inspector.get_table_names()
|
||||
if "flow" in existing_tables:
|
||||
|
|
|
|||
|
|
@ -8,6 +8,7 @@ Create Date: 2024-05-21 09:23:48.772367
|
|||
|
||||
from typing import Sequence, Union
|
||||
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
from sqlalchemy.engine.reflection import Inspector
|
||||
|
||||
|
|
@ -19,7 +20,7 @@ depends_on: Union[str, Sequence[str], None] = None
|
|||
|
||||
def upgrade() -> None:
|
||||
conn = op.get_bind()
|
||||
inspector = Inspector.from_engine(conn) # type: ignore
|
||||
inspector = sa.inspect(conn) # type: ignore
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
indexes = inspector.get_indexes("flow")
|
||||
with op.batch_alter_table("flow", schema=None) as batch_op:
|
||||
|
|
@ -32,7 +33,7 @@ def upgrade() -> None:
|
|||
|
||||
def downgrade() -> None:
|
||||
conn = op.get_bind()
|
||||
inspector = Inspector.from_engine(conn) # type: ignore
|
||||
inspector = sa.inspect(conn) # type: ignore
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
indexes = inspector.get_indexes("flow")
|
||||
with op.batch_alter_table("flow", schema=None) as batch_op:
|
||||
|
|
|
|||
|
|
@ -23,7 +23,7 @@ depends_on: Union[str, Sequence[str], None] = None
|
|||
def upgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
inspector = Inspector.from_engine(conn) # type: ignore
|
||||
inspector = sa.inspect(conn) # type: ignore
|
||||
tables = inspector.get_table_names()
|
||||
try:
|
||||
if "credential" not in tables:
|
||||
|
|
@ -32,8 +32,8 @@ def upgrade() -> None:
|
|||
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
|
||||
sa.Column("value", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
|
||||
sa.Column("provider", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
|
||||
sa.Column("user_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
|
||||
sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
|
||||
sa.Column("user_id", sqlmodel.sql.sqltypes.types.Uuid(), nullable=False),
|
||||
sa.Column("id", sqlmodel.sql.sqltypes.types.Uuid(), nullable=False),
|
||||
sa.Column("created_at", sa.DateTime(), nullable=False),
|
||||
sa.Column("updated_at", sa.DateTime(), nullable=True),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
|
|
|
|||
|
|
@ -8,6 +8,7 @@ Create Date: 2024-05-29 23:08:43.935040
|
|||
|
||||
from typing import Sequence, Union
|
||||
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
from sqlalchemy.engine.reflection import Inspector
|
||||
|
||||
|
|
@ -20,7 +21,7 @@ depends_on: Union[str, Sequence[str], None] = None
|
|||
|
||||
def upgrade() -> None:
|
||||
conn = op.get_bind()
|
||||
inspector = Inspector.from_engine(conn) # type: ignore
|
||||
inspector = sa.inspect(conn) # type: ignore
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
indexes_names = [index["name"] for index in inspector.get_indexes("flow")]
|
||||
constraints_names = [constraint["name"] for constraint in inspector.get_unique_constraints("flow")]
|
||||
|
|
@ -38,7 +39,7 @@ def upgrade() -> None:
|
|||
|
||||
def downgrade() -> None:
|
||||
conn = op.get_bind()
|
||||
inspector = Inspector.from_engine(conn) # type: ignore
|
||||
inspector = sa.inspect(conn) # type: ignore
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
indexes_names = [index["name"] for index in inspector.get_indexes("flow")]
|
||||
constraints_names = [constraint["name"] for constraint in inspector.get_unique_constraints("flow")]
|
||||
|
|
|
|||
|
|
@ -23,7 +23,7 @@ depends_on: Union[str, Sequence[str], None] = None
|
|||
|
||||
def upgrade() -> None:
|
||||
conn = op.get_bind()
|
||||
inspector = Inspector.from_engine(conn) # type: ignore
|
||||
inspector = sa.inspect(conn) # type: ignore
|
||||
table_names = inspector.get_table_names()
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
if "apikey" in table_names:
|
||||
|
|
@ -77,7 +77,7 @@ def upgrade() -> None:
|
|||
|
||||
def downgrade() -> None:
|
||||
conn = op.get_bind()
|
||||
inspector = Inspector.from_engine(conn) # type: ignore
|
||||
inspector = sa.inspect(conn) # type: ignore
|
||||
table_names = inspector.get_table_names()
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
if "variable" in table_names:
|
||||
|
|
|
|||
|
|
@ -26,7 +26,7 @@ depends_on = None
|
|||
|
||||
def upgrade():
|
||||
conn = op.get_bind()
|
||||
inspector = Inspector.from_engine(conn)
|
||||
inspector = sa.inspect(conn)
|
||||
tables = ["apikey", "variable"] # List of tables to modify
|
||||
|
||||
for table_name in tables:
|
||||
|
|
@ -35,7 +35,7 @@ def upgrade():
|
|||
|
||||
def downgrade():
|
||||
conn = op.get_bind()
|
||||
inspector = Inspector.from_engine(conn)
|
||||
inspector = sa.inspect(conn)
|
||||
tables = ["apikey", "variable"] # List of tables to revert
|
||||
|
||||
for table_name in tables:
|
||||
|
|
|
|||
|
|
@ -21,7 +21,7 @@ depends_on: Union[str, Sequence[str], None] = None
|
|||
|
||||
def upgrade() -> None:
|
||||
conn = op.get_bind()
|
||||
inspector = Inspector.from_engine(conn) # type: ignore
|
||||
inspector = sa.inspect(conn) # type: ignore
|
||||
table_names = inspector.get_table_names()
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
column_names = [column["name"] for column in inspector.get_columns("flow")]
|
||||
|
|
@ -34,7 +34,7 @@ def upgrade() -> None:
|
|||
|
||||
def downgrade() -> None:
|
||||
conn = op.get_bind()
|
||||
inspector = Inspector.from_engine(conn) # type: ignore
|
||||
inspector = sa.inspect(conn) # type: ignore
|
||||
table_names = inspector.get_table_names()
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
column_names = [column["name"] for column in inspector.get_columns("flow")]
|
||||
|
|
|
|||
|
|
@ -22,7 +22,7 @@ depends_on: Union[str, Sequence[str], None] = None
|
|||
|
||||
def upgrade() -> None:
|
||||
conn = op.get_bind()
|
||||
inspector = Inspector.from_engine(conn) # type: ignore
|
||||
inspector = sa.inspect(conn) # type: ignore
|
||||
table_names = inspector.get_table_names() # noqa
|
||||
column_names = [column["name"] for column in inspector.get_columns("flow")]
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
|
|
@ -37,7 +37,7 @@ def upgrade() -> None:
|
|||
|
||||
def downgrade() -> None:
|
||||
conn = op.get_bind()
|
||||
inspector = Inspector.from_engine(conn) # type: ignore
|
||||
inspector = sa.inspect(conn) # type: ignore
|
||||
table_names = inspector.get_table_names() # noqa
|
||||
column_names = [column["name"] for column in inspector.get_columns("flow")]
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
|
|
|
|||
|
|
@ -24,7 +24,7 @@ def upgrade() -> None:
|
|||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
try:
|
||||
conn = op.get_bind()
|
||||
inspector = Inspector.from_engine(conn) # type: ignore
|
||||
inspector = sa.inspect(conn) # type: ignore
|
||||
if "user" in inspector.get_table_names() and "profile_image" not in [
|
||||
column["name"] for column in inspector.get_columns("user")
|
||||
]:
|
||||
|
|
@ -45,7 +45,7 @@ def downgrade() -> None:
|
|||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
try:
|
||||
conn = op.get_bind()
|
||||
inspector = Inspector.from_engine(conn) # type: ignore
|
||||
inspector = sa.inspect(conn) # type: ignore
|
||||
if "user" in inspector.get_table_names() and "profile_image" in [
|
||||
column["name"] for column in inspector.get_columns("user")
|
||||
]:
|
||||
|
|
|
|||
|
|
@ -21,7 +21,7 @@ depends_on: Union[str, Sequence[str], None] = None
|
|||
|
||||
def upgrade() -> None:
|
||||
conn = op.get_bind()
|
||||
inspector = Inspector.from_engine(conn) # type: ignore
|
||||
inspector = sa.inspect(conn) # type: ignore
|
||||
# table_names = inspector.get_table_names()
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
columns = inspector.get_columns("apikey")
|
||||
|
|
@ -41,7 +41,7 @@ def upgrade() -> None:
|
|||
|
||||
def downgrade() -> None:
|
||||
conn = op.get_bind()
|
||||
inspector = Inspector.from_engine(conn) # type: ignore
|
||||
inspector = sa.inspect(conn) # type: ignore
|
||||
# table_names = inspector.get_table_names()
|
||||
columns = inspector.get_columns("apikey")
|
||||
column_names = {column["name"]: column for column in columns}
|
||||
|
|
|
|||
|
|
@ -23,7 +23,7 @@ depends_on: Union[str, Sequence[str], None] = None
|
|||
def upgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
inspector = Inspector.from_engine(conn) # type: ignore
|
||||
inspector = sa.inspect(conn) # type: ignore
|
||||
flow_columns = [column["name"] for column in inspector.get_columns("flow")]
|
||||
user_columns = [column["name"] for column in inspector.get_columns("user")]
|
||||
try:
|
||||
|
|
|
|||
|
|
@ -10,9 +10,9 @@ from typing import Sequence, Union
|
|||
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
from loguru import logger
|
||||
from sqlalchemy.dialects import postgresql
|
||||
from sqlalchemy.engine.reflection import Inspector
|
||||
from loguru import logger
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "79e675cb6752"
|
||||
|
|
@ -23,7 +23,7 @@ depends_on: Union[str, Sequence[str], None] = None
|
|||
|
||||
def upgrade() -> None:
|
||||
conn = op.get_bind()
|
||||
inspector = Inspector.from_engine(conn) # type: ignore
|
||||
inspector = sa.inspect(conn) # type: ignore
|
||||
table_names = inspector.get_table_names()
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
if "apikey" in table_names:
|
||||
|
|
@ -77,7 +77,7 @@ def upgrade() -> None:
|
|||
|
||||
def downgrade() -> None:
|
||||
conn = op.get_bind()
|
||||
inspector = Inspector.from_engine(conn) # type: ignore
|
||||
inspector = sa.inspect(conn) # type: ignore
|
||||
table_names = inspector.get_table_names()
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
if "variable" in table_names:
|
||||
|
|
|
|||
|
|
@ -23,7 +23,7 @@ depends_on: Union[str, Sequence[str], None] = None
|
|||
def upgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
inspector = Inspector.from_engine(conn) # type: ignore
|
||||
inspector = sa.inspect(conn) # type: ignore
|
||||
api_key_columns = [column["name"] for column in inspector.get_columns("apikey")]
|
||||
flow_columns = [column["name"] for column in inspector.get_columns("flow")]
|
||||
|
||||
|
|
@ -53,7 +53,7 @@ def downgrade() -> None:
|
|||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
try:
|
||||
conn = op.get_bind()
|
||||
inspector = Inspector.from_engine(conn) # type: ignore
|
||||
inspector = sa.inspect(conn) # type: ignore
|
||||
column_names = [column["name"] for column in inspector.get_columns("flow")]
|
||||
with op.batch_alter_table("flow", schema=None) as batch_op:
|
||||
if "folder" in column_names:
|
||||
|
|
|
|||
|
|
@ -8,11 +8,11 @@ Create Date: 2024-07-24 11:37:48.532933
|
|||
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
import sqlmodel
|
||||
from langflow.utils import migration
|
||||
from alembic import op
|
||||
|
||||
from langflow.utils import migration
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "90be8e2ed91e"
|
||||
|
|
@ -32,8 +32,8 @@ def upgrade() -> None:
|
|||
sa.Column("inputs", sa.JSON(), nullable=True),
|
||||
sa.Column("outputs", sa.JSON(), nullable=True),
|
||||
sa.Column("status", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
||||
sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
|
||||
sa.Column("flow_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
|
||||
sa.Column("id", sqlmodel.sql.sqltypes.types.Uuid(), nullable=False),
|
||||
sa.Column("flow_id", sqlmodel.sql.sqltypes.types.Uuid(), nullable=False),
|
||||
sa.Column("error", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
|
||||
sa.ForeignKeyConstraint(
|
||||
["flow_id"],
|
||||
|
|
|
|||
|
|
@ -22,7 +22,7 @@ depends_on: Union[str, Sequence[str], None] = None
|
|||
|
||||
def upgrade() -> None:
|
||||
conn = op.get_bind()
|
||||
inspector = Inspector.from_engine(conn) # type: ignore
|
||||
inspector = sa.inspect(conn) # type: ignore
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
column_names = [column["name"] for column in inspector.get_columns("flow")]
|
||||
indexes = inspector.get_indexes("flow")
|
||||
|
|
@ -38,7 +38,7 @@ def upgrade() -> None:
|
|||
|
||||
def downgrade() -> None:
|
||||
conn = op.get_bind()
|
||||
inspector = Inspector.from_engine(conn) # type: ignore
|
||||
inspector = sa.inspect(conn) # type: ignore
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
column_names = [column["name"] for column in inspector.get_columns("flow")]
|
||||
indexes = inspector.get_indexes("flow")
|
||||
|
|
|
|||
|
|
@ -24,7 +24,7 @@ depends_on: Union[str, Sequence[str], None] = None
|
|||
def upgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
inspector = Inspector.from_engine(conn) # type: ignore
|
||||
inspector = sa.inspect(conn) # type: ignore
|
||||
tables = inspector.get_table_names()
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
try:
|
||||
|
|
@ -39,7 +39,7 @@ def upgrade() -> None:
|
|||
if "folder" not in flow_columns:
|
||||
batch_op.add_column(sa.Column("folder", sqlmodel.sql.sqltypes.AutoString(), nullable=True))
|
||||
if "user_id" not in flow_columns:
|
||||
batch_op.add_column(sa.Column("user_id", sqlmodel.sql.sqltypes.GUID(), nullable=True))
|
||||
batch_op.add_column(sa.Column("user_id", sqlmodel.sql.sqltypes.types.Uuid(), nullable=True))
|
||||
indices = inspector.get_indexes("flow")
|
||||
indices_names = [index["name"] for index in indices]
|
||||
if "ix_flow_user_id" not in indices_names:
|
||||
|
|
@ -54,7 +54,7 @@ def upgrade() -> None:
|
|||
|
||||
def downgrade() -> None:
|
||||
conn = op.get_bind()
|
||||
inspector = Inspector.from_engine(conn) # type: ignore
|
||||
inspector = sa.inspect(conn) # type: ignore
|
||||
try:
|
||||
# Re-create the dropped table 'flowstyle' if it was previously dropped in upgrade
|
||||
if "flowstyle" not in inspector.get_table_names():
|
||||
|
|
@ -62,8 +62,8 @@ def downgrade() -> None:
|
|||
"flowstyle",
|
||||
sa.Column("color", sa.String(), nullable=False),
|
||||
sa.Column("emoji", sa.String(), nullable=False),
|
||||
sa.Column("flow_id", sqlmodel.sql.sqltypes.GUID(), nullable=True),
|
||||
sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
|
||||
sa.Column("flow_id", sqlmodel.sql.sqltypes.types.Uuid(), nullable=True),
|
||||
sa.Column("id", sqlmodel.sql.sqltypes.types.Uuid(), nullable=False),
|
||||
sa.ForeignKeyConstraint(["flow_id"], ["flow.id"]),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
sa.UniqueConstraint("id"),
|
||||
|
|
|
|||
|
|
@ -22,7 +22,7 @@ depends_on: Union[str, Sequence[str], None] = None
|
|||
|
||||
def upgrade() -> None:
|
||||
conn = op.get_bind()
|
||||
inspector = Inspector.from_engine(conn) # type: ignore
|
||||
inspector = sa.inspect(conn) # type: ignore
|
||||
flow_columns = {column["name"] for column in inspector.get_columns("flow")}
|
||||
flow_indexes = {index["name"] for index in inspector.get_indexes("flow")}
|
||||
flow_fks = {fk["name"] for fk in inspector.get_foreign_keys("flow")}
|
||||
|
|
@ -35,7 +35,7 @@ def upgrade() -> None:
|
|||
if "folder" not in flow_columns:
|
||||
batch_op.add_column(sa.Column("folder", sqlmodel.sql.sqltypes.AutoString(), nullable=True))
|
||||
if "user_id" not in flow_columns:
|
||||
batch_op.add_column(sa.Column("user_id", sqlmodel.sql.sqltypes.GUID(), nullable=True))
|
||||
batch_op.add_column(sa.Column("user_id", sqlmodel.sql.sqltypes.types.Uuid(), nullable=True))
|
||||
if "ix_flow_user_id" not in flow_indexes:
|
||||
batch_op.create_index(batch_op.f("ix_flow_user_id"), ["user_id"], unique=False)
|
||||
if "flow_user_id_fkey" not in flow_fks:
|
||||
|
|
@ -44,7 +44,7 @@ def upgrade() -> None:
|
|||
|
||||
def downgrade() -> None:
|
||||
conn = op.get_bind()
|
||||
inspector = Inspector.from_engine(conn) # type: ignore
|
||||
inspector = sa.inspect(conn) # type: ignore
|
||||
flow_columns = {column["name"] for column in inspector.get_columns("flow")}
|
||||
flow_indexes = {index["name"] for index in inspector.get_indexes("flow")}
|
||||
flow_fks = {fk["name"] for fk in inspector.get_foreign_keys("flow")}
|
||||
|
|
|
|||
|
|
@ -21,7 +21,7 @@ depends_on: Union[str, Sequence[str], None] = None
|
|||
|
||||
def upgrade() -> None:
|
||||
conn = op.get_bind()
|
||||
inspector = Inspector.from_engine(conn) # type: ignore
|
||||
inspector = sa.inspect(conn) # type: ignore
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
columns = inspector.get_columns("variable")
|
||||
with op.batch_alter_table("variable", schema=None) as batch_op:
|
||||
|
|
@ -38,7 +38,7 @@ def upgrade() -> None:
|
|||
|
||||
def downgrade() -> None:
|
||||
conn = op.get_bind()
|
||||
inspector = Inspector.from_engine(conn) # type: ignore
|
||||
inspector = sa.inspect(conn) # type: ignore
|
||||
columns = inspector.get_columns("variable")
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table("variable", schema=None) as batch_op:
|
||||
|
|
|
|||
|
|
@ -32,8 +32,8 @@ def upgrade() -> None:
|
|||
sa.Column("sender_name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
||||
sa.Column("session_id", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
||||
sa.Column("text", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
||||
sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
|
||||
sa.Column("flow_id", sqlmodel.sql.sqltypes.GUID(), nullable=True),
|
||||
sa.Column("id", sqlmodel.sql.sqltypes.types.Uuid(), nullable=False),
|
||||
sa.Column("flow_id", sqlmodel.sql.sqltypes.types.Uuid(), nullable=True),
|
||||
sa.Column("files", sa.JSON(), nullable=True),
|
||||
sa.ForeignKeyConstraint(
|
||||
["flow_id"],
|
||||
|
|
|
|||
|
|
@ -21,7 +21,7 @@ depends_on: Union[str, Sequence[str], None] = None
|
|||
|
||||
def upgrade() -> None:
|
||||
conn = op.get_bind()
|
||||
inspector = Inspector.from_engine(conn) # type: ignore
|
||||
inspector = sa.inspect(conn) # type: ignore
|
||||
table_names = inspector.get_table_names()
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
if "variable" not in table_names:
|
||||
|
|
@ -47,7 +47,7 @@ def upgrade() -> None:
|
|||
|
||||
def downgrade() -> None:
|
||||
conn = op.get_bind()
|
||||
inspector = Inspector.from_engine(conn) # type: ignore
|
||||
inspector = sa.inspect(conn) # type: ignore
|
||||
table_names = inspector.get_table_names()
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
if "variable" not in table_names:
|
||||
|
|
|
|||
|
|
@ -24,7 +24,7 @@ depends_on: Union[str, Sequence[str], None] = None
|
|||
def upgrade() -> None:
|
||||
conn = op.get_bind()
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
inspector = Inspector.from_engine(conn) # type: ignore
|
||||
inspector = sa.inspect(conn) # type: ignore
|
||||
with op.batch_alter_table("vertex_build", schema=None) as batch_op:
|
||||
if migration.column_exists(table_name="vertex_build", column_name="id", conn=conn):
|
||||
columns = inspector.get_columns("vertex_build")
|
||||
|
|
@ -38,7 +38,7 @@ def upgrade() -> None:
|
|||
def downgrade() -> None:
|
||||
conn = op.get_bind()
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
inspector = Inspector.from_engine(conn) # type: ignore
|
||||
inspector = sa.inspect(conn) # type: ignore
|
||||
with op.batch_alter_table("vertex_build", schema=None) as batch_op:
|
||||
if migration.column_exists(table_name="vertex_build", column_name="id", conn=conn):
|
||||
columns = inspector.get_columns("vertex_build")
|
||||
|
|
|
|||
|
|
@ -21,7 +21,7 @@ depends_on: Union[str, Sequence[str], None] = None
|
|||
|
||||
def upgrade() -> None:
|
||||
conn = op.get_bind()
|
||||
inspector = Inspector.from_engine(conn) # type: ignore
|
||||
inspector = sa.inspect(conn) # type: ignore
|
||||
table_names = inspector.get_table_names() # noqa
|
||||
column_names = [column["name"] for column in inspector.get_columns("message")]
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
|
|
@ -36,7 +36,7 @@ def upgrade() -> None:
|
|||
|
||||
def downgrade() -> None:
|
||||
conn = op.get_bind()
|
||||
inspector = Inspector.from_engine(conn) # type: ignore
|
||||
inspector = sa.inspect(conn) # type: ignore
|
||||
table_names = inspector.get_table_names() # noqa
|
||||
column_names = [column["name"] for column in inspector.get_columns("message")]
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
|
|
|
|||
|
|
@ -8,6 +8,7 @@ Create Date: 2023-11-24 15:07:37.566516
|
|||
|
||||
from typing import Optional, Sequence, Union
|
||||
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
from sqlalchemy.engine.reflection import Inspector
|
||||
|
||||
|
|
@ -21,7 +22,7 @@ depends_on: Union[str, Sequence[str], None] = None
|
|||
def upgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
inspector = Inspector.from_engine(conn) # type: ignore
|
||||
inspector = sa.inspect(conn) # type: ignore
|
||||
tables = inspector.get_table_names()
|
||||
foreign_keys_names = []
|
||||
if "credential" in tables:
|
||||
|
|
@ -42,7 +43,7 @@ def upgrade() -> None:
|
|||
def downgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
inspector = Inspector.from_engine(conn) # type: ignore
|
||||
inspector = sa.inspect(conn) # type: ignore
|
||||
tables = inspector.get_table_names()
|
||||
foreign_keys_names: list[Optional[str]] = []
|
||||
if "credential" in tables:
|
||||
|
|
|
|||
|
|
@ -140,7 +140,7 @@ def format_elapsed_time(elapsed_time: float) -> str:
|
|||
return f"{minutes} {minutes_unit}, {seconds} {seconds_unit}"
|
||||
|
||||
|
||||
async def _get_flow_name(flow_id: str) -> str:
|
||||
async def _get_flow_name(flow_id: uuid.UUID) -> str:
|
||||
async with async_session_scope() as session:
|
||||
flow = await session.get(Flow, flow_id)
|
||||
if flow is None:
|
||||
|
|
@ -149,20 +149,21 @@ async def _get_flow_name(flow_id: str) -> str:
|
|||
return flow.name
|
||||
|
||||
|
||||
async def build_graph_from_data(flow_id: str, payload: dict, **kwargs):
|
||||
async def build_graph_from_data(flow_id: uuid.UUID | str, payload: dict, **kwargs):
|
||||
"""Build and cache the graph."""
|
||||
# Get flow name
|
||||
if "flow_name" not in kwargs:
|
||||
flow_name = await _get_flow_name(flow_id)
|
||||
flow_name = await _get_flow_name(flow_id if isinstance(flow_id, uuid.UUID) else uuid.UUID(flow_id))
|
||||
kwargs["flow_name"] = flow_name
|
||||
graph = Graph.from_payload(payload, flow_id, **kwargs)
|
||||
str_flow_id = str(flow_id)
|
||||
graph = Graph.from_payload(payload, str_flow_id, **kwargs)
|
||||
for vertex_id in graph.has_session_id_vertices:
|
||||
vertex = graph.get_vertex(vertex_id)
|
||||
if vertex is None:
|
||||
msg = f"Vertex {vertex_id} not found"
|
||||
raise ValueError(msg)
|
||||
if not vertex.raw_params.get("session_id"):
|
||||
vertex.update_raw_params({"session_id": flow_id}, overwrite=True)
|
||||
vertex.update_raw_params({"session_id": str_flow_id}, overwrite=True)
|
||||
|
||||
run_id = uuid.uuid4()
|
||||
graph.set_run_id(run_id)
|
||||
|
|
@ -171,7 +172,7 @@ async def build_graph_from_data(flow_id: str, payload: dict, **kwargs):
|
|||
return graph
|
||||
|
||||
|
||||
async def build_graph_from_db_no_cache(flow_id: str, session: AsyncSession):
|
||||
async def build_graph_from_db_no_cache(flow_id: uuid.UUID, session: AsyncSession):
|
||||
"""Build and cache the graph."""
|
||||
flow: Flow | None = await session.get(Flow, flow_id)
|
||||
if not flow or not flow.data:
|
||||
|
|
@ -180,20 +181,22 @@ async def build_graph_from_db_no_cache(flow_id: str, session: AsyncSession):
|
|||
return await build_graph_from_data(flow_id, flow.data, flow_name=flow.name, user_id=str(flow.user_id))
|
||||
|
||||
|
||||
async def build_graph_from_db(flow_id: str, session: AsyncSession, chat_service: ChatService):
|
||||
graph = await build_graph_from_db_no_cache(flow_id, session)
|
||||
await chat_service.set_cache(flow_id, graph)
|
||||
async def build_graph_from_db(flow_id: uuid.UUID, session: AsyncSession, chat_service: ChatService):
|
||||
graph = await build_graph_from_db_no_cache(flow_id=flow_id, session=session)
|
||||
await chat_service.set_cache(str(flow_id), graph)
|
||||
return graph
|
||||
|
||||
|
||||
async def build_and_cache_graph_from_data(
|
||||
flow_id: str,
|
||||
flow_id: uuid.UUID | str,
|
||||
chat_service: ChatService,
|
||||
graph_data: dict,
|
||||
): # -> Graph | Any:
|
||||
"""Build and cache the graph."""
|
||||
graph = Graph.from_payload(graph_data, flow_id)
|
||||
await chat_service.set_cache(flow_id, graph)
|
||||
# Convert flow_id to str if it's UUID
|
||||
str_flow_id = str(flow_id) if isinstance(flow_id, uuid.UUID) else flow_id
|
||||
graph = Graph.from_payload(graph_data, str_flow_id)
|
||||
await chat_service.set_cache(str_flow_id, graph)
|
||||
return graph
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -22,7 +22,6 @@ from langflow.api.utils import (
|
|||
build_and_cache_graph_from_data,
|
||||
build_graph_from_data,
|
||||
build_graph_from_db,
|
||||
build_graph_from_db_no_cache,
|
||||
format_elapsed_time,
|
||||
format_exception_message,
|
||||
get_top_level_vertices,
|
||||
|
|
@ -100,13 +99,12 @@ async def retrieve_vertices_order(
|
|||
start_time = time.perf_counter()
|
||||
components_count = None
|
||||
try:
|
||||
flow_id_str = str(flow_id)
|
||||
# First, we need to check if the flow_id is in the cache
|
||||
if not data:
|
||||
graph = await build_graph_from_db(flow_id=flow_id_str, session=session, chat_service=chat_service)
|
||||
graph = await build_graph_from_db(flow_id=flow_id, session=session, chat_service=chat_service)
|
||||
else:
|
||||
graph = await build_and_cache_graph_from_data(
|
||||
flow_id=flow_id_str, graph_data=data.model_dump(), chat_service=chat_service
|
||||
flow_id=flow_id, graph_data=data.model_dump(), chat_service=chat_service
|
||||
)
|
||||
graph = graph.prepare(stop_component_id, start_component_id)
|
||||
|
||||
|
|
@ -166,13 +164,13 @@ async def build_flow(
|
|||
try:
|
||||
flow_id_str = str(flow_id)
|
||||
if not data:
|
||||
graph = await build_graph_from_db_no_cache(flow_id=flow_id_str, session=session)
|
||||
graph = await build_graph_from_db(flow_id=flow_id, session=session, chat_service=chat_service)
|
||||
else:
|
||||
async with async_session_scope() as new_session:
|
||||
result = await new_session.exec(select(Flow.name).where(Flow.id == flow_id_str))
|
||||
result = await new_session.exec(select(Flow.name).where(Flow.id == flow_id))
|
||||
flow_name = result.first()
|
||||
graph = await build_graph_from_data(
|
||||
flow_id_str, data.model_dump(), user_id=str(current_user.id), flow_name=flow_name
|
||||
flow_id=flow_id_str, payload=data.model_dump(), user_id=str(current_user.id), flow_name=flow_name
|
||||
)
|
||||
graph.validate_stream()
|
||||
if stop_component_id or start_component_id:
|
||||
|
|
@ -195,6 +193,7 @@ async def build_flow(
|
|||
# and return the same structure but only with the ids
|
||||
components_count = len(graph.vertices)
|
||||
vertices_to_run = list(graph.vertices_to_run.union(get_top_level_vertices(graph, graph.vertices_to_run)))
|
||||
await chat_service.set_cache(flow_id_str, graph)
|
||||
background_tasks.add_task(
|
||||
telemetry_service.log_package_playground,
|
||||
PlaygroundPayload(
|
||||
|
|
@ -217,12 +216,10 @@ async def build_flow(
|
|||
raise HTTPException(status_code=400, detail=str(exc)) from exc
|
||||
logger.exception("Error checking build status")
|
||||
raise HTTPException(status_code=500, detail=str(exc)) from exc
|
||||
|
||||
return first_layer, vertices_to_run, graph
|
||||
|
||||
async def _build_vertex(vertex_id: str, graph: Graph, event_manager: EventManager) -> VertexBuildResponse:
|
||||
flow_id_str = str(flow_id)
|
||||
|
||||
next_runnable_vertices = []
|
||||
top_level_vertices = []
|
||||
start_time = time.perf_counter()
|
||||
|
|
@ -506,13 +503,18 @@ async def build_vertex(
|
|||
top_level_vertices = []
|
||||
start_time = time.perf_counter()
|
||||
error_message = None
|
||||
try:
|
||||
graph: Graph = await chat_service.get_cache(flow_id_str)
|
||||
except KeyError as exc:
|
||||
raise HTTPException(status_code=404, detail="Graph not found") from exc
|
||||
|
||||
try:
|
||||
cache = await chat_service.get_cache(flow_id_str)
|
||||
if isinstance(cache, CacheMiss):
|
||||
# If there's no cache
|
||||
logger.warning(f"No cache found for {flow_id_str}. Building graph starting at {vertex_id}")
|
||||
graph: Graph = await build_graph_from_db(
|
||||
flow_id=flow_id_str, session=await anext(get_session()), chat_service=chat_service
|
||||
graph = await build_graph_from_db(
|
||||
flow_id=flow_id, session=await anext(get_session()), chat_service=chat_service
|
||||
)
|
||||
else:
|
||||
graph = cache.get("result")
|
||||
|
|
|
|||
|
|
@ -12,7 +12,8 @@ from fastapi.responses import StreamingResponse
|
|||
from langflow.api.utils import CurrentActiveUser, DbSession
|
||||
from langflow.api.v1.schemas import UploadFileResponse
|
||||
from langflow.services.database.models.flow import Flow
|
||||
from langflow.services.deps import get_storage_service
|
||||
from langflow.services.deps import get_settings_service, get_storage_service
|
||||
from langflow.services.settings.service import SettingsService
|
||||
from langflow.services.storage.service import StorageService
|
||||
from langflow.services.storage.utils import build_content_type_from_extension
|
||||
|
||||
|
|
@ -22,36 +23,39 @@ router = APIRouter(tags=["Files"], prefix="/files")
|
|||
# Create dep that gets the flow_id from the request
|
||||
# then finds it in the database and returns it while
|
||||
# using the current user as the owner
|
||||
async def get_flow_id(
|
||||
async def get_flow(
|
||||
flow_id: UUID,
|
||||
current_user: CurrentActiveUser,
|
||||
session: DbSession,
|
||||
):
|
||||
flow_id_str = str(flow_id)
|
||||
# AttributeError: 'SelectOfScalar' object has no attribute 'first'
|
||||
flow = await session.get(Flow, flow_id_str)
|
||||
flow = await session.get(Flow, flow_id)
|
||||
if not flow:
|
||||
raise HTTPException(status_code=404, detail="Flow not found")
|
||||
if flow.user_id != current_user.id:
|
||||
raise HTTPException(status_code=403, detail="You don't have access to this flow")
|
||||
return flow_id_str
|
||||
return flow
|
||||
|
||||
|
||||
@router.post("/upload/{flow_id}", status_code=HTTPStatus.CREATED)
|
||||
async def upload_file(
|
||||
*,
|
||||
file: UploadFile,
|
||||
flow_id: Annotated[UUID, Depends(get_flow_id)],
|
||||
flow: Annotated[Flow, Depends(get_flow)],
|
||||
current_user: CurrentActiveUser,
|
||||
session: DbSession,
|
||||
storage_service: Annotated[StorageService, Depends(get_storage_service)],
|
||||
settings_service: Annotated[SettingsService, Depends(get_settings_service)],
|
||||
) -> UploadFileResponse:
|
||||
try:
|
||||
flow_id_str = str(flow_id)
|
||||
flow = await session.get(Flow, flow_id_str)
|
||||
max_file_size_upload = settings_service.settings.max_file_size_upload
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e)) from e
|
||||
|
||||
if file.size > max_file_size_upload * 1024 * 1024:
|
||||
raise HTTPException(
|
||||
status_code=413, detail=f"File size is larger than the maximum file size {max_file_size_upload}MB."
|
||||
)
|
||||
|
||||
if flow.user_id != current_user.id:
|
||||
raise HTTPException(status_code=403, detail="You don't have access to this flow")
|
||||
|
||||
|
|
@ -60,9 +64,9 @@ async def upload_file(
|
|||
timestamp = datetime.now(tz=timezone.utc).astimezone().strftime("%Y-%m-%d_%H-%M-%S")
|
||||
file_name = file.filename or hashlib.sha256(file_content).hexdigest()
|
||||
full_file_name = f"{timestamp}_{file_name}"
|
||||
folder = flow_id_str
|
||||
folder = str(flow.id)
|
||||
await storage_service.save_file(flow_id=folder, file_name=full_file_name, data=file_content)
|
||||
return UploadFileResponse(flow_id=flow_id_str, file_path=f"{folder}/{full_file_name}")
|
||||
return UploadFileResponse(flow_id=str(flow.id), file_path=f"{folder}/{full_file_name}")
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e)) from e
|
||||
|
||||
|
|
@ -164,12 +168,11 @@ async def list_profile_pictures():
|
|||
|
||||
@router.get("/list/{flow_id}")
|
||||
async def list_files(
|
||||
flow_id: Annotated[UUID, Depends(get_flow_id)],
|
||||
flow: Annotated[Flow, Depends(get_flow)],
|
||||
storage_service: Annotated[StorageService, Depends(get_storage_service)],
|
||||
):
|
||||
try:
|
||||
flow_id_str = str(flow_id)
|
||||
files = await storage_service.list_files(flow_id=flow_id_str)
|
||||
files = await storage_service.list_files(flow_id=str(flow.id))
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e)) from e
|
||||
|
||||
|
|
@ -179,12 +182,11 @@ async def list_files(
|
|||
@router.delete("/delete/{flow_id}/{file_name}")
|
||||
async def delete_file(
|
||||
file_name: str,
|
||||
flow_id: Annotated[UUID, Depends(get_flow_id)],
|
||||
flow: Annotated[Flow, Depends(get_flow)],
|
||||
storage_service: Annotated[StorageService, Depends(get_storage_service)],
|
||||
):
|
||||
try:
|
||||
flow_id_str = str(flow_id)
|
||||
await storage_service.delete_file(flow_id=flow_id_str, file_name=file_name)
|
||||
await storage_service.delete_file(flow_id=str(flow.id), file_name=file_name)
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e)) from e
|
||||
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@ import json
|
|||
import zipfile
|
||||
from datetime import datetime, timezone
|
||||
from typing import Annotated
|
||||
from uuid import UUID
|
||||
|
||||
import orjson
|
||||
from fastapi import APIRouter, Depends, File, HTTPException, Response, UploadFile, status
|
||||
|
|
@ -114,7 +115,7 @@ async def read_folders(
|
|||
async def read_folder(
|
||||
*,
|
||||
session: DbSession,
|
||||
folder_id: str,
|
||||
folder_id: UUID,
|
||||
current_user: CurrentActiveUser,
|
||||
params: Annotated[Params | None, Depends(custom_params)],
|
||||
is_component: bool = False,
|
||||
|
|
@ -165,7 +166,7 @@ async def read_folder(
|
|||
async def update_folder(
|
||||
*,
|
||||
session: DbSession,
|
||||
folder_id: str,
|
||||
folder_id: UUID,
|
||||
folder: FolderUpdate, # Assuming FolderUpdate is a Pydantic model defining updatable fields
|
||||
current_user: CurrentActiveUser,
|
||||
):
|
||||
|
|
@ -226,7 +227,7 @@ async def update_folder(
|
|||
async def delete_folder(
|
||||
*,
|
||||
session: DbSession,
|
||||
folder_id: str,
|
||||
folder_id: UUID,
|
||||
current_user: CurrentActiveUser,
|
||||
):
|
||||
try:
|
||||
|
|
@ -258,7 +259,7 @@ async def delete_folder(
|
|||
async def download_file(
|
||||
*,
|
||||
session: DbSession,
|
||||
folder_id: str,
|
||||
folder_id: UUID,
|
||||
current_user: CurrentActiveUser,
|
||||
):
|
||||
"""Download all flows from folder as a zip file."""
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@ from collections.abc import AsyncIterator, Iterator
|
|||
from copy import deepcopy
|
||||
from textwrap import dedent
|
||||
from typing import TYPE_CHECKING, Any, ClassVar, NamedTuple, get_type_hints
|
||||
from uuid import UUID
|
||||
|
||||
import nanoid
|
||||
import yaml
|
||||
|
|
@ -1006,7 +1007,10 @@ class Component(CustomComponent):
|
|||
|
||||
async def send_message(self, message: Message, id_: str | None = None):
|
||||
if (hasattr(self, "graph") and self.graph.session_id) and (message is not None and not message.session_id):
|
||||
message.session_id = self.graph.session_id
|
||||
session_id = (
|
||||
UUID(self.graph.session_id) if isinstance(self.graph.session_id, str) else self.graph.session_id
|
||||
)
|
||||
message.session_id = session_id
|
||||
stored_message = await self._store_message(message)
|
||||
|
||||
self._stored_message_id = stored_message.id
|
||||
|
|
@ -1031,13 +1035,16 @@ class Component(CustomComponent):
|
|||
return stored_message
|
||||
|
||||
async def _store_message(self, message: Message) -> Message:
|
||||
flow_id = self.graph.flow_id if hasattr(self, "graph") else None
|
||||
messages = await astore_message(message, flow_id=flow_id)
|
||||
if len(messages) != 1:
|
||||
flow_id: str | None = None
|
||||
if hasattr(self, "graph"):
|
||||
# Convert UUID to str if needed
|
||||
flow_id = str(self.graph.flow_id) if self.graph.flow_id else None
|
||||
stored_messages = await astore_message(message, flow_id=flow_id)
|
||||
if len(stored_messages) != 1:
|
||||
msg = "Only one message can be stored at a time."
|
||||
raise ValueError(msg)
|
||||
|
||||
return messages[0]
|
||||
stored_message = stored_messages[0]
|
||||
return await Message.create(**stored_message.model_dump())
|
||||
|
||||
async def _send_message_event(self, message: Message, id_: str | None = None, category: str | None = None) -> None:
|
||||
if hasattr(self, "_event_manager") and self._event_manager:
|
||||
|
|
@ -1065,10 +1072,20 @@ class Component(CustomComponent):
|
|||
and not isinstance(original_message.text, str)
|
||||
)
|
||||
|
||||
async def _update_stored_message(self, stored_message: Message) -> Message:
|
||||
message_tables = await aupdate_messages(stored_message)
|
||||
if len(message_tables) != 1:
|
||||
msg = "Only one message can be updated at a time."
|
||||
async def _update_stored_message(self, message: Message) -> Message:
|
||||
"""Update the stored message."""
|
||||
if hasattr(self, "_vertex") and self._vertex is not None and hasattr(self._vertex, "graph"):
|
||||
flow_id = (
|
||||
UUID(self._vertex.graph.flow_id)
|
||||
if isinstance(self._vertex.graph.flow_id, str)
|
||||
else self._vertex.graph.flow_id
|
||||
)
|
||||
|
||||
message.flow_id = flow_id
|
||||
|
||||
message_tables = await aupdate_messages(message)
|
||||
if not message_tables:
|
||||
msg = "Failed to update message"
|
||||
raise ValueError(msg)
|
||||
message_table = message_tables[0]
|
||||
return await Message.create(**message_table.model_dump())
|
||||
|
|
|
|||
|
|
@ -33,8 +33,9 @@ def list_flows(*, user_id: str | None = None) -> list[Data]:
|
|||
raise ValueError(msg)
|
||||
try:
|
||||
with session_scope() as session:
|
||||
uuid_user_id = UUID(user_id) if isinstance(user_id, str) else user_id
|
||||
flows = session.exec(
|
||||
select(Flow).where(Flow.user_id == user_id).where(Flow.is_component == False) # noqa: E712
|
||||
select(Flow).where(Flow.user_id == uuid_user_id).where(Flow.is_component == False) # noqa: E712
|
||||
).all()
|
||||
|
||||
return [flow.to_data() for flow in flows]
|
||||
|
|
|
|||
|
|
@ -18,7 +18,7 @@ from langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_USER
|
|||
def _get_variable_query(
|
||||
sender: str | None = None,
|
||||
sender_name: str | None = None,
|
||||
session_id: str | None = None,
|
||||
session_id: str | UUID | None = None,
|
||||
order_by: str | None = "timestamp",
|
||||
order: str | None = "DESC",
|
||||
flow_id: UUID | None = None,
|
||||
|
|
@ -44,7 +44,7 @@ def _get_variable_query(
|
|||
def get_messages(
|
||||
sender: str | None = None,
|
||||
sender_name: str | None = None,
|
||||
session_id: str | None = None,
|
||||
session_id: str | UUID | None = None,
|
||||
order_by: str | None = "timestamp",
|
||||
order: str | None = "DESC",
|
||||
flow_id: UUID | None = None,
|
||||
|
|
@ -73,7 +73,7 @@ def get_messages(
|
|||
async def aget_messages(
|
||||
sender: str | None = None,
|
||||
sender_name: str | None = None,
|
||||
session_id: str | None = None,
|
||||
session_id: str | UUID | None = None,
|
||||
order_by: str | None = "timestamp",
|
||||
order: str | None = "DESC",
|
||||
flow_id: UUID | None = None,
|
||||
|
|
@ -99,7 +99,7 @@ async def aget_messages(
|
|||
return [await Message.create(**d.model_dump()) for d in messages]
|
||||
|
||||
|
||||
def add_messages(messages: Message | list[Message], flow_id: str | None = None):
|
||||
def add_messages(messages: Message | list[Message], flow_id: str | UUID | None = None):
|
||||
"""Add a message to the monitor service."""
|
||||
if not isinstance(messages, list):
|
||||
messages = [messages]
|
||||
|
|
@ -110,6 +110,10 @@ def add_messages(messages: Message | list[Message], flow_id: str | None = None):
|
|||
raise ValueError(msg)
|
||||
|
||||
try:
|
||||
# Convert flow_id to UUID if it's a string
|
||||
if isinstance(flow_id, str):
|
||||
flow_id = UUID(flow_id)
|
||||
|
||||
messages_models = [MessageTable.from_message(msg, flow_id=flow_id) for msg in messages]
|
||||
with session_scope() as session:
|
||||
messages_models = add_messagetables(messages_models, session)
|
||||
|
|
@ -119,7 +123,7 @@ def add_messages(messages: Message | list[Message], flow_id: str | None = None):
|
|||
raise
|
||||
|
||||
|
||||
async def aadd_messages(messages: Message | list[Message], flow_id: str | None = None):
|
||||
async def aadd_messages(messages: Message | list[Message], flow_id: str | UUID | None = None):
|
||||
"""Add a message to the monitor service."""
|
||||
if not isinstance(messages, list):
|
||||
messages = [messages]
|
||||
|
|
@ -146,9 +150,15 @@ def update_messages(messages: Message | list[Message]) -> list[Message]:
|
|||
with session_scope() as session:
|
||||
updated_messages: list[MessageTable] = []
|
||||
for message in messages:
|
||||
msg = session.get(MessageTable, message.id)
|
||||
message_id = UUID(message.id) if isinstance(message.id, str) else message.id
|
||||
msg = session.get(MessageTable, message_id)
|
||||
if msg:
|
||||
msg.sqlmodel_update(message.model_dump(exclude_unset=True, exclude_none=True))
|
||||
if hasattr(message, "data"):
|
||||
msg = msg.sqlmodel_update(message.data)
|
||||
else:
|
||||
msg = msg.sqlmodel_update(message.model_dump(exclude_unset=True, exclude_none=True))
|
||||
if isinstance(msg.flow_id, str):
|
||||
msg.flow_id = UUID(msg.flow_id)
|
||||
session.add(msg)
|
||||
session.commit()
|
||||
session.refresh(msg)
|
||||
|
|
@ -167,7 +177,10 @@ async def aupdate_messages(messages: Message | list[Message]) -> list[Message]:
|
|||
for message in messages:
|
||||
msg = await session.get(MessageTable, message.id)
|
||||
if msg:
|
||||
msg.sqlmodel_update(message.model_dump(exclude_unset=True, exclude_none=True))
|
||||
if hasattr(message, "data"):
|
||||
msg = msg.sqlmodel_update(message.data)
|
||||
else:
|
||||
msg = msg.sqlmodel_update(message.model_dump(exclude_unset=True, exclude_none=True))
|
||||
session.add(msg)
|
||||
await session.commit()
|
||||
await session.refresh(msg)
|
||||
|
|
@ -262,13 +275,13 @@ async def delete_message(id_: str) -> None:
|
|||
|
||||
def store_message(
|
||||
message: Message,
|
||||
flow_id: str | None = None,
|
||||
flow_id: str | UUID | None = None,
|
||||
) -> list[Message]:
|
||||
"""Stores a message in the memory.
|
||||
|
||||
Args:
|
||||
message (Message): The message to store.
|
||||
flow_id (Optional[str]): The flow ID associated with the message.
|
||||
flow_id (Optional[str | UUID]): The flow ID associated with the message.
|
||||
When running from the CustomComponent you can access this using `self.graph.flow_id`.
|
||||
|
||||
Returns:
|
||||
|
|
@ -281,6 +294,10 @@ def store_message(
|
|||
logger.warning("No message provided.")
|
||||
return []
|
||||
|
||||
# Convert flow_id to UUID if it's a string
|
||||
if isinstance(flow_id, str):
|
||||
flow_id = UUID(flow_id)
|
||||
|
||||
required_fields = ["session_id", "sender", "sender_name"]
|
||||
missing_fields = [field for field in required_fields if not getattr(message, field)]
|
||||
if missing_fields:
|
||||
|
|
@ -302,7 +319,7 @@ def store_message(
|
|||
|
||||
async def astore_message(
|
||||
message: Message,
|
||||
flow_id: str | None = None,
|
||||
flow_id: str | UUID | None = None,
|
||||
) -> list[Message]:
|
||||
"""Stores a message in the memory.
|
||||
|
||||
|
|
|
|||
|
|
@ -40,7 +40,7 @@ class Message(Data):
|
|||
sender: str | None = None
|
||||
sender_name: str | None = None
|
||||
files: list[str | Image] | None = Field(default=[])
|
||||
session_id: str | None = Field(default="")
|
||||
session_id: str | UUID | None = Field(default="")
|
||||
timestamp: Annotated[str, timestamp_to_str_validator] = Field(
|
||||
default_factory=lambda: datetime.now(timezone.utc).strftime("%Y-%m-%d %H:%M:%S %Z")
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,11 +1,22 @@
|
|||
from collections.abc import AsyncIterator, Generator, Iterator
|
||||
from datetime import datetime
|
||||
from typing import Annotated
|
||||
from uuid import UUID
|
||||
|
||||
from loguru import logger
|
||||
from pydantic import BaseModel
|
||||
from pydantic import BaseModel, BeforeValidator
|
||||
from pydantic.v1 import BaseModel as BaseModelV1
|
||||
|
||||
|
||||
def str_to_uuid(v: str | UUID) -> UUID:
|
||||
if isinstance(v, str):
|
||||
return UUID(v)
|
||||
return v
|
||||
|
||||
|
||||
UUIDstr = Annotated[UUID, BeforeValidator(str_to_uuid)]
|
||||
|
||||
|
||||
def recursive_serialize_or_str(obj):
|
||||
try:
|
||||
if isinstance(obj, type) and issubclass(obj, BaseModel | BaseModelV1):
|
||||
|
|
|
|||
|
|
@ -1,10 +1,12 @@
|
|||
from datetime import datetime, timezone
|
||||
from typing import TYPE_CHECKING
|
||||
from uuid import UUID, uuid4
|
||||
from uuid import uuid4
|
||||
|
||||
from pydantic import field_validator
|
||||
from sqlmodel import Column, DateTime, Field, Relationship, SQLModel, func
|
||||
|
||||
from langflow.schema.serialize import UUIDstr
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from langflow.services.database.models.user import User
|
||||
|
||||
|
|
@ -21,14 +23,14 @@ class ApiKeyBase(SQLModel):
|
|||
|
||||
|
||||
class ApiKey(ApiKeyBase, table=True): # type: ignore[call-arg]
|
||||
id: UUID = Field(default_factory=uuid4, primary_key=True, unique=True)
|
||||
id: UUIDstr = Field(default_factory=uuid4, primary_key=True, unique=True)
|
||||
created_at: datetime | None = Field(
|
||||
default=None, sa_column=Column(DateTime(timezone=True), server_default=func.now(), nullable=False)
|
||||
)
|
||||
api_key: str = Field(index=True, unique=True)
|
||||
# User relationship
|
||||
# Delete API keys when user is deleted
|
||||
user_id: UUID = Field(index=True, foreign_key="user.id")
|
||||
user_id: UUIDstr = Field(index=True, foreign_key="user.id")
|
||||
user: "User" = Relationship(
|
||||
back_populates="api_keys",
|
||||
)
|
||||
|
|
@ -36,7 +38,7 @@ class ApiKey(ApiKeyBase, table=True): # type: ignore[call-arg]
|
|||
|
||||
class ApiKeyCreate(ApiKeyBase):
|
||||
api_key: str | None = None
|
||||
user_id: UUID | None = None
|
||||
user_id: UUIDstr | None = None
|
||||
created_at: datetime | None = Field(default_factory=utc_now)
|
||||
|
||||
@field_validator("created_at", mode="before")
|
||||
|
|
@ -46,15 +48,15 @@ class ApiKeyCreate(ApiKeyBase):
|
|||
|
||||
|
||||
class UnmaskedApiKeyRead(ApiKeyBase):
|
||||
id: UUID
|
||||
id: UUIDstr
|
||||
api_key: str = Field()
|
||||
user_id: UUID = Field()
|
||||
user_id: UUIDstr = Field()
|
||||
|
||||
|
||||
class ApiKeyRead(ApiKeyBase):
|
||||
id: UUID
|
||||
id: UUIDstr
|
||||
api_key: str = Field(schema_extra={"validate_default": True})
|
||||
user_id: UUID = Field()
|
||||
user_id: UUIDstr = Field()
|
||||
created_at: datetime = Field()
|
||||
|
||||
@field_validator("api_key")
|
||||
|
|
|
|||
|
|
@ -47,7 +47,10 @@ class MessageBase(SQLModel):
|
|||
for file in message.files:
|
||||
if hasattr(file, "path") and hasattr(file, "url") and file.path:
|
||||
session_id = message.session_id
|
||||
image_paths.append(f"{session_id}{file.path.split(session_id)[1]}")
|
||||
if session_id:
|
||||
image_paths.append(f"{session_id}{file.path.split(str(session_id))[1]}")
|
||||
else:
|
||||
image_paths.append(file.path)
|
||||
if image_paths:
|
||||
message.files = image_paths
|
||||
|
||||
|
|
@ -76,6 +79,13 @@ class MessageBase(SQLModel):
|
|||
content = content_block.model_dump_json() if hasattr(content_block, "model_dump_json") else content_block
|
||||
content_blocks.append(content)
|
||||
|
||||
if isinstance(flow_id, str):
|
||||
try:
|
||||
flow_id = UUID(flow_id)
|
||||
except ValueError as exc:
|
||||
msg = f"Flow ID {flow_id} is not a valid UUID"
|
||||
raise ValueError(msg) from exc
|
||||
|
||||
return cls(
|
||||
sender=message.sender,
|
||||
sender_name=message.sender_name,
|
||||
|
|
|
|||
|
|
@ -17,6 +17,8 @@ async def get_user_by_username(db: AsyncSession, username: str) -> User | None:
|
|||
|
||||
|
||||
async def get_user_by_id(db: AsyncSession, user_id: UUID) -> User | None:
|
||||
if isinstance(user_id, str):
|
||||
user_id = UUID(user_id)
|
||||
stmt = select(User).where(User.id == user_id)
|
||||
return (await db.exec(stmt)).first()
|
||||
|
||||
|
|
|
|||
|
|
@ -4,6 +4,8 @@ from uuid import UUID, uuid4
|
|||
|
||||
from sqlmodel import Field, Relationship, SQLModel
|
||||
|
||||
from langflow.schema.serialize import UUIDstr
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from langflow.services.database.models.api_key import ApiKey
|
||||
from langflow.services.database.models.flow import Flow
|
||||
|
|
@ -12,7 +14,7 @@ if TYPE_CHECKING:
|
|||
|
||||
|
||||
class User(SQLModel, table=True): # type: ignore[call-arg]
|
||||
id: UUID = Field(default_factory=uuid4, primary_key=True, unique=True)
|
||||
id: UUIDstr = Field(default_factory=uuid4, primary_key=True, unique=True)
|
||||
username: str = Field(index=True, unique=True)
|
||||
password: str = Field()
|
||||
profile_image: str | None = Field(default=None, nullable=True)
|
||||
|
|
|
|||
|
|
@ -67,6 +67,8 @@ class LocalStorageService(StorageService):
|
|||
:return: A list of file names.
|
||||
:raises FileNotFoundError: If the flow directory does not exist.
|
||||
"""
|
||||
if not isinstance(flow_id, str):
|
||||
flow_id = str(flow_id)
|
||||
folder_path = self.data_dir / flow_id
|
||||
if not await folder_path.exists() or not await folder_path.is_dir():
|
||||
logger.warning(f"Flow {flow_id} directory does not exist.")
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
from sqlalchemy.engine.reflection import Inspector
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
def table_exists(name, conn):
|
||||
|
|
@ -11,7 +11,7 @@ def table_exists(name, conn):
|
|||
Returns:
|
||||
bool: True if the table exists, False otherwise.
|
||||
"""
|
||||
inspector = Inspector.from_engine(conn)
|
||||
inspector = sa.inspect(conn)
|
||||
return name in inspector.get_table_names()
|
||||
|
||||
|
||||
|
|
@ -26,7 +26,7 @@ def column_exists(table_name, column_name, conn):
|
|||
Returns:
|
||||
bool: True if the column exists, False otherwise.
|
||||
"""
|
||||
inspector = Inspector.from_engine(conn)
|
||||
inspector = sa.inspect(conn)
|
||||
return column_name in [column["name"] for column in inspector.get_columns(table_name)]
|
||||
|
||||
|
||||
|
|
@ -41,7 +41,7 @@ def foreign_key_exists(table_name, fk_name, conn):
|
|||
Returns:
|
||||
bool: True if the foreign key exists, False otherwise.
|
||||
"""
|
||||
inspector = Inspector.from_engine(conn)
|
||||
inspector = sa.inspect(conn)
|
||||
return fk_name in [fk["name"] for fk in inspector.get_foreign_keys(table_name)]
|
||||
|
||||
|
||||
|
|
@ -56,6 +56,6 @@ def constraint_exists(table_name, constraint_name, conn):
|
|||
Returns:
|
||||
bool: True if the constraint exists, False otherwise.
|
||||
"""
|
||||
inspector = Inspector.from_engine(conn)
|
||||
inspector = sa.inspect(conn)
|
||||
constraints = inspector.get_unique_constraints(table_name)
|
||||
return constraint_name in [constraint["name"] for constraint in constraints]
|
||||
|
|
|
|||
|
|
@ -114,10 +114,10 @@ dependencies = [
|
|||
"langchain~=0.3.3",
|
||||
"langchain-core~=0.3.15",
|
||||
"langchainhub~=0.1.15",
|
||||
"sqlmodel==0.0.18",
|
||||
"loguru>=0.7.1,<1.0.0",
|
||||
"rich>=13.7.0,<14.0.0",
|
||||
"langchain-experimental>=0.0.61,<1.0.0",
|
||||
"sqlmodel==0.0.22",
|
||||
"pydantic~=2.7.0",
|
||||
"pydantic-settings>=2.2.0,<3.0.0",
|
||||
"typer>=0.13.0,<1.0.0",
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
|
|
@ -8,17 +8,9 @@ from langflow.components.outputs import ChatOutput
|
|||
from langflow.components.tools.calculator import CalculatorToolComponent
|
||||
from langflow.graph import Graph
|
||||
from langflow.schema.data import Data
|
||||
from langflow.services.settings.feature_flags import FEATURE_FLAGS
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def _add_toolkit_output():
|
||||
FEATURE_FLAGS.add_toolkit_output = True
|
||||
yield
|
||||
FEATURE_FLAGS.add_toolkit_output = False
|
||||
|
||||
|
||||
async def test_component_tool():
|
||||
calculator_component = CalculatorToolComponent()
|
||||
component_toolkit = ComponentToolkit(component=calculator_component)
|
||||
|
|
@ -43,7 +35,6 @@ async def test_component_tool():
|
|||
|
||||
|
||||
@pytest.mark.api_key_required
|
||||
@pytest.mark.usefixtures("_add_toolkit_output")
|
||||
def test_component_tool_with_api_key():
|
||||
chat_output = ChatOutput()
|
||||
openai_llm = OpenAIModelComponent()
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ import asyncio
|
|||
import time
|
||||
from typing import Any
|
||||
from unittest.mock import MagicMock
|
||||
from uuid import uuid4
|
||||
|
||||
import pytest
|
||||
from langflow.custom.custom_component.component import Component
|
||||
|
|
@ -9,7 +10,7 @@ from langflow.events.event_manager import EventManager
|
|||
from langflow.schema.content_block import ContentBlock
|
||||
from langflow.schema.content_types import TextContent, ToolContent
|
||||
from langflow.schema.message import Message
|
||||
from langflow.schema.properties import Source
|
||||
from langflow.schema.properties import Properties, Source
|
||||
from langflow.template.field.base import Output
|
||||
|
||||
|
||||
|
|
@ -52,11 +53,13 @@ async def test_component_message_sending():
|
|||
component.set_event_manager(event_manager)
|
||||
|
||||
# Create a message
|
||||
properties = Properties()
|
||||
message = Message(
|
||||
sender="test_sender",
|
||||
session_id="test_session",
|
||||
sender_name="test_sender_name",
|
||||
content_blocks=[ContentBlock(title="Test Block", contents=[TextContent(type="text", text="Test message")])],
|
||||
properties=properties,
|
||||
)
|
||||
|
||||
# Send the message
|
||||
|
|
@ -80,6 +83,7 @@ async def test_component_tool_output():
|
|||
component.set_event_manager(event_manager)
|
||||
|
||||
# Create a message with tool content
|
||||
properties = Properties()
|
||||
message = Message(
|
||||
sender="test_sender",
|
||||
session_id="test_session",
|
||||
|
|
@ -90,6 +94,7 @@ async def test_component_tool_output():
|
|||
contents=[ToolContent(type="tool_use", name="test_tool", tool_input={"query": "test input"})],
|
||||
)
|
||||
],
|
||||
properties=properties,
|
||||
)
|
||||
|
||||
# Send the message
|
||||
|
|
@ -210,7 +215,7 @@ async def test_component_streaming_message():
|
|||
# Create a proper mock vertex with graph and flow_id
|
||||
vertex = MagicMock()
|
||||
mock_graph = MagicMock()
|
||||
mock_graph.flow_id = "12345678-1234-5678-1234-567812345678" # Valid UUID string
|
||||
mock_graph.flow_id = str(uuid4())
|
||||
vertex.graph = mock_graph
|
||||
|
||||
component = ComponentForTesting(_vertex=vertex)
|
||||
|
|
@ -227,11 +232,13 @@ async def test_component_streaming_message():
|
|||
yield StreamChunk(chunk)
|
||||
|
||||
# Create a streaming message
|
||||
properties = Properties()
|
||||
message = Message(
|
||||
sender="test_sender",
|
||||
session_id="test_session",
|
||||
sender_name="test_sender_name",
|
||||
text=text_generator(),
|
||||
properties=properties,
|
||||
)
|
||||
|
||||
# Send the streaming message
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ from langchain_core.messages import AIMessage, HumanMessage
|
|||
from langchain_core.prompts.chat import ChatPromptTemplate
|
||||
from langflow.schema.message import Message
|
||||
from langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_USER
|
||||
from loguru import logger
|
||||
from platformdirs import user_cache_dir
|
||||
|
||||
|
||||
|
|
@ -176,4 +177,7 @@ def cleanup():
|
|||
# Clean up the real cache directory after tests
|
||||
cache_dir = Path(user_cache_dir("langflow"))
|
||||
if cache_dir.exists():
|
||||
shutil.rmtree(str(cache_dir))
|
||||
try:
|
||||
shutil.rmtree(str(cache_dir))
|
||||
except OSError as exc:
|
||||
logger.error(f"Error cleaning up cache directory: {exc}")
|
||||
|
|
|
|||
8
uv.lock
generated
8
uv.lock
generated
|
|
@ -4048,7 +4048,7 @@ requires-dist = [
|
|||
{ name = "sentry-sdk", extras = ["fastapi", "loguru"], specifier = ">=2.5.1,<3.0.0" },
|
||||
{ name = "setuptools", specifier = ">=70,<76.0.0" },
|
||||
{ name = "spider-client", specifier = ">=0.0.27,<1.0.0" },
|
||||
{ name = "sqlmodel", specifier = "==0.0.18" },
|
||||
{ name = "sqlmodel", specifier = "==0.0.22" },
|
||||
{ name = "typer", specifier = ">=0.13.0,<1.0.0" },
|
||||
{ name = "types-google-cloud-ndb", marker = "extra == 'dev'", specifier = ">=2.2.0.0" },
|
||||
{ name = "types-markdown", marker = "extra == 'dev'", specifier = ">=3.7.0.20240822" },
|
||||
|
|
@ -7421,15 +7421,15 @@ postgresql-psycopgbinary = [
|
|||
|
||||
[[package]]
|
||||
name = "sqlmodel"
|
||||
version = "0.0.18"
|
||||
version = "0.0.22"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "pydantic" },
|
||||
{ name = "sqlalchemy" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/32/19/d0b363befa90c168941f4f7826f6a9d7211f4aa2b99660ac0410bf5803f8/sqlmodel-0.0.18.tar.gz", hash = "sha256:2e520efe03810ef2c268a1004cfc5ef8f8a936312232f38d6c8e62c11af2cac3", size = 109419 }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/b5/39/8641040ab0d5e1d8a1c2325ae89a01ae659fc96c61a43d158fb71c9a0bf0/sqlmodel-0.0.22.tar.gz", hash = "sha256:7d37c882a30c43464d143e35e9ecaf945d88035e20117bf5ec2834a23cbe505e", size = 116392 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/cc/5f/8838e6b1b6673709e93386d6d42d28030883079b5ebcbdc7a37f2953e993/sqlmodel-0.0.18-py3-none-any.whl", hash = "sha256:d70fdf8fe595e30a918660cf4537b9c5fc2fffdbfcba851a0135de73c3ebcbb7", size = 26507 },
|
||||
{ url = "https://files.pythonhosted.org/packages/dd/b1/3af5104b716c420e40a6ea1b09886cae3a1b9f4538343875f637755cae5b/sqlmodel-0.0.22-py3-none-any.whl", hash = "sha256:a1ed13e28a1f4057cbf4ff6cdb4fc09e85702621d3259ba17b3c230bfb2f941b", size = 28276 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue