Revert "fix: published flows now can add rows to the database" (#7571)

Revert "fix: published flows now can add rows to the database (#7560)"

This reverts commit 077110a496.
This commit is contained in:
Gabriel Luiz Freitas Almeida 2025-04-10 18:40:54 -03:00 committed by GitHub
commit a8d2fccd1c
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
6 changed files with 25 additions and 80 deletions

View file

@ -1,72 +0,0 @@
"""remove fk constraint in message transaction and vertex build
Revision ID: 1b8b740a6fa3
Revises: f3b2d1f1002d
Create Date: 2025-04-10 10:17:32.493181
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
import sqlmodel
from sqlalchemy.engine.reflection import Inspector
from langflow.utils import migration
# revision identifiers, used by Alembic.
revision: str = '1b8b740a6fa3'
down_revision: Union[str, None] = 'f3b2d1f1002d'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
conn = op.get_bind()
inspector = Inspector.from_engine(conn)
# ### commands auto generated by Alembic - please adjust! ###
# Handle message table
message_fks = inspector.get_foreign_keys("message")
with op.batch_alter_table('message', schema=None) as batch_op:
for fk in message_fks:
if fk.get("referred_table") == "flow" and "flow_id" in fk.get("constrained_columns", []):
constraint_name = fk.get("name")
batch_op.drop_constraint(constraint_name, type_="foreignkey")
break
# Handle transaction table
transaction_fks = inspector.get_foreign_keys("transaction")
with op.batch_alter_table('transaction', schema=None) as batch_op:
for fk in transaction_fks:
if fk.get("referred_table") == "flow" and "flow_id" in fk.get("constrained_columns", []):
constraint_name = fk.get("name")
batch_op.drop_constraint(constraint_name, type_="foreignkey")
break
# Handle vertex_build table
vertex_build_fks = inspector.get_foreign_keys("vertex_build")
with op.batch_alter_table('vertex_build', schema=None) as batch_op:
for fk in vertex_build_fks:
if fk.get("referred_table") == "flow" and "flow_id" in fk.get("constrained_columns", []):
constraint_name = fk.get("name")
batch_op.drop_constraint(constraint_name, type_="foreignkey")
break
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('vertex_build', schema=None) as batch_op:
batch_op.create_foreign_key('fk_vertex_build_flow_id', 'flow', ['flow_id'], ['id'])
with op.batch_alter_table('transaction', schema=None) as batch_op:
batch_op.create_foreign_key('fk_transaction_flow_id', 'flow', ['flow_id'], ['id'])
with op.batch_alter_table('message', schema=None) as batch_op:
batch_op.create_foreign_key('fk_message_flow_id', 'flow', ['flow_id'], ['id'])
# ### end Alembic commands ###

View file

@ -1,5 +1,4 @@
from .api_key import ApiKey
from .file import File
from .flow import Flow
from .folder import Folder
from .message import MessageTable
@ -7,4 +6,4 @@ from .transactions import TransactionTable
from .user import User
from .variable import Variable
__all__ = ["ApiKey", "File", "Flow", "Folder", "MessageTable", "TransactionTable", "User", "Variable"]
__all__ = ["ApiKey", "Flow", "Folder", "MessageTable", "TransactionTable", "User", "Variable"]

View file

@ -23,8 +23,11 @@ from sqlmodel import JSON, Column, Field, Relationship, SQLModel
from langflow.schema import Data
if TYPE_CHECKING:
from langflow.services.database.models import TransactionTable
from langflow.services.database.models.folder import Folder
from langflow.services.database.models.message import MessageTable
from langflow.services.database.models.user import User
from langflow.services.database.models.vertex_builds.model import VertexBuildTable
HEX_COLOR_LENGTH = 7
@ -187,6 +190,9 @@ class Flow(FlowBase, table=True): # type: ignore[call-arg]
folder_id: UUID | None = Field(default=None, foreign_key="folder.id", nullable=True, index=True)
fs_path: str | None = Field(default=None, nullable=True)
folder: Optional["Folder"] = Relationship(back_populates="flows")
messages: list["MessageTable"] = Relationship(back_populates="flow")
transactions: list["TransactionTable"] = Relationship(back_populates="flow")
vertex_builds: list["VertexBuildTable"] = Relationship(back_populates="flow")
def to_data(self):
serialized = self.model_dump()

View file

@ -5,7 +5,7 @@ from uuid import UUID, uuid4
from pydantic import field_serializer, field_validator
from sqlalchemy import Text
from sqlmodel import JSON, Column, Field, SQLModel
from sqlmodel import JSON, Column, Field, Relationship, SQLModel
from langflow.schema.content_block import ContentBlock
from langflow.schema.properties import Properties
@ -13,6 +13,7 @@ from langflow.schema.validators import str_to_timestamp_validator
if TYPE_CHECKING:
from langflow.schema.message import Message
from langflow.services.database.models.flow.model import Flow
class MessageBase(SQLModel):
@ -112,7 +113,8 @@ class MessageBase(SQLModel):
class MessageTable(MessageBase, table=True): # type: ignore[call-arg]
__tablename__ = "message"
id: UUID = Field(default_factory=uuid4, primary_key=True)
flow_id: UUID | None = Field(default=None)
flow_id: UUID | None = Field(default=None, foreign_key="flow.id")
flow: "Flow" = Relationship(back_populates="messages")
files: list[str] = Field(sa_column=Column(JSON))
properties: Properties = Field(default_factory=lambda: Properties().model_dump(), sa_column=Column(JSON)) # type: ignore[assignment]
category: str = Field(sa_column=Column(Text))

View file

@ -1,12 +1,16 @@
from datetime import datetime, timezone
from typing import TYPE_CHECKING
from uuid import UUID, uuid4
from pydantic import field_serializer, field_validator
from sqlmodel import JSON, Column, Field, SQLModel
from sqlmodel import JSON, Column, Field, Relationship, SQLModel
from langflow.serialization.constants import MAX_ITEMS_LENGTH, MAX_TEXT_LENGTH
from langflow.serialization.serialization import serialize
if TYPE_CHECKING:
from langflow.services.database.models.flow.model import Flow
class TransactionBase(SQLModel):
timestamp: datetime = Field(default_factory=lambda: datetime.now(timezone.utc))
@ -16,7 +20,7 @@ class TransactionBase(SQLModel):
outputs: dict | None = Field(default=None, sa_column=Column(JSON))
status: str = Field(nullable=False)
error: str | None = Field(default=None)
flow_id: UUID = Field()
flow_id: UUID = Field(foreign_key="flow.id")
# Needed for Column(JSON)
class Config:
@ -43,6 +47,7 @@ class TransactionBase(SQLModel):
class TransactionTable(TransactionBase, table=True): # type: ignore[call-arg]
__tablename__ = "transaction"
id: UUID | None = Field(default_factory=uuid4, primary_key=True)
flow: "Flow" = Relationship(back_populates="transactions")
class TransactionReadResponse(TransactionBase):

View file

@ -1,13 +1,17 @@
from datetime import datetime, timezone
from typing import TYPE_CHECKING
from uuid import UUID, uuid4
from pydantic import BaseModel, field_serializer, field_validator
from sqlalchemy import Text
from sqlmodel import JSON, Column, Field, SQLModel
from sqlmodel import JSON, Column, Field, Relationship, SQLModel
from langflow.serialization.constants import MAX_ITEMS_LENGTH, MAX_TEXT_LENGTH
from langflow.serialization.serialization import serialize
if TYPE_CHECKING:
from langflow.services.database.models.flow.model import Flow
class VertexBuildBase(SQLModel):
timestamp: datetime = Field(default_factory=lambda: datetime.now(timezone.utc))
@ -16,7 +20,7 @@ class VertexBuildBase(SQLModel):
artifacts: dict | None = Field(default=None, sa_column=Column(JSON))
params: str | None = Field(default=None, sa_column=Column(Text, nullable=True))
valid: bool = Field(nullable=False)
flow_id: UUID = Field()
flow_id: UUID = Field(foreign_key="flow.id")
# Needed for Column(JSON)
class Config:
@ -54,6 +58,7 @@ class VertexBuildBase(SQLModel):
class VertexBuildTable(VertexBuildBase, table=True): # type: ignore[call-arg]
__tablename__ = "vertex_build"
build_id: UUID | None = Field(default_factory=uuid4, primary_key=True)
flow: "Flow" = Relationship(back_populates="vertex_builds")
class VertexBuildMapModel(BaseModel):