Merge remote-tracking branch 'origin/dev' into update_lc

This commit is contained in:
Gabriel Luiz Freitas Almeida 2024-01-24 19:22:22 -03:00
commit 87d2db11ac
44 changed files with 1654 additions and 1098 deletions

View file

@ -45,6 +45,13 @@ run_frontend:
@-kill -9 `lsof -t -i:3000`
cd src/frontend && npm start
tests_frontend:
ifeq ($(UI), true)
cd src/frontend && ./run-tests.sh --ui
else
cd src/frontend && ./run-tests.sh
endif
run_cli:
poetry run langflow run --path src/frontend/build

View file

@ -1,11 +1,13 @@
import Admonition from '@theme/Admonition';
import Admonition from "@theme/Admonition";
# Embeddings
<Admonition type="caution" icon="🚧" title="ZONE UNDER CONSTRUCTION">
<p>
We appreciate your understanding as we polish our documentation it may contain some rough edges. Share your feedback or report issues to help us improve! 🛠️📝
</p>
<p>
We appreciate your understanding as we polish our documentation it may
contain some rough edges. Share your feedback or report issues to help us
improve! 🛠️📝
</p>
</Admonition>
Embeddings are vector representations of text that capture the semantic meaning of the text. They are created using text embedding models and allow us to think about the text in a vector space, enabling us to perform tasks like semantic search, where we look for pieces of text that are most similar in the vector space.
@ -110,4 +112,12 @@ Vertex AI is a cloud computing platform offered by Google Cloud Platform (GCP).
- **top_k:** How the model selects tokens for output, the next token is selected from defaults to `40`.
- **top_p:** Tokens are selected from most probable to least until the sum of their defaults to `0.95`.
- **tuned_model_name:** The name of a tuned model. If provided, model_name is ignored.
- **verbose:** This parameter is used to control the level of detail in the output of the chain. When set to True, it will print out some internal states of the chain while it is being run, which can help debug and understand the chain's behavior. If set to False, it will suppress the verbose output defaults to `False`.
- **verbose:** This parameter is used to control the level of detail in the output of the chain. When set to True, it will print out some internal states of the chain while it is being run, which can help debug and understand the chain's behavior. If set to False, it will suppress the verbose output defaults to `False`.
### OllamaEmbeddings
Used to load [Ollamas](https://ollama.ai/) embedding models. Wrapper around LangChain's [Ollama API](https://python.langchain.com/docs/integrations/text_embedding/ollama).
- **model** The name of the Ollama model to use defaults to `llama2`.
- **base_url** The base URL for the Ollama API defaults to `http://localhost:11434`.
- **temperature** Tunes the degree of randomness in text generations. Should be a non-negative value defaults to `0`.

1982
poetry.lock generated

File diff suppressed because it is too large Load diff

View file

@ -1,6 +1,6 @@
[tool.poetry]
name = "langflow"
version = "0.6.4"
version = "0.6.5a9"
description = "A Python package with a built-in web application"
authors = ["Logspace <contact@logspace.ai>"]
maintainers = [
@ -55,7 +55,7 @@ tiktoken = "~0.5.0"
wikipedia = "^1.4.0"
qdrant-client = "^1.7.0"
websockets = "^10.3"
weaviate-client = "^3.26.0"
weaviate-client = { version = "^4.4b6", allow-prereleases = true }
jina = "*"
sentence-transformers = { version = "^2.2.2", optional = true }
ctransformers = { version = "^0.2.10", optional = true }

View file

@ -1,15 +1,12 @@
import platform
import socket
import sys
import time
import webbrowser
from pathlib import Path
from typing import Optional
import httpx
import typer
from dotenv import load_dotenv
from multiprocess import Process, cpu_count # type: ignore
from multiprocess import cpu_count # type: ignore
from rich import box
from rich import print as rprint
from rich.console import Console
@ -212,23 +209,12 @@ def run(
run_on_windows(host, port, log_level, options, app)
else:
# Run using gunicorn on Linux
run_on_mac_or_linux(host, port, log_level, options, app, open_browser)
run_on_mac_or_linux(host, port, log_level, options, app)
def run_on_mac_or_linux(host, port, log_level, options, app, open_browser=True):
webapp_process = Process(target=run_langflow, args=(host, port, log_level, options, app))
webapp_process.start()
status_code = 0
while status_code != 200:
try:
status_code = httpx.get(f"http://{host}:{port}/health").status_code
except Exception:
time.sleep(1)
def run_on_mac_or_linux(host, port, log_level, options, app):
print_banner(host, port)
if open_browser:
webbrowser.open(f"http://{host}:{port}")
run_langflow(host, port, log_level, options, app)
def run_on_windows(host, port, log_level, options, app):
@ -303,19 +289,26 @@ def run_langflow(host, port, log_level, options, app):
Run Langflow server on localhost
"""
try:
if platform.system() in ["Windows"]:
if platform.system() in ["Windows", "Darwin"]:
# Run using uvicorn on MacOS and Windows
# Windows doesn't support gunicorn
# MacOS requires an env variable to be set to use gunicorn
import uvicorn
uvicorn.run(app, host=host, port=port, log_level=log_level)
uvicorn.run(
app,
host=host,
port=port,
log_level=log_level,
)
else:
from langflow.server import LangflowApplication
LangflowApplication(app, options).run()
except KeyboardInterrupt:
pass
logger.info("Shutting down server")
sys.exit(0)
except Exception as e:
logger.exception(e)
sys.exit(1)

View file

@ -27,7 +27,8 @@ def upgrade() -> None:
with op.batch_alter_table('user', schema=None) as batch_op:
batch_op.create_unique_constraint('uq_user_id', ['id'])
except Exception:
except Exception as e:
print(e)
pass
# ### end Alembic commands ###
@ -44,6 +45,7 @@ def downgrade() -> None:
with op.batch_alter_table('apikey', schema=None) as batch_op:
batch_op.drop_constraint('uq_apikey_id', type_='unique')
except Exception:
except Exception as e:
print(e)
pass
# ### end Alembic commands ###

View file

@ -0,0 +1,71 @@
"""empty message
Revision ID: 0b8757876a7c
Revises: 006b3990db50
Create Date: 2024-01-17 10:32:56.686287
"""
from typing import Sequence, Union
from alembic import op
# revision identifiers, used by Alembic.
revision: str = '0b8757876a7c'
down_revision: Union[str, None] = '006b3990db50'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
try:
with op.batch_alter_table('apikey', schema=None) as batch_op:
batch_op.create_index(batch_op.f('ix_apikey_api_key'), ['api_key'], unique=True)
batch_op.create_index(batch_op.f('ix_apikey_name'), ['name'], unique=False)
batch_op.create_index(batch_op.f('ix_apikey_user_id'), ['user_id'], unique=False)
except Exception as e:
print(e)
pass
try:
with op.batch_alter_table('flow', schema=None) as batch_op:
batch_op.create_index(batch_op.f('ix_flow_description'), ['description'], unique=False)
batch_op.create_index(batch_op.f('ix_flow_name'), ['name'], unique=False)
batch_op.create_index(batch_op.f('ix_flow_user_id'), ['user_id'], unique=False)
except Exception as e:
print(e)
pass
try:
with op.batch_alter_table('user', schema=None) as batch_op:
batch_op.create_index(batch_op.f('ix_user_username'), ['username'], unique=True)
except Exception as e:
print(e)
pass
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
try:
with op.batch_alter_table('user', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('ix_user_username'))
except Exception as e:
print(e)
pass
try:
with op.batch_alter_table('flow', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('ix_flow_user_id'))
batch_op.drop_index(batch_op.f('ix_flow_name'))
batch_op.drop_index(batch_op.f('ix_flow_description'))
except Exception as e:
print(e)
pass
try:
with op.batch_alter_table('apikey', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('ix_apikey_user_id'))
batch_op.drop_index(batch_op.f('ix_apikey_name'))
batch_op.drop_index(batch_op.f('ix_apikey_api_key'))
except Exception as e:
print(e)
pass
# ### end Alembic commands ###

View file

@ -60,8 +60,8 @@ def upgrade() -> None:
sa.Column("create_at", sa.DateTime(), nullable=False),
sa.Column("updated_at", sa.DateTime(), nullable=False),
sa.Column("last_login_at", sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("id"),
sa.PrimaryKeyConstraint("id", name="pk_user"),
sa.UniqueConstraint("id", name="uq_user_id"),
)
with op.batch_alter_table("user", schema=None) as batch_op:
batch_op.create_index(
@ -83,8 +83,8 @@ def upgrade() -> None:
["user_id"],
["user.id"],
),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("id"),
sa.PrimaryKeyConstraint("id", name="pk_apikey"),
sa.UniqueConstraint("id", name="uq_apikey_id"),
)
with op.batch_alter_table("apikey", schema=None) as batch_op:
batch_op.create_index(
@ -106,8 +106,8 @@ def upgrade() -> None:
["user_id"],
["user.id"],
),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("id"),
sa.PrimaryKeyConstraint("id", name="pk_flow"),
sa.UniqueConstraint("id", name="uq_flow_id"),
)
# Conditionally create indices for 'flow' table
# if _alembic_tmp_flow exists, then we need to drop it first
@ -145,7 +145,7 @@ def upgrade() -> None:
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
conn = op.get_bind()
inspector = Inspector.from_engine(conn) # type: ignore
# List existing tables
existing_tables = inspector.get_table_names()

View file

@ -29,9 +29,10 @@ def upgrade() -> None:
sa.Column('id', sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id')
sa.PrimaryKeyConstraint('id'),
)
except Exception:
except Exception as e:
print(e)
pass
# ### end Alembic commands ###
@ -40,6 +41,7 @@ def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
try:
op.drop_table('credential')
except Exception:
except Exception as e:
print(e)
pass
# ### end Alembic commands ###

View file

@ -45,6 +45,7 @@ def downgrade() -> None:
with op.batch_alter_table("flow", schema=None) as batch_op:
batch_op.drop_column("is_component")
except Exception:
except Exception as e:
print(e)
pass
# ### end Alembic commands ###

View file

@ -37,7 +37,6 @@ def upgrade() -> None:
with op.batch_alter_table('flow', schema=None) as batch_op:
batch_op.add_column(sa.Column('updated_at', sa.DateTime(), nullable=True))
batch_op.add_column(sa.Column('folder', sqlmodel.sql.sqltypes.AutoString(), nullable=True))
# ### end Alembic commands ###

View file

@ -29,7 +29,8 @@ def upgrade() -> None:
except exc.SQLAlchemyError:
# connection.execute(text("ROLLBACK"))
pass
except Exception:
except Exception as e:
print(e)
pass
try:
@ -37,7 +38,8 @@ def upgrade() -> None:
except exc.SQLAlchemyError:
# connection.execute(text("ROLLBACK"))
pass
except Exception:
except Exception as e:
print(e)
pass
# ### end Alembic commands ###
@ -57,14 +59,15 @@ def downgrade() -> None:
sa.Column("is_read_only", sa.BOOLEAN(), nullable=False),
sa.Column("create_at", sa.DATETIME(), nullable=False),
sa.Column("update_at", sa.DATETIME(), nullable=False),
sa.PrimaryKeyConstraint("id"),
sa.PrimaryKeyConstraint("id", name="pk_component"),
)
with op.batch_alter_table("component", schema=None) as batch_op:
batch_op.create_index("ix_component_name", ["name"], unique=False)
batch_op.create_index(
"ix_component_frontend_node_id", ["frontend_node_id"], unique=False
)
except Exception:
except Exception as e:
print(e)
pass
try:
@ -78,9 +81,10 @@ def downgrade() -> None:
["flow_id"],
["flow.id"],
),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("id"),
sa.PrimaryKeyConstraint("id", name="pk_flowstyle"),
sa.UniqueConstraint("id", name="uq_flowstyle_id"),
)
except Exception:
except Exception as e:
print(e)
pass
# ### end Alembic commands ###

View file

@ -7,10 +7,8 @@ Create Date: 2023-10-18 23:12:27.297016
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
import sqlmodel
from alembic import op
# revision identifiers, used by Alembic.
revision: str = "f5ee9749d1a6"
@ -26,7 +24,8 @@ def upgrade() -> None:
batch_op.alter_column(
"user_id", existing_type=sa.CHAR(length=32), nullable=True
)
except Exception:
except Exception as e:
print(e)
pass
# ### end Alembic commands ###
@ -39,7 +38,8 @@ def downgrade() -> None:
batch_op.alter_column(
"user_id", existing_type=sa.CHAR(length=32), nullable=False
)
except Exception:
except Exception as e:
print(e)
pass
# ### end Alembic commands ###

View file

@ -21,7 +21,8 @@ def upgrade() -> None:
try:
with op.batch_alter_table('credential', schema=None) as batch_op:
batch_op.create_foreign_key("fk_credential_user_id", 'user', ['user_id'], ['id'])
except Exception:
except Exception as e:
print(e)
pass
# ### end Alembic commands ###
@ -32,7 +33,8 @@ def downgrade() -> None:
try:
with op.batch_alter_table('credential', schema=None) as batch_op:
batch_op.drop_constraint("fk_credential_user_id", type_='foreignkey')
except Exception:
except Exception as e:
print(e)
pass
# ### end Alembic commands ###

View file

@ -3,12 +3,10 @@ from typing import Annotated, Any, List, Optional, Union
import sqlalchemy as sa
from fastapi import APIRouter, Body, Depends, HTTPException, UploadFile, status
from loguru import logger
from sqlmodel import select
from langflow.api.utils import update_frontend_node_with_template_values
from langflow.api.v1.schemas import (
CustomComponentCode,
PreloadResponse,
ProcessResponse,
TaskResponse,
TaskStatusResponse,
@ -17,12 +15,15 @@ from langflow.api.v1.schemas import (
from langflow.interface.custom.custom_component import CustomComponent
from langflow.interface.custom.directory_reader import DirectoryReader
from langflow.interface.custom.utils import build_custom_component_template
from langflow.processing.process import process_graph_cached, process_tweaks
from langflow.processing.process import build_graph_and_generate_result, process_graph_cached, process_tweaks
from langflow.services.auth.utils import api_key_security, get_current_active_user
from langflow.services.cache.utils import save_uploaded_file
from langflow.services.database.models.flow import Flow
from langflow.services.database.models.user.model import User
from langflow.services.deps import get_session, get_session_service, get_settings_service, get_task_service
from langflow.services.session.service import SessionService
from loguru import logger
from sqlmodel import select
try:
from langflow.worker import process_graph_cached_task
@ -32,9 +33,8 @@ except ImportError:
raise NotImplementedError("Celery is not installed")
from sqlmodel import Session
from langflow.services.task.service import TaskService
from sqlmodel import Session
# build router
router = APIRouter(tags=["Base"])
@ -148,6 +148,55 @@ async def process_json(
raise HTTPException(status_code=500, detail=str(exc)) from exc
# Endpoint to preload a graph
@router.post("/process/preload/{flow_id}", response_model=PreloadResponse)
async def preload_flow(
session: Annotated[Session, Depends(get_session)],
flow_id: str,
session_id: Optional[str] = None,
session_service: SessionService = Depends(get_session_service),
api_key_user: User = Depends(api_key_security),
clear_session: Annotated[bool, Body(embed=True)] = False, # noqa: F821
):
try:
# Get the flow that matches the flow_id and belongs to the user
# flow = session.query(Flow).filter(Flow.id == flow_id).filter(Flow.user_id == api_key_user.id).first()
if clear_session:
session_service.clear_session(session_id)
# Check if the session exists
session_data = await session_service.load_session(session_id)
# Session data is a tuple of (graph, artifacts)
# or (None, None) if the session is empty
if isinstance(session_data, tuple):
graph, artifacts = session_data
is_clear = graph is None and artifacts is None
else:
is_clear = session_data is None
return PreloadResponse(session_id=session_id, is_clear=is_clear)
else:
if session_id is None:
session_id = flow_id
flow = session.exec(select(Flow).where(Flow.id == flow_id).where(Flow.user_id == api_key_user.id)).first()
if flow is None:
raise ValueError(f"Flow {flow_id} not found")
if flow.data is None:
raise ValueError(f"Flow {flow_id} has no data")
graph_data = flow.data
session_service.clear_session(session_id)
# Load the graph using SessionService
session_data = await session_service.load_session(session_id, graph_data)
graph, artifacts = session_data if session_data else (None, None)
if not graph:
raise ValueError("Graph not found in the session")
_ = await graph.build()
session_service.update_session(session_id, (graph, artifacts))
return PreloadResponse(session_id=session_id)
except Exception as exc:
logger.exception(exc)
raise HTTPException(status_code=500, detail=str(exc)) from exc
@router.post(
"/predict/{flow_id}",
response_model=ProcessResponse,
@ -167,36 +216,75 @@ async def process(
task_service: "TaskService" = Depends(get_task_service),
api_key_user: User = Depends(api_key_security),
sync: Annotated[bool, Body(embed=True)] = True, # noqa: F821
session_service: SessionService = Depends(get_session_service),
):
"""
Endpoint to process an input with a given flow_id.
"""
try:
if api_key_user is None:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Invalid API Key",
if session_id:
session_data = await session_service.load_session(session_id)
graph, artifacts = session_data if session_data else (None, None)
task_result: Any = None
task_status = None
task_id = None
if not graph:
raise ValueError("Graph not found in the session")
result = await build_graph_and_generate_result(
graph=graph,
inputs=inputs,
artifacts=artifacts,
session_id=session_id,
session_service=session_service,
)
task_id = str(id(result))
if isinstance(result, dict) and "result" in result:
task_result = result["result"]
session_id = result["session_id"]
elif hasattr(result, "result") and hasattr(result, "session_id"):
task_result = result.result
session_id = result.session_id
else:
task_result = result
if task_id:
task_response = TaskResponse(id=task_id, href=f"api/v1/task/{task_id}")
else:
task_response = None
return ProcessResponse(
result=task_result,
status=task_status,
task=task_response,
session_id=session_id,
backend=task_service.backend_name,
)
# Get the flow that matches the flow_id and belongs to the user
# flow = session.query(Flow).filter(Flow.id == flow_id).filter(Flow.user_id == api_key_user.id).first()
flow = session.exec(select(Flow).where(Flow.id == flow_id).where(Flow.user_id == api_key_user.id)).first()
if flow is None:
raise ValueError(f"Flow {flow_id} not found")
else:
if api_key_user is None:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Invalid API Key",
)
if flow.data is None:
raise ValueError(f"Flow {flow_id} has no data")
graph_data = flow.data
return await process_graph_data(
graph_data=graph_data,
inputs=inputs,
tweaks=tweaks,
clear_cache=clear_cache,
session_id=session_id,
task_service=task_service,
sync=sync,
)
# Get the flow that matches the flow_id and belongs to the user
# flow = session.query(Flow).filter(Flow.id == flow_id).filter(Flow.user_id == api_key_user.id).first()
flow = session.exec(select(Flow).where(Flow.id == flow_id).where(Flow.user_id == api_key_user.id)).first()
if flow is None:
raise ValueError(f"Flow {flow_id} not found")
if flow.data is None:
raise ValueError(f"Flow {flow_id} has no data")
graph_data = flow.data
return await process_graph_data(
graph_data=graph_data,
inputs=inputs,
tweaks=tweaks,
clear_cache=clear_cache,
session_id=session_id,
task_service=task_service,
sync=sync,
)
except sa.exc.StatementError as exc:
# StatementError('(builtins.ValueError) badly formed hexadecimal UUID string')
if "badly formed hexadecimal UUID string" in str(exc):

View file

@ -64,6 +64,13 @@ class ProcessResponse(BaseModel):
backend: Optional[str] = None
class PreloadResponse(BaseModel):
"""Preload response schema."""
session_id: Optional[str] = None
is_clear: Optional[bool] = None
# TaskStatusResponse(
# status=task.status, result=task.result if task.ready() else None
# )

View file

@ -0,0 +1,64 @@
from langflow import CustomComponent
from langchain.embeddings.base import Embeddings
from langchain_community.embeddings import AzureOpenAIEmbeddings
class AzureOpenAIEmbeddingsComponent(CustomComponent):
display_name: str = "AzureOpenAIEmbeddings"
description: str = "Embeddings model from Azure OpenAI."
documentation: str = "https://python.langchain.com/docs/integrations/text_embedding/azureopenai"
beta = False
API_VERSION_OPTIONS = [
"2022-12-01",
"2023-03-15-preview",
"2023-05-15",
"2023-06-01-preview",
"2023-07-01-preview",
"2023-08-01-preview",
]
def build_config(self):
return {
"azure_endpoint": {
"display_name": "Azure Endpoint",
"required": True,
"info": "Your Azure endpoint, including the resource.. Example: `https://example-resource.azure.openai.com/`",
},
"azure_deployment": {
"display_name": "Deployment Name",
"required": True,
},
"api_version": {
"display_name": "API Version",
"options": self.API_VERSION_OPTIONS,
"value": self.API_VERSION_OPTIONS[-1],
"advanced": True,
},
"api_key": {
"display_name": "API Key",
"required": True,
"password": True,
},
"code": {"show": False},
}
def build(
self,
azure_endpoint: str,
azure_deployment: str,
api_version: str,
api_key: str,
) -> Embeddings:
try:
embeddings = AzureOpenAIEmbeddings(
azure_endpoint=azure_endpoint,
deployment=azure_deployment,
openai_api_version=api_version,
openai_api_key=api_key,
)
except Exception as e:
raise ValueError("Could not connect to AzureOpenAIEmbeddings API.") from e
return embeddings

View file

@ -0,0 +1,38 @@
from typing import Optional
from langflow import CustomComponent
from langchain.embeddings.base import Embeddings
from langchain_community.embeddings import OllamaEmbeddings
class OllamaEmbeddingsComponent(CustomComponent):
"""
A custom component for implementing an Embeddings Model using Ollama.
"""
display_name: str = "Ollama Embeddings"
description: str = "Embeddings model from Ollama."
documentation = "https://python.langchain.com/docs/integrations/text_embedding/ollama"
beta = True
def build_config(self):
return {
"model": {
"display_name": "Ollama Model",
},
"base_url": {"display_name": "Ollama Base URL"},
"temperature": {"display_name": "Model Temperature"},
"code": {"show": False},
}
def build(
self,
model: str = "llama2",
base_url: str = "http://localhost:11434",
temperature: Optional[float] = None,
) -> Embeddings:
try:
output = OllamaEmbeddings(model=model, base_url=base_url, temperature=temperature) # type: ignore
except Exception as e:
raise ValueError("Could not connect to Ollama API.") from e
return output

View file

@ -8,6 +8,7 @@ class AzureChatOpenAIComponent(CustomComponent):
display_name: str = "AzureChatOpenAI"
description: str = "LLM model from Azure OpenAI."
documentation: str = "https://python.langchain.com/docs/integrations/llms/azure_openai"
beta = False
AZURE_OPENAI_MODELS = [
"gpt-35-turbo",
@ -18,11 +19,21 @@ class AzureChatOpenAIComponent(CustomComponent):
"gpt-4-vision",
]
AZURE_OPENAI_API_VERSIONS = [
"2023-03-15-preview",
"2023-05-15",
"2023-06-01-preview",
"2023-07-01-preview",
"2023-08-01-preview",
"2023-09-01-preview",
"2023-12-01-preview"
]
def build_config(self):
return {
"model": {
"display_name": "Model Name",
"value": "gpt-35-turbo",
"value": self.AZURE_OPENAI_MODELS[0],
"options": self.AZURE_OPENAI_MODELS,
"required": True,
},
@ -37,7 +48,8 @@ class AzureChatOpenAIComponent(CustomComponent):
},
"api_version": {
"display_name": "API Version",
"value": "2023-05-15",
"options": self.AZURE_OPENAI_API_VERSIONS,
"value": self.AZURE_OPENAI_API_VERSIONS[-1],
"required": True,
"advanced": True,
},
@ -54,6 +66,7 @@ class AzureChatOpenAIComponent(CustomComponent):
"required": False,
"field_type": "int",
"advanced": True,
"info": "Maximum number of tokens to generate.",
},
"code": {"show": False},
}
@ -64,16 +77,20 @@ class AzureChatOpenAIComponent(CustomComponent):
azure_endpoint: str,
azure_deployment: str,
api_key: str,
api_version: str = "2023-05-15",
api_version: str,
temperature: float = 0.7,
max_tokens: Optional[int] = 1000,
) -> BaseLanguageModel:
return AzureChatOpenAI(
model=model,
azure_endpoint=azure_endpoint,
azure_deployment=azure_deployment,
api_version=api_version,
api_key=api_key,
temperature=temperature,
max_tokens=max_tokens,
)
try:
llm = AzureChatOpenAI(
model=model,
azure_endpoint=azure_endpoint,
azure_deployment=azure_deployment,
api_version=api_version,
api_key=api_key,
temperature=temperature,
max_tokens=max_tokens,
)
except Exception as e:
raise ValueError("Could not connect to AzureOpenAI API.") from e
return llm

View file

@ -106,6 +106,8 @@ embeddings:
documentation: "https://python.langchain.com/docs/modules/data_connection/text_embedding/integrations/google_vertex_ai_palm"
AmazonBedrockEmbeddings:
documentation: "https://python.langchain.com/docs/modules/data_connection/text_embedding/integrations/bedrock"
OllamaEmbeddings:
documentation: "https://python.langchain.com/docs/modules/data_connection/text_embedding/integrations/ollama"
llms:
OpenAI:

View file

@ -67,7 +67,9 @@ Human: {input}
class MidJourneyPromptChain(BaseCustomConversationChain):
"""MidJourneyPromptChain is a chain you can use to generate new MidJourney prompts."""
template: Optional[str] = """I want you to act as a prompt generator for Midjourney's artificial intelligence program.
template: Optional[
str
] = """I want you to act as a prompt generator for Midjourney's artificial intelligence program.
Your job is to provide detailed and creative descriptions that will inspire unique and interesting images from the AI.
Keep in mind that the AI is capable of understanding a wide range of language and can interpret abstract concepts, so feel free to be as imaginative and descriptive as possible.
For example, you could describe a scene from a futuristic city, or a surreal landscape filled with strange creatures.
@ -81,7 +83,9 @@ class MidJourneyPromptChain(BaseCustomConversationChain):
class TimeTravelGuideChain(BaseCustomConversationChain):
template: Optional[str] = """I want you to act as my time travel guide. You are helpful and creative. I will provide you with the historical period or future time I want to visit and you will suggest the best events, sights, or people to experience. Provide the suggestions and any necessary information.
template: Optional[
str
] = """I want you to act as my time travel guide. You are helpful and creative. I will provide you with the historical period or future time I want to visit and you will suggest the best events, sights, or people to experience. Provide the suggestions and any necessary information.
Current conversation:
{history}
Human: {input}

View file

@ -7,6 +7,7 @@ from fastapi import FastAPI, Request
from fastapi.middleware.cors import CORSMiddleware
from fastapi.responses import FileResponse
from fastapi.staticfiles import StaticFiles
from langflow.api import router
from langflow.interface.utils import setup_llm_caching
from langflow.services.plugins.langfuse_plugin import LangfuseInstance
@ -102,11 +103,12 @@ def setup_app(static_files_dir: Optional[Path] = None, backend_only: bool = Fals
if __name__ == "__main__":
import uvicorn
from langflow.__main__ import get_number_of_workers
configure()
uvicorn.run(
create_app,
"langflow.main:create_app",
host="127.0.0.1",
port=7860,
workers=get_number_of_workers(),

View file

@ -7,9 +7,11 @@ from langchain.schema import AgentAction, Document
from langchain.vectorstores.base import VectorStore
from langchain_core.messages import AIMessage
from langchain_core.runnables.base import Runnable
from langflow.graph.graph.base import Graph
from langflow.interface.custom.custom_component import CustomComponent
from langflow.interface.run import build_sorted_vertices, get_memory_key, update_memory_keys
from langflow.services.deps import get_session_service
from langflow.services.session.service import SessionService
from loguru import logger
from pydantic import BaseModel
@ -220,13 +222,29 @@ async def process_graph_cached(
graph, artifacts = session if session else (None, None)
if not graph:
raise ValueError("Graph not found in the session")
result = await build_graph_and_generate_result(
graph=graph, session_id=session_id, inputs=inputs, artifacts=artifacts, session_service=session_service
)
return result
async def build_graph_and_generate_result(
graph: "Graph",
session_id: str,
inputs: Optional[Union[dict, List[dict]]] = None,
artifacts: Optional[Dict[str, Any]] = None,
session_service: Optional[SessionService] = None,
):
"""Build the graph and generate the result"""
built_object = await graph.build()
processed_inputs = process_inputs(inputs, artifacts or {})
result = await generate_result(built_object, processed_inputs)
# langchain_object is now updated with the new memory
# we need to update the cache with the updated langchain_object
session_service.update_session(session_id, (graph, artifacts))
if session_id and session_service:
session_service.update_session(session_id, (graph, artifacts))
return Result(result=result, session_id=session_id)

View file

@ -5,17 +5,16 @@ from typing import TYPE_CHECKING
import sqlalchemy as sa
from alembic import command, util
from alembic.config import Config
from loguru import logger
from sqlalchemy import inspect
from sqlalchemy.exc import OperationalError
from sqlmodel import Session, SQLModel, create_engine, select, text
from langflow.services.base import Service
from langflow.services.database import models # noqa
from langflow.services.database.models.user.crud import get_user_by_username
from langflow.services.database.utils import Result, TableResults
from langflow.services.deps import get_settings_service
from langflow.services.utils import teardown_superuser
from loguru import logger
from sqlalchemy import inspect
from sqlalchemy.exc import OperationalError
from sqlmodel import Session, SQLModel, create_engine, select, text
if TYPE_CHECKING:
from sqlalchemy.engine import Engine
@ -40,7 +39,7 @@ class DatabaseService(Service):
connect_args = {"check_same_thread": False}
else:
connect_args = {}
return create_engine(self.database_url, connect_args=connect_args)
return create_engine(self.database_url, connect_args=connect_args, max_overflow=-1)
def __enter__(self):
self._session = Session(self.engine)

View file

@ -1,4 +1,4 @@
from typing import TYPE_CHECKING
from typing import TYPE_CHECKING, Optional
from langflow.interface.run import build_sorted_vertices
from langflow.services.base import Service
@ -14,14 +14,15 @@ class SessionService(Service):
def __init__(self, cache_service):
self.cache_service: "BaseCacheService" = cache_service
async def load_session(self, key, data_graph):
async def load_session(self, key, data_graph: Optional[dict] = None):
# Check if the data is cached
if key in self.cache_service:
return self.cache_service.get(key)
if key is None:
key = self.generate_key(session_id=None, data_graph=data_graph)
if data_graph is None:
return (None, None)
# If not cached, build the graph and cache it
graph, artifacts = await build_sorted_vertices(data_graph)

View file

@ -23,7 +23,6 @@ import useFlowsManagerStore from "./stores/flowsManagerStore";
import { useTypesStore } from "./stores/typesStore";
export default function App() {
const errorData = useAlertStore((state) => state.errorData);
const errorOpen = useAlertStore((state) => state.errorOpen);
const setErrorOpen = useAlertStore((state) => state.setErrorOpen);

View file

@ -57,6 +57,7 @@ export default function ParameterComponent({
proxy,
showNode,
index = "",
isMinimized,
}: ParameterComponentType): JSX.Element {
const ref = useRef<HTMLDivElement>(null);
const refHtml = useRef<HTMLDivElement & ReactNode>(null);
@ -131,25 +132,24 @@ export default function ParameterComponent({
if (data.node!.template[name].value !== code) {
takeSnapshot();
}
setNode(data.id, (oldNode) => {
let newNode = cloneDeep(oldNode);
newNode.data = {
...newNode.data,
node: newNodeClass,
description: newNodeClass.description ?? data.node!.description,
display_name: newNodeClass.display_name ?? data.node!.display_name,
};
newNode.data.node.template[name].value = code;
return newNode;
});
updateNodeInternals(data.id);
renderTooltips();
};
@ -273,9 +273,11 @@ export default function ParameterComponent({
<Handle
type={left ? "target" : "source"}
position={left ? Position.Left : Position.Right}
key={proxy
? scapedJSONStringfy({ ...id, proxy })
: scapedJSONStringfy(id)}
key={
proxy
? scapedJSONStringfy({ ...id, proxy })
: scapedJSONStringfy(id)
}
id={
proxy
? scapedJSONStringfy({ ...id, proxy })
@ -286,7 +288,8 @@ export default function ParameterComponent({
}
className={classNames(
left ? "my-12 -ml-0.5 " : " my-12 -mr-0.5 ",
"h-3 w-3 rounded-full border-2 bg-background"
"h-3 w-3 rounded-full border-2 bg-background",
isMinimized ? "mt-0" : ""
)}
style={{
borderColor: color,
@ -348,9 +351,11 @@ export default function ParameterComponent({
<Handle
type={left ? "target" : "source"}
position={left ? Position.Left : Position.Right}
key={proxy
? scapedJSONStringfy({ ...id, proxy })
: scapedJSONStringfy(id)}
key={
proxy
? scapedJSONStringfy({ ...id, proxy })
: scapedJSONStringfy(id)
}
id={
proxy
? scapedJSONStringfy({ ...id, proxy })
@ -395,8 +400,8 @@ export default function ParameterComponent({
disabled={disabled}
value={data.node.template[name].value ?? ""}
onChange={handleOnNewValue}
id={"textarea-" + index}
data-testid={"textarea-" + index}
id={"textarea-" + data.node.template[name].name}
data-testid={"textarea-" + data.node.template[name].name}
/>
) : (
<InputComponent

View file

@ -1,5 +1,5 @@
import { useEffect, useState } from "react";
import { NodeToolbar } from "reactflow";
import { NodeToolbar, useUpdateNodeInternals } from "reactflow";
import ShadTooltip from "../../components/ShadTooltipComponent";
import Tooltip from "../../components/TooltipComponent";
import IconComponent from "../../components/genericIconComponent";
@ -41,7 +41,9 @@ export default function GenericNode({
const [validationStatus, setValidationStatus] =
useState<validationStatusType | null>(null);
const [handles, setHandles] = useState<boolean[] | []>([]);
const [isMinimized, setIsMinimized] = useState<boolean>(false);
let numberOfInputs: boolean[] = [];
const updateNodeInternals = useUpdateNodeInternals();
const takeSnapshot = useFlowsManagerStore((state) => state.takeSnapshot);
@ -105,6 +107,10 @@ export default function GenericNode({
const nameEditable = data.node?.flow || data.type === "CustomComponent";
useEffect(() => {
updateNodeInternals(data.id);
}, [isMinimized]);
return (
<>
<NodeToolbar>
@ -123,6 +129,7 @@ export default function GenericNode({
}}
numberOfHandles={handles}
showNode={showNode}
setIsMinimized={setIsMinimized}
></NodeToolbarComponent>
</NodeToolbar>
@ -276,6 +283,7 @@ export default function GenericNode({
}
proxy={data.node?.template[templateField].proxy}
showNode={showNode}
isMinimized={isMinimized}
/>
)
)}
@ -302,6 +310,7 @@ export default function GenericNode({
type={data.node?.base_classes.join("|")}
left={false}
showNode={showNode}
isMinimized={isMinimized}
/>
</>
)}
@ -506,6 +515,7 @@ export default function GenericNode({
}
proxy={data.node?.template[templateField].proxy}
showNode={showNode}
isMinimized={isMinimized}
/>
) : (
<></>
@ -549,6 +559,7 @@ export default function GenericNode({
type={data.node?.base_classes.join("|")}
left={false}
showNode={showNode}
isMinimized={isMinimized}
/>
)}
</>

View file

@ -5,7 +5,6 @@ import BuildTrigger from "./buildTrigger";
import ChatTrigger from "./chatTrigger";
import * as _ from "lodash";
import { getBuildStatus } from "../../controllers/API";
import FormModal from "../../modals/formModal";
import useFlowStore from "../../stores/flowStore";
import { NodeType } from "../../types/flow";
@ -32,17 +31,6 @@ export default function Chat({ flow }: ChatType): JSX.Element {
};
}, [isBuilt]);
useEffect(() => {
// Define an async function within the useEffect hook
const fetchBuildStatus = async () => {
const response = await getBuildStatus(flow.id);
setIsBuilt(response.data.built);
};
// Call the async function
fetchBuildStatus();
}, [flow]);
const prevNodesRef = useRef<any[] | undefined>();
const nodes: NodeType[] = useNodes();
useEffect(() => {

View file

@ -35,7 +35,6 @@ import {
convertObjToArray,
convertValuesToNumbers,
hasDuplicateKeys,
unselectAllNodes,
} from "../../utils/reactflowUtils";
import { classNames } from "../../utils/utils";
import DictComponent from "../dictComponent";
@ -54,6 +53,7 @@ export default function CodeTabsComponent({
const [data, setData] = useState(flow ? flow["data"]!["nodes"] : null);
const [openAccordion, setOpenAccordion] = useState<string[]>([]);
const dark = useDarkStore((state) => state.dark);
const unselectAll = useFlowStore((state) => state.unselectAll);
const setNodes = useFlowStore((state) => state.setNodes);
@ -67,12 +67,7 @@ export default function CodeTabsComponent({
useEffect(() => {
if (tweaks && data) {
unselectAllNodes({
data,
updateNodes: (nodes) => {
setNodes(nodes);
},
});
unselectAll();
}
}, []);
@ -593,14 +588,7 @@ export default function CodeTabsComponent({
].type === "prompt" ? (
<div className="mx-auto">
<PromptAreaComponent
readonly={
node.data.node?.flow &&
node.data.node.template[
templateField
].dynamic
? true
: false
}
readonly={true}
editNode={true}
disabled={false}
value={
@ -643,14 +631,7 @@ export default function CodeTabsComponent({
<CodeAreaComponent
disabled={false}
editNode={true}
readonly={
node.data.node?.flow &&
node.data.node.template[
templateField
].dynamic
? true
: false
}
readonly={true}
value={
!node.data.node.template[
templateField

View file

@ -216,8 +216,16 @@ const EditNodeModal = forwardRef(
) : myData.node.template[templateParam]
.multiline ? (
<TextAreaComponent
id={"textarea-edit-" + index}
data-testid={"textarea-edit-" + index}
id={
"textarea-edit-" +
myData.node.template[templateParam]
.name
}
data-testid={
"textarea-edit-" +
myData.node.template[templateParam]
.name
}
disabled={disabled}
editNode={true}
value={
@ -448,9 +456,13 @@ const EditNodeModal = forwardRef(
onChange={(value: string | string[]) => {
handleOnNewValue(value, templateParam);
}}
id={"prompt-area-edit" + index}
id={
"prompt-area-edit-" +
myData.node.template[templateParam].name
}
data-testid={
"modal-prompt-input-" + index
"modal-prompt-input-" +
myData.node.template[templateParam].name
}
/>
</div>

View file

@ -126,7 +126,8 @@ export default function GenericModal({
if (
JSON.stringify(apiReturn.data?.frontend_node) !== JSON.stringify({})
) {
setNodeClass!(apiReturn.data?.frontend_node, inputValue);
if (setNodeClass)
setNodeClass(apiReturn.data?.frontend_node, inputValue);
setModalOpen(closeModal);
setIsEdit(false);
}

View file

@ -9,7 +9,6 @@ import ReactFlow, {
OnMove,
OnSelectionChangeParams,
SelectionDragHandler,
addEdge,
updateEdge,
} from "reactflow";
import GenericNode from "../../../../CustomNodes/GenericNode";
@ -19,13 +18,12 @@ import useFlowStore from "../../../../stores/flowStore";
import useFlowsManagerStore from "../../../../stores/flowsManagerStore";
import { useTypesStore } from "../../../../stores/typesStore";
import { APIClassType } from "../../../../types/api";
import { FlowType, NodeType, targetHandleType } from "../../../../types/flow";
import { FlowType, NodeType } from "../../../../types/flow";
import {
generateFlow,
generateNodeFromFlow,
getNodeId,
isValidConnection,
scapeJSONParse,
validateSelection,
} from "../../../../utils/reactflowUtils";
import { getRandomName, isWrappedWithClass } from "../../../../utils/utils";
@ -175,8 +173,8 @@ export default function Page({
useEffect(() => {
return () => {
cleanFlow();
}
}, [])
};
}, []);
const onConnectMod = useCallback(
(params: Connection) => {
@ -332,7 +330,7 @@ export default function Page({
<div className="h-full w-full">
<div className="h-full w-full" ref={reactFlowWrapper}>
{Object.keys(templates).length > 0 &&
Object.keys(types).length > 0 ? (
Object.keys(types).length > 0 ? (
<div id="react-flow-id" className="h-full w-full">
<ReactFlow
nodes={nodes}

View file

@ -4,7 +4,7 @@ import {
Select,
SelectContent,
SelectItem,
SelectTrigger
SelectTrigger,
} from "../../../../../components/ui/select-custom";
import { useDarkStore } from "../../../../../stores/darkStore";
import useFlowsManagerStore from "../../../../../stores/flowsManagerStore";

View file

@ -32,6 +32,7 @@ export default function NodeToolbarComponent({
setShowNode,
numberOfHandles,
showNode,
setIsMinimized,
}: nodeToolbarPropsType): JSX.Element {
const nodeLength = Object.keys(data.node!.template).filter(
(templateField) =>
@ -97,6 +98,10 @@ export default function NodeToolbarComponent({
showconfirmShare,
]);
useEffect(() => {
setIsMinimized(!showNode);
}, [showNode]);
const handleSelectChange = (event) => {
switch (event) {
case "advanced":

View file

@ -201,7 +201,7 @@ const useFlowStore = create<FlowStoreType>((set, get) => ({
.map((node) => ({ ...node, selected: false }))
.concat({ ...newNode, selected: false });
});
set({ nodes: newNodes });
get().setNodes(newNodes);
selection.edges.forEach((edge: Edge) => {
let source = idsMap[edge.source];
@ -245,7 +245,7 @@ const useFlowStore = create<FlowStoreType>((set, get) => ({
newEdges.map((edge) => ({ ...edge, selected: false }))
);
});
set({ edges: newEdges });
get().setEdges(newEdges);
},
setLastCopiedSelection: (newSelection) => {
set({ lastCopiedSelection: newSelection });
@ -265,7 +265,7 @@ const useFlowStore = create<FlowStoreType>((set, get) => ({
},
getFilterEdge: [],
onConnect: (connection) => {
let newEdges: Edge[] = []
let newEdges: Edge[] = [];
get().setEdges((oldEdges) => {
newEdges = addEdge(
{
@ -287,8 +287,7 @@ const useFlowStore = create<FlowStoreType>((set, get) => ({
oldEdges
);
return newEdges;
})
});
useFlowsManagerStore
.getState()
.autoSaveCurrentFlow(
@ -297,6 +296,17 @@ const useFlowStore = create<FlowStoreType>((set, get) => ({
get().reactFlowInstance?.getViewport() ?? { x: 0, y: 0, zoom: 1 }
);
},
unselectAll: () => {
let newNodes = cloneDeep(get().nodes);
newNodes.forEach((node) => {
node.selected = false;
let newEdges = cleanEdges(newNodes, get().edges);
set({
nodes: newNodes,
edges: newEdges,
});
});
},
}));
export default useFlowStore;

View file

@ -43,5 +43,4 @@ export const useTypesStore = create<TypesStoreType>((set, get) => ({
let newChange = typeof change === "function" ? change(get().data) : change;
set({ data: newChange });
},
}));

View file

@ -53,6 +53,7 @@ export type ParameterComponentType = {
showNode?: boolean;
index?: string;
onCloseModal?: (close: boolean) => void;
isMinimized?: boolean;
};
export type InputListComponentType = {
value: string[];
@ -479,6 +480,7 @@ export type nodeToolbarPropsType = {
setShowNode: (boolean: any) => void;
numberOfHandles: boolean[] | [];
showNode: boolean;
setIsMinimized: (boolean: boolean) => void;
};
export type parsedDataType = {

View file

@ -54,4 +54,5 @@ export type FlowStoreType = {
setFilterEdge: (newState) => void;
getFilterEdge: any[];
onConnect: (connection: Connection) => void;
unselectAll: () => void;
};

View file

@ -475,7 +475,7 @@ export function convertArrayToObj(arrayOfObjects) {
export function hasDuplicateKeys(array) {
const keys = {};
// Transforms an empty object into an object array without opening the 'editNode' modal to prevent the flow build from breaking.
if (!Array.isArray(array)) array = [{"": ""}];
if (!Array.isArray(array)) array = [{ "": "" }];
for (const obj of array) {
for (const key in obj) {
if (keys[key]) {

View file

@ -44,7 +44,7 @@ test("PromptTemplateComponent", async ({ page }) => {
await page.getByTestId("genericModalBtnSave").click();
await page.getByTestId("div-textarea-5").click();
await page.getByTestId("div-textarea-prompt").click();
await page.getByTestId("text-area-modal").fill("prompt_value_!@#!@#");
value = await page.getByTestId("text-area-modal").inputValue();
@ -55,7 +55,7 @@ test("PromptTemplateComponent", async ({ page }) => {
await page.getByTestId("genericModalBtnSave").click();
await page.getByTestId("div-textarea-6").click();
await page.getByTestId("div-textarea-prompt1").click();
await page
.getByTestId("text-area-modal")
.fill("prompt_name_test_123123!@#!@#");
@ -77,29 +77,31 @@ test("PromptTemplateComponent", async ({ page }) => {
await page.getByTestId("more-options-modal").click();
await page.getByTestId("edit-button-modal").click();
value = await page.locator('//*[@id="textarea-edit-1"]').inputValue();
value = await page.locator('//*[@id="textarea-edit-prompt"]').inputValue();
if (value != "prompt_value_!@#!@#") {
expect(false).toBeTruthy();
}
value = await page.locator('//*[@id="textarea-edit-2"]').inputValue();
value = await page.locator('//*[@id="textarea-edit-prompt1"]').inputValue();
if (value != "prompt_name_test_123123!@#!@#") {
expect(false).toBeTruthy();
}
value = await page.locator('//*[@id="prompt-area-edit0"]').innerText();
value = await page
.locator('//*[@id="prompt-area-edit-template"]')
.innerText();
if (value != "{prompt} example {prompt1}") {
expect(false).toBeTruthy();
}
await page
.locator('//*[@id="textarea-edit-2"]')
.locator('//*[@id="textarea-edit-prompt1"]')
.fill("prompt_edit_test_12312312321!@#$");
await page
.locator('//*[@id="textarea-edit-1"]')
.locator('//*[@id="textarea-edit-prompt"]')
.fill("prompt_edit_test_44444444444!@#$");
await page.locator('//*[@id="showtemplate"]').click();
@ -141,35 +143,29 @@ test("PromptTemplateComponent", async ({ page }) => {
await page.locator('//*[@id="saveChangesBtn"]').click();
const plusButtonLocator = page.locator('//*[@id="textarea-8"]');
const elementCount = await plusButtonLocator.count();
if (elementCount === 0) {
expect(true).toBeTruthy();
await page.getByTestId("more-options-modal").click();
await page.getByTestId("edit-button-modal").click();
await page.getByTestId("more-options-modal").click();
await page.getByTestId("edit-button-modal").click();
await page.locator('//*[@id="showprompt1"]').click();
expect(await page.locator('//*[@id="showprompt1"]').isChecked()).toBeTruthy();
await page.locator('//*[@id="showprompt1"]').click();
expect(
await page.locator('//*[@id="showprompt1"]').isChecked()
).toBeTruthy();
value = await page.locator('//*[@id="textarea-edit-prompt"]').inputValue();
value = await page.locator('//*[@id="textarea-edit-1"]').inputValue();
if (value != "prompt_edit_test_44444444444!@#$") {
expect(false).toBeTruthy();
}
if (value != "prompt_edit_test_44444444444!@#$") {
expect(false).toBeTruthy();
}
value = await page.locator('//*[@id="textarea-edit-prompt1"]').inputValue();
value = await page.locator('//*[@id="textarea-edit-2"]').inputValue();
if (value != "prompt_edit_test_12312312321!@#$") {
expect(false).toBeTruthy();
}
if (value != "prompt_edit_test_12312312321!@#$") {
expect(false).toBeTruthy();
}
value = await page
.locator('//*[@id="prompt-area-edit-template"]')
.innerText();
value = await page.locator('//*[@id="prompt-area-edit0"]').innerText();
if (value != "{prompt} example {prompt1}") {
expect(false).toBeTruthy();
}
if (value != "{prompt} example {prompt1}") {
expect(false).toBeTruthy();
}
});

View file

@ -67,7 +67,7 @@ test.describe("group node test", () => {
await page.getByRole("button", { name: "Group" }).click();
const textArea = page.getByTestId("div-textarea-2");
const textArea = page.getByTestId("div-textarea-description");
const elementCountText = await textArea.count();
if (elementCountText > 0) {
expect(true).toBeTruthy();

View file

@ -26,8 +26,7 @@ test("NestedComponent", async ({ page }) => {
.getByTestId("vectorstoresPinecone")
.first()
.dragTo(page.locator('//*[@id="react-flow-id"]'));
await page.mouse.up();
await page.mouse.down();
await page.click('//*[@id="react-flow-id"]');
await page.getByTestId("more-options-modal").click();
await page.getByTestId("edit-button-modal").click();

View file

@ -74,7 +74,7 @@ test.describe("save component tests", () => {
await page.getByRole("button", { name: "Group" }).click();
let textArea = page.getByTestId("div-textarea-2");
let textArea = page.getByTestId("div-textarea-description");
let elementCountText = await textArea.count();
if (elementCountText > 0) {
expect(true).toBeTruthy();
@ -102,7 +102,7 @@ test.describe("save component tests", () => {
await page.mouse.up();
await page.mouse.down();
textArea = page.getByTestId("div-textarea-2");
textArea = page.getByTestId("div-textarea-description");
elementCountText = await textArea.count();
if (elementCountText > 0) {
expect(true).toBeTruthy();