Merge remote-tracking branch 'origin/dev' into new-style-features

This commit is contained in:
Rodrigo Nader 2023-06-28 12:35:23 -03:00
commit 80a73f93fe
129 changed files with 6607 additions and 3026 deletions

15
Dockerfile Normal file
View file

@ -0,0 +1,15 @@
FROM python:3.10-slim
RUN apt-get update && apt-get install gcc g++ git make -y && apt-get clean \
&& rm -rf /var/lib/apt/lists/*
RUN useradd -m -u 1000 user
USER user
ENV HOME=/home/user \
PATH=/home/user/.local/bin:$PATH
WORKDIR $HOME/app
COPY --chown=user . $HOME/app
RUN pip install langflow>==0.0.86 -U --user
CMD ["python", "-m", "langflow", "--host", "0.0.0.0", "--port", "7860"]

View file

@ -44,13 +44,18 @@ install_backend:
backend:
make install_backend
poetry run uvicorn langflow.main:app --port 7860 --reload --log-level debug
poetry run uvicorn src.backend.langflow.main:app --port 7860 --reload --log-level debug
build_and_run:
echo 'Removing dist folder'
rm -rf dist
make build && poetry run pip install dist/*.tar.gz && poetry run langflow
build_and_install:
echo 'Removing dist folder'
rm -rf dist
make build && poetry run pip install dist/*.tar.gz
build_frontend:
cd src/frontend && CI='' npm run build
cp -r src/frontend/build src/backend/langflow/frontend

View file

@ -2,10 +2,9 @@
# ⛓️ LangFlow
~ A User Interface For [LangChain](https://github.com/hwchase17/langchain) ~
~ An effortless way to experiment and prototype [LangChain](https://github.com/hwchase17/langchain) pipelines ~
<p>
<a href="https://huggingface.co/spaces/Logspace/LangFlow"><img src="https://huggingface.co/datasets/huggingface/badges/raw/main/open-in-hf-spaces-sm.svg" alt="HuggingFace Spaces"></a>
<img alt="GitHub Contributors" src="https://img.shields.io/github/contributors/logspace-ai/langflow" />
<img alt="GitHub Last Commit" src="https://img.shields.io/github/last-commit/logspace-ai/langflow" />
<img alt="" src="https://img.shields.io/github/repo-size/logspace-ai/langflow" />
@ -14,10 +13,18 @@
<img alt="Github License" src="https://img.shields.io/github/license/logspace-ai/langflow" />
</p>
<p>
<a href="https://discord.gg/EqksyE2EX9"><img alt="Discord Server" src="https://dcbadge.vercel.app/api/server/EqksyE2EX9?compact=true&style=flat"/></a>
<a href="https://huggingface.co/spaces/Logspace/LangFlow"><img src="https://huggingface.co/datasets/huggingface/badges/raw/main/open-in-hf-spaces-sm.svg" alt="HuggingFace Spaces"></a>
</p>
<a href="https://github.com/logspace-ai/langflow">
<img width="100%" src="https://github.com/logspace-ai/langflow/blob/main/img/langflow-demo.gif?raw=true"></a>
LangFlow is a GUI for [LangChain](https://github.com/hwchase17/langchain), designed with [react-flow](https://github.com/wbkd/react-flow) to provide an effortless way to experiment and prototype flows with drag-and-drop components and a chat box.
<p>
</p>
## 📦 Installation
### <b>Locally</b>
@ -50,11 +57,11 @@ Alternatively, click the **"Open in Cloud Shell"** button below to launch Google
Langflow integrates with langchain-serve to provide a one-command deployment to Jina AI Cloud.
Start by installing `langchain-serve` with
Start by installing `langchain-serve` with
```bash
pip install -U langchain-serve
```
```
Then, run:
@ -109,24 +116,38 @@ You can use Langflow directly on your browser, or use the API endpoints on Jina
<summary>Show API usage (with python)</summary>
```python
import json
import requests
import requests
FLOW_PATH = "Time_traveller.json"
BASE_API_URL = "https://langflow-e3dd8820ec.wolf.jina.ai/api/v1/predict"
FLOW_ID = "864c4f98-2e59-468b-8e13-79cd8da07468"
# You can tweak the flow by adding a tweaks dictionary
# e.g {"OpenAI-XXXXX": {"model_name": "gpt-4"}}
TWEAKS = {
"ChatOpenAI-g4jEr": {},
"ConversationChain-UidfJ": {}
}
# HOST = 'http://localhost:7860'
HOST = 'https://langflow-f1ed20e309.wolf.jina.ai'
API_URL = f'{HOST}/predict'
def run_flow(message: str, flow_id: str, tweaks: dict = None) -> dict:
"""
Run a flow with a given message and optional tweaks.
def predict(message):
with open(FLOW_PATH, "r") as f:
json_data = json.load(f)
payload = {'exported_flow': json_data, 'message': message}
response = requests.post(API_URL, json=payload)
return response.json()
:param message: The message to send to the flow
:param flow_id: The ID of the flow to run
:param tweaks: Optional tweaks to customize the flow
:return: The JSON response from the flow
"""
api_url = f"{BASE_API_URL}/{flow_id}"
payload = {"message": message}
predict('Take me to 1920s Bangalore')
if tweaks:
payload["tweaks"] = tweaks
response = requests.post(api_url, json=payload)
return response.json()
# Setup any tweaks you want to apply to the flow
print(run_flow("Your message", flow_id=FLOW_ID, tweaks=TWEAKS))
```
```json
@ -164,6 +185,11 @@ flow("Hey, have you heard of LangFlow?")
We welcome contributions from developers of all levels to our open-source project on GitHub. If you'd like to contribute, please check our [contributing guidelines](./CONTRIBUTING.md) and help make LangFlow more accessible.
Join our [Discord](https://discord.com/invite/EqksyE2EX9) server to ask questions, make suggestions and showcase your projects! 🦾
<p>
</p>
[![Star History Chart](https://api.star-history.com/svg?repos=logspace-ai/langflow&type=Timeline)](https://star-history.com/#logspace-ai/langflow&Date)

1813
poetry.lock generated

File diff suppressed because it is too large Load diff

View file

@ -1,11 +1,12 @@
[tool.poetry]
name = "langflow"
version = "0.0.88"
version = "0.2.2"
description = "A Python package with a built-in web application"
authors = ["Logspace <contact@logspace.ai>"]
maintainers = [
"Cristhian Zanforlin <cristhian.lousa@gmail.com>",
"Gabriel Almeida <gabriel@logspace.ai>",
"Ibis Prevedello <ibiscp@gmail.com>",
"Gustavo Schaedler <gustavopoa@gmail.com>",
"Lucas Eduoli <lucaseduoli@gmail.com>",
"Otávio Anovazzi <otavio2204@gmail.com>",
]
@ -22,22 +23,21 @@ langflow = "langflow.__main__:main"
[tool.poetry.dependencies]
python = ">=3.9,<3.12"
fastapi = "^0.96.0"
uvicorn = "^0.20.0"
fastapi = "^0.98.0"
uvicorn = "^0.22.0"
beautifulsoup4 = "^4.11.2"
google-search-results = "^2.4.1"
google-api-python-client = "^2.79.0"
typer = "^0.7.0"
typer = "^0.9.0"
gunicorn = "^20.1.0"
langchain = "^0.0.200"
langchain = "^0.0.215"
openai = "^0.27.8"
types-pyyaml = "^6.0.12.8"
dill = "^0.3.6"
pandas = "^1.5.3"
chromadb = "^0.3.21"
huggingface-hub = "^0.13.3"
rich = "^13.3.3"
llama-cpp-python = "^0.1.50"
rich = "^13.4.2"
llama-cpp-python = "~0.1.0"
networkx = "^3.1"
unstructured = "^0.5.11"
pypdf = "^3.7.1"
@ -46,25 +46,33 @@ pysrt = "^1.1.2"
fake-useragent = "^1.1.3"
docstring-parser = "^0.15"
psycopg2-binary = "^2.9.6"
pyarrow = "^11.0.0"
tiktoken = "^0.3.3"
pyarrow = "^12.0.0"
tiktoken = "~0.4.0"
wikipedia = "^1.4.0"
langchain-serve = { version = ">0.0.39", optional = true }
qdrant-client = "^1.2.0"
websockets = "^11.0.3"
weaviate-client = "^3.19.2"
websockets = "^10.3"
weaviate-client = "^3.21.0"
jina = "3.15.2"
sentence-transformers = "^2.2.2"
ctransformers = "^0.2.2"
cohere = "^4.6.0"
python-multipart = "^0.0.6"
sqlmodel = "^0.0.8"
faiss-cpu = "^1.7.4"
anthropic = "^0.2.9"
orjson = "^3.9.0"
anthropic = "^0.2.10"
orjson = "^3.9.1"
multiprocess = "^0.70.14"
cachetools = "^5.3.1"
types-cachetools = "^5.3.0.5"
appdirs = "^1.4.4"
pinecone-client = "^2.2.2"
supabase = "^1.0.3"
pymongo = "^4.4.0"
certifi = "^2023.5.7"
[tool.poetry.group.dev.dependencies]
[tool.poetry.dev-dependencies]
black = "^23.1.0"
ipykernel = "^6.21.2"
mypy = "^1.1.1"
@ -76,6 +84,7 @@ requests = "^2.28.0"
pytest-cov = "^4.0.0"
pandas-stubs = "^2.0.0.230412"
types-pillow = "^9.5.0.2"
types-appdirs = "^1.4.3.5"
[tool.poetry.extras]

View file

@ -1,17 +1,18 @@
import sys
import time
from fastapi import FastAPI
import httpx
from multiprocess import Process, cpu_count # type: ignore
import platform
from pathlib import Path
from typing import Optional
import socket
from rich.panel import Panel
from rich import box
from rich import print as rprint
import typer
from fastapi.staticfiles import StaticFiles
from fastapi.responses import FileResponse
from langflow.main import create_app
from langflow.settings import settings
from langflow.utils.logger import configure, logger
@ -29,17 +30,20 @@ def get_number_of_workers(workers=None):
def update_settings(
config: str,
cache: str,
dev: bool = False,
database_url: Optional[str] = None,
save_api_keys: bool = True,
remove_api_keys: bool = False,
):
"""Update the settings from a config file."""
if config:
settings.update_from_yaml(config, dev=dev)
if database_url:
settings.update_settings(database_url=database_url)
if save_api_keys:
settings.update_settings(save_api_keys=save_api_keys)
if remove_api_keys:
settings.update_settings(remove_api_keys=remove_api_keys)
if cache:
settings.update_settings(cache=cache)
def serve_on_jcloud():
@ -101,6 +105,11 @@ def serve(
),
log_level: str = typer.Option("critical", help="Logging level."),
log_file: Path = typer.Option("logs/langflow.log", help="Path to the log file."),
cache: str = typer.Argument(
envvar="LANGCHAIN_CACHE",
help="Type of cache to use. (InMemoryCache, SQLiteCache)",
default="SQLiteCache",
),
jcloud: bool = typer.Option(False, help="Deploy on Jina AI Cloud"),
dev: bool = typer.Option(False, help="Run in development mode (may contain bugs)"),
database_url: str = typer.Option(
@ -114,26 +123,12 @@ def serve(
open_browser: bool = typer.Option(
True, help="Open the browser after starting the server."
),
save_api_keys: bool = typer.Option(
True, help="Save API keys in your projects for future use."
remove_api_keys: bool = typer.Option(
False, help="Remove API keys from the projects saved in the database."
),
):
"""
Run the Langflow server.
Args:
host (str): Host to bind the server to.
workers (int): Number of worker processes.
timeout (int): Worker timeout in seconds.
port (int): Port to listen on.
config (str): Path to the configuration file.
env_file (Path): Path to the .env file containing environment variables.
log_level (str): Logging level.
log_file (Path): Path to the log file.
jcloud (bool): Deploy on Jina AI Cloud.
dev (bool): Run in development mode (may contain bugs).
path (str): Path to the frontend directory containing build files. This is for development purposes only.
open_browser (bool): Open the browser after starting the server.
"""
if jcloud:
@ -143,20 +138,25 @@ def serve(
configure(log_level=log_level, log_file=log_file)
update_settings(
config, dev=dev, database_url=database_url, save_api_keys=save_api_keys
config,
dev=dev,
database_url=database_url,
remove_api_keys=remove_api_keys,
cache=cache,
)
app = create_app()
# get the directory of the current file
if not path:
frontend_path = Path(__file__).parent
static_files_dir = frontend_path / "frontend"
else:
static_files_dir = Path(path)
app.mount(
"/",
StaticFiles(directory=static_files_dir, html=True),
name="static",
)
app = create_app()
setup_static_files(app, static_files_dir)
# check if port is being used
if is_port_in_use(port, host):
port = get_free_port(port)
options = {
"bind": f"{host}:{port}",
"workers": get_number_of_workers(workers),
@ -164,6 +164,17 @@ def serve(
"timeout": timeout,
}
if platform.system() in ["Windows"]:
# Run using uvicorn on MacOS and Windows
# Windows doesn't support gunicorn
# MacOS requires an env variable to be set to use gunicorn
run_on_windows(host, port, log_level, options, app)
else:
# Run using gunicorn on Linux
run_on_mac_or_linux(host, port, log_level, options, app, open_browser)
def run_on_mac_or_linux(host, port, log_level, options, app, open_browser=True):
webapp_process = Process(
target=run_langflow, args=(host, port, log_level, options, app)
)
@ -171,7 +182,8 @@ def serve(
status_code = 0
while status_code != 200:
try:
status_code = httpx.get(f"http://{host}:{port}").status_code
status_code = httpx.get(f"http://{host}:{port}/health").status_code
except Exception:
time.sleep(1)
@ -180,11 +192,72 @@ def serve(
webbrowser.open(f"http://{host}:{port}")
def run_on_windows(host, port, log_level, options, app):
"""
Run the Langflow server on Windows.
"""
print_banner(host, port)
run_langflow(host, port, log_level, options, app)
def setup_static_files(app: FastAPI, static_files_dir: Path):
"""
Setup the static files directory.
Args:
app (FastAPI): FastAPI app.
path (str): Path to the static files directory.
"""
app.mount(
"/",
StaticFiles(directory=static_files_dir, html=True),
name="static",
)
@app.exception_handler(404)
async def custom_404_handler(request, __):
path = static_files_dir / "index.html"
if not path.exists():
raise RuntimeError(f"File at path {path} does not exist.")
return FileResponse(path)
def is_port_in_use(port, host="localhost"):
"""
Check if a port is in use.
Args:
port (int): The port number to check.
host (str): The host to check the port on. Defaults to 'localhost'.
Returns:
bool: True if the port is in use, False otherwise.
"""
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
return s.connect_ex((host, port)) == 0
def get_free_port(port):
"""
Given a used port, find a free port.
Args:
port (int): The port number to check.
Returns:
int: A free port number.
"""
while is_port_in_use(port):
port += 1
return port
def print_banner(host, port):
# console = Console()
word = "LangFlow"
colors = ["#690080", "#660099", "#4d00b3", "#3300cc", "#1a00e6", "#0000ff"]
colors = ["#3300cc"]
styled_word = ""

View file

@ -1,4 +1,3 @@
import json
from fastapi import (
APIRouter,
HTTPException,
@ -7,15 +6,16 @@ from fastapi import (
status,
)
from fastapi.responses import StreamingResponse
from langflow.api.v1.schemas import BuiltResponse, InitResponse
from langflow.api.v1.schemas import BuiltResponse, InitResponse, StreamData
from langflow.chat.manager import ChatManager
from langflow.graph.graph.base import Graph
from langflow.utils.logger import logger
from cachetools import LRUCache
router = APIRouter(tags=["Chat"])
chat_manager = ChatManager()
flow_data_store = {}
flow_data_store: LRUCache = LRUCache(maxsize=10)
@router.websocket("/chat/{client_id}")
@ -38,7 +38,8 @@ async def init_build(graph_data: dict):
try:
flow_id = graph_data.get("id")
if flow_id is None:
raise ValueError("No ID provided")
flow_data_store[flow_id] = graph_data
return InitResponse(flowId=flow_id)
@ -69,26 +70,39 @@ async def stream_build(flow_id: str):
"""Stream the build process based on stored flow data."""
async def event_stream(flow_id):
final_response = json.dumps({"end_of_stream": True})
final_response = {"end_of_stream": True}
try:
if flow_id not in flow_data_store:
error_message = "Invalid session ID"
yield f"data: {json.dumps({'error': error_message})}\n\n"
yield str(StreamData(event="error", data={"error": error_message}))
return
graph_data = flow_data_store[flow_id].get("data")
if not graph_data:
error_message = "No data provided"
yield f"data: {json.dumps({'error': error_message})}\n\n"
yield str(StreamData(event="error", data={"error": error_message}))
return
logger.debug("Building langchain object")
graph = Graph.from_payload(graph_data)
for node in graph.generator_build():
try:
# Some error could happen when building the graph
graph = Graph.from_payload(graph_data)
except Exception as exc:
logger.exception(exc)
error_message = str(exc)
yield str(StreamData(event="error", data={"error": error_message}))
return
number_of_nodes = len(graph.nodes)
for i, vertex in enumerate(graph.generator_build(), 1):
try:
node.build()
params = node._built_object_repr()
log_dict = {
"log": f"Building node {vertex.vertex_type}",
}
yield str(StreamData(event="log", data=log_dict))
vertex.build()
params = vertex._built_object_repr()
valid = True
logger.debug(
f"Building node {params[:50]}{'...' if len(params) > 50 else ''}"
@ -97,21 +111,21 @@ async def stream_build(flow_id: str):
params = str(exc)
valid = False
response = json.dumps(
{
"valid": valid,
"params": params,
"id": node.id,
}
)
yield f"data: {response}\n\n"
response = {
"valid": valid,
"params": params,
"id": vertex.id,
"progress": round(i / number_of_nodes, 2),
}
yield str(StreamData(event="message", data=response))
chat_manager.set_cache(flow_id, graph.build())
except Exception as exc:
logger.error("Error while building the flow: %s", exc)
yield f"error: {json.dumps({'error': str(exc)})}\n\n"
yield str(StreamData(event="error", data={"error": str(exc)}))
finally:
yield f"data: {final_response}\n\n"
yield str(StreamData(event="message", data=final_response))
try:
return StreamingResponse(event_stream(flow_id), media_type="text/event-stream")

View file

@ -1,13 +1,14 @@
from typing import Optional
from langflow.cache.utils import save_uploaded_file
from langflow.database.models.flow import Flow
from langflow.processing.process import process_graph_cached, process_tweaks
from langflow.utils.logger import logger
from fastapi import APIRouter, Depends, HTTPException
from fastapi.security import HTTPBearer
from fastapi import APIRouter, Depends, HTTPException, UploadFile
from langflow.api.v1.schemas import (
PredictRequest,
PredictResponse,
ProcessResponse,
UploadFileResponse,
)
from langflow.interface.types import build_langchain_types_dict
@ -17,48 +18,41 @@ from sqlmodel import Session
# build router
router = APIRouter(tags=["Base"])
security = HTTPBearer()
def get_flow_from_token(
bearer: HTTPBearer = Depends(security), session: Session = Depends(get_session)
) -> str:
# Extract the token, which is the flow_id in this case
flow_id = bearer.credentials
# Check if the flow_id exists in the database
flow = session.get(Flow, flow_id)
if flow is None:
raise HTTPException(status_code=401, detail="Invalid token")
return flow
@router.get("/all")
def get_all():
return build_langchain_types_dict()
@router.post("/predict/{flow_id}", response_model=PredictResponse)
async def predict_flow(
predict_request: PredictRequest,
# For backwards compatibility we will keep the old endpoint
@router.post("/predict/{flow_id}", response_model=ProcessResponse)
@router.post("/process/{flow_id}", response_model=ProcessResponse)
async def process_flow(
flow_id: str,
inputs: Optional[dict] = None,
tweaks: Optional[dict] = None,
session: Session = Depends(get_session),
):
"""
Endpoint to process a message using the flow passed in the bearer token.
Endpoint to process an input with a given flow_id.
"""
try:
flow = session.get(Flow, flow_id)
if flow is None:
raise ValueError(f"Flow {flow_id} not found")
graph_data = flow.data
if predict_request.tweaks:
graph_data = process_tweaks(graph_data, predict_request.tweaks)
response = process_graph_cached(graph_data, predict_request.message)
return PredictResponse(
result=response.get("result", ""),
intermediate_steps=response.get("thought", ""),
if flow.data is None:
raise ValueError(f"Flow {flow_id} has no data")
graph_data = flow.data
if tweaks:
try:
graph_data = process_tweaks(graph_data, tweaks)
except Exception as exc:
logger.error(f"Error processing tweaks: {exc}")
response = process_graph_cached(graph_data, inputs)
return ProcessResponse(
result=response,
)
except Exception as e:
# Log stack trace
@ -66,6 +60,21 @@ async def predict_flow(
raise HTTPException(status_code=500, detail=str(e)) from e
@router.post("/upload/{flow_id}", response_model=UploadFileResponse, status_code=201)
async def create_upload_file(file: UploadFile, flow_id: str):
# Cache file
try:
file_path = save_uploaded_file(file.file, folder_name=flow_id)
return UploadFileResponse(
flowId=flow_id,
file_path=file_path,
)
except Exception as exc:
logger.error(f"Error saving file: {exc}")
raise HTTPException(status_code=500, detail=str(exc)) from exc
# get endpoint to return version of langflow
@router.get("/version")
def get_version():

View file

@ -1,6 +1,6 @@
from typing import List
from uuid import UUID
from langflow import settings
from langflow.settings import settings
from langflow.api.utils import remove_api_keys
from langflow.api.v1.schemas import FlowListCreate, FlowListRead
from langflow.database.models.flow import (
@ -61,7 +61,7 @@ def update_flow(
if not db_flow:
raise HTTPException(status_code=404, detail="Flow not found")
flow_data = flow.dict(exclude_unset=True)
if settings.save_api_keys:
if settings.remove_api_keys:
flow_data = remove_api_keys(flow_data)
for key, value in flow_data.items():
setattr(db_flow, key, value)

View file

@ -1,6 +1,8 @@
from pathlib import Path
from typing import Any, Dict, List, Optional, Union
from langflow.database.models.flow import FlowCreate, FlowRead
from pydantic import BaseModel, Field, validator
import json
class GraphData(BaseModel):
@ -19,34 +21,22 @@ class ExportedFlow(BaseModel):
data: GraphData
class PredictRequest(BaseModel):
"""Predict request schema."""
class InputRequest(BaseModel):
input: dict
message: str
class TweaksRequest(BaseModel):
tweaks: Optional[Dict[str, Dict[str, str]]] = Field(default_factory=dict)
class Config:
schema_extra = {
"example": {
"message": "Hello, how are you?",
"tweaks": {
"dndnode_986363f0-4677-4035-9f38-74b94af5dd78": {
"name": "A tool name",
"description": "A tool description",
},
"dndnode_986363f0-4677-4035-9f38-74b94af57378": {
"template": "A {template}",
},
},
}
}
class UpdateTemplateRequest(BaseModel):
template: dict
class PredictResponse(BaseModel):
"""Predict response schema."""
class ProcessResponse(BaseModel):
"""Process response schema."""
result: str
intermediate_steps: str = ""
result: dict
class ChatMessage(BaseModel):
@ -101,3 +91,18 @@ class InitResponse(BaseModel):
class BuiltResponse(BaseModel):
built: bool
class UploadFileResponse(BaseModel):
"""Upload file response schema."""
flowId: str
file_path: Path
class StreamData(BaseModel):
event: str
data: dict
def __str__(self) -> str:
return f"event: {self.event}\ndata: {json.dumps(self.data)}\n\n"

View file

@ -1,5 +1,3 @@
import json
from fastapi import APIRouter, HTTPException
from langflow.api.v1.base import (
@ -9,8 +7,6 @@ from langflow.api.v1.base import (
PromptValidationResponse,
validate_prompt,
)
from langflow.graph.vertex.types import VectorStoreVertex
from langflow.graph import Graph
from langflow.utils.logger import logger
from langflow.utils.validate import validate_code
@ -37,21 +33,3 @@ def post_validate_prompt(prompt: Prompt):
except Exception as e:
logger.exception(e)
raise HTTPException(status_code=500, detail=str(e)) from e
# validate node
@router.post("/node/{node_id}", status_code=200)
def post_validate_node(node_id: str, data: dict):
try:
# build graph
graph = Graph.from_payload(data)
# validate node
node = graph.get_node(node_id)
if node is None:
raise ValueError(f"Node {node_id} not found")
if not isinstance(node, VectorStoreVertex):
node.build()
return json.dumps({"valid": True, "params": str(node._built_object_repr())})
except Exception as e:
logger.exception(e)
return json.dumps({"valid": False, "params": str(e)})

View file

@ -17,7 +17,6 @@ class BaseCache(abc.ABC):
Returns:
The value associated with the key, or None if the key is not found.
"""
pass
@abc.abstractmethod
def set(self, key, value):
@ -28,7 +27,6 @@ class BaseCache(abc.ABC):
key: The key of the item.
value: The value to cache.
"""
pass
@abc.abstractmethod
def delete(self, key):
@ -38,14 +36,12 @@ class BaseCache(abc.ABC):
Args:
key: The key of the item to remove.
"""
pass
@abc.abstractmethod
def clear(self):
"""
Clear all items from the cache.
"""
pass
@abc.abstractmethod
def __contains__(self, key):
@ -58,7 +54,6 @@ class BaseCache(abc.ABC):
Returns:
True if the key is in the cache, False otherwise.
"""
pass
@abc.abstractmethod
def __getitem__(self, key):
@ -67,11 +62,7 @@ class BaseCache(abc.ABC):
Args:
key: The key of the item to retrieve.
Returns:
The value associated with the key, or None if the key is not found.
"""
pass
@abc.abstractmethod
def __setitem__(self, key, value):
@ -82,7 +73,6 @@ class BaseCache(abc.ABC):
key: The key of the item.
value: The value to cache.
"""
pass
@abc.abstractmethod
def __delitem__(self, key):
@ -92,4 +82,3 @@ class BaseCache(abc.ABC):
Args:
key: The key of the item to remove.
"""
pass

View file

@ -8,16 +8,17 @@ import tempfile
from collections import OrderedDict
from pathlib import Path
from typing import Any, Dict
import dill # type: ignore
from appdirs import user_cache_dir
CACHE: Dict[str, Any] = {}
CACHE_DIR = user_cache_dir("langflow", "langflow")
def create_cache_folder(func):
def wrapper(*args, **kwargs):
# Get the destination folder
cache_path = Path(tempfile.gettempdir()) / PREFIX
cache_path = Path(CACHE_DIR) / PREFIX
# Create the destination folder if it doesn't exist
os.makedirs(cache_path, exist_ok=True)
@ -119,7 +120,7 @@ def save_binary_file(content: str, file_name: str, accepted_types: list[str]) ->
raise ValueError(f"File {file_name} is not accepted")
# Get the destination folder
cache_path = Path(tempfile.gettempdir()) / PREFIX
cache_path = Path(CACHE_DIR) / PREFIX
if not content:
raise ValueError("Please, reload the file in the loader.")
data = content.split(",")[1]
@ -136,19 +137,43 @@ def save_binary_file(content: str, file_name: str, accepted_types: list[str]) ->
@create_cache_folder
def save_cache(hash_val: str, chat_data, clean_old_cache_files: bool):
cache_path = Path(tempfile.gettempdir()) / PREFIX / f"{hash_val}.dill"
with cache_path.open("wb") as cache_file:
dill.dump(chat_data, cache_file)
def save_uploaded_file(file, folder_name):
"""
Save an uploaded file to the specified folder with a hash of its content as the file name.
if clean_old_cache_files:
clear_old_cache_files()
Args:
file: The uploaded file object.
folder_name: The name of the folder to save the file in.
Returns:
The path to the saved file.
"""
cache_path = Path(CACHE_DIR)
folder_path = cache_path / folder_name
@create_cache_folder
def load_cache(hash_val):
cache_path = Path(tempfile.gettempdir()) / PREFIX / f"{hash_val}.dill"
if cache_path.exists():
with cache_path.open("rb") as cache_file:
return dill.load(cache_file)
return None
# Create the folder if it doesn't exist
if not folder_path.exists():
folder_path.mkdir()
# Create a hash of the file content
sha256_hash = hashlib.sha256()
# Reset the file cursor to the beginning of the file
file.seek(0)
# Iterate over the uploaded file in small chunks to conserve memory
while chunk := file.read(8192): # Read 8KB at a time (adjust as needed)
sha256_hash.update(chunk)
# Use the hex digest of the hash as the file name
hex_dig = sha256_hash.hexdigest()
file_name = hex_dig
# Reset the file cursor to the beginning of the file
file.seek(0)
# Save the file with the hash as its name
file_path = folder_path / file_name
with open(file_path, "wb") as new_file:
while chunk := file.read(8192):
new_file.write(chunk)
return file_path

View file

@ -144,6 +144,8 @@ class ChatManager:
if isinstance(msg, FileResponse):
if msg.data_type == "image":
# Base64 encode the image
if isinstance(msg.data, str):
continue
msg.data = pil_to_base64(msg.data)
file_responses.append(msg)
if msg.type == "start":

View file

@ -3,7 +3,7 @@ agents:
- ZeroShotAgent
- JsonAgent
- CSVAgent
- initialize_agent
- AgentInitializer
- VectorStoreAgent
- VectorStoreRouterAgent
- SQLAgent
@ -48,6 +48,8 @@ documentloaders:
- ReadTheDocsLoader
- SlackDirectoryLoader
- NotionDirectoryLoader
- DirectoryLoader
- GitLoader
embeddings:
- OpenAIEmbeddings
- HuggingFaceEmbeddings
@ -57,7 +59,7 @@ llms:
# - AzureOpenAI
# - AzureChatOpenAI
- ChatOpenAI
- LlamaCpp
- LlamaCpp
- CTransformers
- Cohere
- Anthropic
@ -129,6 +131,9 @@ vectorstores:
- Qdrant
- Weaviate
- FAISS
- Pinecone
- SupabaseVectorStore
- MongoDBAtlasVectorSearch
wrappers:
- RequestsWrapper
# - ChatPromptTemplate

View file

@ -13,7 +13,7 @@ CUSTOM_NODES = {
"agents": {
"JsonAgent": frontend_node.agents.JsonAgentNode(),
"CSVAgent": frontend_node.agents.CSVAgentNode(),
"initialize_agent": frontend_node.agents.InitializeAgentNode(),
"AgentInitializer": frontend_node.agents.InitializeAgentNode(),
"VectorStoreAgent": frontend_node.agents.VectorStoreAgentNode(),
"VectorStoreRouterAgent": frontend_node.agents.VectorStoreRouterAgentNode(),
"SQLAgent": frontend_node.agents.SQLAgentNode(),

View file

@ -11,6 +11,7 @@ from langflow.graph.vertex.types import (
from langflow.interface.tools.constants import FILE_TOOLS
from langflow.utils import payload
from langflow.utils.logger import logger
from langchain.chains.base import Chain
class Graph:
@ -99,7 +100,7 @@ class Graph:
]
return connected_nodes
def build(self) -> List[Vertex]:
def build(self) -> Chain:
"""Builds the graph."""
# Get root node
root_node = payload.get_root_node(self)
@ -145,7 +146,7 @@ class Graph:
def generator_build(self) -> Generator:
"""Builds each vertex in the graph and yields it."""
sorted_vertices = self.topological_sort()
logger.info("Sorted vertices: %s", sorted_vertices)
logger.debug("Sorted vertices: %s", sorted_vertices)
yield from sorted_vertices
def get_node_neighbors(self, node: Vertex) -> Dict[Vertex, int]:

View file

@ -30,9 +30,6 @@ from langflow.interface.wrappers.base import wrapper_creator
from typing import Dict, Type
DIRECT_TYPES = ["str", "bool", "code", "int", "float", "Any", "prompt"]
VERTEX_TYPE_MAP: Dict[str, Type[Vertex]] = {
**{t: PromptVertex for t in prompt_creator.to_list()},
**{t: AgentVertex for t in agent_creator.to_list()},

View file

@ -1,6 +1,5 @@
from langflow.cache import utils as cache_utils
from langflow.graph.vertex.constants import DIRECT_TYPES
from langflow.interface import loading
from langflow.utils.constants import DIRECT_TYPES
from langflow.interface.initialize import loading
from langflow.interface.listing import ALL_TYPES_DICT
from langflow.utils.logger import logger
from langflow.utils.util import sync_to_async
@ -90,12 +89,7 @@ class Vertex:
# Load the type in value.get('suffixes') using
# what is inside value.get('content')
# value.get('value') is the file name
file_name = value.get("value")
content = value.get("content")
type_to_load = value.get("suffixes")
file_path = cache_utils.save_binary_file(
content=content, file_name=file_name, accepted_types=type_to_load
)
file_path = value.get("file_path")
params[key] = file_path

View file

@ -1 +1 @@
DIRECT_TYPES = ["str", "bool", "code", "int", "float", "Any", "prompt"]

View file

@ -106,9 +106,6 @@ class VectorStoreVertex(Vertex):
def __init__(self, data: Dict):
super().__init__(data, base_type="vectorstores")
def _built_object_repr(self):
return "Vector stores can take time to build. It will build on the first query."
class MemoryVertex(Vertex):
def __init__(self, data: Dict):

View file

@ -6,6 +6,7 @@ from langchain.agents import (
Tool,
ZeroShotAgent,
initialize_agent,
AgentType,
)
from langchain.agents.agent_toolkits import (
SQLDatabaseToolkit,
@ -64,7 +65,9 @@ class JsonAgent(CustomAgentExecutor):
llm=llm,
prompt=prompt,
)
agent = ZeroShotAgent(llm_chain=llm_chain, allowed_tools=tool_names) # type: ignore
agent = ZeroShotAgent(
llm_chain=llm_chain, allowed_tools=tool_names # type: ignore
)
return cls.from_agent_and_tools(agent=agent, tools=tools, verbose=True)
def run(self, *args, **kwargs):
@ -111,7 +114,9 @@ class CSVAgent(CustomAgentExecutor):
prompt=partial_prompt,
)
tool_names = {tool.name for tool in tools}
agent = ZeroShotAgent(llm_chain=llm_chain, allowed_tools=tool_names, **kwargs) # type: ignore
agent = ZeroShotAgent(
llm_chain=llm_chain, allowed_tools=tool_names, **kwargs # type: ignore
)
return cls.from_agent_and_tools(agent=agent, tools=tools, verbose=True)
@ -148,7 +153,9 @@ class VectorStoreAgent(CustomAgentExecutor):
prompt=prompt,
)
tool_names = {tool.name for tool in tools}
agent = ZeroShotAgent(llm_chain=llm_chain, allowed_tools=tool_names, **kwargs) # type: ignore
agent = ZeroShotAgent(
llm_chain=llm_chain, allowed_tools=tool_names, **kwargs # type: ignore
)
return AgentExecutor.from_agent_and_tools(
agent=agent, tools=tools, verbose=True
)
@ -186,7 +193,7 @@ class SQLAgent(CustomAgentExecutor):
from langchain.tools.sql_database.tool import (
InfoSQLDatabaseTool,
ListSQLDatabaseTool,
QueryCheckerTool,
QuerySQLCheckerTool,
QuerySQLDataBaseTool,
)
@ -201,7 +208,7 @@ class SQLAgent(CustomAgentExecutor):
QuerySQLDataBaseTool(db=db), # type: ignore
InfoSQLDatabaseTool(db=db), # type: ignore
ListSQLDatabaseTool(db=db), # type: ignore
QueryCheckerTool(db=db, llm_chain=llmchain, llm=llm), # type: ignore
QuerySQLCheckerTool(db=db, llm_chain=llmchain, llm=llm), # type: ignore
]
prefix = SQL_PREFIX.format(dialect=toolkit.dialect, top_k=10)
@ -216,7 +223,9 @@ class SQLAgent(CustomAgentExecutor):
prompt=prompt,
)
tool_names = {tool.name for tool in tools} # type: ignore
agent = ZeroShotAgent(llm_chain=llm_chain, allowed_tools=tool_names, **kwargs) # type: ignore
agent = ZeroShotAgent(
llm_chain=llm_chain, allowed_tools=tool_names, **kwargs # type: ignore
)
return AgentExecutor.from_agent_and_tools(
agent=agent,
tools=tools, # type: ignore
@ -252,14 +261,20 @@ class VectorStoreRouterAgent(CustomAgentExecutor):
):
"""Construct a vector store router agent from an LLM and tools."""
tools = vectorstoreroutertoolkit.get_tools()
tools = (
vectorstoreroutertoolkit
if isinstance(vectorstoreroutertoolkit, list)
else vectorstoreroutertoolkit.get_tools()
)
prompt = ZeroShotAgent.create_prompt(tools, prefix=VECTORSTORE_ROUTER_PREFIX)
llm_chain = LLMChain(
llm=llm,
prompt=prompt,
)
tool_names = {tool.name for tool in tools}
agent = ZeroShotAgent(llm_chain=llm_chain, allowed_tools=tool_names, **kwargs) # type: ignore
agent = ZeroShotAgent(
llm_chain=llm_chain, allowed_tools=tool_names, **kwargs # type: ignore
)
return AgentExecutor.from_agent_and_tools(
agent=agent, tools=tools, verbose=True
)
@ -269,11 +284,11 @@ class VectorStoreRouterAgent(CustomAgentExecutor):
class InitializeAgent(CustomAgentExecutor):
"""Implementation of initialize_agent function"""
"""Implementation of AgentInitializer function"""
@staticmethod
def function_name():
return "initialize_agent"
return "AgentInitializer"
@classmethod
def initialize(
@ -283,6 +298,9 @@ class InitializeAgent(CustomAgentExecutor):
agent: str,
memory: Optional[BaseChatMemory] = None,
):
# Find which value in the AgentType enum corresponds to the string
# passed in as agent
agent = AgentType(agent)
return initialize_agent(
tools=tools,
llm=llm,
@ -302,7 +320,7 @@ class InitializeAgent(CustomAgentExecutor):
CUSTOM_AGENTS = {
"JsonAgent": JsonAgent,
"CSVAgent": CSVAgent,
"initialize_agent": InitializeAgent,
"AgentInitializer": InitializeAgent,
"VectorStoreAgent": VectorStoreAgent,
"VectorStoreRouterAgent": VectorStoreRouterAgent,
"SQLAgent": SQLAgent,

View file

@ -68,7 +68,7 @@ class LangChainTypeCreator(BaseModel, ABC):
value=value.get("value", None),
suffixes=value.get("suffixes", []),
file_types=value.get("fileTypes", []),
content=value.get("content", None),
file_path=value.get("file_path", None),
)
for key, value in signature["template"].items()
if key != "_type"

View file

@ -20,7 +20,10 @@ class ChainCreator(LangChainTypeCreator):
return ChainFrontendNode
#! We need to find a better solution for this
from_method_nodes = {"ConversationalRetrievalChain": "from_llm"}
from_method_nodes = {
"ConversationalRetrievalChain": "from_llm",
"LLMCheckerChain": "from_llm",
}
@property
def type_to_loader_dict(self) -> Dict:

View file

@ -97,7 +97,7 @@ class TimeTravelGuideChain(BaseCustomConversationChain):
class CombineDocsChain(CustomChain):
"""Implementation of initialize_agent function"""
"""Implementation of load_qa_chain function"""
@staticmethod
def function_name():

View file

@ -17,14 +17,14 @@ from langflow.interface.importing.utils import import_class
from langflow.interface.agents.custom import CUSTOM_AGENTS
from langflow.interface.chains.custom import CUSTOM_CHAINS
## LLMs
# LLMs
llm_type_to_cls_dict = llms.type_to_cls_dict
llm_type_to_cls_dict["anthropic-chat"] = ChatAnthropic # type: ignore
llm_type_to_cls_dict["azure-chat"] = AzureChatOpenAI # type: ignore
llm_type_to_cls_dict["openai-chat"] = ChatOpenAI # type: ignore
## Toolkits
# Toolkits
toolkit_type_to_loader_dict: dict[str, Any] = {
toolkit_name: import_class(f"langchain.agents.agent_toolkits.{toolkit_name}")
# if toolkit_name is lower case it is a loader
@ -39,25 +39,25 @@ toolkit_type_to_cls_dict: dict[str, Any] = {
if not toolkit_name.islower()
}
## Memories
# Memories
memory_type_to_cls_dict: dict[str, Any] = {
memory_name: import_class(f"langchain.memory.{memory_name}")
for memory_name in memory.__all__
}
## Wrappers
# Wrappers
wrapper_type_to_cls_dict: dict[str, Any] = {
wrapper.__name__: wrapper for wrapper in [requests.RequestsWrapper]
}
## Embeddings
# Embeddings
embedding_type_to_cls_dict: dict[str, Any] = {
embedding_name: import_class(f"langchain.embeddings.{embedding_name}")
for embedding_name in embeddings.__all__
}
## Document Loaders
# Document Loaders
documentloaders_type_to_cls_dict: dict[str, Any] = {
documentloader_name: import_class(
f"langchain.document_loaders.{documentloader_name}"
@ -65,7 +65,7 @@ documentloaders_type_to_cls_dict: dict[str, Any] = {
for documentloader_name in document_loaders.__all__
}
## Text Splitters
# Text Splitters
textsplitter_type_to_cls_dict: dict[str, Any] = dict(
inspect.getmembers(text_splitter, inspect.isclass)
)

View file

@ -1,31 +1,21 @@
import json
from typing import Any, Callable, Dict, Optional
from typing import Any, Callable, Dict, Sequence
from langchain.agents import ZeroShotAgent
from langchain.agents import agent as agent_module
from langchain.agents.agent import AgentExecutor
from langchain.agents.agent_toolkits.base import BaseToolkit
from langchain.agents.load_tools import (
_BASE_TOOLS,
_EXTRA_LLM_TOOLS,
_EXTRA_OPTIONAL_TOOLS,
_LLM_TOOLS,
)
from langchain.agents.loading import load_agent_from_config
from langchain.agents.tools import Tool
from langchain.base_language import BaseLanguageModel
from langchain.callbacks.base import BaseCallbackManager
from langchain.chains.loading import load_chain_from_config
from langchain.llms.loading import load_llm_from_config
from langchain.agents.tools import BaseTool
from langflow.interface.initialize.vector_store import vecstore_initializer
from pydantic import ValidationError
from langflow.interface.custom_lists import CUSTOM_NODES
from langflow.interface.importing.utils import get_function, import_by_type
from langflow.interface.toolkits.base import toolkits_creator
from langflow.interface.chains.base import chain_creator
from langflow.interface.types import get_type_list
from langflow.interface.utils import load_file_into_dict
from langflow.utils import util, validate
from langflow.utils import validate
def instantiate_class(node_type: str, base_type: str, params: Dict) -> Any:
@ -153,24 +143,48 @@ def instantiate_embedding(class_object, params):
def instantiate_vectorstore(class_object, params):
if len(params.get("documents", [])) == 0:
raise ValueError(
"The source you provided did not load correctly or was empty."
"This may cause an error in the vectorstore."
)
# Chroma requires all metadata values to not be None
if class_object.__name__ == "Chroma":
for doc in params["documents"]:
if doc.metadata is None:
doc.metadata = {}
for key, value in doc.metadata.items():
if value is None:
doc.metadata[key] = ""
return class_object.from_documents(**params)
search_kwargs = params.pop("search_kwargs", {})
if initializer := vecstore_initializer.get(class_object.__name__):
vecstore = initializer(class_object, params)
else:
if "texts" in params:
params["documents"] = params.pop("texts")
vecstore = class_object.from_documents(**params)
# ! This might not work. Need to test
if search_kwargs and hasattr(vecstore, "as_retriever"):
vecstore = vecstore.as_retriever(search_kwargs=search_kwargs)
return vecstore
def instantiate_documentloader(class_object, params):
return class_object(**params).load()
if "file_filter" in params:
# file_filter will be a string but we need a function
# that will be used to filter the files using file_filter
# like lambda x: x.endswith(".txt") but as we don't know
# anything besides the string, we will simply check if the string is
# in x and if it is, we will return True
file_filter = params.pop("file_filter", None)
extensions = file_filter.split(",")
params["file_filter"] = lambda x: any(
extension.strip() in x for extension in extensions
)
metadata = params.pop("metadata", None)
docs = class_object(**params).load()
if metadata:
if isinstance(metadata, str):
try:
metadata = json.loads(metadata)
except json.JSONDecodeError as exc:
raise ValueError(
"The metadata you provided is not a valid JSON string."
) from exc
for doc in docs:
doc.metadata = metadata
return docs
def instantiate_textsplitter(class_object, params):
@ -207,48 +221,14 @@ def replace_zero_shot_prompt_with_prompt_template(nodes):
return nodes
def load_langchain_type_from_config(config: Dict[str, Any]):
"""Load langchain type from config"""
# Get type list
type_list = get_type_list()
if config["_type"] in type_list["agents"]:
config = util.update_verbose(config, new_value=False)
return load_agent_executor_from_config(config, verbose=True)
elif config["_type"] in type_list["chains"]:
config = util.update_verbose(config, new_value=False)
return load_chain_from_config(config, verbose=True)
elif config["_type"] in type_list["llms"]:
config = util.update_verbose(config, new_value=True)
return load_llm_from_config(config)
else:
raise ValueError("Type should be either agent, chain or llm")
def load_agent_executor_from_config(
config: dict,
llm: Optional[BaseLanguageModel] = None,
tools: Optional[list[Tool]] = None,
callback_manager: Optional[BaseCallbackManager] = None,
**kwargs: Any,
):
tools = load_tools_from_config(config["allowed_tools"])
config["allowed_tools"] = [tool.name for tool in tools] if tools else []
agent_obj = load_agent_from_config(config, llm, tools, **kwargs)
return AgentExecutor.from_agent_and_tools(
agent=agent_obj,
tools=tools,
callback_manager=callback_manager,
**kwargs,
)
def load_agent_executor(agent_class: type[agent_module.Agent], params, **kwargs):
"""Load agent executor from agent class, tools and chain"""
allowed_tools = params.get("allowed_tools", [])
allowed_tools: Sequence[BaseTool] = params.get("allowed_tools", [])
llm_chain = params["llm_chain"]
# if allowed_tools is not a list or set, make it a list
if not isinstance(allowed_tools, (list, set)):
if not isinstance(allowed_tools, (list, set)) and isinstance(
allowed_tools, BaseTool
):
allowed_tools = [allowed_tools]
tool_names = [tool.name for tool in allowed_tools]
# Agent class requires an output_parser but Agent classes
@ -267,46 +247,6 @@ def load_toolkits_executor(node_type: str, toolkit: BaseToolkit, params: dict):
return create_function(llm=llm, toolkit=toolkit)
def load_tools_from_config(tool_list: list[dict]) -> list:
"""Load tools based on a config list.
Args:
config: config list.
Returns:
List of tools.
"""
tools = []
for tool in tool_list:
tool_type = tool.pop("_type")
llm_config = tool.pop("llm", None)
llm = load_llm_from_config(llm_config) if llm_config else None
kwargs = tool
if tool_type in _BASE_TOOLS:
tools.append(_BASE_TOOLS[tool_type]())
elif tool_type in _LLM_TOOLS:
if llm is None:
raise ValueError(f"Tool {tool_type} requires an LLM to be provided")
tools.append(_LLM_TOOLS[tool_type](llm))
elif tool_type in _EXTRA_LLM_TOOLS:
if llm is None:
raise ValueError(f"Tool {tool_type} requires an LLM to be provided")
_get_llm_tool_func, extra_keys = _EXTRA_LLM_TOOLS[tool_type]
if missing_keys := set(extra_keys).difference(kwargs):
raise ValueError(
f"Tool {tool_type} requires some parameters that were not "
f"provided: {missing_keys}"
)
tools.append(_get_llm_tool_func(llm=llm, **kwargs))
elif tool_type in _EXTRA_OPTIONAL_TOOLS:
_get_tool_func, extra_keys = _EXTRA_OPTIONAL_TOOLS[tool_type]
kwargs = {k: value for k, value in kwargs.items() if value}
tools.append(_get_tool_func(**kwargs))
else:
raise ValueError(f"Got unknown tool {tool_type}")
return tools
def build_prompt_template(prompt, tools):
"""Build PromptTemplate from ZeroShotPrompt"""
prefix = prompt["node"]["template"]["prefix"]["value"]

View file

@ -0,0 +1,223 @@
import json
from typing import Any, Callable, Dict, Type
from langchain.vectorstores import (
Pinecone,
Qdrant,
Chroma,
FAISS,
Weaviate,
SupabaseVectorStore,
MongoDBAtlasVectorSearch,
)
import os
def docs_in_params(params: dict) -> bool:
"""Check if params has documents OR texts and one of them is not an empty list,
If any of them is not an empty list, return True, else return False"""
return ("documents" in params and params["documents"]) or (
"texts" in params and params["texts"]
)
def initialize_mongodb(class_object: Type[MongoDBAtlasVectorSearch], params: dict):
"""Initialize mongodb and return the class object"""
MONGODB_ATLAS_CLUSTER_URI = params.pop("mongodb_atlas_cluster_uri")
if not MONGODB_ATLAS_CLUSTER_URI:
raise ValueError("Mongodb atlas cluster uri must be provided in the params")
from pymongo import MongoClient
import certifi
client: MongoClient = MongoClient(
MONGODB_ATLAS_CLUSTER_URI, tlsCAFile=certifi.where()
)
db_name = params.pop("db_name", None)
collection_name = params.pop("collection_name", None)
if not db_name or not collection_name:
raise ValueError("db_name and collection_name must be provided in the params")
index_name = params.pop("index_name", None)
if not index_name:
raise ValueError("index_name must be provided in the params")
collection = client[db_name][collection_name]
if not docs_in_params(params):
# __init__ requires collection, embedding and index_name
init_args = {
"collection": collection,
"index_name": index_name,
"embedding": params.get("embedding"),
}
return class_object(**init_args)
if "texts" in params:
params["documents"] = params.pop("texts")
params["collection"] = collection
params["index_name"] = index_name
return class_object.from_documents(**params)
def initialize_supabase(class_object: Type[SupabaseVectorStore], params: dict):
"""Initialize supabase and return the class object"""
from supabase.client import Client, create_client
if "supabase_url" not in params or "supabase_service_key" not in params:
raise ValueError("Supabase url and service key must be provided in the params")
if "texts" in params:
params["documents"] = params.pop("texts")
client_kwargs = {
"supabase_url": params.pop("supabase_url"),
"supabase_key": params.pop("supabase_service_key"),
}
supabase: Client = create_client(**client_kwargs)
if not docs_in_params(params):
params.pop("documents", None)
params.pop("texts", None)
return class_object(client=supabase, **params)
# If there are docs in the params, create a new index
return class_object.from_documents(client=supabase, **params)
def initialize_weaviate(class_object: Type[Weaviate], params: dict):
"""Initialize weaviate and return the class object"""
if not docs_in_params(params):
import weaviate # type: ignore
client_kwargs_json = params.get("client_kwargs", "{}")
client_kwargs = json.loads(client_kwargs_json)
client_params = {
"url": params.get("weaviate_url"),
}
client_params.update(client_kwargs)
weaviate_client = weaviate.Client(**client_params)
new_params = {
"client": weaviate_client,
"index_name": params.get("index_name"),
"text_key": params.get("text_key"),
}
return class_object(**new_params)
# If there are docs in the params, create a new index
if "texts" in params:
params["documents"] = params.pop("texts")
return class_object.from_documents(**params)
def initialize_faiss(class_object: Type[FAISS], params: dict):
"""Initialize faiss and return the class object"""
if not docs_in_params(params):
return class_object.load_local
save_local = params.get("save_local")
faiss_index = class_object(**params)
if save_local:
faiss_index.save_local(folder_path=save_local)
return faiss_index
def initialize_pinecone(class_object: Type[Pinecone], params: dict):
"""Initialize pinecone and return the class object"""
import pinecone # type: ignore
pinecone_api_key = params.get("pinecone_api_key")
pinecone_env = params.get("pinecone_env")
if pinecone_api_key is None or pinecone_env is None:
if os.getenv("PINECONE_API_KEY") is not None:
pinecone_api_key = os.getenv("PINECONE_API_KEY")
if os.getenv("PINECONE_ENV") is not None:
pinecone_env = os.getenv("PINECONE_ENV")
if pinecone_api_key is None or pinecone_env is None:
raise ValueError(
"Pinecone API key and environment must be provided in the params"
)
# initialize pinecone
pinecone.init(
api_key=pinecone_api_key, # find at app.pinecone.io
environment=pinecone_env, # next to api key in console
)
# If there are no docs in the params, return an existing index
# but first remove any texts or docs keys from the params
if not docs_in_params(params):
existing_index_params = {
"embedding": params.pop("embedding"),
}
if "index_name" in params:
existing_index_params["index_name"] = params.pop("index_name")
if "namespace" in params:
existing_index_params["namespace"] = params.pop("namespace")
return class_object.from_existing_index(**existing_index_params)
# If there are docs in the params, create a new index
if "texts" in params:
params["documents"] = params.pop("texts")
return class_object.from_documents(**params)
def initialize_chroma(class_object: Type[Chroma], params: dict):
"""Initialize a ChromaDB object from the params"""
persist = params.pop("persist", False)
if not docs_in_params(params):
params.pop("documents", None)
params.pop("texts", None)
params["embedding_function"] = params.pop("embedding")
chromadb = class_object(**params)
else:
if "texts" in params:
params["documents"] = params.pop("texts")
for doc in params["documents"]:
if doc.metadata is None:
doc.metadata = {}
for key, value in doc.metadata.items():
if value is None:
doc.metadata[key] = ""
chromadb = class_object.from_documents(**params)
if persist:
chromadb.persist()
return chromadb
def initialize_qdrant(class_object: Type[Qdrant], params: dict):
if not docs_in_params(params):
if "location" not in params and "api_key" not in params:
raise ValueError("Location and API key must be provided in the params")
from qdrant_client import QdrantClient
client_params = {
"location": params.pop("location"),
"api_key": params.pop("api_key"),
}
lc_params = {
"collection_name": params.pop("collection_name"),
"embeddings": params.pop("embedding"),
}
client = QdrantClient(**client_params)
return class_object(client=client, **lc_params)
return class_object.from_documents(**params)
vecstore_initializer: Dict[str, Callable[[Type[Any], dict], Any]] = {
"Pinecone": initialize_pinecone,
"Chroma": initialize_chroma,
"Qdrant": initialize_qdrant,
"Weaviate": initialize_weaviate,
"FAISS": initialize_faiss,
"SupabaseVectorStore": initialize_supabase,
"MongoDBAtlasVectorSearch": initialize_mongodb,
}

View file

@ -1,22 +1,8 @@
from langflow.cache.utils import compute_dict_hash, load_cache, memoize_dict
from langflow.cache.utils import memoize_dict
from langflow.graph import Graph
from langflow.utils.logger import logger
def load_langchain_object(data_graph, is_first_message=False):
"""
Load langchain object from cache if it exists, otherwise build it.
"""
computed_hash = compute_dict_hash(data_graph)
if is_first_message:
langchain_object = build_langchain_object(data_graph)
else:
logger.debug("Loading langchain object from cache")
langchain_object = load_cache(computed_hash)
return computed_hash, langchain_object
@memoize_dict(maxsize=10)
def build_langchain_object_with_caching(data_graph):
"""

View file

@ -7,6 +7,7 @@ import re
import yaml
from langchain.base_language import BaseLanguageModel
from PIL.Image import Image
from langflow.utils.logger import logger
def load_file_into_dict(file_path: str) -> dict:
@ -58,3 +59,22 @@ def try_setting_streaming_options(langchain_object, websocket):
def extract_input_variables_from_prompt(prompt: str) -> list[str]:
"""Extract input variables from prompt."""
return re.findall(r"{(.*?)}", prompt)
def setup_llm_caching():
"""Setup LLM caching."""
try:
import langchain
from langflow.settings import settings
from langflow.interface.importing.utils import import_class
cache_class = import_class(f"langchain.cache.{settings.cache}")
logger.debug(f"Setting up LLM caching with {cache_class.__name__}")
langchain.llm_cache = cache_class()
logger.info(f"LLM caching setup with {cache_class.__name__}")
except ImportError:
logger.warning(f"Could not import {settings.cache}. ")
except Exception as exc:
logger.warning(f"Could not setup LLM caching. Error: {exc}")

View file

@ -3,10 +3,12 @@ from fastapi.middleware.cors import CORSMiddleware
from langflow.api import router
from langflow.database.base import create_db_and_tables
from langflow.interface.utils import setup_llm_caching
def create_app():
"""Create the FastAPI app and include the router."""
app = FastAPI()
origins = [
@ -27,6 +29,7 @@ def create_app():
app.include_router(router)
app.on_event("startup")(create_db_and_tables)
app.on_event("startup")(setup_llm_caching)
return app

View file

@ -1,5 +1,4 @@
import contextlib
import io
from pathlib import Path
from langchain.schema import AgentAction
import json
from langflow.interface.run import (
@ -9,9 +8,9 @@ from langflow.interface.run import (
)
from langflow.utils.logger import logger
from langflow.graph import Graph
from typing import Any, Dict, List, Tuple
from langchain.chains.base import Chain
from langchain.vectorstores.base import VectorStore
from typing import Any, Dict, List, Optional, Tuple, Union
def fix_memory_inputs(langchain_object):
@ -20,22 +19,23 @@ def fix_memory_inputs(langchain_object):
object's input variables. If so, it does nothing. Otherwise, it gets a possible new memory key using the
get_memory_key function and updates the memory keys using the update_memory_keys function.
"""
if hasattr(langchain_object, "memory") and langchain_object.memory is not None:
try:
if langchain_object.memory.memory_key in langchain_object.input_variables:
return
except AttributeError:
input_variables = (
langchain_object.prompt.input_variables
if hasattr(langchain_object, "prompt")
else langchain_object.input_keys
)
if langchain_object.memory.memory_key in input_variables:
return
if not hasattr(langchain_object, "memory") or langchain_object.memory is None:
return
try:
if langchain_object.memory.memory_key in langchain_object.input_variables:
return
except AttributeError:
input_variables = (
langchain_object.prompt.input_variables
if hasattr(langchain_object, "prompt")
else langchain_object.input_keys
)
if langchain_object.memory.memory_key in input_variables:
return
possible_new_mem_key = get_memory_key(langchain_object)
if possible_new_mem_key is not None:
update_memory_keys(langchain_object, possible_new_mem_key)
possible_new_mem_key = get_memory_key(langchain_object)
if possible_new_mem_key is not None:
update_memory_keys(langchain_object, possible_new_mem_key)
def format_actions(actions: List[Tuple[AgentAction, str]]) -> str:
@ -54,79 +54,42 @@ def format_actions(actions: List[Tuple[AgentAction, str]]) -> str:
return "\n".join(output)
def get_result_and_thought(langchain_object, message: str):
def get_result_and_thought(langchain_object: Any, inputs: dict):
"""Get result and thought from extracted json"""
try:
if hasattr(langchain_object, "verbose"):
langchain_object.verbose = True
chat_input = None
memory_key = ""
if hasattr(langchain_object, "memory") and langchain_object.memory is not None:
memory_key = langchain_object.memory.memory_key
if hasattr(langchain_object, "input_keys"):
for key in langchain_object.input_keys:
if key not in [memory_key, "chat_history"]:
chat_input = {key: message}
else:
chat_input = message # type: ignore
if hasattr(langchain_object, "return_intermediate_steps"):
# https://github.com/hwchase17/langchain/issues/2068
# Deactivating until we have a frontend solution
# to display intermediate steps
langchain_object.return_intermediate_steps = False
langchain_object.return_intermediate_steps = True
fix_memory_inputs(langchain_object)
with io.StringIO() as output_buffer, contextlib.redirect_stdout(output_buffer):
try:
# if hasattr(langchain_object, "acall"):
# output = await langchain_object.acall(chat_input)
# else:
output = langchain_object(chat_input)
except ValueError as exc:
# make the error message more informative
logger.debug(f"Error: {str(exc)}")
output = langchain_object.run(chat_input)
intermediate_steps = (
output.get("intermediate_steps", []) if isinstance(output, dict) else []
)
result = (
output.get(langchain_object.output_keys[0])
if isinstance(output, dict)
else output
)
if intermediate_steps:
thought = format_actions(intermediate_steps)
else:
thought = output_buffer.getvalue()
try:
output = langchain_object(inputs, return_only_outputs=True)
except ValueError as exc:
# make the error message more informative
logger.debug(f"Error: {str(exc)}")
output = langchain_object.run(inputs)
except Exception as exc:
raise ValueError(f"Error: {str(exc)}") from exc
return result, thought
return output
def load_or_build_langchain_object(data_graph, is_first_message=False):
"""
Load langchain object from cache if it exists, otherwise build it.
"""
if is_first_message:
build_langchain_object_with_caching.clear_cache()
return build_langchain_object_with_caching(data_graph)
def get_input_str_if_only_one_input(inputs: dict) -> Optional[str]:
"""Get input string if only one input is provided"""
return list(inputs.values())[0] if len(inputs) == 1 else None
def process_graph_cached(data_graph: Dict[str, Any], message: str):
def process_graph_cached(data_graph: Dict[str, Any], inputs: Optional[dict] = None):
"""
Process graph by extracting input variables and replacing ZeroShotPrompt
with PromptTemplate,then run the graph and return the result and thought.
"""
# Load langchain object
is_first_message = len(data_graph.get("chatHistory", [])) == 0
langchain_object = load_or_build_langchain_object(data_graph, is_first_message)
logger.debug("Loaded langchain object")
langchain_object = build_langchain_object_with_caching(data_graph)
logger.debug("Loaded LangChain object")
if langchain_object is None:
# Raise user facing error
@ -135,60 +98,121 @@ def process_graph_cached(data_graph: Dict[str, Any], message: str):
)
# Generate result and thought
logger.debug("Generating result and thought")
result, thought = get_result_and_thought(langchain_object, message)
logger.debug("Generated result and thought")
return {"result": str(result), "thought": thought.strip()}
if isinstance(langchain_object, Chain):
if inputs is None:
raise ValueError("Inputs must be provided for a Chain")
logger.debug("Generating result and thought")
result = get_result_and_thought(langchain_object, inputs)
logger.debug("Generated result and thought")
elif isinstance(langchain_object, VectorStore):
class_name = langchain_object.__class__.__name__
result = {"message": f"Processed {class_name} successfully"}
return result
def load_flow_from_json(path: str, build=True):
"""Load flow from json file"""
# This is done to avoid circular imports
def load_flow_from_json(
input: Union[Path, str, dict], tweaks: Optional[dict] = None, build=True
):
"""
Load flow from a JSON file or a JSON object.
with open(path, "r", encoding="utf-8") as f:
flow_graph = json.load(f)
data_graph = flow_graph["data"]
nodes = data_graph["nodes"]
# Substitute ZeroShotPrompt with PromptTemplate
# nodes = replace_zero_shot_prompt_with_prompt_template(nodes)
# Add input variables
# nodes = payload.extract_input_variables(nodes)
:param input: JSON file path or JSON object
:param tweaks: Optional tweaks to be processed
:param build: If True, build the graph, otherwise return the graph object
:return: Langchain object or Graph object depending on the build parameter
"""
# If input is a file path, load JSON from the file
if isinstance(input, (str, Path)):
with open(input, "r", encoding="utf-8") as f:
flow_graph = json.load(f)
# If input is a dictionary, assume it's a JSON object
elif isinstance(input, dict):
flow_graph = input
else:
raise TypeError(
"Input must be either a file path (str) or a JSON object (dict)"
)
# Nodes, edges and root node
edges = data_graph["edges"]
graph_data = flow_graph["data"]
if tweaks is not None:
graph_data = process_tweaks(graph_data, tweaks)
nodes = graph_data["nodes"]
edges = graph_data["edges"]
graph = Graph(nodes, edges)
if build:
langchain_object = graph.build()
if hasattr(langchain_object, "verbose"):
langchain_object.verbose = True
if hasattr(langchain_object, "return_intermediate_steps"):
# https://github.com/hwchase17/langchain/issues/2068
# Deactivating until we have a frontend solution
# to display intermediate steps
langchain_object.return_intermediate_steps = False
fix_memory_inputs(langchain_object)
return langchain_object
return graph
def process_tweaks(graph_data: dict, tweaks: dict):
"""This function is used to tweak the graph data using the node id and the tweaks dict"""
# the tweaks dict is a dict of dicts
# the key is the node id and the value is a dict of the tweaks
# the dict of tweaks contains the name of a certain parameter and the value to be tweaked
def validate_input(
graph_data: Dict[str, Any], tweaks: Dict[str, Dict[str, Any]]
) -> List[Dict[str, Any]]:
if not isinstance(graph_data, dict) or not isinstance(tweaks, dict):
raise ValueError("graph_data and tweaks should be dictionaries")
nodes = graph_data.get("data", {}).get("nodes") or graph_data.get("nodes")
if not isinstance(nodes, list):
raise ValueError(
"graph_data should contain a list of nodes under 'data' key or directly under 'nodes' key"
)
return nodes
def apply_tweaks(node: Dict[str, Any], node_tweaks: Dict[str, Any]) -> None:
template_data = node.get("data", {}).get("node", {}).get("template")
if not isinstance(template_data, dict):
logger.warning(
f"Template data for node {node.get('id')} should be a dictionary"
)
return
for tweak_name, tweak_value in node_tweaks.items():
if tweak_name and tweak_value and tweak_name in template_data:
key = tweak_name if tweak_name == "file_path" else "value"
template_data[tweak_name][key] = tweak_value
def process_tweaks(
graph_data: Dict[str, Any], tweaks: Dict[str, Dict[str, Any]]
) -> Dict[str, Any]:
"""
This function is used to tweak the graph data using the node id and the tweaks dict.
:param graph_data: The dictionary containing the graph data. It must contain a 'data' key with
'nodes' as its child or directly contain 'nodes' key. Each node should have an 'id' and 'data'.
:param tweaks: A dictionary where the key is the node id and the value is a dictionary of the tweaks.
The inner dictionary contains the name of a certain parameter as the key and the value to be tweaked.
:return: The modified graph_data dictionary.
:raises ValueError: If the input is not in the expected format.
"""
nodes = validate_input(graph_data, tweaks)
# We need to process the graph data to add the tweaks
nodes = graph_data["data"]["nodes"]
for node in nodes:
node_id = node["id"]
if node_id in tweaks:
node_tweaks = tweaks[node_id]
template_data = node["data"]["node"]["template"]
for tweak_name, tweake_value in node_tweaks.items():
if tweak_name in template_data:
template_data[tweak_name]["value"] = tweake_value
print(
f"Something changed in node {node_id} with tweak {tweak_name} and value {tweake_value}"
)
if isinstance(node, dict) and isinstance(node.get("id"), str):
node_id = node["id"]
if node_tweaks := tweaks.get(node_id):
apply_tweaks(node, node_tweaks)
else:
logger.warning(
"Each node should be a dictionary with an 'id' key of type str"
)
return graph_data

View file

@ -21,7 +21,8 @@ class Settings(BaseSettings):
utilities: List[str] = []
dev: bool = False
database_url: str = "sqlite:///./langflow.db"
save_api_keys: bool = True
cache: str = "InMemoryCache"
remove_api_keys: bool = False
class Config:
validate_assignment = True
@ -48,7 +49,6 @@ class Settings(BaseSettings):
self.textsplitters = new_settings.textsplitters or []
self.utilities = new_settings.utilities or []
self.dev = dev
self.save_api_keys = new_settings.save_api_keys
def update_settings(self, **kwargs):
for key, value in kwargs.items():

View file

@ -15,7 +15,7 @@ class TemplateFieldCreator(BaseModel, ABC):
suffixes: list[str] = []
fileTypes: list[str] = []
file_types: list[str] = []
content: Union[str, None] = None
file_path: Union[str, None] = None
password: bool = False
options: list[str] = []
name: str = ""
@ -35,7 +35,7 @@ class TemplateFieldCreator(BaseModel, ABC):
result["fileTypes"] = result.pop("file_types")
if self.field_type == "file":
result["content"] = self.content
result["file_path"] = self.file_path
return result

View file

@ -155,6 +155,7 @@ class CSVAgentNode(FrontendNode):
class InitializeAgentNode(FrontendNode):
name: str = "AgentInitializer"
display_name: str = "AgentInitializer"
template: Template = Template(
type_name="initialize_agent",
fields=[

View file

@ -13,12 +13,37 @@ class ChainFrontendNode(FrontendNode):
self.template.add_field(
TemplateField(
field_type="BaseChatMemory",
required=False,
required=True,
show=True,
name="memory",
advanced=False,
)
)
# add return_source_documents
self.template.add_field(
TemplateField(
field_type="bool",
required=False,
show=True,
name="return_source_documents",
advanced=False,
value=True,
display_name="Return source documents",
)
)
self.template.add_field(
TemplateField(
field_type="str",
required=True,
is_list=True,
show=True,
multiline=False,
options=QA_CHAIN_TYPES,
value=QA_CHAIN_TYPES[0],
name="chain_type",
advanced=False,
)
)
@staticmethod
def format_field(field: TemplateField, name: Optional[str] = None) -> None:
@ -33,13 +58,21 @@ class ChainFrontendNode(FrontendNode):
field.show = True
field.advanced = True
# We should think of a way to deal with this later
# if field.field_type == "PromptTemplate":
# field.field_type = "str"
# field.multiline = True
# field.show = True
# field.advanced = False
# field.value = field.value.template
# Separated for possible future changes
if field.name == "prompt" and field.value is None:
field.required = True
field.show = True
field.advanced = False
if field.name == "memory":
field.required = False
# field.required = False
field.show = True
field.advanced = False
if field.name == "verbose":
@ -51,6 +84,12 @@ class ChainFrontendNode(FrontendNode):
field.show = True
field.advanced = False
if field.name == "return_source_documents":
field.required = False
field.show = True
field.advanced = True
field.value = True
class SeriesCharacterChainNode(FrontendNode):
name: str = "SeriesCharacterChain"

View file

@ -1,8 +1,9 @@
from typing import Optional
from langflow.template.field.base import TemplateField
from langflow.template.frontend_node.base import FrontendNode
def build_template(
def build_file_field(
suffixes: list, fileTypes: list, name: str = "file_path"
) -> TemplateField:
"""Build a template field for a document loader."""
@ -19,39 +20,90 @@ def build_template(
class DocumentLoaderFrontNode(FrontendNode):
file_path_templates = {
"AirbyteJSONLoader": build_template(suffixes=[".json"], fileTypes=["json"]),
"CoNLLULoader": build_template(suffixes=[".csv"], fileTypes=["csv"]),
"CSVLoader": build_template(suffixes=[".csv"], fileTypes=["csv"]),
"UnstructuredEmailLoader": build_template(suffixes=[".eml"], fileTypes=["eml"]),
"EverNoteLoader": build_template(suffixes=[".xml"], fileTypes=["xml"]),
"FacebookChatLoader": build_template(suffixes=[".json"], fileTypes=["json"]),
"GutenbergLoader": build_template(suffixes=[".txt"], fileTypes=["txt"]),
"BSHTMLLoader": build_template(suffixes=[".html"], fileTypes=["html"]),
"UnstructuredHTMLLoader": build_template(
"AirbyteJSONLoader": build_file_field(suffixes=[".json"], fileTypes=["json"]),
"CoNLLULoader": build_file_field(suffixes=[".csv"], fileTypes=["csv"]),
"CSVLoader": build_file_field(suffixes=[".csv"], fileTypes=["csv"]),
"UnstructuredEmailLoader": build_file_field(
suffixes=[".eml"], fileTypes=["eml"]
),
"SlackDirectoryLoader": build_file_field(suffixes=[".zip"], fileTypes=["zip"]),
"EverNoteLoader": build_file_field(suffixes=[".xml"], fileTypes=["xml"]),
"FacebookChatLoader": build_file_field(suffixes=[".json"], fileTypes=["json"]),
"GutenbergLoader": build_file_field(suffixes=[".txt"], fileTypes=["txt"]),
"BSHTMLLoader": build_file_field(suffixes=[".html"], fileTypes=["html"]),
"UnstructuredHTMLLoader": build_file_field(
suffixes=[".html"], fileTypes=["html"]
),
"UnstructuredImageLoader": build_template(
"UnstructuredImageLoader": build_file_field(
suffixes=[".jpg", ".jpeg", ".png", ".gif", ".bmp"],
fileTypes=["jpg", "jpeg", "png", "gif", "bmp"],
),
"UnstructuredMarkdownLoader": build_template(
"UnstructuredMarkdownLoader": build_file_field(
suffixes=[".md"], fileTypes=["md"]
),
"PyPDFLoader": build_template(suffixes=[".pdf"], fileTypes=["pdf"]),
"UnstructuredPowerPointLoader": build_template(
"PyPDFLoader": build_file_field(suffixes=[".pdf"], fileTypes=["pdf"]),
"UnstructuredPowerPointLoader": build_file_field(
suffixes=[".pptx", ".ppt"], fileTypes=["pptx", "ppt"]
),
"SRTLoader": build_template(suffixes=[".srt"], fileTypes=["srt"]),
"TelegramChatLoader": build_template(suffixes=[".json"], fileTypes=["json"]),
"TextLoader": build_template(suffixes=[".txt"], fileTypes=["txt"]),
"UnstructuredWordDocumentLoader": build_template(
"SRTLoader": build_file_field(suffixes=[".srt"], fileTypes=["srt"]),
"TelegramChatLoader": build_file_field(suffixes=[".json"], fileTypes=["json"]),
"TextLoader": build_file_field(suffixes=[".txt"], fileTypes=["txt"]),
"UnstructuredWordDocumentLoader": build_file_field(
suffixes=[".docx", ".doc"], fileTypes=["docx", "doc"]
),
}
def add_extra_fields(self) -> None:
name = None
if self.template.type_name in self.file_path_templates:
display_name = "Web Page"
if self.template.type_name in {"GitLoader"}:
# Add fields repo_path, clone_url, branch and file_filter
self.template.add_field(
TemplateField(
field_type="str",
required=True,
show=True,
name="repo_path",
value="",
display_name="Path to repository",
advanced=False,
)
)
self.template.add_field(
TemplateField(
field_type="str",
required=False,
show=True,
name="clone_url",
value="",
display_name="Clone URL",
advanced=False,
)
)
self.template.add_field(
TemplateField(
field_type="str",
required=True,
show=True,
name="branch",
value="",
display_name="Branch",
advanced=False,
)
)
self.template.add_field(
TemplateField(
field_type="str",
required=False,
show=True,
name="file_filter",
value="",
display_name="File extensions (comma-separated)",
advanced=False,
)
)
elif self.template.type_name in self.file_path_templates:
self.template.add_field(self.file_path_templates[self.template.type_name])
elif self.template.type_name in {
"WebBaseLoader",
@ -64,8 +116,13 @@ class DocumentLoaderFrontNode(FrontendNode):
name = "web_path"
elif self.template.type_name in {"GitbookLoader"}:
name = "web_page"
elif self.template.type_name in {"ReadTheDocsLoader"}:
elif self.template.type_name in {
"DirectoryLoader",
"ReadTheDocsLoader",
"NotionDirectoryLoader",
}:
name = "path"
display_name = "Local directory"
if name:
self.template.add_field(
TemplateField(
@ -74,6 +131,37 @@ class DocumentLoaderFrontNode(FrontendNode):
show=True,
name=name,
value="",
display_name="Web Page",
display_name=display_name,
)
)
if self.template.type_name in {"DirectoryLoader"}:
self.template.add_field(
TemplateField(
field_type="str",
required=True,
show=True,
name="glob",
value="**/*.txt",
display_name="glob",
)
)
# add a metadata field of type dict
self.template.add_field(
TemplateField(
field_type="code",
required=True,
show=True,
name="metadata",
value="{}",
display_name="Metadata",
multiline=False,
)
)
@staticmethod
def format_field(field: TemplateField, name: Optional[str] = None) -> None:
FrontendNode.format_field(field, name)
if field.name == "metadata":
field.show = True
field.advanced = False
field.show = True

View file

@ -18,6 +18,27 @@ class MemoryFrontendNode(FrontendNode):
value=False,
)
)
# add input_key and output_key str fields
self.template.add_field(
TemplateField(
field_type="str",
required=False,
show=True,
name="input_key",
advanced=True,
value="",
)
)
self.template.add_field(
TemplateField(
field_type="str",
required=False,
show=True,
name="output_key",
advanced=True,
value="",
)
)
@staticmethod
def format_field(field: TemplateField, name: Optional[str] = None) -> None:
@ -36,3 +57,10 @@ class MemoryFrontendNode(FrontendNode):
field.required = False
field.show = True
field.advanced = False
if field.name in ["input_key", "output_key"]:
field.required = False
field.show = True
field.advanced = False
field.value = ""
if field.name == "memory_key":
field.value = "chat_history"

View file

@ -96,6 +96,16 @@ class PythonFunctionToolNode(FrontendNode):
name="code",
advanced=False,
),
TemplateField(
field_type="bool",
required=True,
placeholder="",
is_list=False,
show=True,
multiline=False,
value=False,
name="return_direct",
),
],
)
description: str = "Python function to be executed."

View file

@ -1,4 +1,4 @@
from typing import Optional
from typing import List, Optional
from langflow.template.field.base import TemplateField
from langflow.template.frontend_node.base import FrontendNode
@ -6,6 +6,19 @@ from langflow.template.frontend_node.base import FrontendNode
class VectorStoreFrontendNode(FrontendNode):
def add_extra_fields(self) -> None:
extra_fields: List[TemplateField] = []
# Add search_kwargs field
extra_field = TemplateField(
name="search_kwargs",
field_type="code",
required=False,
placeholder="",
show=True,
advanced=True,
multiline=False,
value="{}",
)
extra_fields.append(extra_field)
if self.template.type_name == "Weaviate":
extra_field = TemplateField(
name="weaviate_url",
@ -17,8 +30,174 @@ class VectorStoreFrontendNode(FrontendNode):
multiline=False,
value="http://localhost:8080",
)
# Add client_kwargs field
extra_field2 = TemplateField(
name="client_kwargs",
field_type="code",
required=False,
placeholder="",
show=True,
advanced=True,
multiline=False,
value="{}",
)
extra_fields.extend((extra_field, extra_field2))
self.template.add_field(extra_field)
elif self.template.type_name == "Chroma":
# New bool field for persist parameter
extra_field = TemplateField(
name="persist",
field_type="bool",
required=False,
show=True,
advanced=False,
value=True,
display_name="Persist",
)
extra_fields.append(extra_field)
elif self.template.type_name == "Pinecone":
# add pinecone_api_key and pinecone_env
extra_field = TemplateField(
name="pinecone_api_key",
field_type="str",
required=False,
placeholder="",
show=True,
advanced=True,
multiline=False,
value="",
)
extra_field2 = TemplateField(
name="pinecone_env",
field_type="str",
required=False,
placeholder="",
show=True,
advanced=True,
multiline=False,
value="",
)
extra_fields.extend((extra_field, extra_field2))
elif self.template.type_name == "FAISS":
extra_field = TemplateField(
name="folder_path",
field_type="str",
required=False,
placeholder="",
show=True,
advanced=True,
multiline=False,
display_name="Local Path",
value="",
)
extra_field2 = TemplateField(
name="index_name",
field_type="str",
required=False,
show=True,
advanced=False,
value="",
display_name="Index Name",
)
extra_fields.extend((extra_field, extra_field2))
elif self.template.type_name == "SupabaseVectorStore":
self.display_name = "Supabase"
# Add table_name and query_name
extra_field = TemplateField(
name="table_name",
field_type="str",
required=False,
placeholder="",
show=True,
advanced=True,
multiline=False,
value="",
)
extra_field2 = TemplateField(
name="query_name",
field_type="str",
required=False,
placeholder="",
show=True,
advanced=True,
multiline=False,
value="",
)
# Add supabase_url and supabase_service_key
extra_field3 = TemplateField(
name="supabase_url",
field_type="str",
required=False,
placeholder="",
show=True,
advanced=True,
multiline=False,
value="",
)
extra_field4 = TemplateField(
name="supabase_service_key",
field_type="str",
required=False,
placeholder="",
show=True,
advanced=True,
multiline=False,
value="",
)
extra_fields.extend((extra_field, extra_field2, extra_field3, extra_field4))
elif self.template.type_name == "MongoDBAtlasVectorSearch":
self.display_name = "MongoDB Atlas"
extra_field = TemplateField(
name="mongodb_atlas_cluster_uri",
field_type="str",
required=False,
placeholder="",
show=True,
advanced=True,
multiline=False,
display_name="MongoDB Atlas Cluster URI",
value="",
)
extra_field2 = TemplateField(
name="collection_name",
field_type="str",
required=False,
placeholder="",
show=True,
advanced=True,
multiline=False,
display_name="Collection Name",
value="",
)
extra_field3 = TemplateField(
name="db_name",
field_type="str",
required=False,
placeholder="",
show=True,
advanced=True,
multiline=False,
display_name="Database Name",
value="",
)
extra_field4 = TemplateField(
name="index_name",
field_type="str",
required=False,
placeholder="",
show=True,
advanced=True,
multiline=False,
display_name="Index Name",
value="",
)
extra_fields.extend((extra_field, extra_field2, extra_field3, extra_field4))
if extra_fields:
for field in extra_fields:
self.template.add_field(field)
def add_extra_base_classes(self) -> None:
self.base_classes.append("BaseRetriever")
@ -27,7 +206,25 @@ class VectorStoreFrontendNode(FrontendNode):
def format_field(field: TemplateField, name: Optional[str] = None) -> None:
FrontendNode.format_field(field, name)
# Define common field attributes
basic_fields = ["work_dir", "collection_name", "api_key", "location"]
basic_fields = [
"work_dir",
"collection_name",
"api_key",
"location",
"persist_directory",
"persist",
"weaviate_url",
"index_name",
"namespace",
"folder_path",
"table_name",
"query_name",
"supabase_url",
"supabase_service_key",
"mongodb_atlas_cluster_uri",
"collection_name",
"db_name",
]
advanced_fields = [
"n_dim",
"key",
@ -43,14 +240,21 @@ class VectorStoreFrontendNode(FrontendNode):
"https",
"prefer_grpc",
"grpc_port",
"pinecone_api_key",
"pinecone_env",
"client_kwargs",
"search_kwargs",
]
# Check and set field attributes
if field.name == "texts":
# if field.name is "texts" it has to be replaced
# when instantiating the vectorstores
field.name = "documents"
field.field_type = "TextSplitter"
field.display_name = "Text Splitter"
field.required = True
field.display_name = "Documents"
field.required = False
field.show = True
field.advanced = False
@ -78,5 +282,6 @@ class VectorStoreFrontendNode(FrontendNode):
field.advanced = True
if "key" in field.name:
field.password = False
# TODO: Weaviate requires weaviate_url to be passed as it is not part of
# the class or from_texts method. We need the add_extra_fields to fix this
elif field.name == "text_key":
field.show = False

View file

@ -3,6 +3,7 @@ from typing import Callable, Optional, Union
from pydantic import BaseModel
from langflow.template.field.base import TemplateField
from langflow.utils.constants import DIRECT_TYPES
class Template(BaseModel):
@ -18,8 +19,15 @@ class Template(BaseModel):
for field in self.fields:
format_field_func(field, name)
def sort_fields(self):
# first sort alphabetically
# then sort fields so that fields that have .field_type in DIRECT_TYPES are first
self.fields.sort(key=lambda x: x.name)
self.fields.sort(key=lambda x: x.field_type in DIRECT_TYPES, reverse=False)
def to_dict(self, format_field_func=None):
self.process_fields(self.type_name, format_field_func)
self.sort_fields()
result = {field.name: field.to_dict() for field in self.fields}
result["_type"] = self.type_name # type: ignore
return result

View file

@ -36,3 +36,4 @@ def python_function(text: str) -> str:
\"\"\"This is a default python function that returns the input text\"\"\"
return text
"""
DIRECT_TYPES = ["str", "bool", "code", "int", "float", "Any", "prompt"]

View file

@ -299,12 +299,15 @@ def format_dict(d, name: Optional[str] = None):
if name == "OpenAI" and key == "model_name":
value["options"] = constants.OPENAI_MODELS
value["list"] = True
value["value"] = constants.OPENAI_MODELS[0]
elif name == "ChatOpenAI" and key == "model_name":
value["options"] = constants.CHAT_OPENAI_MODELS
value["list"] = True
value["value"] = constants.CHAT_OPENAI_MODELS[0]
elif (name == "Anthropic" or name == "ChatAnthropic") and key == "model_name":
value["options"] = constants.ANTHROPIC_MODELS
value["list"] = True
value["value"] = constants.ANTHROPIC_MODELS[0]
return d

View file

@ -1,4 +1,5 @@
import ast
import contextlib
import importlib
import types
from typing import Dict
@ -147,11 +148,8 @@ def create_function(code, function_name):
code_obj = compile(
ast.Module(body=[function_code], type_ignores=[]), "<string>", "exec"
)
try:
with contextlib.suppress(Exception):
exec(code_obj, exec_globals, locals())
except Exception:
pass
exec_globals[function_name] = locals()[function_name]
# Return a function that imports necessary modules and calls the target function

View file

@ -1 +0,0 @@
/usr/lib/node_modules/opencommit/out/cli.cjs

File diff suppressed because it is too large Load diff

View file

@ -8,15 +8,16 @@
"@headlessui/react": "^1.7.10",
"@heroicons/react": "^2.0.15",
"@mui/material": "^5.11.9",
"@radix-ui/react-dropdown-menu": "^2.0.5",
"@radix-ui/react-menubar": "^1.0.3",
"@radix-ui/react-separator": "^1.0.3",
"@radix-ui/react-slot": "^1.0.2",
"@radix-ui/react-tabs": "^1.0.4",
"@radix-ui/react-checkbox": "^1.0.4",
"@radix-ui/react-dialog": "^1.0.4",
"@radix-ui/react-dropdown-menu": "^2.0.5",
"@radix-ui/react-label": "^2.0.2",
"@radix-ui/react-menubar": "^1.0.3",
"@radix-ui/react-progress": "^1.0.3",
"@radix-ui/react-separator": "^1.0.3",
"@radix-ui/react-slot": "^1.0.2",
"@radix-ui/react-switch": "^1.0.3",
"@radix-ui/react-tabs": "^1.0.4",
"@radix-ui/react-tooltip": "^1.0.6",
"@tabler/icons-react": "^2.18.0",
"@tailwindcss/forms": "^0.5.3",
@ -98,9 +99,10 @@
"@types/uuid": "^9.0.1",
"@vitejs/plugin-react-swc": "^3.0.0",
"autoprefixer": "^10.4.14",
"daisyui": "^3.1.1",
"postcss": "^8.4.23",
"tailwindcss": "^3.3.2",
"typescript": "^5.0.2",
"vite": "^4.3.5"
"vite": "^4.3.9"
}
}

View file

@ -47,13 +47,6 @@ export default function App() {
}>
>([]);
// Initialize state variable for the version
const [version, setVersion] = useState("");
useEffect(() => {
getVersion().then((data) => {
setVersion(data.version);
});
}, []);
// Use effect hook to update alertsList when a new alert is added
useEffect(() => {
// If there is an error alert open with data, add it to the alertsList
@ -157,14 +150,6 @@ export default function App() {
</div>
))}
</div>
<a
target={"_blank"}
href="https://logspace.ai/"
className="absolute left-7 bottom-2 flex h-6 cursor-pointer flex-col items-center justify-start overflow-hidden rounded-lg bg-foreground px-2 text-center font-sans text-xs tracking-wide text-secondary transition-all duration-500 ease-in-out hover:h-12"
>
{version && <div className="mt-1"> LangFlow v{version}</div>}
<div className={version ? "mt-2" : "mt-1"}>Created by Logspace</div>
</a>
</div>
);
}

View file

@ -1,8 +1,10 @@
import { Handle, Position, useUpdateNodeInternals } from "reactflow";
import {
classNames,
getRandomKeyByssmm,
groupByFamily,
isValidConnection,
nodeIconsLucide,
} from "../../../../utils";
import { useContext, useEffect, useRef, useState } from "react";
import InputComponent from "../../../../components/inputComponent";
@ -17,7 +19,7 @@ import InputFileComponent from "../../../../components/inputFileComponent";
import { TabsContext } from "../../../../contexts/tabsContext";
import IntComponent from "../../../../components/intComponent";
import PromptAreaComponent from "../../../../components/promptComponent";
import { nodeNames, nodeIcons } from "../../../../utils";
import { nodeNames } from "../../../../utils";
import React from "react";
import { nodeColors } from "../../../../utils";
import ShadTooltip from "../../../../components/ShadTooltipComponent";
@ -40,7 +42,7 @@ export default function ParameterComponent({
const updateNodeInternals = useUpdateNodeInternals();
const [position, setPosition] = useState(0);
const { closePopUp } = useContext(PopUpContext);
const { setTabsState, tabId } = useContext(TabsContext);
const { setTabsState, tabId, save } = useContext(TabsContext);
useEffect(() => {
if (ref.current && ref.current.offsetTop && ref.current.clientHeight) {
@ -82,18 +84,18 @@ export default function ParameterComponent({
refHtml.current = groupedObj.map((item, i) => (
<span
key={i}
key={getRandomKeyByssmm()}
className={classNames(
i > 0 ? "items-center flex mt-3" : "items-center flex"
)}
>
<div
className="h-5 w-5"
className="h-6 w-6"
style={{
color: nodeColors[item.family],
}}
>
{React.createElement(nodeIcons[item.family])}
{React.createElement(nodeIconsLucide[item.family])}
</div>
<span className="ps-2 text-gray-950">
{nodeNames[item.family] ?? ""}{" "}
@ -102,14 +104,14 @@ export default function ParameterComponent({
-&nbsp;
{item.type.split(", ").length > 2
? item.type.split(", ").map((el, i) => (
<>
<span key={i}>
{i == item.type.split(", ").length - 1
<React.Fragment key={el + i}>
<span>
{i === item.type.split(", ").length - 1
? el
: (el += `, `)}
</span>
{i % 2 == 0 && i > 0 && <br></br>}
</>
{i % 2 === 0 && i > 0 && <br />}
</React.Fragment>
))
: item.type}
</span>
@ -239,7 +241,8 @@ export default function ParameterComponent({
fileTypes={data.node.template[name].fileTypes}
suffixes={data.node.template[name].suffixes}
onFileChange={(t: string) => {
data.node.template[name].content = t;
data.node.template[name].file_path = t;
save();
}}
></InputFileComponent>
) : left === true && type === "int" ? (

View file

@ -1,7 +1,21 @@
import { classNames, nodeColors, nodeIcons, toTitleCase } from "../../utils";
import {
classNames,
nodeColors,
nodeIconsLucide,
toTitleCase,
} from "../../utils";
import ParameterComponent from "./components/parameterComponent";
import { typesContext } from "../../contexts/typesContext";
import { useContext, useState, useEffect, useRef } from "react";
import {
useContext,
useState,
useEffect,
useRef,
ForwardRefExoticComponent,
ComponentType,
SVGProps,
ReactNode,
} from "react";
import { NodeDataType } from "../../types/flow";
import { alertContext } from "../../contexts/alertContext";
import { PopUpContext } from "../../contexts/popUpContext";
@ -12,6 +26,7 @@ import NodeToolbarComponent from "../../pages/FlowPage/components/nodeToolbarCom
import ShadTooltip from "../../components/ShadTooltipComponent";
import { useSSE } from "../../contexts/SSEContext";
import { ReactElement } from "react-markdown/lib/react-markdown";
export default function GenericNode({
data,
@ -25,11 +40,12 @@ export default function GenericNode({
const { types, deleteNode } = useContext(typesContext);
const { closePopUp, openPopUp } = useContext(PopUpContext);
const Icon = nodeIcons[data.type] || nodeIcons[types[data.type]];
// any to avoid type conflict
const Icon: any =
nodeIconsLucide[data.type] || nodeIconsLucide[types[data.type]];
const [validationStatus, setValidationStatus] = useState(null);
// State for outline color
const { sseData } = useSSE();
const { sseData, isBuilding } = useSSE();
// useEffect(() => {
// if (reactFlowInstance) {
@ -88,8 +104,13 @@ export default function GenericNode({
}}
/>
<div className="ml-2 truncate">
<ShadTooltip delayDuration={1500} content={data.type}>
<div className="ml-2 truncate text-foreground">{data.type}</div>
<ShadTooltip
delayDuration={1500}
content={data.node.display_name}
>
<div className="ml-2 truncate text-gray-800">
{data.node.display_name}
</div>
</ShadTooltip>
</div>
</div>
@ -137,10 +158,10 @@ export default function GenericNode({
></div>
<div
className={classNames(
!validationStatus
? "w-4 h-4 rounded-full bg-status-yellow opacity-100"
: "w-4 h-4 rounded-full bg-ring opacity-0 hidden animate-spin",
"absolute w-4 hover:text-ring hover: transition-all ease-in-out duration-200"
!validationStatus || isBuilding
? "w-4 h-4 rounded-full bg-yellow-500 opacity-100"
: "w-4 h-4 rounded-full bg-gray-500 opacity-0 hidden animate-spin",
"absolute w-4 hover:text-gray-500 hover:dark:text-gray-300 transition-all ease-in-out duration-200"
)}
></div>
</div>

View file

@ -1,13 +1,8 @@
import {
XCircleIcon,
XMarkIcon,
InformationCircleIcon,
CheckCircleIcon,
} from "@heroicons/react/24/outline";
import { Link } from "react-router-dom";
import { Transition } from "@headlessui/react";
import { useState } from "react";
import { SingleAlertComponentType } from "../../../../types/alerts";
import { X, CheckCircle2, Info, XCircle } from "lucide-react";
export default function SingleAlert({
dropItem,
@ -34,8 +29,8 @@ export default function SingleAlert({
key={dropItem.id}
>
<div className="flex-shrink-0">
<XCircleIcon
className="h-5 w-5 text-red-400"
<XCircle
className="h-5 w-5 text-red-400 dark:text-red-50"
aria-hidden="true"
/>
</div>
@ -70,7 +65,7 @@ export default function SingleAlert({
className="inline-flex rounded-md bg-red-50 p-1.5 text-red-500"
>
<span className="sr-only">Dismiss</span>
<XMarkIcon className="h-5 w-5" aria-hidden="true" />
<X className="h-5 w-5" aria-hidden="true" />
</button>
</div>
</div>
@ -81,8 +76,8 @@ export default function SingleAlert({
key={dropItem.id}
>
<div className="flex-shrink-0">
<InformationCircleIcon
className="h-5 w-5 text-blue-400 "
<Info
className="h-5 w-5 text-blue-400 dark:text-blue-50"
aria-hidden="true"
/>
</div>
@ -116,7 +111,7 @@ export default function SingleAlert({
className="inline-flex rounded-md bg-blue-50 p-1.5 text-blue-500 "
>
<span className="sr-only">Dismiss</span>
<XMarkIcon className="h-5 w-5" aria-hidden="true" />
<X className="h-5 w-5" aria-hidden="true" />
</button>
</div>
</div>
@ -127,8 +122,8 @@ export default function SingleAlert({
key={dropItem.id}
>
<div className="flex-shrink-0">
<CheckCircleIcon
className="h-5 w-5 text-green-400 "
<CheckCircle2
className="h-5 w-5 text-green-400 dark:text-green-50"
aria-hidden="true"
/>
</div>
@ -150,7 +145,7 @@ export default function SingleAlert({
className="inline-flex rounded-md bg-green-50 p-1.5 text-green-500 "
>
<span className="sr-only">Dismiss</span>
<XMarkIcon className="h-5 w-5" aria-hidden="true" />
<X className="h-5 w-5" aria-hidden="true" />
</button>
</div>
</div>

View file

@ -1,11 +1,11 @@
import { useContext, useEffect, useRef } from "react";
import { alertContext } from "../../contexts/alertContext";
import { XMarkIcon } from "@heroicons/react/24/solid";
import { TrashIcon } from "@heroicons/react/24/outline";
import SingleAlert from "./components/singleAlertComponent";
import { AlertDropdownType } from "../../types/alerts";
import { PopUpContext } from "../../contexts/popUpContext";
import { useOnClickOutside } from "../hooks/useOnClickOutside";
import { X, Trash2 } from "lucide-react";
export default function AlertDropdown({}: AlertDropdownType) {
const { closePopUp } = useContext(PopUpContext);
const componentRef = useRef<HTMLDivElement>(null);
@ -36,13 +36,13 @@ export default function AlertDropdown({}: AlertDropdownType) {
setTimeout(clearNotificationList, 100);
}}
>
<TrashIcon className="w-[1.1rem] h-[1.1rem]" />
<Trash2 className="w-[1.1rem] h-[1.1rem]" />
</button>
<button
className="text-foreground hover:text-status-red"
onClick={closePopUp}
>
<XMarkIcon className="h-5 w-5" />
<X className="h-5 w-5" />
</button>
</div>
</div>

View file

@ -1,7 +1,7 @@
import { Transition } from "@headlessui/react";
import { XCircleIcon, XMarkIcon } from "@heroicons/react/24/outline";
import { useEffect, useState } from "react";
import { ErrorAlertType } from "../../types/alerts";
import { XCircle } from "lucide-react";
export default function ErrorAlert({
title,
@ -43,8 +43,8 @@ export default function ErrorAlert({
>
<div className="flex">
<div className="flex-shrink-0">
<XCircleIcon
className="h-5 w-5 text-error-background"
<XCircle
className="h-5 w-5 text-red-400 dark:text-red-50"
aria-hidden="true"
/>
</div>

View file

@ -1,8 +1,8 @@
import { Transition } from "@headlessui/react";
import { InformationCircleIcon, XMarkIcon } from "@heroicons/react/24/outline";
import { useEffect, useState } from "react";
import { Link } from "react-router-dom";
import { NoticeAlertType } from "../../types/alerts";
import { Info } from "lucide-react";
export default function NoticeAlert({
title,
@ -40,8 +40,8 @@ export default function NoticeAlert({
>
<div className="flex">
<div className="flex-shrink-0">
<InformationCircleIcon
className="h-5 w-5 text-medium-blue"
<Info
className="h-5 w-5 text-blue-400 dark:text-blue-50"
aria-hidden="true"
/>
</div>

View file

@ -1,7 +1,7 @@
import { Transition } from "@headlessui/react";
import { CheckCircleIcon, XMarkIcon } from "@heroicons/react/24/outline";
import { useEffect, useState } from "react";
import { SuccessAlertType } from "../../types/alerts";
import { CheckCircle2 } from "lucide-react";
export default function SuccessAlert({
title,
@ -38,8 +38,8 @@ export default function SuccessAlert({
>
<div className="flex">
<div className="flex-shrink-0">
<CheckCircleIcon
className="h-5 w-5 text-status-green"
<CheckCircle2
className="h-5 w-5 text-green-400 dark:text-green-50"
aria-hidden="true"
/>
</div>

View file

@ -1,5 +1,4 @@
import { Disclosure } from "@headlessui/react";
import { ChevronLeftIcon } from "@heroicons/react/24/outline";
import { useContext, useState } from "react";
import { Link } from "react-router-dom";
import { classNames } from "../../utils";

View file

@ -0,0 +1,6 @@
import { useContext, useEffect, useRef, useState } from "react";
import { RadialProgressType } from "../../types/components";
export default function LoadingSpinner({}) {
return <></>;
}

View file

@ -0,0 +1,21 @@
import { ReactElement, useContext, useEffect, useRef, useState } from "react";
import { ProgressBarType } from "../../types/components";
import { Progress } from "../../components/ui/progress";
import { progressContext } from "../../contexts/ProgressContext";
import { setInterval } from "timers/promises";
export default function ProgressBarComponent({
value,
children,
}: ProgressBarType) {
const ref = useRef(0);
const reff = useRef();
const { progress } = useContext(progressContext);
useEffect(() => {
ref.current = progress * 100;
console.log(progress);
}, [progress]);
return <Progress className="h-2.5" value={ref.current} />;
}

View file

@ -0,0 +1,19 @@
import { useContext, useEffect, useRef, useState } from "react";
import { RadialProgressType } from "../../types/components";
export default function RadialProgressComponent({
value,
color,
}: RadialProgressType) {
const style = {
"--value": value * 100,
"--size": "1.5rem",
"--thickness": "2px",
} as React.CSSProperties;
return (
<div className={"radial-progress " + color} style={style}>
<strong className="text-[8px]">{Math.trunc(value * 100)}%</strong>
</div>
);
}

View file

@ -29,13 +29,15 @@ export const CardComponent = ({
<Card className="group">
<CardHeader>
<CardTitle className="flex w-full items-center gap-4">
<span
className={
"rounded-full w-7 h-7 flex items-center justify-center text-2xl " +
gradients[parseInt(flow.id.slice(0, 12), 16) % gradients.length]
}
></span>
<span className="flex-1 w-full inline-block truncate-doubleline break-words">{flow.name}</span>
<span
className={
"rounded-full w-7 h-7 flex items-center justify-center text-2xl " +
gradients[parseInt(flow.id.slice(0, 12), 16) % gradients.length]
}
></span>
<span className="flex-1 w-full inline-block truncate-doubleline break-words">
{flow.name}
</span>
{onDelete && (
<button className="flex self-start" onClick={onDelete}>
<Trash2 className="w-4 h-4 text-primary opacity-0 group-hover:opacity-100 transition-all" />

View file

@ -1,4 +1,4 @@
import { useState, useContext } from "react";
import { useContext, useState } from "react";
import { Transition } from "@headlessui/react";
import { Zap } from "lucide-react";
import { validateNodes } from "../../../utils";
@ -10,6 +10,8 @@ import { alertContext } from "../../../contexts/alertContext";
import { postBuildInit } from "../../../controllers/API";
import ShadTooltip from "../../ShadTooltipComponent";
import RadialProgressComponent from "../../RadialProgress";
export default function BuildTrigger({
open,
flow,
@ -21,11 +23,12 @@ export default function BuildTrigger({
setIsBuilt: any;
isBuilt: boolean;
}) {
const [isBuilding, setIsBuilding] = useState(false);
const { updateSSEData } = useSSE();
const { updateSSEData, isBuilding, setIsBuilding, sseData } = useSSE();
const { reactFlowInstance } = useContext(typesContext);
const { setErrorData } = useContext(alertContext);
const { setErrorData, setSuccessData } = useContext(alertContext);
const [isIconTouched, setIsIconTouched] = useState(false);
const eventClick = isBuilding ? "pointer-events-none" : "";
const [progress, setProgress] = useState(0);
async function handleBuild(flow: FlowType) {
try {
@ -47,10 +50,12 @@ export default function BuildTrigger({
const allNodesValid = await streamNodeData(flow);
await enforceMinimumLoadingTime(startTime, minimumLoadingTime);
setIsBuilt(allNodesValid);
if(!allNodesValid) {
if (!allNodesValid) {
setErrorData({
title: "Oops! Looks like you missed something",
list: ["Check nodes and retry. Hover over 🔴 node for status."],
list: [
"Check components and retry. Hover over component status icon 🔴 to inspect.",
],
});
}
} catch (error) {
@ -59,12 +64,10 @@ export default function BuildTrigger({
setIsBuilding(false);
}
}
async function streamNodeData(flow: FlowType) {
// Step 1: Make a POST request to send the flow data and receive a unique session ID
const response = await postBuildInit(flow);
const { flowId } = response.data;
// Step 2: Use the session ID to establish an SSE connection using EventSource
let validationResults = [];
let finished = false;
@ -82,15 +85,25 @@ export default function BuildTrigger({
eventSource.close();
return;
} else if (parsedData.log) {
// If the event is a log, log it
setSuccessData({ title: parsedData.log });
} else {
// Otherwise, process the data
const isValid = processStreamResult(parsedData);
setProgress(parsedData.progress);
validationResults.push(isValid);
}
// Otherwise, process the data
const isValid = processStreamResult(parsedData);
validationResults.push(isValid);
};
eventSource.onerror = (error) => {
eventSource.onerror = (error: any) => {
console.error("EventSource failed:", error);
eventSource.close();
if (error.data) {
const parsedData = JSON.parse(error.data);
setErrorData({ title: parsedData.error });
setIsBuilding(false);
}
};
// Step 3: Wait for the stream to finish
while (!finished) {
@ -124,6 +137,14 @@ export default function BuildTrigger({
}
}
const handleMouseEnter = () => {
setIsIconTouched(true);
};
const handleMouseLeave = () => {
setIsIconTouched(false);
};
return (
<Transition
show={!open}
@ -136,30 +157,30 @@ export default function BuildTrigger({
leaveTo="translate-y-96"
>
<div className={`fixed right-4` + (isBuilt ? " bottom-20" : " bottom-4")}>
<ShadTooltip
delayDuration={500}
content="Build Flow"
side="left"
<div
className={`${eventClick} flex justify-center align-center py-1 px-3 w-12 h-12 rounded-full shadow-md shadow-[#0000002a] hover:shadow-[#00000032] bg-[#E2E7EE] dark:border-gray-600 cursor-pointer`}
onClick={() => {
handleBuild(flow);
}}
onMouseEnter={handleMouseEnter}
onMouseLeave={handleMouseLeave}
>
<div
className="flex justify-center align-center py-1 px-3 w-12 h-12 rounded-full shadow-md hover:shadow-sm shadow-btn-shadow hover:shadow-btn-shadow
bg-buildBackground cursor-pointer"
onClick={() => {
handleBuild(flow);
}}
>
<button>
<div className="flex gap-3 items-center">
{isBuilding ? (
// Render your loading animation here when isBuilding is true
<Loading strokeWidth={1.5} className="text-build" />
) : (
<Zap className="sh-6 w-6 fill-build stroke-1 stroke-build"/>
)}
</div>
</button>
</div>
</ShadTooltip>
<button>
<div className="flex gap-3 items-center">
{isBuilding && progress < 1 ? (
// Render your loading animation here when isBuilding is true
<RadialProgressComponent
color={"text-orange-400"}
value={progress}
></RadialProgressComponent>
) : isBuilding ? (
<Loading strokeWidth={1.5} style={{ color: "#fb923c" }} />
) : (
<Zap className="sh-6 w-6 fill-orange-400 stroke-1 stroke-orange-400" />
)}
</div>
</button>
</div>
</div>
</Transition>
);

View file

@ -1,13 +1,9 @@
import {
ChatBubbleLeftEllipsisIcon,
ChatBubbleOvalLeftEllipsisIcon,
PlusSmallIcon,
} from "@heroicons/react/24/outline";
import { useState } from "react";
import { ChatMessageType } from "../../../types/chat";
import { nodeColors } from "../../../utils";
import Convert from "ansi-to-html";
const convert = new Convert({ newline: true });
import { MessageCircle } from "lucide-react";
export default function ChatMessage({ chat }: { chat: ChatMessageType }) {
const [hidden, setHidden] = useState(true);
@ -24,7 +20,7 @@ export default function ChatMessage({ chat }: { chat: ChatMessageType }) {
onClick={() => setHidden((prev) => !prev)}
className="absolute top-2 right-2 cursor-pointer"
>
<ChatBubbleOvalLeftEllipsisIcon className="w-5 h-5 animate-bounce" />
<MessageCircle className="w-5 h-5 animate-bounce" />
</div>
)}
{chat.thought && chat.thought !== "" && !hidden && (

View file

@ -1,5 +1,6 @@
import { Transition } from "@headlessui/react";
import { MessagesSquare } from "lucide-react";
import { alertContext } from "../../../contexts/alertContext";
import { useContext } from "react";
import ShadTooltip from "../../ShadTooltipComponent";
@ -30,26 +31,21 @@ export default function ChatTrigger({ open, setOpen, isBuilt }) {
leaveTo="translate-y-96"
>
<div className="absolute bottom-4 right-3">
<ShadTooltip
delayDuration={500}
content="Chat Interface"
side="left"
<div
className="flex justify-center align-center py-1 px-3 w-12 h-12 rounded-full shadow-md shadow-[#0000002a] hover:shadow-[#00000032]
bg-[#E2E7EE] dark:border-gray-600 cursor-pointer"
onClick={handleClick}
>
<div
className="border flex justify-center items-center py-1 px-3 w-12 h-12 rounded-full bg-almost-dark-blue cursor-pointer"
onClick={handleClick}
>
<button>
<div className="flex gap-3">
<MessagesSquare
className="h-6 w-6 text-medium-light-blue fill-medium-light-blue"
style={{ color: "white" }}
strokeWidth={1.5}
/>
</div>
</button>
</div>
</ShadTooltip>
<button>
<div className="flex gap-3">
<MessagesSquare
className="pth-6 w-6 fill-[#5c8be1] stroke-1 stroke-[#5c8be1]"
style={{ color: "white" }}
strokeWidth={1.5}
/>
</div>
</button>
</div>
</div>
</Transition>
);

View file

@ -16,7 +16,8 @@ export default function Chat({ flow }: ChatType) {
const handleKeyDown = (event: KeyboardEvent) => {
if (
(event.key === "K" || event.key === "k") &&
(event.metaKey || event.ctrlKey)
(event.metaKey || event.ctrlKey) &&
isBuilt
) {
event.preventDefault();
setOpen((oldState) => !oldState);
@ -26,7 +27,7 @@ export default function Chat({ flow }: ChatType) {
return () => {
document.removeEventListener("keydown", handleKeyDown);
};
}, []);
}, [isBuilt]);
useEffect(() => {
// Define an async function within the useEffect hook

View file

@ -1,10 +1,10 @@
import { ArrowTopRightOnSquareIcon } from "@heroicons/react/24/outline";
import { useContext, useEffect, useState } from "react";
import { PopUpContext } from "../../contexts/popUpContext";
import CodeAreaModal from "../../modals/codeAreaModal";
import TextAreaModal from "../../modals/textAreaModal";
import { TextAreaComponentType } from "../../types/components";
import { INPUT_STYLE } from "../../constants";
import { ExternalLink } from "lucide-react";
export default function CodeAreaComponent({
value,
@ -69,7 +69,7 @@ export default function CodeAreaComponent({
}}
>
{!editNode && (
<ArrowTopRightOnSquareIcon className="w-6 h-6 hover:text-ring" />
<ExternalLink className="w-6 h-6 hover:text-ring dark:text-gray-300 ml-3" />
)}
</button>
</div>

View file

@ -1,9 +1,9 @@
import { Listbox, Transition } from "@headlessui/react";
import { ChevronUpDownIcon, CheckIcon } from "@heroicons/react/24/outline";
import { Fragment, useState } from "react";
import { Fragment, useEffect, useState } from "react";
import { DropDownComponentType } from "../../types/components";
import { classNames } from "../../utils";
import { INPUT_STYLE } from "../../constants";
import { ChevronsUpDown, Check } from "lucide-react";
export default function Dropdown({
value,
@ -15,6 +15,9 @@ export default function Dropdown({
let [internalValue, setInternalValue] = useState(
value === "" || !value ? "Choose an option" : value
);
useEffect(() => {
setInternalValue(value === "" || !value ? "Choose an option" : value);
}, [value]);
return (
<>
@ -43,8 +46,8 @@ export default function Dropdown({
"pointer-events-none absolute inset-y-0 right-0 flex items-center pr-2"
}
>
<ChevronUpDownIcon
className="h-5 w-5 text-ring"
<ChevronsUpDown
className="h-5 w-5 text-gray-400"
aria-hidden="true"
/>
</span>
@ -97,7 +100,7 @@ export default function Dropdown({
"absolute inset-y-0 right-0 flex items-center pr-4"
)}
>
<CheckIcon
<Check
className={
active
? "h-5 w-5 text-black"

View file

@ -53,7 +53,7 @@ export const MenuBar = ({ flows, tabId }) => {
</Link>
<div className="flex items-center font-medium text-sm rounded-md py-1 px-1.5 gap-0.5">
<DropdownMenu>
<DropdownMenuTrigger>
<DropdownMenuTrigger asChild>
<Button
className="gap-2 flex items-center max-w-[200px]"
variant="primary"
@ -64,11 +64,21 @@ export const MenuBar = ({ flows, tabId }) => {
</Button>
</DropdownMenuTrigger>
<DropdownMenuContent className="w-44">
<DropdownMenuLabel>Edit</DropdownMenuLabel>
<DropdownMenuLabel>Options</DropdownMenuLabel>
<DropdownMenuItem
onClick={() => {
handleAddFlow();
}}
className="cursor-pointer"
>
<Plus className="w-4 h-4 mr-2" />
New
</DropdownMenuItem>
<DropdownMenuItem
onClick={() => {
openPopUp(<FlowSettingsModal />);
}}
className="cursor-pointer"
>
<Settings2 className="w-4 h-4 mr-2 " />
Settings
@ -77,6 +87,7 @@ export const MenuBar = ({ flows, tabId }) => {
onClick={() => {
undo();
}}
className="cursor-pointer"
>
<Undo className="w-4 h-4 mr-2 " />
Undo
@ -85,13 +96,14 @@ export const MenuBar = ({ flows, tabId }) => {
onClick={() => {
redo();
}}
className="cursor-pointer"
>
<Redo className="w-4 h-4 mr-2 " />
Redo
</DropdownMenuItem>
<DropdownMenuSeparator />
<DropdownMenuLabel>Projects</DropdownMenuLabel>
<DropdownMenuRadioGroup
{/* <DropdownMenuLabel>Projects</DropdownMenuLabel> */}
{/* <DropdownMenuRadioGroup className="max-h-full overflow-scroll"
value={tabId}
onValueChange={(value) => {
setTabId(value);
@ -99,22 +111,20 @@ export const MenuBar = ({ flows, tabId }) => {
>
{flows.map((flow, idx) => {
return (
<Link to={"/flow/" + flow.id} className="flex w-full items-center">
<DropdownMenuRadioItem value={flow.id} className="flex-1 w-full inline-block truncate break-words mr-2">
<Link
to={"/flow/" + flow.id}
className="flex w-full items-center"
>
<DropdownMenuRadioItem
value={flow.id}
className="flex-1 w-full inline-block truncate break-words mr-2"
>
{flow.name}
</DropdownMenuRadioItem>
</Link>
);
})}
</DropdownMenuRadioGroup>
<DropdownMenuItem
onClick={() => {
handleAddFlow();
}}
>
<Plus className="w-4 h-4 mr-2" />
New Project
</DropdownMenuItem>
</DropdownMenuRadioGroup> */}
</DropdownMenuContent>
</DropdownMenu>
</div>

View file

@ -1,6 +1,6 @@
import { SunIcon, MoonIcon, BellIcon, Home, Users2 } from "lucide-react";
import { useContext, useState, useEffect } from "react";
import { FaGithub } from "react-icons/fa";
import { BellIcon, Home, MoonIcon, SunIcon, Users2 } from "lucide-react";
import { useContext, useEffect, useState } from "react";
import { FaDiscord, FaGithub, FaTwitter } from "react-icons/fa";
import { Button } from "../ui/button";
import { TabsContext } from "../../contexts/tabsContext";
import AlertDropdown from "../../alerts/alertDropDown";
@ -10,6 +10,10 @@ import { PopUpContext } from "../../contexts/popUpContext";
import { typesContext } from "../../contexts/typesContext";
import MenuBar from "./components/menuBar";
import { Link, useLocation, useParams } from "react-router-dom";
import { USER_PROJECTS_HEADER } from "../../constants";
import { getRepoStars } from "../../controllers/API";
import { Separator } from "../ui/separator";
import { Bell } from "lucide-react";
export default function Header() {
const { flows, addFlow, tabId } = useContext(TabsContext);
@ -21,6 +25,16 @@ export default function Header() {
const { notificationCenter, setNotificationCenter, setErrorData } =
useContext(alertContext);
const location = useLocation();
const [stars, setStars] = useState(null);
useEffect(() => {
async function fetchStars() {
const starsCount = await getRepoStars("logspace-ai", "langflow");
setStars(starsCount);
}
fetchStars();
}, []);
return (
<div className="w-full h-12 flex justify-between items-center border-b bg-muted">
<div className="flex gap-2 justify-start items-center w-96">
@ -39,7 +53,7 @@ export default function Header() {
size="sm"
>
<Home className="w-4 h-4" />
<div className="flex-1">My Projects</div>
<div className="flex-1">{USER_PROJECTS_HEADER}</div>
</Button>
</Link>
<Link to="/community">
@ -56,24 +70,38 @@ export default function Header() {
</Link>
</div>
<div className="flex justify-end px-2 w-96">
<div className="ml-auto mr-2 flex gap-5">
<Button
asChild
variant="outline"
className="text-muted-foreground "
<div className="ml-auto mr-2 flex gap-5 items-center">
<a
href="https://github.com/logspace-ai/langflow"
target="_blank"
rel="noreferrer"
className="inline-flex shadow-sm items-center justify-center text-sm font-medium transition-colors focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 disabled:opacity-50 disabled:pointer-events-none ring-offset-background text-gray-600 dark:text-gray-300 border border-input hover:bg-accent hover:text-accent-foreground h-9 px-3 pr-0 rounded-md"
>
<a
href="https://github.com/logspace-ai/langflow"
target="_blank"
rel="noreferrer"
className="flex"
>
<FaGithub className="h-5 w-5 mr-2" />
Join The Community
</a>
</Button>
<FaGithub className="h-5 w-5 mr-2" />
Star
<div className="ml-2 flex text-sm bg-background rounded-md rounded-l-none border px-2 h-9 -mr-px items-center justify-center">
{stars}
</div>
</a>
<a
href="https://twitter.com/logspace_ai"
target="_blank"
rel="noreferrer"
className="text-muted-foreground"
>
<FaTwitter className="h-5 w-5" />
</a>
<a
href="https://discord.gg/EqksyE2EX9"
target="_blank"
rel="noreferrer"
className="text-muted-foreground"
>
<FaDiscord className="h-5 w-5" />
</a>
{/* <Separator orientation="vertical" />
<button
className="text-gray-600 hover:text-gray-500"
className="text-gray-600 hover:text-gray-500 dark:text-gray-300 dark:hover:text-gray-200"
onClick={() => {
setDark(!dark);
}}
@ -107,14 +135,8 @@ export default function Header() {
{notificationCenter && (
<div className="absolute w-1.5 h-1.5 rounded-full bg-destructive right-[3px]"></div>
)}
<BellIcon className="h-5 w-5" aria-hidden="true" />
<Bell className="h-5 w-5" aria-hidden="true" />
</button>
{/* <button>
<img
src="https://github.com/shadcn.png"
className="rounded-full w-8"
/>
</button> */}
</div>
</div>
</div>

View file

@ -1,8 +1,10 @@
import { DocumentMagnifyingGlassIcon } from "@heroicons/react/24/outline";
import { useContext, useEffect, useState } from "react";
import { alertContext } from "../../contexts/alertContext";
import { FileComponentType } from "../../types/components";
import { TabsContext } from "../../contexts/tabsContext";
import { INPUT_STYLE } from "../../constants";
import { FileSearch2 } from "lucide-react";
import { uploadFile } from "../../controllers/API";
export default function InputFileComponent({
value,
@ -14,7 +16,9 @@ export default function InputFileComponent({
editNode = false,
}: FileComponentType) {
const [myValue, setMyValue] = useState(value);
const [loading, setLoading] = useState(false);
const { setErrorData } = useContext(alertContext);
const { tabId } = useContext(TabsContext);
useEffect(() => {
if (disabled) {
setMyValue("");
@ -23,12 +27,6 @@ export default function InputFileComponent({
}
}, [disabled, onChange]);
function attachFile(fileReadEvent: ProgressEvent<FileReader>) {
fileReadEvent.preventDefault();
const file = fileReadEvent.target.result;
onFileChange(file as string);
}
function checkFileType(fileName: string): boolean {
for (let index = 0; index < suffixes.length; index++) {
if (fileName.endsWith(suffixes[index])) {
@ -43,27 +41,54 @@ export default function InputFileComponent({
}, [value]);
const handleButtonClick = () => {
// Create a file input element
const input = document.createElement("input");
input.type = "file";
input.accept = suffixes.join(",");
input.style.display = "none";
input.multiple = false;
input.style.display = "none"; // Hidden from view
input.multiple = false; // Allow only one file selection
input.onchange = (e: Event) => {
setLoading(true);
// Get the selected file
const file = (e.target as HTMLInputElement).files?.[0];
const fileData = new FileReader();
fileData.onload = attachFile;
// Check if the file type is correct
if (file && checkFileType(file.name)) {
fileData.readAsDataURL(file);
setMyValue(file.name);
onChange(file.name);
// Upload the file
uploadFile(file, tabId)
.then((res) => res.data)
.then((data) => {
console.log("File uploaded successfully");
// Get the file name from the response
const { file_path } = data;
console.log("File name:", file_path);
// Update the state and callback with the name of the file
// sets the value to the user
setMyValue(file.name);
onChange(file.name);
// sets the value that goes to the backend
onFileChange(file_path);
setLoading(false);
})
.catch(() => {
console.error("Error occurred while uploading file");
setLoading(false);
});
} else {
// Show an error if the file type is not allowed
setErrorData({
title:
"Please select a valid file. Only files this files are allowed:",
"Please select a valid file. Only these file types are allowed:",
list: fileTypes,
});
setLoading(false);
}
};
// Trigger the file selection dialog
input.click();
};
@ -73,7 +98,7 @@ export default function InputFileComponent({
disabled ? "pointer-events-none cursor-not-allowed w-full" : "w-full"
}
>
<div className="w-full flex items-center gap-3">
<div className="w-full flex items-center gap-2">
<span
onClick={handleButtonClick}
className={
@ -88,8 +113,11 @@ export default function InputFileComponent({
{myValue !== "" ? myValue : "No file"}
</span>
<button onClick={handleButtonClick}>
{!editNode && (
<DocumentMagnifyingGlassIcon className="w-8 h-8 hover:text-ring" />
{!editNode && !loading && (
<FileSearch2 className="w-6 h-6 hover:text-ring" />
)}
{!editNode && loading && (
<span className="loading loading-spinner loading-sm pl-3 h-8 pointer-events-none"></span>
)}
</button>
</div>

View file

@ -1,10 +1,11 @@
import { PlusIcon, XMarkIcon } from "@heroicons/react/24/outline";
import { useContext, useEffect, useState } from "react";
import { InputListComponentType } from "../../types/components";
import { TabsContext } from "../../contexts/tabsContext";
import _ from "lodash";
import { INPUT_STYLE } from "../../constants";
import { X, Plus } from "lucide-react";
export default function InputListComponent({
value,
onChange,
@ -59,7 +60,7 @@ export default function InputListComponent({
onChange(inputList);
}}
>
<PlusIcon className={"w-4 h-4 hover:text-ring"} />
<Plus className={"w-4 h-4 hover:text-ring"} />
</button>
) : (
<button
@ -72,7 +73,7 @@ export default function InputListComponent({
onChange(inputList);
}}
>
<XMarkIcon className="w-4 h-4 hover:text-destructive" />
<X className="w-4 h-4 hover:text-red-600" />
</button>
)}
</div>

View file

@ -1,10 +1,10 @@
import { ArrowTopRightOnSquareIcon } from "@heroicons/react/24/outline";
import { useContext, useEffect, useState } from "react";
import { PopUpContext } from "../../contexts/popUpContext";
import { TextAreaComponentType } from "../../types/components";
import GenericModal from "../../modals/genericModal";
import { TypeModal } from "../../utils";
import { INPUT_STYLE } from "../../constants";
import { ExternalLink } from "lucide-react";
export default function PromptAreaComponent({
value,
@ -74,7 +74,7 @@ export default function PromptAreaComponent({
}}
>
{!editNode && (
<ArrowTopRightOnSquareIcon className="w-6 h-6 hover:text-ring " />
<ExternalLink className="w-6 h-6 hover:text-ring dark:text-gray-300" />
)}
</button>
</div>

View file

@ -1,10 +1,11 @@
import { ArrowTopRightOnSquareIcon } from "@heroicons/react/24/outline";
import { useContext, useEffect, useState } from "react";
import { PopUpContext } from "../../contexts/popUpContext";
import { TextAreaComponentType } from "../../types/components";
import GenericModal from "../../modals/genericModal";
import { TypeModal } from "../../utils";
import { INPUT_STYLE } from "../../constants";
import { ExternalLink } from "lucide-react";
export default function TextAreaComponent({
value,
onChange,
@ -76,7 +77,7 @@ export default function TextAreaComponent({
}}
>
{!editNode && (
<ArrowTopRightOnSquareIcon className="w-6 h-6 hover:text-ring " />
<ExternalLink className="w-6 h-6 hover:text-ring dark:text-gray-300" />
)}
</button>
</div>

View file

@ -47,7 +47,7 @@ const CardDescription = React.forwardRef<
HTMLParagraphElement,
React.HTMLAttributes<HTMLParagraphElement>
>(({ className, ...props }, ref) => (
<p
<div
ref={ref}
className={cn("text-sm text-muted-foreground", className)}
{...props}

View file

@ -0,0 +1,27 @@
"use client";
import * as React from "react";
import * as ProgressPrimitive from "@radix-ui/react-progress";
import { cn } from "../../utils";
const Progress = React.forwardRef<
React.ElementRef<typeof ProgressPrimitive.Root>,
React.ComponentPropsWithoutRef<typeof ProgressPrimitive.Root>
>(({ className, value, ...props }, ref) => (
<ProgressPrimitive.Root
ref={ref}
className={cn(
"relative h-4 w-full overflow-hidden rounded-full bg-secondary",
className
)}
{...props}
>
<ProgressPrimitive.Indicator
className="h-full w-full flex-1 bg-primary transition-all"
style={{ transform: `translateX(-${100 - (value || 0)}%)` }}
/>
</ProgressPrimitive.Root>
));
Progress.displayName = ProgressPrimitive.Root.displayName;
export { Progress };

View file

@ -1,10 +1,10 @@
"use client"
"use client";
import * as React from "react"
import * as TabsPrimitive from "@radix-ui/react-tabs"
import { cn } from "../../utils"
import * as React from "react";
import * as TabsPrimitive from "@radix-ui/react-tabs";
import { cn } from "../../utils";
const Tabs = TabsPrimitive.Root
const Tabs = TabsPrimitive.Root;
const TabsList = React.forwardRef<
React.ElementRef<typeof TabsPrimitive.List>,
@ -18,8 +18,8 @@ const TabsList = React.forwardRef<
)}
{...props}
/>
))
TabsList.displayName = TabsPrimitive.List.displayName
));
TabsList.displayName = TabsPrimitive.List.displayName;
const TabsTrigger = React.forwardRef<
React.ElementRef<typeof TabsPrimitive.Trigger>,
@ -33,8 +33,8 @@ const TabsTrigger = React.forwardRef<
)}
{...props}
/>
))
TabsTrigger.displayName = TabsPrimitive.Trigger.displayName
));
TabsTrigger.displayName = TabsPrimitive.Trigger.displayName;
const TabsContent = React.forwardRef<
React.ElementRef<typeof TabsPrimitive.Content>,
@ -48,7 +48,7 @@ const TabsContent = React.forwardRef<
)}
{...props}
/>
))
TabsContent.displayName = TabsPrimitive.Content.displayName
));
TabsContent.displayName = TabsPrimitive.Content.displayName;
export { Tabs, TabsList, TabsTrigger, TabsContent }
export { Tabs, TabsList, TabsTrigger, TabsContent };

View file

@ -66,7 +66,7 @@ export const getPythonApiCode = (flow: FlowType): string => {
BASE_API_URL = "${window.location.protocol}//${
window.location.host
}/ap1/v1/predict"
}/api/v1/process"
FLOW_ID = "${flowId}"
# You can tweak the flow by adding a tweaks dictionary
# e.g {"OpenAI-XXXXX": {"model_name": "gpt-4"}}
@ -83,7 +83,7 @@ def run_flow(message: str, flow_id: str, tweaks: dict = None) -> dict:
"""
api_url = f"{BASE_API_URL}/{flow_id}"
payload = {"message": message}
payload = {"inputs": {"input": message}}
if tweaks:
payload["tweaks"] = tweaks
@ -106,9 +106,9 @@ export const getCurlCode = (flow: FlowType): string => {
return `curl -X POST \\
${window.location.protocol}//${
window.location.host
}/api/v1/predict/${flowId} \\
}/api/v1/process/${flowId} \\
-H 'Content-Type: application/json' \\
-d '{"message": "Your message", "tweaks": ${JSON.stringify(
-d '{"inputs": {"input": message}, "tweaks": ${JSON.stringify(
tweaks,
null,
2
@ -121,11 +121,12 @@ export const getCurlCode = (flow: FlowType): string => {
*/
export const getPythonCode = (flow: FlowType): string => {
const flowName = flow.name;
const tweaks = buildTweaks(flow);
return `from langflow import load_flow_from_json
flow = load_flow_from_json("${flowName}.json")
# Now you can use it like any chain
flow("Hey, have you heard of LangFlow?")`;
TWEAKS = ${JSON.stringify(tweaks, null, 2)}
flow = load_flow_from_json("${flowName}.json", tweaks=TWEAKS)
# Now you can use it like any chain
flow("Hey, have you heard of LangFlow?")`;
};
/**
@ -170,3 +171,262 @@ export const NAV_DISPLAY_STYLE =
*/
export const BUTTON_DIV_STYLE =
" flex gap-2 ";
" focus:ring-1 focus:ring-offset-1 focus:ring-ring focus:outline-none ";
/**
* Default description for the flow
* @constant
*/
export const DESCRIPTIONS: string[] = [
"Chain the Words, Master Language!",
"Language Architect at Work!",
"Empowering Language Engineering.",
"Craft Language Connections Here.",
"Create, Connect, Converse.",
"Smart Chains, Smarter Conversations.",
"Bridging Prompts for Brilliance.",
"Language Models, Unleashed.",
"Your Hub for Text Generation.",
"Promptly Ingenious!",
"Building Linguistic Labyrinths.",
"LangFlow: Create, Chain, Communicate.",
"Connect the Dots, Craft Language.",
"Interactive Language Weaving.",
"Generate, Innovate, Communicate.",
"Conversation Catalyst Engine.",
"Language Chainlink Master.",
"Design Dialogues with LangFlow.",
"Nurture NLP Nodes Here.",
"Conversational Cartography Unlocked.",
"Design, Develop, Dialogize.",
];
/**
* Adjectives for the name of the flow
* @constant
*
*/
export const ADJECTIVES: string[] = [
"admiring",
"adoring",
"agitated",
"amazing",
"angry",
"awesome",
"backstabbing",
"berserk",
"big",
"boring",
"clever",
"cocky",
"compassionate",
"condescending",
"cranky",
"desperate",
"determined",
"distracted",
"dreamy",
"drunk",
"ecstatic",
"elated",
"elegant",
"evil",
"fervent",
"focused",
"furious",
"gigantic",
"gloomy",
"goofy",
"grave",
"happy",
"high",
"hopeful",
"hungry",
"insane",
"jolly",
"jovial",
"kickass",
"lonely",
"loving",
"mad",
"modest",
"naughty",
"nauseous",
"nostalgic",
"pedantic",
"pensive",
"prickly",
"reverent",
"romantic",
"sad",
"serene",
"sharp",
"sick",
"silly",
"sleepy",
"small",
"stoic",
"stupefied",
"suspicious",
"tender",
"thirsty",
"tiny",
"trusting",
];
/**
* Nouns for the name of the flow
* @constant
*
*/
export const NOUNS: string[] = [
"albattani",
"allen",
"almeida",
"archimedes",
"ardinghelli",
"aryabhata",
"austin",
"babbage",
"banach",
"bardeen",
"bartik",
"bassi",
"bell",
"bhabha",
"bhaskara",
"blackwell",
"bohr",
"booth",
"borg",
"bose",
"boyd",
"brahmagupta",
"brattain",
"brown",
"carson",
"chandrasekhar",
"colden",
"cori",
"cray",
"curie",
"darwin",
"davinci",
"dijkstra",
"dubinsky",
"easley",
"einstein",
"elion",
"engelbart",
"euclid",
"euler",
"fermat",
"fermi",
"feynman",
"franklin",
"galileo",
"gates",
"goldberg",
"goldstine",
"goldwasser",
"golick",
"goodall",
"hamilton",
"hawking",
"heisenberg",
"heyrovsky",
"hodgkin",
"hoover",
"hopper",
"hugle",
"hypatia",
"jang",
"jennings",
"jepsen",
"joliot",
"jones",
"kalam",
"kare",
"keller",
"khorana",
"kilby",
"kirch",
"knuth",
"kowalevski",
"lalande",
"lamarr",
"leakey",
"leavitt",
"lichterman",
"liskov",
"lovelace",
"lumiere",
"mahavira",
"mayer",
"mccarthy",
"mcclintock",
"mclean",
"mcnulty",
"meitner",
"meninsky",
"mestorf",
"minsky",
"mirzakhani",
"morse",
"murdock",
"newton",
"nobel",
"noether",
"northcutt",
"noyce",
"panini",
"pare",
"pasteur",
"payne",
"perlman",
"pike",
"poincare",
"poitras",
"ptolemy",
"raman",
"ramanujan",
"ride",
"ritchie",
"roentgen",
"rosalind",
"saha",
"sammet",
"shaw",
"shirley",
"shockley",
"sinoussi",
"snyder",
"spence",
"stallman",
"stonebraker",
"swanson",
"swartz",
"swirles",
"tesla",
"thompson",
"torvalds",
"turing",
"varahamihira",
"visvesvaraya",
"volhard",
"wescoff",
"williams",
"wilson",
"wing",
"wozniak",
"wright",
"yalow",
"yonath",
];
/**
* Header text for user projects
* @constant
*
*/
export const USER_PROJECTS_HEADER = "My Collection";

View file

@ -9,6 +9,8 @@ import {
const initialValue = {
updateSSEData: ({}) => {},
sseData: {},
isBuilding: false,
setIsBuilding: (isBuilding: boolean) => {},
};
const SSEContext = createContext(initialValue);
@ -19,6 +21,7 @@ export function useSSE() {
export function SSEProvider({ children }) {
const [sseData, setSSEData] = useState({});
const [isBuilding, setIsBuilding] = useState(false);
const updateSSEData = useCallback((newData: any) => {
setSSEData((prevData) => ({
@ -28,7 +31,9 @@ export function SSEProvider({ children }) {
}, []);
return (
<SSEContext.Provider value={{ sseData, updateSSEData }}>
<SSEContext.Provider
value={{ sseData, updateSSEData, isBuilding, setIsBuilding }}
>
{children}
</SSEContext.Provider>
);

View file

@ -80,7 +80,7 @@ export function AlertProvider({ children }: { children: ReactNode }) {
function setErrorData(newState: { title: string; list?: Array<string> }) {
setErrorDataState(newState);
setErrorOpen(true);
if (newState.title) {
if (newState.title && newState.title !== "") {
setNotificationCenter(true);
pushNotificationList({
type: "error",
@ -97,7 +97,7 @@ export function AlertProvider({ children }: { children: ReactNode }) {
function setNoticeData(newState: { title: string; link?: string }) {
setNoticeDataState(newState);
setNoticeOpen(true);
if (newState.title) {
if (newState.title && newState.title !== "") {
// Add new notice to notification center
setNotificationCenter(true);
pushNotificationList({
@ -117,7 +117,7 @@ export function AlertProvider({ children }: { children: ReactNode }) {
setSuccessOpen(true); // open the success alert
// If the new state has a "title" property, add a new success notification to the list
if (newState.title) {
if (newState.title && newState.title !== "") {
setNotificationCenter(true); // show the notification center
pushNotificationList({
// add the new notification to the list

View file

@ -8,10 +8,15 @@ import {
} from "react";
import { FlowType, NodeType } from "../types/flow";
import { TabsContextType, TabsState } from "../types/tabs";
import { updateIds, updateTemplate } from "../utils";
import {
updateIds,
updateTemplate,
getRandomDescription,
getRandomName,
} from "../utils";
import { alertContext } from "./alertContext";
import { typesContext } from "./typesContext";
import { APITemplateType } from "../types/api";
import { APIClassType, APITemplateType } from "../types/api";
import ShortUniqueId from "short-unique-id";
import { addEdge } from "reactflow";
import {
@ -40,7 +45,7 @@ const TabsContextInitialValue: TabsContextType = {
uploadFlows: () => {},
uploadFlow: () => {},
hardReset: () => {},
saveFlow: async (flow:FlowType) => {},
saveFlow: async (flow: FlowType) => {},
disableCopyPaste: false,
setDisableCopyPaste: (state: boolean) => {},
lastCopiedSelection: null,
@ -187,20 +192,26 @@ export function TabsProvider({ children }: { children: ReactNode }) {
}
function processFlowEdges(flow) {
if(!flow.data || !flow.data.edges) return;
flow.data.edges.forEach((edge) => {
edge.className = "";
edge.style = { stroke: "#555555" };
});
}
function updateDisplay_name(node:NodeType,template:APIClassType) {
node.data.node.display_name = template["display_name"]?template["display_name"]:node.data.type;
}
function processFlowNodes(flow) {
flow.data.nodes.forEach((node) => {
if(!flow.data || !flow.data.nodes) return;
flow.data.nodes.forEach((node:NodeType) => {
const template = templates[node.data.type];
if (!template) {
setErrorData({ title: `Unknown node type: ${node.data.type}` });
return;
}
if (Object.keys(template["template"]).length > 0) {
updateDisplay_name(node,template);
updateNodeBaseClasses(node, template);
updateNodeEdges(flow, node, template);
updateNodeDescription(node, template);
@ -209,11 +220,11 @@ export function TabsProvider({ children }: { children: ReactNode }) {
});
}
function updateNodeBaseClasses(node, template) {
function updateNodeBaseClasses(node:NodeType,template:APIClassType) {
node.data.node.base_classes = template["base_classes"];
}
function updateNodeEdges(flow, node, template) {
function updateNodeEdges(flow:FlowType, node:NodeType,template:APIClassType) {
flow.data.edges.forEach((edge) => {
if (edge.source === node.id) {
edge.sourceHandle = edge.sourceHandle
@ -225,11 +236,11 @@ export function TabsProvider({ children }: { children: ReactNode }) {
});
}
function updateNodeDescription(node, template) {
function updateNodeDescription(node:NodeType,template:APIClassType) {
node.data.node.description = template["description"];
}
function updateNodeTemplate(node, template) {
function updateNodeTemplate(node:NodeType,template:APIClassType) {
node.data.node.template = updateTemplate(
template["template"] as unknown as APITemplateType,
node.data.node.template as APITemplateType
@ -346,7 +357,6 @@ export function TabsProvider({ children }: { children: ReactNode }) {
*/
function removeFlow(id: string) {
const index = flows.findIndex((flow) => flow.id === id);
console.log(index);
if (index >= 0) {
deleteFlowFromDatabase(id).then(() => {
setFlows(flows.filter((flow) => flow.id !== id));
@ -454,11 +464,13 @@ export function TabsProvider({ children }: { children: ReactNode }) {
if (newProject) {
let flowData = extractDataFromFlow(flow);
if (flowData.description == "") {
flowData.description = "This is a new flow.";
flowData.description = getRandomDescription();
}
// Create a new flow with a default name if no flow is provided.
const newFlow = createNewFlow(flowData, flow);
processFlowEdges(newFlow);
processFlowNodes(newFlow);
try {
const { id } = await saveFlowToDatabase(newFlow);
@ -536,7 +548,7 @@ export function TabsProvider({ children }: { children: ReactNode }) {
const createNewFlow = (flowData, flow) => ({
description: flowData.description,
name: flow?.name ?? "New Flow",
name: flow?.name ?? getRandomName(),
data: flowData.data,
id: "",
});
@ -565,10 +577,10 @@ export function TabsProvider({ children }: { children: ReactNode }) {
}
async function saveFlow(newFlow: FlowType) {
try{
try {
// updates flow in db
const updatedFlow = await updateFlowInDatabase(newFlow);
if (updatedFlow){
if (updatedFlow) {
// updates flow in state
setFlows((prevState) => {
const newFlows = [...prevState];
@ -590,8 +602,7 @@ export function TabsProvider({ children }: { children: ReactNode }) {
};
});
}
}
catch(err){
} catch (err) {
setErrorData(err);
}
}

View file

@ -51,16 +51,21 @@ export function TypesProvider({ children }: { children: ReactNode }) {
);
// Set the types by reducing over the keys of the result data and updating the accumulator.
setTypes(
Object.keys(result.data).reduce((acc, curr) => {
Object.keys(result.data[curr]).forEach((c: keyof APIKindType) => {
acc[c] = curr;
// Add the base classes to the accumulator as well.
result.data[curr][c].base_classes?.forEach((b) => {
acc[b] = curr;
});
});
return acc;
}, {})
// Reverse the keys so the tool world does not overlap
Object.keys(result.data)
.reverse()
.reduce((acc, curr) => {
Object.keys(result.data[curr]).forEach(
(c: keyof APIKindType) => {
acc[c] = curr;
// Add the base classes to the accumulator as well.
result.data[curr][c].base_classes?.forEach((b) => {
acc[b] = curr;
});
}
);
return acc;
}, {})
);
}
// Clear the interval if successful.

View file

@ -66,8 +66,6 @@ export function UndoRedoProvider({ children }) {
const takeSnapshot = useCallback(() => {
// push the current graph to the past state
console.log(past);
console.log(tabIndex);
setPast((old) => {
let newPast = cloneDeep(old);
newPast[tabIndex] = old[tabIndex].slice(

View file

@ -3,6 +3,7 @@ import {
PromptTypeAPI,
errorsTypeAPI,
InitTypeAPI,
UploadFileTypeAPI,
} from "./../../types/api/index";
import { APIObjectType, sendAllProps } from "../../types/api/index";
import axios, { AxiosResponse } from "axios";
@ -18,6 +19,20 @@ export async function getAll(): Promise<AxiosResponse<APIObjectType>> {
return await axios.get(`/api/v1/all`);
}
const GITHUB_API_URL = "https://api.github.com";
export async function getRepoStars(owner, repo) {
try {
const response = await axios.get(
`${GITHUB_API_URL}/repos/${owner}/${repo}`
);
return response.data.stargazers_count;
} catch (error) {
console.error("Error fetching repository data:", error);
return null;
}
}
/**
* Sends data to the API for prediction.
*
@ -34,13 +49,6 @@ export async function postValidateCode(
return await axios.post("/api/v1/validate/code", { code });
}
export async function postValidateNode(
nodeId: string,
data: any
): Promise<AxiosResponse<string>> {
return await axios.post(`/api/v1/validate/node/${nodeId}`, { data });
}
/**
* Checks the prompt for the code block by sending it to an API endpoint.
*
@ -305,3 +313,21 @@ export async function postBuildInit(
): Promise<AxiosResponse<InitTypeAPI>> {
return await axios.post(`/api/v1/build/init`, flow);
}
// fetch(`/upload/${id}`, {
// method: "POST",
// body: formData,
// });
/**
* Uploads a file to the server.
* @param {File} file - The file to upload.
* @param {string} id - The ID of the flow to upload the file to.
*/
export async function uploadFile(
file: File,
id: string
): Promise<AxiosResponse<UploadFileTypeAPI>> {
const formData = new FormData();
formData.append("file", file);
return await axios.post(`/api/v1/upload/${id}`, formData);
}

View file

@ -1,9 +1,9 @@
import React, { forwardRef } from "react";
import { ReactComponent as MidjorneySVG } from "./Midjourney_Emblem.svg";
import { ReactComponent as MidjourneySVG } from "./Midjourney_Emblem.svg";
export const MidjorneyIcon = forwardRef<
export const MidjourneyIcon = forwardRef<
SVGSVGElement,
React.PropsWithChildren<{}>
>((props, ref) => {
return <MidjorneySVG ref={ref} {...props} />;
return <MidjourneySVG ref={ref} {...props} />;
});

View file

@ -0,0 +1,9 @@
import React, { forwardRef } from "react";
import { ReactComponent as MongoDBSVG } from "./mongodb-icon.svg";
export const MongoDBIcon = forwardRef<
SVGSVGElement,
React.PropsWithChildren<{}>
>((props, ref) => {
return <MongoDBSVG ref={ref} {...props} />;
});

View file

@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" width="64" height="64" viewBox="0 0 32 32"><path d="M15.9.087l.854 1.604c.192.296.4.558.645.802.715.715 1.394 1.464 2.004 2.266 1.447 1.9 2.423 4.01 3.12 6.292.418 1.394.645 2.824.662 4.27.07 4.323-1.412 8.035-4.4 11.12-.488.488-1.01.94-1.57 1.342-.296 0-.436-.227-.558-.436-.227-.383-.366-.82-.436-1.255-.105-.523-.174-1.046-.14-1.586v-.244C16.057 24.21 15.796.21 15.9.087z" fill="#599636"/><path d="M15.9.034c-.035-.07-.07-.017-.105.017.017.35-.105.662-.296.96-.21.296-.488.523-.767.767-1.55 1.342-2.77 2.963-3.747 4.776-1.3 2.44-1.97 5.055-2.16 7.808-.087.993.314 4.497.627 5.508.854 2.684 2.388 4.933 4.375 6.885.488.47 1.01.906 1.55 1.325.157 0 .174-.14.21-.244a4.78 4.78 0 0 0 .157-.68l.35-2.614L15.9.034z" fill="#6cac48"/><path d="M16.754 28.845c.035-.4.227-.732.436-1.063-.21-.087-.366-.26-.488-.453-.105-.174-.192-.383-.26-.575-.244-.732-.296-1.5-.366-2.248v-.453c-.087.07-.105.662-.105.75a17.37 17.37 0 0 1-.314 2.353c-.052.314-.087.627-.28.906 0 .035 0 .07.017.122.314.924.4 1.865.453 2.824v.35c0 .418-.017.33.33.47.14.052.296.07.436.174.105 0 .122-.087.122-.157l-.052-.575v-1.604c-.017-.28.035-.558.07-.82z" fill="#c2bfbf"/></svg>

After

Width:  |  Height:  |  Size: 1.2 KiB

View file

@ -0,0 +1,9 @@
import React, { forwardRef } from "react";
import { ReactComponent as PineconeSVG } from "./pinecone_logo.svg";
export const PineconeIcon = forwardRef<
SVGSVGElement,
React.PropsWithChildren<{}>
>((props, ref) => {
return <PineconeSVG ref={ref} {...props} />;
});

View file

@ -0,0 +1,21 @@
<svg width="32" height="35" viewBox="0 0 32 35" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M13.8555 34.2962C14.9325 34.2962 15.8055 33.4451 15.8055 32.3954C15.8055 31.3456 14.9325 30.4946 13.8555 30.4946C12.7786 30.4946 11.9055 31.3456 11.9055 32.3954C11.9055 33.4451 12.7786 34.2962 13.8555 34.2962Z" fill="black"/>
<path d="M18.4138 7.19675L19.2512 2.66005" stroke="black" stroke-width="2.11786" stroke-linecap="square"/>
<path d="M22.2656 5.5855L19.3466 2.11099L15.3748 4.37292" stroke="black" stroke-width="2.11786" stroke-linecap="square" stroke-linejoin="round"/>
<path d="M14.9202 26.5528L15.7337 22.0169" stroke="black" stroke-width="2.11786" stroke-linecap="square"/>
<path d="M18.7729 24.9304L15.83 21.4671L11.8701 23.741" stroke="black" stroke-width="2.11786" stroke-linecap="square" stroke-linejoin="round"/>
<path d="M16.6077 17.1996L17.4212 12.6633" stroke="black" stroke-width="2.11786" stroke-linecap="square"/>
<path d="M20.4587 15.58L17.5277 12.128L13.5679 14.3904" stroke="black" stroke-width="2.11786" stroke-linecap="square" stroke-linejoin="round"/>
<path d="M8.32871 26.1554L4.75171 28.5815" stroke="black" stroke-width="2.01017" stroke-linecap="square"/>
<path d="M8.54383 30.0865L4.3208 28.8738L4.63185 24.5944" stroke="black" stroke-width="2.01017" stroke-linecap="square" stroke-linejoin="round"/>
<path d="M21.3213 28.4299L23.8096 31.9282" stroke="black" stroke-width="2.01017" stroke-linecap="square"/>
<path d="M19.718 32.045L24.1085 32.3365L25.3527 28.2438" stroke="black" stroke-width="2.01017" stroke-linecap="square" stroke-linejoin="round"/>
<path d="M25.3999 21.3291L29.7784 22.0996" stroke="black" stroke-width="2.05804" stroke-linecap="square"/>
<path d="M26.9072 25.072L30.3048 22.1919L28.1634 18.3557" stroke="black" stroke-width="2.05804" stroke-linecap="square" stroke-linejoin="round"/>
<path d="M24.1196 12.8615L28.0197 10.763" stroke="black" stroke-width="2.05804" stroke-linecap="square"/>
<path d="M24.3357 8.83965L28.4869 10.5188L27.7093 14.8216" stroke="black" stroke-width="2.05804" stroke-linecap="square" stroke-linejoin="round"/>
<path d="M6.91639 18.1572L2.52588 17.4101" stroke="black" stroke-width="2.05804" stroke-linecap="square"/>
<path d="M4.17731 21.1645L2 17.328L5.36167 14.436" stroke="black" stroke-width="2.05804" stroke-linecap="square" stroke-linejoin="round"/>
<path d="M11.0799 10.6129L8.14893 7.34769" stroke="black" stroke-width="2.05804" stroke-linecap="square"/>
<path d="M12.2897 6.77496L7.80349 6.96156L7.01392 11.2649" stroke="black" stroke-width="2.05804" stroke-linecap="square" stroke-linejoin="round"/>
</svg>

After

Width:  |  Height:  |  Size: 2.5 KiB

View file

@ -0,0 +1,9 @@
import React, { forwardRef } from "react";
import { ReactComponent as SupabaseSvg } from "./supabase-icon.svg";
export const SupabaseIcon = forwardRef<
SVGSVGElement,
React.PropsWithChildren<{}>
>((props, ref) => {
return <SupabaseSvg ref={ref} {...props} />;
});

View file

@ -0,0 +1,99 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<svg
xmlns:dc="http://purl.org/dc/elements/1.1/"
xmlns:cc="http://creativecommons.org/ns#"
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
xmlns:svg="http://www.w3.org/2000/svg"
xmlns="http://www.w3.org/2000/svg"
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
width="64"
height="64"
viewBox="0 0 64 64"
version="1.1"
id="svg20"
sodipodi:docname="supabase-icon.svg"
style="fill:none"
inkscape:version="0.92.4 (5da689c313, 2019-01-14)">
<metadata
id="metadata24">
<rdf:RDF>
<cc:Work
rdf:about="">
<dc:format>image/svg+xml</dc:format>
<dc:type
rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
<dc:title></dc:title>
</cc:Work>
</rdf:RDF>
</metadata>
<sodipodi:namedview
pagecolor="#ffffff"
bordercolor="#666666"
borderopacity="1"
objecttolerance="10"
gridtolerance="10"
guidetolerance="10"
inkscape:pageopacity="0"
inkscape:pageshadow="2"
inkscape:window-width="1687"
inkscape:window-height="849"
id="namedview22"
showgrid="false"
inkscape:zoom="2.0884956"
inkscape:cx="54.5"
inkscape:cy="56.5"
inkscape:window-x="70"
inkscape:window-y="0"
inkscape:window-maximized="0"
inkscape:current-layer="svg20" />
<path
d="m 37.41219,62.936701 c -1.634985,2.05896 -4.950068,0.93085 -4.989463,-1.69817 L 31.846665,22.786035 h 25.855406 c 4.683108,0 7.294967,5.409033 4.382927,9.07673 z"
id="path2"
style="fill:url(#paint0_linear);stroke-width:0.57177335"
inkscape:connector-curvature="0" />
<path
d="m 37.41219,62.936701 c -1.634985,2.05896 -4.950068,0.93085 -4.989463,-1.69817 L 31.846665,22.786035 h 25.855406 c 4.683108,0 7.294967,5.409033 4.382927,9.07673 z"
id="path4"
style="fill:url(#paint1_linear);fill-opacity:0.2;stroke-width:0.57177335"
inkscape:connector-curvature="0" />
<path
d="m 26.89694,1.0634102 c 1.634986,-2.05918508 4.950125,-0.93090008 4.989521,1.698149 L 32.138899,41.214003 H 6.607076 c -4.6832501,0 -7.29518376,-5.409032 -4.3830007,-9.07673 z"
id="path6"
inkscape:connector-curvature="0"
style="fill:#3ecf8e;stroke-width:0.57177335" />
<defs
id="defs18">
<linearGradient
id="paint0_linear"
x1="53.973801"
y1="54.973999"
x2="94.163498"
y2="71.829498"
gradientUnits="userSpaceOnUse"
gradientTransform="matrix(0.57177306,0,0,0.57177334,0.98590077,-0.12074988)">
<stop
stop-color="#249361"
id="stop8" />
<stop
offset="1"
stop-color="#3ECF8E"
id="stop10" />
</linearGradient>
<linearGradient
id="paint1_linear"
x1="36.1558"
y1="30.577999"
x2="54.484402"
y2="65.080597"
gradientUnits="userSpaceOnUse"
gradientTransform="matrix(0.57177306,0,0,0.57177334,0.98590077,-0.12074988)">
<stop
id="stop13" />
<stop
offset="1"
stop-opacity="0"
id="stop15" />
</linearGradient>
</defs>
</svg>

After

Width:  |  Height:  |  Size: 3.2 KiB

View file

@ -71,35 +71,35 @@
--almost-light-blue: #dbeafe; tailwind blue 100 */
}
.dark {
--background: 224 71% 4%; /* hsl(224 71% 4%) */
--foreground: 213 31% 91%; /* hsl(213 31% 91%) */
.dark {
--background: 224 71% 4%; /* hsl(224 71% 4%) */
--foreground: 213 31% 91%; /* hsl(213 31% 91%) */
--muted: 223 47% 11%; /* hsl(223 47% 11%) */
--muted-foreground: 215.4 16.3% 56.9%; /* hsl(215 16% 56%) */
--muted: 223 47% 11%; /* hsl(223 47% 11%) */
--muted-foreground: 215.4 16.3% 56.9%; /* hsl(215 16% 56%) */
--popover: 224 71% 4%; /* hsl(224 71% 4%) */
--popover-foreground: 215 20.2% 65.1%; /* hsl(215 20% 65%) */
--popover: 224 71% 4%; /* hsl(224 71% 4%) */
--popover-foreground: 215 20.2% 65.1%; /* hsl(215 20% 65%) */
--card: 224 71% 4%; /* hsl(224 71% 4%) */
--card-foreground: 213 31% 91%; /* hsl(213 31% 91%) */
--card: 224 71% 4%; /* hsl(224 71% 4%) */
--card-foreground: 213 31% 91%; /* hsl(213 31% 91%) */
--border: 216 34% 17%; /* hsl(216 34% 17%) */
--input: 216 34% 17%; /* hsl(216 34% 17%) */
--border: 216 34% 17%; /* hsl(216 34% 17%) */
--input: 216 34% 17%; /* hsl(216 34% 17%) */
--primary: 210 40% 98%; /* hsl(210 40% 98%) */
--primary-foreground: 222.2 47.4% 1.2%; /* hsl(222 47% 1%) */
--primary: 210 40% 98%; /* hsl(210 40% 98%) */
--primary-foreground: 222.2 47.4% 1.2%; /* hsl(222 47% 1%) */
--secondary: 222.2 47.4% 11.2%; /* hsl(222 47% 11%) */
--secondary-foreground: 210 40% 98%; /* hsl(210 40% 98%) */
--secondary: 222.2 47.4% 11.2%; /* hsl(222 47% 11%) */
--secondary-foreground: 210 40% 98%; /* hsl(210 40% 98%) */
--accent: 216 34% 17%; /* hsl(216 34% 17%) */
--accent-foreground: 210 40% 98%; /* hsl(210 40% 98%) */
--accent: 216 34% 17%; /* hsl(216 34% 17%) */
--accent-foreground: 210 40% 98%; /* hsl(210 40% 98%) */
--destructive: 0 63% 31%; /* hsl(0 63% 31%) */
--destructive-foreground: 210 40% 98%; /* hsl(210 40% 98%) */
--destructive: 0 63% 31%; /* hsl(0 63% 31%) */
--destructive-foreground: 210 40% 98%; /* hsl(210 40% 98%) */
--ring: 216 34% 17%; /* hsl(216 34% 17%) */
--ring: 216 34% 17%; /* hsl(216 34% 17%) */
--radius: 0.5rem;
@ -118,7 +118,7 @@
--card-foreground: 222.2 47.4% 11.2%; /* hsl(222 47% 11%) */
--border: 214.3 21.8% 91.4%; /* hsl(214 32% 91%) */
--input: 214.3 21.8% 91.4%; /* hsl(214 32% 91%) */
--primary: 222.2 27.0% 11.2%; /* hsl(222 27% 18%) */
--primary: 222.2 27% 11.2%; /* hsl(222 27% 18%) */
--primary-foreground: 210 40% 98%; /* hsl(210 40% 98%) */
--secondary: 210 40% 96.1%; /* hsl(210 40% 96%) */
--secondary-foreground: 222.2 47.4% 11.2%; /* hsl(222 47% 11%) */
@ -222,4 +222,4 @@ code {
The cursor: default; property value restores the browser's default cursor style for the targeted element. By applying this style, the element will no longer have a custom cursor appearance such as "grab" or any other custom cursor defined elsewhere in the application. Instead, it will revert to the default cursor style determined by the browser, typically an arrow-shaped cursor. */
.react-flow__pane {
cursor: default;
}
}

View file

@ -1,4 +1,3 @@
import { CodeBracketSquareIcon } from "@heroicons/react/24/outline";
import { useContext, useState } from "react";
import { PopUpContext } from "../../contexts/popUpContext";
import "ace-builds/src-noconflict/mode-python";
@ -26,7 +25,7 @@ import {
TabsList,
TabsTrigger,
} from "../../components/ui/tabs";
import { Check, Clipboard } from "lucide-react";
import { Check, Clipboard, Code2 } from "lucide-react";
export default function ApiModal({ flow }: { flow: FlowType }) {
const [open, setOpen] = useState(true);
@ -88,8 +87,8 @@ export default function ApiModal({ flow }: { flow: FlowType }) {
<DialogHeader>
<DialogTitle className="flex items-center">
<span className="pr-2">Code</span>
<CodeBracketSquareIcon
className="h-6 w-6 text-foreground pl-1"
<Code2
className="h-6 w-6 text-gray-800 pl-1 dark:text-white"
aria-hidden="true"
/>
</DialogTitle>
@ -104,7 +103,7 @@ export default function ApiModal({ flow }: { flow: FlowType }) {
<div className="flex items-center justify-between px-2">
<TabsList>
{tabs.map((tab, index) => (
<TabsTrigger value={index.toString()}>{tab.name}</TabsTrigger>
<TabsTrigger key={index} value={index.toString()}>{tab.name}</TabsTrigger>
))}
</TabsList>
<div className="float-right">

View file

@ -12,7 +12,6 @@ import {
TableRow,
} from "../../components/ui/table";
import ToggleShadComponent from "../../components/toggleShadComponent";
import { VariableIcon } from "@heroicons/react/24/outline";
import InputListComponent from "../../components/inputListComponent";
import TextAreaComponent from "../../components/textAreaComponent";
import InputComponent from "../../components/inputComponent";
@ -33,6 +32,7 @@ import {
} from "../../components/ui/dialog";
import { Button } from "../../components/ui/button";
import { Badge } from "../../components/ui/badge";
import { Variable } from "lucide-react";
export default function EditNodeModal({ data }: { data: NodeDataType }) {
const [open, setOpen] = useState(true);
@ -79,8 +79,8 @@ export default function EditNodeModal({ data }: { data: NodeDataType }) {
}
return (
<Dialog open={true} onOpenChange={setModalOpen}>
<DialogTrigger></DialogTrigger>
<Dialog open={true} onOpenChange={setModalOpen} >
<DialogTrigger asChild></DialogTrigger>
<DialogContent className="lg:max-w-[700px] ">
<DialogHeader>
<DialogTitle className="flex items-center">
@ -90,10 +90,8 @@ export default function EditNodeModal({ data }: { data: NodeDataType }) {
<DialogDescription>
{data.node?.description}
<div className="flex pt-4">
<VariableIcon className="w-5 h-5 pe-1 text-foreground stroke-2">
&nbsp;
</VariableIcon>
<span className="text-sm font-semibold text-foreground">
<Variable className="w-5 h-5 pe-1 text-gray-700 stroke-2 dark:text-slate-200"></Variable>
<span className="text-sm font-semibold text-gray-800 dark:text-white">
Parameters
</span>
</div>

View file

@ -101,6 +101,7 @@ export default function ModalField({
data.node.template[name].value = t;
setEnabled(t);
}}
size="small"
/>
</div>
) : type === "float" ? (

View file

@ -1,5 +1,4 @@
import { Dialog, Transition } from "@headlessui/react";
import { XMarkIcon } from "@heroicons/react/24/outline";
import { Fragment, useContext, useRef, useState } from "react";
import { PopUpContext } from "../../contexts/popUpContext";
import { NodeDataType } from "../../types/flow";
@ -13,6 +12,7 @@ import {
} from "../../utils";
import { typesContext } from "../../contexts/typesContext";
import ModalField from "./components/ModalField";
import { X } from "lucide-react";
export default function NodeModal({ data }: { data: NodeDataType }) {
const [open, setOpen] = useState(true);
@ -27,7 +27,8 @@ export default function NodeModal({ data }: { data: NodeDataType }) {
}, 300);
}
}
const Icon = nodeIcons[types[data.type]];
// any to avoid type conflict
const Icon: any = nodeIcons[types[data.type]];
return (
<Transition.Root show={open} appear={true} as={Fragment}>
<Dialog
@ -69,7 +70,7 @@ export default function NodeModal({ data }: { data: NodeDataType }) {
}}
>
<span className="sr-only">Close</span>
<XMarkIcon className="h-6 w-6" aria-hidden="true" />
<X className="h-6 w-6" aria-hidden="true" />
</button>
</div>
<div className="h-full w-full flex flex-col justify-center items-center">

View file

@ -1,8 +1,9 @@
import { LockClosedIcon, PaperAirplaneIcon } from "@heroicons/react/24/outline";
import { classNames } from "../../../utils";
import { useContext, useEffect, useRef, useState } from "react";
import { TabsContext } from "../../../contexts/tabsContext";
import { INPUT_STYLE } from "../../../constants";
import { Lock, Send } from "lucide-react";
export default function ChatInput({
lockChat,
chatValue,
@ -10,6 +11,12 @@ export default function ChatInput({
setChatValue,
inputRef,
}) {
useEffect(() => {
if (!lockChat && inputRef.current) {
inputRef.current.focus();
}
}, [lockChat, inputRef]);
useEffect(() => {
if (inputRef.current) {
inputRef.current.style.height = "inherit"; // Reset the height
@ -54,13 +61,13 @@ export default function ChatInput({
<div className="absolute bottom-0.5 right-3">
<button disabled={lockChat} onClick={() => sendMessage()}>
{lockChat ? (
<LockClosedIcon
className="h-5 w-5 text-ring animate-pulse"
<Lock
className="h-5 w-5 text-gray-500 dark:hover:text-gray-300 animate-pulse"
aria-hidden="true"
/>
) : (
<PaperAirplaneIcon
className="h-5 w-5 text-ring hover:text-muted-foreground"
<Send
className="h-5 w-5 text-gray-500 hover:text-gray-600 dark:hover:text-gray-300"
aria-hidden="true"
/>
)}

View file

@ -1,10 +1,8 @@
import { ChatBubbleOvalLeftEllipsisIcon } from "@heroicons/react/24/outline";
import { useEffect, useRef, useState } from "react";
import { ChatMessageType } from "../../../types/chat";
import { classNames } from "../../../utils";
import AiIcon from "../../../assets/Gooey Ring-5s-271px.svg";
import AiIconStill from "../../../assets/froze-flow.png";
import { UserIcon } from "@heroicons/react/24/solid";
import FileCard from "../fileComponent";
import ReactMarkdown from "react-markdown";
import rehypeMathjax from "rehype-mathjax";
@ -12,6 +10,7 @@ import remarkGfm from "remark-gfm";
import remarkMath from "remark-math";
import { CodeBlock } from "./codeBlock";
import Convert from "ansi-to-html";
import { User2, MessageCircle } from "lucide-react";
export default function ChatMessage({
chat,
@ -62,7 +61,7 @@ export default function ChatMessage({
</div>
)}
{chat.isSend && (
<UserIcon className="w-6 h-6 -mb-1 text-foreground " />
<User2 className="w-6 h-6 -mb-1 text-gray-800 dark:text-gray-200" />
)}
</div>
{!chat.isSend ? (
@ -73,7 +72,7 @@ export default function ChatMessage({
onClick={() => setHidden((prev) => !prev)}
className="absolute -top-1 -left-2 cursor-pointer"
>
<ChatBubbleOvalLeftEllipsisIcon className="w-5 h-5 animate-bounce" />
<MessageCircle className="w-5 h-5 animate-bounce dark:text-white" />
</div>
)}
{chat.thought && chat.thought !== "" && !hidden && (
@ -152,7 +151,7 @@ export default function ChatMessage({
</div>
) : (
<div className="w-full flex items-center">
<div className="text-start inline-block px-3 text-sm text-muted-foreground">
<div className="text-start inline-block px-3 text-gray-600 dark:text-white">
<span
className="text-muted-foreground "
dangerouslySetInnerHTML={{

View file

@ -1,6 +1,6 @@
import { CloudArrowDownIcon, DocumentIcon } from "@heroicons/react/24/outline";
import * as base64js from "base64-js";
import { useState } from "react";
import { DownloadCloud, File } from "lucide-react";
export default function FileCard({ fileName, content, fileType }) {
const handleDownload = () => {
@ -43,7 +43,7 @@ export default function FileCard({ fileName, content, fileType }) {
className="text-ring py-1 px-2 "
onClick={handleDownload}
>
<CloudArrowDownIcon className="hover:scale-110 w-5 h-5 text-current"></CloudArrowDownIcon>
<DownloadCloud className="hover:scale-110 w-5 h-5 text-current" />
</button>
</div>
)}
@ -65,14 +65,14 @@ export default function FileCard({ fileName, content, fileType }) {
className="w-8 h-8"
/>
) : (
<DocumentIcon className="w-8 h-8" />
<File className="w-8 h-8" />
)}
<div className="flex flex-col items-start">
{" "}
<div className="truncate text-sm text-current">{fileName}</div>
<div className="truncate text-xs text-ring">{fileType}</div>
</div>
<CloudArrowDownIcon className="w-6 h-6 text-current ml-auto" />
<DownloadCloud className="w-6 h-6 text-current ml-auto" />
</div>
</button>
);

View file

@ -1,13 +1,11 @@
import { Dialog, Transition } from "@headlessui/react";
import { ChatBubbleOvalLeftEllipsisIcon } from "@heroicons/react/24/outline";
import { Fragment, useContext, useEffect, useRef, useState } from "react";
import { FlowType } from "../../types/flow";
import { alertContext } from "../../contexts/alertContext";
import { validateNodes } from "../../utils";
import { typesContext } from "../../contexts/typesContext";
import ChatMessage from "./chatMessage";
import { FaEraser } from "react-icons/fa";
import { HiX } from "react-icons/hi";
import { X, MessagesSquare, Eraser } from "lucide-react";
import { sendAllProps } from "../../types/api";
import { ChatMessageType } from "../../types/chat";
import ChatInput from "./chatInput";
@ -352,13 +350,13 @@ export default function ChatModal({
onClick={() => clearChat()}
className="absolute top-2 right-10 hover:text-status-red text-muted-foreground z-30"
>
<FaEraser className="w-4 h-4" />
<Eraser className="w-4 h-4" />
</button>
<button
onClick={() => setModalOpen(false)}
className="absolute top-1.5 right-2 hover:text-status-red text-muted-foreground z-30"
>
<HiX className="w-5 h-5" />
<X className="w-5 h-5" />
</button>
</div>
<div
@ -387,7 +385,7 @@ export default function ChatModal({
<span className="text-base text-ring">
Start a conversation and click the agents thoughts{" "}
<span>
<ChatBubbleOvalLeftEllipsisIcon className="w-6 h-6 inline animate-bounce " />
<MessagesSquare className="w-5 h-5 inline animate-bounce mx-1 " />
</span>{" "}
to inspect the chaining process.
</span>

View file

@ -1,4 +1,3 @@
import { XMarkIcon, CommandLineIcon } from "@heroicons/react/24/outline";
import { Fragment, useContext, useRef, useState } from "react";
import { PopUpContext } from "../../contexts/popUpContext";
import AceEditor from "react-ace";
@ -22,6 +21,7 @@ import {
} from "../../components/ui/dialog";
import { Button } from "../../components/ui/button";
import { CODE_PROMPT_DIALOG_SUBTITLE } from "../../constants";
import { TerminalSquare } from "lucide-react";
export default function CodeAreaModal({
value,
@ -51,8 +51,8 @@ export default function CodeAreaModal({
<DialogHeader>
<DialogTitle className="flex items-center">
<span className="pr-2">Edit Code</span>
<CommandLineIcon
className="h-6 w-6 text-foreground pl-1"
<TerminalSquare
className="h-6 w-6 text-gray-800 pl-1 dark:text-white"
aria-hidden="true"
/>
</DialogTitle>

Some files were not shown because too many files have changed in this diff Show more