Merge branch 'zustand/io/migration' of github.com:logspace-ai/langflow into zustand/io/migration
This commit is contained in:
commit
afe0d42dd0
52 changed files with 163 additions and 83 deletions
|
|
@ -9,9 +9,6 @@ class TextInput(CustomComponent):
|
|||
description = "Used to pass text input to the next component."
|
||||
|
||||
field_config = {
|
||||
"code": {
|
||||
"show": False,
|
||||
},
|
||||
"value": {"display_name": "Value"},
|
||||
}
|
||||
|
||||
|
|
|
|||
19
src/backend/langflow/components/io/TextOutput.py
Normal file
19
src/backend/langflow/components/io/TextOutput.py
Normal file
|
|
@ -0,0 +1,19 @@
|
|||
from typing import Optional
|
||||
|
||||
from langflow import CustomComponent
|
||||
from langflow.field_typing import Text
|
||||
|
||||
|
||||
class TextOutput(CustomComponent):
|
||||
display_name = "Text Output"
|
||||
description = "Used to pass text output to the next component."
|
||||
|
||||
field_config = {
|
||||
"value": {"display_name": "Value"},
|
||||
}
|
||||
|
||||
def build(self, value: Optional[str] = "") -> Text:
|
||||
self.status = value
|
||||
if not value:
|
||||
value = ""
|
||||
return value
|
||||
|
|
@ -1,13 +1,13 @@
|
|||
from typing import Optional
|
||||
|
||||
from langchain_community.chat_models.bedrock import BedrockChat
|
||||
from langflow.field_typing import Text
|
||||
|
||||
from langflow import CustomComponent
|
||||
from langflow.field_typing import Text
|
||||
|
||||
|
||||
class AmazonBedrockComponent(CustomComponent):
|
||||
display_name: str = "Amazon Bedrock model"
|
||||
display_name: str = "Amazon Bedrock Model"
|
||||
description: str = "Generate text using LLM model from Amazon Bedrock."
|
||||
|
||||
def build_config(self):
|
||||
|
|
@ -2,14 +2,13 @@ from typing import Optional
|
|||
|
||||
from langchain_community.chat_models.anthropic import ChatAnthropic
|
||||
from pydantic.v1 import SecretStr
|
||||
|
||||
from langflow import CustomComponent
|
||||
from langflow.field_typing import Text
|
||||
|
||||
|
||||
from langflow import CustomComponent
|
||||
|
||||
|
||||
class AnthropicLLM(CustomComponent):
|
||||
display_name: str = "Anthropic model"
|
||||
display_name: str = "AnthropicModel"
|
||||
description: str = "Generate text using Anthropic Chat&Completion large language models."
|
||||
|
||||
def build_config(self):
|
||||
|
|
@ -67,7 +66,7 @@ class AnthropicLLM(CustomComponent):
|
|||
try:
|
||||
output = ChatAnthropic(
|
||||
model_name=model,
|
||||
anthropic_api_key=SecretStr(anthropic_api_key) if anthropic_api_key else None,
|
||||
anthropic_api_key=(SecretStr(anthropic_api_key) if anthropic_api_key else None),
|
||||
max_tokens_to_sample=max_tokens, # type: ignore
|
||||
temperature=temperature,
|
||||
anthropic_api_url=api_endpoint,
|
||||
|
|
@ -7,7 +7,7 @@ from langflow import CustomComponent
|
|||
|
||||
|
||||
class AzureChatOpenAIComponent(CustomComponent):
|
||||
display_name: str = "AzureOpenAI model"
|
||||
display_name: str = "AzureOpenAIModel"
|
||||
description: str = "Generate text using LLM model from Azure OpenAI."
|
||||
documentation: str = "https://python.langchain.com/docs/integrations/llms/azure_openai"
|
||||
beta = False
|
||||
|
|
@ -1,13 +1,13 @@
|
|||
from typing import Dict, Optional
|
||||
|
||||
from langchain_community.llms.ctransformers import CTransformers
|
||||
from langflow.field_typing import Text
|
||||
|
||||
from langflow import CustomComponent
|
||||
from langflow.field_typing import Text
|
||||
|
||||
|
||||
class CTransformersComponent(CustomComponent):
|
||||
display_name = "CTransformers model"
|
||||
display_name = "CTransformersModel"
|
||||
description = "Generate text using CTransformers LLM models"
|
||||
documentation = "https://python.langchain.com/docs/modules/model_io/models/llms/integrations/ctransformers"
|
||||
|
||||
|
|
@ -31,7 +31,14 @@ class CTransformersComponent(CustomComponent):
|
|||
"inputs": {"display_name": "Input"},
|
||||
}
|
||||
|
||||
def build(self, model: str, model_file: str, inputs: str, model_type: str, config: Optional[Dict] = None) -> Text:
|
||||
def build(
|
||||
self,
|
||||
model: str,
|
||||
model_file: str,
|
||||
inputs: str,
|
||||
model_type: str,
|
||||
config: Optional[Dict] = None,
|
||||
) -> Text:
|
||||
output = CTransformers(model=model, model_file=model_file, model_type=model_type, config=config)
|
||||
message = output.invoke(inputs)
|
||||
result = message.content if hasattr(message, "content") else message
|
||||
|
|
@ -1,18 +1,33 @@
|
|||
from langchain_community.chat_models.cohere import ChatCohere
|
||||
|
||||
from langflow import CustomComponent
|
||||
from langflow.field_typing import Text
|
||||
|
||||
|
||||
class CohereComponent(CustomComponent):
|
||||
display_name = "Cohere model"
|
||||
display_name = "CohereModel"
|
||||
description = "Generate text using Cohere large language models."
|
||||
documentation = "https://python.langchain.com/docs/modules/model_io/models/llms/integrations/cohere"
|
||||
|
||||
def build_config(self):
|
||||
return {
|
||||
"cohere_api_key": {"display_name": "Cohere API Key", "type": "password", "password": True},
|
||||
"max_tokens": {"display_name": "Max Tokens", "default": 256, "type": "int", "show": True},
|
||||
"temperature": {"display_name": "Temperature", "default": 0.75, "type": "float", "show": True},
|
||||
"cohere_api_key": {
|
||||
"display_name": "Cohere API Key",
|
||||
"type": "password",
|
||||
"password": True,
|
||||
},
|
||||
"max_tokens": {
|
||||
"display_name": "Max Tokens",
|
||||
"default": 256,
|
||||
"type": "int",
|
||||
"show": True,
|
||||
},
|
||||
"temperature": {
|
||||
"display_name": "Temperature",
|
||||
"default": 0.75,
|
||||
"type": "float",
|
||||
"show": True,
|
||||
},
|
||||
"inputs": {"display_name": "Input"},
|
||||
}
|
||||
|
||||
|
|
@ -23,8 +38,13 @@ class CohereComponent(CustomComponent):
|
|||
max_tokens: int = 256,
|
||||
temperature: float = 0.75,
|
||||
) -> Text:
|
||||
output = ChatCohere(cohere_api_key=cohere_api_key, max_tokens=max_tokens, temperature=temperature)
|
||||
output = ChatCohere(
|
||||
cohere_api_key=cohere_api_key,
|
||||
max_tokens=max_tokens,
|
||||
temperature=temperature,
|
||||
)
|
||||
message = output.invoke(inputs)
|
||||
result = message.content if hasattr(message, "content") else message
|
||||
self.status = result
|
||||
return result
|
||||
return result
|
||||
|
|
@ -1,14 +1,14 @@
|
|||
from typing import Optional
|
||||
|
||||
from langchain_google_genai import ChatGoogleGenerativeAI # type: ignore
|
||||
from langflow import CustomComponent
|
||||
from langflow.field_typing import RangeSpec
|
||||
from pydantic.v1.types import SecretStr
|
||||
from langflow.field_typing import Text
|
||||
|
||||
from langflow import CustomComponent
|
||||
from langflow.field_typing import RangeSpec, Text
|
||||
|
||||
|
||||
class GoogleGenerativeAIComponent(CustomComponent):
|
||||
display_name: str = "Google Generative AI model"
|
||||
display_name: str = "Google Generative AIModel"
|
||||
description: str = "Generate text using Google Generative AI to generate text."
|
||||
documentation: str = "http://docs.langflow.org/components/custom"
|
||||
|
||||
|
|
@ -1,11 +1,13 @@
|
|||
from typing import Optional, List, Dict, Any
|
||||
from langflow import CustomComponent
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
from langchain_community.llms.llamacpp import LlamaCpp
|
||||
|
||||
from langflow import CustomComponent
|
||||
from langflow.field_typing import Text
|
||||
|
||||
|
||||
class LlamaCppComponent(CustomComponent):
|
||||
display_name = "LlamaCpp model"
|
||||
display_name = "LlamaCppModel"
|
||||
description = "Generate text using llama.cpp model."
|
||||
documentation = "https://python.langchain.com/docs/modules/model_io/models/llms/integrations/llamacpp"
|
||||
|
||||
|
|
@ -17,7 +19,10 @@ class LlamaCppComponent(CustomComponent):
|
|||
"echo": {"display_name": "Echo", "advanced": True},
|
||||
"f16_kv": {"display_name": "F16 KV", "advanced": True},
|
||||
"grammar_path": {"display_name": "Grammar Path", "advanced": True},
|
||||
"last_n_tokens_size": {"display_name": "Last N Tokens Size", "advanced": True},
|
||||
"last_n_tokens_size": {
|
||||
"display_name": "Last N Tokens Size",
|
||||
"advanced": True,
|
||||
},
|
||||
"logits_all": {"display_name": "Logits All", "advanced": True},
|
||||
"logprobs": {"display_name": "Logprobs", "advanced": True},
|
||||
"lora_base": {"display_name": "Lora Base", "advanced": True},
|
||||
|
|
@ -134,3 +139,5 @@ class LlamaCppComponent(CustomComponent):
|
|||
result = message.content if hasattr(message, "content") else message
|
||||
self.status = result
|
||||
return result
|
||||
self.status = result
|
||||
return result
|
||||
|
|
@ -12,7 +12,7 @@ from langflow.field_typing import Text
|
|||
|
||||
|
||||
class ChatOllamaComponent(CustomComponent):
|
||||
display_name = "ChatOllama model"
|
||||
display_name = "ChatOllamaModel"
|
||||
description = "Generate text using Local LLM for chat with Ollama."
|
||||
|
||||
def build_config(self) -> dict:
|
||||
|
|
@ -7,7 +7,7 @@ from langflow.field_typing import Text
|
|||
|
||||
|
||||
class ChatVertexAIComponent(CustomComponent):
|
||||
display_name = "ChatVertexAI model"
|
||||
display_name = "ChatVertexAIModel"
|
||||
description = "Generate text using Vertex AI Chat large language models API."
|
||||
|
||||
def build_config(self):
|
||||
|
|
@ -13,6 +13,7 @@ from langflow.graph.vertex.types import (
|
|||
ChatVertex,
|
||||
FileToolVertex,
|
||||
LLMVertex,
|
||||
RoutingVertex,
|
||||
ToolkitVertex,
|
||||
)
|
||||
from langflow.interface.tools.constants import FILE_TOOLS
|
||||
|
|
@ -402,10 +403,12 @@ class Graph:
|
|||
node_name = node_id.split("-")[0]
|
||||
if node_name in ["ChatOutput", "ChatInput"]:
|
||||
return ChatVertex
|
||||
if node_name in lazy_load_vertex_dict.VERTEX_TYPE_MAP:
|
||||
return lazy_load_vertex_dict.VERTEX_TYPE_MAP[node_name]
|
||||
elif node_name in ["ShouldRunNext"]:
|
||||
return RoutingVertex
|
||||
elif node_base_type in lazy_load_vertex_dict.VERTEX_TYPE_MAP:
|
||||
return lazy_load_vertex_dict.VERTEX_TYPE_MAP[node_base_type]
|
||||
elif node_name in lazy_load_vertex_dict.VERTEX_TYPE_MAP:
|
||||
return lazy_load_vertex_dict.VERTEX_TYPE_MAP[node_name]
|
||||
|
||||
if node_type in FILE_TOOLS:
|
||||
return FileToolVertex
|
||||
|
|
|
|||
|
|
@ -67,7 +67,7 @@ class LLMVertex(StatelessVertex):
|
|||
class_built_object = None
|
||||
|
||||
def __init__(self, data: Dict, graph, params: Optional[Dict] = None):
|
||||
super().__init__(data, graph=graph, base_type="llms", params=params)
|
||||
super().__init__(data, graph=graph, base_type="models", params=params)
|
||||
self.steps: List[Callable] = [self._custom_build]
|
||||
|
||||
async def _custom_build(self, *args, **kwargs):
|
||||
|
|
|
|||
|
|
@ -35,7 +35,7 @@ def import_by_type(_type: str, name: str) -> Any:
|
|||
func_dict = {
|
||||
"agents": import_agent,
|
||||
"prompts": import_prompt,
|
||||
"llms": {"llm": import_llm, "chat": import_chat_llm},
|
||||
"models": {"llm": import_llm, "chat": import_chat_llm},
|
||||
"tools": import_tool,
|
||||
"chains": import_chain,
|
||||
"toolkits": import_toolkit,
|
||||
|
|
@ -50,7 +50,7 @@ def import_by_type(_type: str, name: str) -> Any:
|
|||
"retrievers": import_retriever,
|
||||
"custom_components": import_custom_component,
|
||||
}
|
||||
if _type == "llms":
|
||||
if _type == "models":
|
||||
key = "chat" if "chat" in name.lower() else "llm"
|
||||
loaded_func = func_dict[_type][key] # type: ignore
|
||||
else:
|
||||
|
|
|
|||
|
|
@ -19,7 +19,11 @@ from langflow.interface.custom.utils import get_function
|
|||
from langflow.interface.custom_lists import CUSTOM_NODES
|
||||
from langflow.interface.importing.utils import import_by_type
|
||||
from langflow.interface.initialize.llm import initialize_vertexai
|
||||
from langflow.interface.initialize.utils import handle_format_kwargs, handle_node_type, handle_partial_variables
|
||||
from langflow.interface.initialize.utils import (
|
||||
handle_format_kwargs,
|
||||
handle_node_type,
|
||||
handle_partial_variables,
|
||||
)
|
||||
from langflow.interface.initialize.vector_store import vecstore_initializer
|
||||
from langflow.interface.output_parsers.base import output_parser_creator
|
||||
from langflow.interface.retrievers.base import retriever_creator
|
||||
|
|
@ -105,7 +109,7 @@ async def instantiate_based_on_type(class_object, base_type, node_type, params,
|
|||
return instantiate_chains(node_type, class_object, params)
|
||||
elif base_type == "output_parsers":
|
||||
return instantiate_output_parser(node_type, class_object, params)
|
||||
elif base_type == "llms":
|
||||
elif base_type == "models":
|
||||
return instantiate_llm(node_type, class_object, params)
|
||||
elif base_type == "retrievers":
|
||||
return instantiate_retriever(node_type, class_object, params)
|
||||
|
|
|
|||
|
|
@ -1,16 +1,16 @@
|
|||
from typing import Dict, List, Optional, Type
|
||||
|
||||
from loguru import logger
|
||||
|
||||
from langflow.interface.base import LangChainTypeCreator
|
||||
from langflow.interface.custom_lists import llm_type_to_cls_dict
|
||||
from langflow.services.deps import get_settings_service
|
||||
|
||||
from langflow.template.frontend_node.llms import LLMFrontendNode
|
||||
from loguru import logger
|
||||
from langflow.utils.util import build_template_from_class
|
||||
|
||||
|
||||
class LLMCreator(LangChainTypeCreator):
|
||||
type_name: str = "llms"
|
||||
type_name: str = "models"
|
||||
|
||||
@property
|
||||
def frontend_node_class(self) -> Type[LLMFrontendNode]:
|
||||
|
|
|
|||
|
|
@ -11,7 +11,11 @@ from starlette.websockets import WebSocket
|
|||
|
||||
from langflow.services.database.models.api_key.model import ApiKey
|
||||
from langflow.services.database.models.api_key.crud import check_key
|
||||
from langflow.services.database.models.user.crud import get_user_by_id, get_user_by_username, update_user_last_login_at
|
||||
from langflow.services.database.models.user.crud import (
|
||||
get_user_by_id,
|
||||
get_user_by_username,
|
||||
update_user_last_login_at,
|
||||
)
|
||||
from langflow.services.database.models.user.model import User
|
||||
from langflow.services.deps import get_session, get_settings_service
|
||||
|
||||
|
|
@ -323,7 +327,7 @@ def add_padding(s):
|
|||
|
||||
def get_fernet(settings_service=Depends(get_settings_service)):
|
||||
SECRET_KEY = settings_service.auth_settings.SECRET_KEY
|
||||
# It's important that your secret key is 32 url-safe base64-encoded bytes
|
||||
# It's important that your secret key is 32 url-safe base64-encoded byte
|
||||
padded_secret_key = add_padding(SECRET_KEY)
|
||||
fernet = Fernet(padded_secret_key)
|
||||
return fernet
|
||||
|
|
|
|||
|
|
@ -324,7 +324,9 @@ export default function ParameterComponent({
|
|||
) : (
|
||||
title
|
||||
)}
|
||||
<span className={(info === '' ? '' : 'ml-1 ') + " text-status-red"}>{required ? " *" : ""}</span>
|
||||
<span className={(info === "" ? "" : "ml-1 ") + " text-status-red"}>
|
||||
{required ? " *" : ""}
|
||||
</span>
|
||||
<div className="">
|
||||
{info !== "" && (
|
||||
<ShadTooltip content={infoHtml.current}>
|
||||
|
|
|
|||
|
|
@ -37,6 +37,7 @@ export default function GenericNode({
|
|||
const flowPool = useFlowStore((state) => state.flowPool);
|
||||
const buildFlow = useFlowStore((state) => state.buildFlow);
|
||||
const setNode = useFlowStore((state) => state.setNode);
|
||||
const getBuildStatus = useFlowStore((state) => state.getBuildStatus);
|
||||
const name = nodeIconsLucide[data.type] ? data.type : types[data.type];
|
||||
const [inputName, setInputName] = useState(false);
|
||||
const [nodeName, setNodeName] = useState(data.node!.display_name);
|
||||
|
|
@ -170,7 +171,6 @@ export default function GenericNode({
|
|||
validationStatus: validationStatusType | null
|
||||
) => {
|
||||
const isValid = validationStatus && validationStatus.valid;
|
||||
|
||||
if (isValid) {
|
||||
return "green-status";
|
||||
} else if (!isValid && buildStatus === BuildStatus.INACTIVE) {
|
||||
|
|
@ -179,10 +179,12 @@ export default function GenericNode({
|
|||
return "green-status";
|
||||
} else if (!isValid && buildStatus === BuildStatus.BUILT) {
|
||||
return "red-status";
|
||||
} else if (!validationStatus) {
|
||||
return "yellow-status";
|
||||
} else {
|
||||
} else if (!validationStatus && buildStatus === BuildStatus.TO_BUILD) {
|
||||
return "green-status";
|
||||
} else if (buildStatus === BuildStatus.BUILDING) {
|
||||
return "status-build-animation";
|
||||
} else {
|
||||
return "green-status";
|
||||
}
|
||||
};
|
||||
|
||||
|
|
@ -205,6 +207,7 @@ export default function GenericNode({
|
|||
validationStatus: validationStatusType | null
|
||||
) => {
|
||||
let isInvalid = validationStatus && !validationStatus.valid;
|
||||
|
||||
if (buildStatus === BuildStatus.INACTIVE && isInvalid) {
|
||||
// INACTIVE should have its own class
|
||||
return "inactive-status";
|
||||
|
|
@ -224,11 +227,17 @@ export default function GenericNode({
|
|||
buildStatus: BuildStatus | undefined,
|
||||
validationStatus: validationStatusType | null
|
||||
) => {
|
||||
const specificClassFromBuildStatus = getSpecificClassFromBuildStatus(
|
||||
buildStatus,
|
||||
validationStatus
|
||||
);
|
||||
const baseBorderClass = getBaseBorderClass(selected);
|
||||
const nodeSizeClass = getNodeSizeClass(showNode);
|
||||
return classNames(
|
||||
getBaseBorderClass(selected),
|
||||
getNodeSizeClass(showNode),
|
||||
baseBorderClass,
|
||||
nodeSizeClass,
|
||||
"generic-node-div",
|
||||
getSpecificClassFromBuildStatus(buildStatus, validationStatus)
|
||||
specificClassFromBuildStatus
|
||||
);
|
||||
};
|
||||
|
||||
|
|
@ -265,7 +274,7 @@ export default function GenericNode({
|
|||
className={getNodeBorderClassName(
|
||||
selected,
|
||||
showNode,
|
||||
data?.build_status,
|
||||
getBuildStatus(data.id),
|
||||
validationStatus
|
||||
)}
|
||||
>
|
||||
|
|
@ -476,22 +485,26 @@ export default function GenericNode({
|
|||
variant="outline"
|
||||
className={"h-9 px-1.5"}
|
||||
onClick={() => {
|
||||
if (data?.build_status === BuildStatus.BUILDING || isBuilding)
|
||||
if (
|
||||
getBuildStatus(data.id) === BuildStatus.BUILDING ||
|
||||
isBuilding
|
||||
)
|
||||
return;
|
||||
|
||||
buildFlow(data.id);
|
||||
}}
|
||||
>
|
||||
<div>
|
||||
<Tooltip
|
||||
title={
|
||||
data?.build_status === BuildStatus.BUILDING ? (
|
||||
getBuildStatus(data.id) === BuildStatus.BUILDING ? (
|
||||
<span>Building...</span>
|
||||
) : !validationStatus ? (
|
||||
<span className="flex">
|
||||
Build{" "}
|
||||
<IconComponent
|
||||
name="Play"
|
||||
className=" h-5 stroke-build-trigger stroke-2"
|
||||
className=" h-5 stroke-status-green stroke-2"
|
||||
/>{" "}
|
||||
flow to validate status.
|
||||
</span>
|
||||
|
|
@ -510,7 +523,7 @@ export default function GenericNode({
|
|||
>
|
||||
<div className="generic-node-status-position flex items-center justify-center">
|
||||
{renderIconPlayOrPauseComponents(
|
||||
data?.build_status,
|
||||
getBuildStatus(data.id),
|
||||
validationStatus
|
||||
)}
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -394,7 +394,10 @@ export default function NodeToolbarComponent({
|
|||
/>{" "}
|
||||
<span className="">Delete</span>{" "}
|
||||
<span>
|
||||
<IconComponent name="Delete" className="text-red-300 absolute right-2 top-2 h-4 w-4"></IconComponent>
|
||||
<IconComponent
|
||||
name="Delete"
|
||||
className="absolute right-2 top-2 h-4 w-4 text-red-300"
|
||||
></IconComponent>
|
||||
</span>
|
||||
</div>
|
||||
</SelectItem>
|
||||
|
|
|
|||
|
|
@ -416,7 +416,6 @@ const useFlowStore = create<FlowStoreType>((set, get) => ({
|
|||
useFlowStore.getState().updateBuildStatus(idList, BuildStatus.BUILDING);
|
||||
},
|
||||
});
|
||||
get().revertAllVerticesToBuild();
|
||||
},
|
||||
getFlow: () => {
|
||||
return {
|
||||
|
|
@ -425,26 +424,11 @@ const useFlowStore = create<FlowStoreType>((set, get) => ({
|
|||
viewport: get().reactFlowInstance?.getViewport()!,
|
||||
};
|
||||
},
|
||||
updateBuildStatus: (nodeIdList: string[], status: BuildStatus) => {
|
||||
nodeIdList.forEach((id) => {
|
||||
const nodeToUpdate = get().nodes.find((node) => node.id === id);
|
||||
if (nodeToUpdate) {
|
||||
nodeToUpdate.data.build_status = status;
|
||||
get().setNodes(get().nodes);
|
||||
}
|
||||
});
|
||||
},
|
||||
updateVerticesBuild: (vertices: string[]) => {
|
||||
set({ verticesBuild: vertices });
|
||||
},
|
||||
verticesBuild: [],
|
||||
revertAllVerticesToBuild: () => {
|
||||
// set all vertices to TO_BUILD
|
||||
const verticesIds = get()
|
||||
.nodes.filter((node) => node.data.build_status === BuildStatus.BUILDING)
|
||||
.map((node) => node.id);
|
||||
get().updateBuildStatus(verticesIds, BuildStatus.TO_BUILD);
|
||||
},
|
||||
|
||||
removeFromVerticesBuild: (vertices: string[]) => {
|
||||
set({
|
||||
verticesBuild: get().verticesBuild.filter(
|
||||
|
|
@ -452,6 +436,25 @@ const useFlowStore = create<FlowStoreType>((set, get) => ({
|
|||
),
|
||||
});
|
||||
},
|
||||
updateBuildStatus: (nodeIdList: string[], status: BuildStatus) => {
|
||||
let newBuildStatus = cloneDeep(get().buildStatus);
|
||||
// check if nodeIdList is an array
|
||||
if (!Array.isArray(nodeIdList)) {
|
||||
nodeIdList = [nodeIdList];
|
||||
}
|
||||
nodeIdList.forEach((id) => {
|
||||
newBuildStatus[id] = status;
|
||||
});
|
||||
set({ buildStatus: newBuildStatus });
|
||||
},
|
||||
buildStatus: {},
|
||||
getBuildStatus: (nodeId: string) => {
|
||||
// if the node is not in the buildStatus object, set it to TO_BUILD
|
||||
if (!get().buildStatus[nodeId]) {
|
||||
get().buildStatus[nodeId] = BuildStatus.TO_BUILD;
|
||||
}
|
||||
return get().buildStatus[nodeId];
|
||||
},
|
||||
}));
|
||||
|
||||
export default useFlowStore;
|
||||
|
|
|
|||
|
|
@ -296,7 +296,7 @@
|
|||
@apply generic-node-status text-status-green;
|
||||
}
|
||||
.gray-status {
|
||||
@apply generic-node-status text-muted-foreground;
|
||||
@apply generic-node-status text-status-gray;
|
||||
}
|
||||
|
||||
.red-status {
|
||||
|
|
|
|||
|
|
@ -60,6 +60,7 @@
|
|||
--chat-send: #059669;
|
||||
--status-green: #4ade80;
|
||||
--status-blue: #2563eb;
|
||||
--status-gray: #6b7280;
|
||||
--connection: #555;
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,4 @@
|
|||
import { Edge, Node, Viewport } from "reactflow";
|
||||
import { BuildStatus } from "../../constants/enums";
|
||||
import { FlowType } from "../flow";
|
||||
//kind and class are just representative names to represent the actual structure of the object received by the API
|
||||
export type APIDataType = { [key: string]: APIKindType };
|
||||
|
|
@ -37,7 +36,6 @@ export type APIClassType = {
|
|||
| CustomFieldsType
|
||||
| boolean
|
||||
| undefined;
|
||||
build_status?: BuildStatus;
|
||||
};
|
||||
|
||||
export type TemplateVariableType = {
|
||||
|
|
|
|||
|
|
@ -1,5 +1,4 @@
|
|||
import { ReactFlowJsonObject, XYPosition } from "reactflow";
|
||||
import { BuildStatus } from "../../constants/enums";
|
||||
import { APIClassType } from "../api/index";
|
||||
|
||||
export type FlowType = {
|
||||
|
|
@ -29,7 +28,6 @@ export type NodeDataType = {
|
|||
node?: APIClassType;
|
||||
id: string;
|
||||
output_types?: string[];
|
||||
build_status?: BuildStatus;
|
||||
};
|
||||
// FlowStyleType is the type of the style object that is used to style the
|
||||
// Flow card with an emoji and a color.
|
||||
|
|
|
|||
|
|
@ -86,9 +86,10 @@ export type FlowStoreType = {
|
|||
unselectAll: () => void;
|
||||
buildFlow: (nodeId?: string) => Promise<void>;
|
||||
getFlow: () => { nodes: Node[]; edges: Edge[]; viewport: Viewport };
|
||||
updateBuildStatus: (nodeId: string[], status: BuildStatus) => void;
|
||||
updateVerticesBuild: (vertices: string[]) => void;
|
||||
removeFromVerticesBuild: (vertices: string[]) => void;
|
||||
revertAllVerticesToBuild: () => void;
|
||||
verticesBuild: string[];
|
||||
updateBuildStatus: (nodeId: string[], status: BuildStatus) => void;
|
||||
buildStatus: { [key: string]: BuildStatus };
|
||||
getBuildStatus: (nodeId: string) => BuildStatus;
|
||||
};
|
||||
|
|
|
|||
|
|
@ -25,6 +25,7 @@ import {
|
|||
Compass,
|
||||
Copy,
|
||||
Cpu,
|
||||
Delete,
|
||||
Download,
|
||||
DownloadCloud,
|
||||
Edit,
|
||||
|
|
@ -117,7 +118,6 @@ import {
|
|||
X,
|
||||
XCircle,
|
||||
Zap,
|
||||
Delete,
|
||||
} from "lucide-react";
|
||||
import { FaApple, FaGithub } from "react-icons/fa";
|
||||
import { AWSIcon } from "../icons/AWS";
|
||||
|
|
@ -195,7 +195,7 @@ export const gradients = [
|
|||
export const nodeColors: { [char: string]: string } = {
|
||||
prompts: "#4367BF",
|
||||
models: "#AA2411",
|
||||
llms: "#6344BE",
|
||||
model_specs: "#6344BE",
|
||||
chains: "#FE7500",
|
||||
Document: "#7AAE42",
|
||||
list: "#9AAE42",
|
||||
|
|
@ -224,7 +224,7 @@ export const nodeColors: { [char: string]: string } = {
|
|||
export const nodeNames: { [char: string]: string } = {
|
||||
prompts: "Prompts",
|
||||
models: "Language Models",
|
||||
llms: "Model Settings",
|
||||
model_specs: "Model Specs",
|
||||
chains: "Chains",
|
||||
agents: "Agents",
|
||||
tools: "Tools",
|
||||
|
|
@ -296,7 +296,7 @@ export const nodeIconsLucide: iconsType = {
|
|||
chains: Link,
|
||||
memories: Cpu,
|
||||
models: Bot,
|
||||
llms: Lightbulb,
|
||||
model_specs: Lightbulb,
|
||||
prompts: TerminalSquare,
|
||||
tools: Wrench,
|
||||
advanced: Laptop2,
|
||||
|
|
|
|||
|
|
@ -80,6 +80,7 @@ module.exports = {
|
|||
"status-green": "var(--status-green)",
|
||||
"status-red": "var(--status-red)",
|
||||
"status-yellow": "var(--status-yellow)",
|
||||
"status-gray": "var(--status-gray)",
|
||||
"success-background": "var(--success-background)",
|
||||
"success-foreground": "var(--success-foreground)",
|
||||
"beta-background": "var(--beta-background)",
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue