Merge remote-tracking branch 'origin/zustand/io/migration' into cz/fixTestsIo

This commit is contained in:
anovazzi1 2024-02-28 18:43:12 -03:00
commit 2666a224ef
33 changed files with 105 additions and 2 deletions

View file

@ -16,7 +16,8 @@ from langflow.field_typing.range_spec import RangeSpec
class ConversationalAgent(CustomComponent):
display_name: str = "OpenAI Conversational Agent"
description: str = "Conversational Agent that can use OpenAI's function calling API"
icon = "OpenAI"
def build_config(self):
openai_function_models = [
"gpt-4-turbo-preview",

View file

@ -9,6 +9,7 @@ class HuggingFaceEmbeddingsComponent(CustomComponent):
documentation = (
"https://python.langchain.com/docs/modules/data_connection/text_embedding/integrations/sentence_transformers"
)
icon="HuggingFace"
def build_config(self):
return {

View file

@ -9,6 +9,8 @@ class HuggingFaceInferenceAPIEmbeddingsComponent(CustomComponent):
display_name = "HuggingFaceInferenceAPIEmbeddings"
description = "HuggingFace sentence_transformers embedding models, API version."
documentation = "https://github.com/huggingface/text-embeddings-inference"
icon="HuggingFace"
def build_config(self):
return {

View file

@ -10,6 +10,8 @@ from langflow import CustomComponent
class AmazonBedrockComponent(CustomComponent):
display_name: str = "Amazon Bedrock"
description: str = "LLM model from Amazon Bedrock."
icon = "Amazon"
def build_config(self):
return {

View file

@ -10,6 +10,7 @@ from langflow import CustomComponent
class AnthropicLLM(CustomComponent):
display_name: str = "AnthropicLLM"
description: str = "Anthropic Chat&Completion large language models."
icon ="Anthropic"
def build_config(self):
return {

View file

@ -10,6 +10,8 @@ from langflow.field_typing import BaseLanguageModel, NestedDict
class AnthropicComponent(CustomComponent):
display_name = "Anthropic"
description = "Anthropic large language models."
icon ="Anthropic"
def build_config(self):
return {

View file

@ -9,6 +9,7 @@ class ChatAnthropicComponent(CustomComponent):
display_name = "ChatAnthropic"
description = "`Anthropic` chat large language models."
documentation = "https://python.langchain.com/docs/modules/model_io/models/chat/integrations/anthropic"
icon ="Anthropic"
def build_config(self):
return {

View file

@ -9,6 +9,7 @@ from langflow.field_typing import BaseLanguageModel, NestedDict
class ChatOpenAIComponent(CustomComponent):
display_name = "ChatOpenAI"
description = "`OpenAI` Chat large language models API."
icon = "OpenAI"
def build_config(self):
return {

View file

@ -10,6 +10,8 @@ from langflow.field_typing import BaseLanguageModel
class ChatVertexAIComponent(CustomComponent):
display_name = "ChatVertexAI"
description = "`Vertex AI` Chat large language models API."
icon="VertexAI"
def build_config(self):
return {

View file

@ -7,6 +7,7 @@ class CohereComponent(CustomComponent):
display_name = "Cohere"
description = "Cohere large language models."
documentation = "https://python.langchain.com/docs/modules/model_io/models/llms/integrations/cohere"
icon = "Cohere"
def build_config(self):
return {

View file

@ -10,6 +10,7 @@ class GoogleGenerativeAIComponent(CustomComponent):
display_name: str = "Google Generative AI"
description: str = "A component that uses Google Generative AI to generate text."
documentation: str = "http://docs.langflow.org/components/custom"
icon = "Google"
def build_config(self):
return {

View file

@ -8,6 +8,8 @@ from langflow import CustomComponent
class HuggingFaceEndpointsComponent(CustomComponent):
display_name: str = "Hugging Face Inference API"
description: str = "LLM model from Hugging Face Inference API."
icon="HuggingFace"
def build_config(self):
return {

View file

@ -7,6 +7,7 @@ from langchain_community.llms.vertexai import VertexAI
class VertexAIComponent(CustomComponent):
display_name = "VertexAI"
description = "Google Vertex AI large language models"
icon="VertexAI"
def build_config(self):
return {

View file

@ -9,7 +9,7 @@ from langflow.field_typing import Text
class AmazonBedrockComponent(LCModelComponent):
display_name: str = "Amazon Bedrock Model"
description: str = "Generate text using LLM model from Amazon Bedrock."
icon = "AmazonBedrock"
icon = "Amazon"
def build_config(self):
return {

View file

@ -11,6 +11,7 @@ class GoogleGenerativeAIComponent(LCModelComponent):
display_name: str = "Google Generative AIModel"
description: str = "Generate text using Google Generative AI to generate text."
icon = "GoogleGenerativeAI"
icon = "Google"
def build_config(self):
return {

View file

@ -9,6 +9,8 @@ from langflow.field_typing import Text
class ChatVertexAIComponent(LCModelComponent):
display_name = "ChatVertexAIModel"
description = "Generate text using Vertex AI Chat large language models API."
icon="VertexAI"
def build_config(self):
return {

View file

@ -9,6 +9,7 @@ from langflow import CustomComponent
class AmazonKendraRetrieverComponent(CustomComponent):
display_name: str = "Amazon Kendra Retriever"
description: str = "Retriever that uses the Amazon Kendra API."
icon = "Amazon"
def build_config(self):
return {

View file

@ -17,6 +17,8 @@ class VectaraSelfQueryRetriverComponent(CustomComponent):
description: str = "Implementation of Vectara Self Query Retriever"
documentation = "https://python.langchain.com/docs/integrations/retrievers/self_query/vectara_self_query"
beta = True
icon="Vectara"
field_config = {
"code": {"show": True},

View file

@ -11,6 +11,7 @@ class MongoDBAtlasComponent(CustomComponent):
description = (
"Construct a `MongoDB Atlas Vector Search` vector store from raw documents."
)
icon="MongoDB"
def build_config(self):
return {

View file

@ -10,6 +10,7 @@ from langflow.field_typing import Document, Embeddings, NestedDict
class QdrantComponent(CustomComponent):
display_name = "Qdrant"
description = "Construct Qdrant wrapper from a list of texts."
icon="Qdrant"
def build_config(self):
return {

View file

@ -9,6 +9,8 @@ from langflow.schema import Record
class QdrantSearchComponent(QdrantComponent, LCVectorStoreComponent):
display_name = "Qdrant"
description = "Construct Qdrant wrapper from a list of texts."
icon="Qdrant"
def build_config(self):
return {

View file

@ -11,6 +11,7 @@ from langflow.schema import Record
class SupabaseSearchComponent(LCVectorStoreComponent):
display_name = "Supabase Search"
description = "Search a Supabase Vector Store for similar documents."
icon="Supabase"
def build_config(self):
return {

View file

@ -18,6 +18,7 @@ class VectaraComponent(CustomComponent):
"https://python.langchain.com/docs/integrations/vectorstores/vectara"
)
beta = True
icon="Vectara"
field_config = {
"vectara_customer_id": {
"display_name": "Vectara Customer ID",

View file

@ -14,6 +14,8 @@ class VectaraSearchComponent(VectaraComponent, LCVectorStoreComponent):
"https://python.langchain.com/docs/integrations/vectorstores/vectara"
)
beta = True
icon="Vectara"
field_config = {
"search_type": {
"display_name": "Search Type",

View file

@ -14,6 +14,8 @@ class WeaviateSearchVectorStore(WeaviateVectorStoreComponent, LCVectorStoreCompo
"https://python.langchain.com/docs/integrations/vectorstores/weaviate"
)
beta = True
icon="Weaviate"
field_config = {
"search_type": {
"display_name": "Search Type",

View file

@ -21,6 +21,7 @@ import { useDarkStore } from "./stores/darkStore";
import useFlowsManagerStore from "./stores/flowsManagerStore";
import { useStoreStore } from "./stores/storeStore";
import { useTypesStore } from "./stores/typesStore";
import useFlowStore from "./stores/flowStore";
export default function App() {
const removeFromTempNotificationList = useAlertStore(
@ -44,6 +45,25 @@ export default function App() {
const refreshStars = useDarkStore((state) => state.refreshStars);
const checkHasStore = useStoreStore((state) => state.checkHasStore);
const handleModalWShortcut = useFlowStore(state => state.handleModalWShortcut);
const nodes = useFlowStore(state => state.nodes);
useEffect(() => {
const onKeyDown = (event: KeyboardEvent) => {
const selectedNode = nodes.filter((obj) => obj.selected);
if ((event.ctrlKey || event.metaKey) && event.shiftKey && event.key === "C" && selectedNode.length > 0) {
event.preventDefault();
handleModalWShortcut("code");
}
};
document.addEventListener("keydown", onKeyDown);
return () => {
document.removeEventListener("keydown", onKeyDown);
};
}, [handleModalWShortcut, nodes]);
useEffect(() => {
refreshStars();
refreshVersion();

View file

@ -269,6 +269,7 @@ export default function GenericNode({
showNode={showNode}
openAdvancedModal={false}
onCloseAdvancedModal={() => {}}
selected={selected}
></NodeToolbarComponent>
</NodeToolbar>
<div

View file

@ -47,6 +47,27 @@ export default function CodeAreaModal({
}
}, []);
const handleModalWShortcut = useFlowStore((state) => state.handleModalWShortcut)
const openCodeModalWShortcut = useFlowStore(state => state.openCodeModalWShortcut);
const nodes = useFlowStore(state => state.nodes);
useEffect(() => {
const onKeyDown = (event: KeyboardEvent) => {
const selectedNode = nodes.filter((obj) => obj.selected);
if ((event.ctrlKey || event.metaKey) && event.shiftKey && event.key === "C" && selectedNode.length > 0) {
event.preventDefault();
setOpen(openCodeModalWShortcut)
}
}
document.addEventListener("keydown", onKeyDown);
return () => {
document.removeEventListener("keydown", onKeyDown);
}
}, []);
useEffect(() => {
if (openModal) setOpen(true);
}, [openModal]);

View file

@ -36,6 +36,7 @@ export default function NodeToolbarComponent({
numberOfHandles,
showNode,
name = "code",
selected,
onCloseAdvancedModal,
}: nodeToolbarPropsType): JSX.Element {
const nodeLength = Object.keys(data.node!.template).filter(
@ -89,6 +90,13 @@ export default function NodeToolbarComponent({
}, [showModalAdvanced]);
const updateNodeInternals = useUpdateNodeInternals();
const openCodeModalWShortcut = useFlowStore(state => state.openCodeModalWShortcut);
const handleModalWShortcut = useFlowStore(state => state.handleModalWShortcut);
useEffect(() => {
setOpenModal(openCodeModalWShortcut)
}, [openCodeModalWShortcut, handleModalWShortcut])
const setLastCopiedSelection = useFlowStore(
(state) => state.setLastCopiedSelection
);

View file

@ -56,6 +56,16 @@ const useFlowStore = create<FlowStoreType>((set, get) => ({
flowPool: {},
inputs: [],
outputs: [],
openCodeModalWShortcut: false,
handleModalWShortcut: ((modal) => {
switch (modal) {
case "code":
set((state) => ({
openCodeModalWShortcut: !state.openCodeModalWShortcut,
}));
break
}
}),
setFlowPool: (flowPool) => {
set({ flowPool });
},

View file

@ -485,6 +485,7 @@ export type nodeToolbarPropsType = {
name?: string;
openAdvancedModal?: boolean;
onCloseAdvancedModal?: (close: boolean) => void;
selected: boolean;
};
export type parsedDataType = {

View file

@ -37,6 +37,8 @@ export type FlowPoolType = {
};
export type FlowStoreType = {
openCodeModalWShortcut: boolean;
handleModalWShortcut: (modal: string) => void
flowPool: FlowPoolType;
inputs: Array<{ type: string; id: string }>;
outputs: Array<{ type: string; id: string }>;

View file

@ -258,6 +258,7 @@ export const nodeIconsLucide: iconsType = {
Chroma: ChromaIcon,
AirbyteJSONLoader: AirbyteIcon,
AmazonBedrockEmbeddings: AWSIcon,
Amazon: AWSIcon,
Anthropic: AnthropicIcon,
ChatAnthropic: AnthropicIcon,
BingSearchAPIWrapper: BingIcon,
@ -270,13 +271,17 @@ export const nodeIconsLucide: iconsType = {
GoogleSearchAPIWrapper: GoogleIcon,
GoogleSearchResults: GoogleIcon,
GoogleSearchRun: GoogleIcon,
Google: GoogleIcon,
HNLoader: HackerNewsIcon,
HuggingFaceHub: HuggingFaceIcon,
HuggingFace: HuggingFaceIcon,
HuggingFaceEmbeddings: HuggingFaceIcon,
IFixitLoader: IFixIcon,
Meta: MetaIcon,
Midjorney: MidjourneyIcon,
MongoDBAtlasVectorSearch: MongoDBIcon,
MongoDB:MongoDBIcon,
MongoDBChatMessageHistory: MongoDBIcon,
NotionDirectoryLoader: NotionIcon,
ChatOpenAI: OpenAiIcon,
AzureChatOpenAI: OpenAiIcon,
@ -289,6 +294,7 @@ export const nodeIconsLucide: iconsType = {
Searx: SearxIcon,
SlackDirectoryLoader: SvgSlackIcon,
SupabaseVectorStore: SupabaseIcon,
Supabase: SupabaseIcon,
VertexAI: VertexAIIcon,
ChatVertexAI: VertexAIIcon,
VertexAIEmbeddings: VertexAIIcon,