From b1a552fa9ed7d4c4eabb90642f4b81f24775f676 Mon Sep 17 00:00:00 2001 From: Lucas Oliveira <62335616+lucaseduoli@users.noreply.github.com> Date: Thu, 21 Nov 2024 22:01:37 -0300 Subject: [PATCH] fix: refactor get all to fix types not being fetched before checking for outdated components (#4762) * Added use get types to fetch types from backend using tanstack * Updated typesStore to use new set types * Updated project to not use getTypes anymore * deleted unused getTypes * add tests * [autofix.ci] apply automated fixes * fix tests --------- Co-authored-by: cristhianzl Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> Co-authored-by: Gabriel Luiz Freitas Almeida --- .../components/FlowMenu/index.tsx | 6 +- .../src/controllers/API/helpers/constants.ts | 1 + .../API/queries/flows/use-get-types.ts | 37 + .../API/queries/folders/use-get-folders.ts | 5 +- src/frontend/src/pages/AppInitPage/index.tsx | 18 +- src/frontend/src/pages/FlowPage/index.tsx | 2 - .../pages/MainPage/components/list/index.tsx | 1 + .../pages/GlobalVariablesPage/index.tsx | 12 +- src/frontend/src/pages/ViewPage/index.tsx | 2 - src/frontend/src/stores/typesStore.ts | 39 +- src/frontend/src/types/zustand/types/index.ts | 1 - src/frontend/tests/assets/outdated_flow.json | 1081 +++++++++++++++++ .../features/outdated-message.spec.ts | 58 + 13 files changed, 1203 insertions(+), 60 deletions(-) create mode 100644 src/frontend/src/controllers/API/queries/flows/use-get-types.ts create mode 100644 src/frontend/tests/assets/outdated_flow.json create mode 100644 src/frontend/tests/extended/features/outdated-message.spec.ts diff --git a/src/frontend/src/components/appHeaderComponent/components/FlowMenu/index.tsx b/src/frontend/src/components/appHeaderComponent/components/FlowMenu/index.tsx index 8f508d472..09ee6adf3 100644 --- a/src/frontend/src/components/appHeaderComponent/components/FlowMenu/index.tsx +++ b/src/frontend/src/components/appHeaderComponent/components/FlowMenu/index.tsx @@ -28,8 +28,8 @@ import useAlertStore from "@/stores/alertStore"; import useFlowsManagerStore from "@/stores/flowsManagerStore"; import useFlowStore from "@/stores/flowStore"; import { useShortcutsStore } from "@/stores/shortcuts"; -import { useTypesStore } from "@/stores/typesStore"; import { cn } from "@/utils/utils"; +import { useQueryClient } from "@tanstack/react-query"; export const MenuBar = ({}: {}): JSX.Element => { const shortcuts = useShortcutsStore((state) => state.shortcuts); @@ -44,8 +44,8 @@ export const MenuBar = ({}: {}): JSX.Element => { const uploadFlow = useUploadFlow(); const navigate = useCustomNavigate(); const isBuilding = useFlowStore((state) => state.isBuilding); - const getTypes = useTypesStore((state) => state.getTypes); const saveFlow = useSaveFlow(); + const queryClient = useQueryClient(); const autoSaving = useFlowsManagerStore((state) => state.autoSaving); const currentFlow = useFlowStore((state) => state.currentFlow); const currentSavedFlow = useFlowsManagerStore((state) => state.currentFlow); @@ -75,7 +75,7 @@ export const MenuBar = ({}: {}): JSX.Element => { } function handleReloadComponents() { - getTypes(true).then(() => { + queryClient.prefetchQuery({ queryKey: ["useGetTypes"] }).then(() => { setSuccessData({ title: "Components reloaded successfully" }); }); } diff --git a/src/frontend/src/controllers/API/helpers/constants.ts b/src/frontend/src/controllers/API/helpers/constants.ts index 6a63eac70..2d8809cc1 100644 --- a/src/frontend/src/controllers/API/helpers/constants.ts +++ b/src/frontend/src/controllers/API/helpers/constants.ts @@ -22,6 +22,7 @@ export const URLs = { CONFIG: `config`, STARTER_PROJECTS: `starter-projects`, SIDEBAR_CATEGORIES: `sidebar_categories`, + ALL: `all`, } as const; export function getURL(key: keyof typeof URLs, params: any = {}) { diff --git a/src/frontend/src/controllers/API/queries/flows/use-get-types.ts b/src/frontend/src/controllers/API/queries/flows/use-get-types.ts new file mode 100644 index 000000000..300975ca9 --- /dev/null +++ b/src/frontend/src/controllers/API/queries/flows/use-get-types.ts @@ -0,0 +1,37 @@ +import useFlowsManagerStore from "@/stores/flowsManagerStore"; +import { useTypesStore } from "@/stores/typesStore"; +import { APIObjectType, useQueryFunctionType } from "../../../../types/api"; +import { api } from "../../api"; +import { getURL } from "../../helpers/constants"; +import { UseRequestProcessor } from "../../services/request-processor"; + +export const useGetTypes: useQueryFunctionType = (options) => { + const { query } = UseRequestProcessor(); + const setLoading = useFlowsManagerStore((state) => state.setIsLoading); + const setTypes = useTypesStore((state) => state.setTypes); + + const getTypesFn = async () => { + try { + const response = await api.get( + `${getURL("ALL")}?force_refresh=true`, + ); + const data = response?.data; + setTypes(data); + return data; + } catch { + (error) => { + console.error("An error has occurred while fetching types."); + console.log(error); + setLoading(false); + throw error; + }; + } + }; + + const queryResult = query(["useGetTypes"], getTypesFn, { + refetchOnWindowFocus: false, + ...options, + }); + + return queryResult; +}; diff --git a/src/frontend/src/controllers/API/queries/folders/use-get-folders.ts b/src/frontend/src/controllers/API/queries/folders/use-get-folders.ts index 00f00b184..0c554c302 100644 --- a/src/frontend/src/controllers/API/queries/folders/use-get-folders.ts +++ b/src/frontend/src/controllers/API/queries/folders/use-get-folders.ts @@ -1,4 +1,4 @@ -import { DEFAULT_FOLDER, STARTER_FOLDER_NAME } from "@/constants/constants"; +import { DEFAULT_FOLDER } from "@/constants/constants"; import { FolderType } from "@/pages/MainPage/entities"; import useAuthStore from "@/stores/authStore"; import { useFolderStore } from "@/stores/foldersStore"; @@ -29,10 +29,9 @@ export const useGetFoldersQuery: useQueryFunctionType< const myCollectionId = data?.find((f) => f.name === DEFAULT_FOLDER)?.id; setMyCollectionId(myCollectionId); setFolders(data); - const { getTypes, types } = useTypesStore.getState(); + const { types } = useTypesStore.getState(); await refreshFlows({ get_all: true, header_flows: true }); - if (!types || Object.keys(types).length === 0) await getTypes(); return data; }; diff --git a/src/frontend/src/pages/AppInitPage/index.tsx b/src/frontend/src/pages/AppInitPage/index.tsx index 8901b561a..2eb8b2976 100644 --- a/src/frontend/src/pages/AppInitPage/index.tsx +++ b/src/frontend/src/pages/AppInitPage/index.tsx @@ -1,6 +1,7 @@ import { useGetAutoLogin } from "@/controllers/API/queries/auth"; import { useGetConfig } from "@/controllers/API/queries/config/use-get-config"; import { useGetBasicExamplesQuery } from "@/controllers/API/queries/flows/use-get-basic-examples"; +import { useGetTypes } from "@/controllers/API/queries/flows/use-get-types"; import { useGetFoldersQuery } from "@/controllers/API/queries/folders/use-get-folders"; import { useGetTagsQuery } from "@/controllers/API/queries/store"; import { useGetGlobalVariables } from "@/controllers/API/queries/variables"; @@ -23,17 +24,16 @@ export function AppInitPage() { const { isFetched } = useGetAutoLogin({ enabled: isLoaded }); useGetVersionQuery({ enabled: isFetched }); useGetConfig({ enabled: isFetched }); - useGetGlobalVariables({ enabled: isFetched }); - useGetBasicExamplesQuery({ enabled: isFetched }); - useGetTagsQuery({ enabled: isFetched }); - const { refetch: refetchFolders } = useGetFoldersQuery({ - enabled: isFetched, - }); + const { isFetched: typesLoaded } = useGetTypes({ enabled: isFetched }); + useGetGlobalVariables({ enabled: typesLoaded }); + useGetBasicExamplesQuery({ enabled: typesLoaded }); + useGetTagsQuery({ enabled: typesLoaded }); + + useGetFoldersQuery({ enabled: typesLoaded }); useEffect(() => { if (isFetched) { refreshStars(); - refetchFolders(); } }, [isFetched]); @@ -49,11 +49,11 @@ export function AppInitPage() { //need parent component with width and height <> {isLoaded ? ( - (isLoading || !isFetched) && + (isLoading || !isFetched || !typesLoaded) && ) : ( )} - {isFetched && } + {isFetched && typesLoaded && } ); } diff --git a/src/frontend/src/pages/FlowPage/index.tsx b/src/frontend/src/pages/FlowPage/index.tsx index 45655a2d3..c0d480687 100644 --- a/src/frontend/src/pages/FlowPage/index.tsx +++ b/src/frontend/src/pages/FlowPage/index.tsx @@ -42,7 +42,6 @@ export default function FlowPage({ view }: { view?: boolean }): JSX.Element { const { mutateAsync: refreshFlows } = useGetRefreshFlows(); const setIsLoading = useFlowsManagerStore((state) => state.setIsLoading); - const getTypes = useTypesStore((state) => state.getTypes); const types = useTypesStore((state) => state.types); const updatedAt = currentSavedFlow?.updated_at; @@ -118,7 +117,6 @@ export default function FlowPage({ view }: { view?: boolean }): JSX.Element { } else if (!flows) { setIsLoading(true); await refreshFlows({ get_all: true, header_flows: true }); - if (!types || Object.keys(types).length === 0) await getTypes(); setIsLoading(false); } }; diff --git a/src/frontend/src/pages/MainPage/components/list/index.tsx b/src/frontend/src/pages/MainPage/components/list/index.tsx index 9b4f078d0..a1ab295da 100644 --- a/src/frontend/src/pages/MainPage/components/list/index.tsx +++ b/src/frontend/src/pages/MainPage/components/list/index.tsx @@ -114,6 +114,7 @@ const ListComponent = ({ flowData }: { flowData: FlowType }) => { className={`my-2 flex flex-row bg-background ${ isComponent ? "cursor-default" : "cursor-pointer" } group justify-between rounded-lg border border-border p-4 hover:border-placeholder-foreground hover:shadow-sm`} + data-testid="list-card" > {/* left side */}
state.setErrorData); const [openModal, setOpenModal] = useState(false); const initialData = useRef(undefined); - const getTypes = useTypesStore((state) => state.getTypes); const BadgeRenderer = (props) => { return props.value !== "" ? (
@@ -40,11 +35,6 @@ export default function GlobalVariablesPage() { ); }; - useEffect(() => { - //get the components to build the Aplly To Fields dropdown - getTypes(true); - }, []); - const DropdownEditor = ({ options, value, onValueChange }) => { return ( diff --git a/src/frontend/src/pages/ViewPage/index.tsx b/src/frontend/src/pages/ViewPage/index.tsx index 6184d9252..7e58eb1f8 100644 --- a/src/frontend/src/pages/ViewPage/index.tsx +++ b/src/frontend/src/pages/ViewPage/index.tsx @@ -16,7 +16,6 @@ export default function ViewPage() { const currentFlowId = useFlowsManagerStore((state) => state.currentFlowId); const { mutateAsync: refreshFlows } = useGetRefreshFlows(); const setIsLoading = useFlowsManagerStore((state) => state.setIsLoading); - const getTypes = useTypesStore((state) => state.getTypes); const types = useTypesStore((state) => state.types); // Set flow tab id @@ -34,7 +33,6 @@ export default function ViewPage() { } else if (!flows) { setIsLoading(true); await refreshFlows({ get_all: true, header_flows: true }); - if (!types || Object.keys(types).length === 0) await getTypes(); setIsLoading(false); } }; diff --git a/src/frontend/src/stores/typesStore.ts b/src/frontend/src/stores/typesStore.ts index 2aedc0d65..93b2afb20 100644 --- a/src/frontend/src/stores/typesStore.ts +++ b/src/frontend/src/stores/typesStore.ts @@ -1,5 +1,4 @@ import { create } from "zustand"; -import { getAll } from "../controllers/API"; import { APIDataType } from "../types/api"; import { TypesStoreType } from "../types/zustand/types"; import { @@ -7,7 +6,6 @@ import { templatesGenerator, typesGenerator, } from "../utils/reactflowUtils"; -import useFlowsManagerStore from "./flowsManagerStore"; export const useTypesStore = create((set, get) => ({ ComponentFields: new Set(), @@ -20,33 +18,16 @@ export const useTypesStore = create((set, get) => ({ types: {}, templates: {}, data: {}, - getTypes: (force_refresh: boolean = true) => { - return new Promise(async (resolve, reject) => { - const setLoading = useFlowsManagerStore.getState().setIsLoading; - getAll(force_refresh) - .then((response) => { - const data = response?.data; - set((old) => ({ - types: typesGenerator(data), - data: { ...old.data, ...data }, - ComponentFields: extractFieldsFromComponenents({ - ...old.data, - ...data, - }), - templates: templatesGenerator(data), - })); - resolve(); - }) - .catch((error) => { - console.error("An error has occurred while fetching types."); - console.log(error); - setLoading(false); - reject(); - }); - }); - }, - setTypes: (newState: {}) => { - set({ types: newState }); + setTypes: (data: APIDataType) => { + set((old) => ({ + types: typesGenerator(data), + data: { ...old.data, ...data }, + ComponentFields: extractFieldsFromComponenents({ + ...old.data, + ...data, + }), + templates: templatesGenerator(data), + })); }, setTemplates: (newState: {}) => { set({ templates: newState }); diff --git a/src/frontend/src/types/zustand/types/index.ts b/src/frontend/src/types/zustand/types/index.ts index 1f2fa1ed2..8692b039c 100644 --- a/src/frontend/src/types/zustand/types/index.ts +++ b/src/frontend/src/types/zustand/types/index.ts @@ -7,7 +7,6 @@ export type TypesStoreType = { setTemplates: (newState: {}) => void; data: APIDataType; setData: (newState: {}) => void; - getTypes: (force_refresh?: boolean) => Promise; ComponentFields: Set; setComponentFields: (fields: Set) => void; addComponentField: (field: string) => void; diff --git a/src/frontend/tests/assets/outdated_flow.json b/src/frontend/tests/assets/outdated_flow.json new file mode 100644 index 000000000..66ca450da --- /dev/null +++ b/src/frontend/tests/assets/outdated_flow.json @@ -0,0 +1,1081 @@ +{ + "id": "cb35184a-3446-4074-9ec7-8a935e980114", + "data": { + "edges": [ + { + "className": "", + "data": { + "sourceHandle": { + "dataType": "ChatInput", + "id": "ChatInput-KovKB", + "name": "message", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "user_message", + "id": "Prompt-Xz9bN", + "inputTypes": ["Message", "Text"], + "type": "str" + } + }, + "id": "reactflow__edge-ChatInput-KovKB{œdataTypeœ:œChatInputœ,œidœ:œChatInput-KovKBœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-Prompt-Xz9bN{œfieldNameœ:œuser_messageœ,œidœ:œPrompt-Xz9bNœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", + "source": "ChatInput-KovKB", + "sourceHandle": "{œdataTypeœ:œChatInputœ,œidœ:œChatInput-KovKBœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}", + "target": "Prompt-Xz9bN", + "targetHandle": "{œfieldNameœ:œuser_messageœ,œidœ:œPrompt-Xz9bNœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}" + }, + { + "className": "", + "data": { + "sourceHandle": { + "dataType": "Prompt", + "id": "Prompt-Xz9bN", + "name": "prompt", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "input_value", + "id": "OpenAIModel-pqHDB", + "inputTypes": ["Message"], + "type": "str" + } + }, + "id": "reactflow__edge-Prompt-Xz9bN{œdataTypeœ:œPromptœ,œidœ:œPrompt-Xz9bNœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-pqHDB{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-pqHDBœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "Prompt-Xz9bN", + "sourceHandle": "{œdataTypeœ:œPromptœ,œidœ:œPrompt-Xz9bNœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}", + "target": "OpenAIModel-pqHDB", + "targetHandle": "{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-pqHDBœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}" + }, + { + "className": "", + "data": { + "sourceHandle": { + "dataType": "OpenAIModel", + "id": "OpenAIModel-pqHDB", + "name": "text_output", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "input_value", + "id": "ChatOutput-NasE4", + "inputTypes": ["Message"], + "type": "str" + } + }, + "id": "reactflow__edge-OpenAIModel-pqHDB{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-pqHDBœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-NasE4{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-NasE4œ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "OpenAIModel-pqHDB", + "sourceHandle": "{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-pqHDBœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}", + "target": "ChatOutput-NasE4", + "targetHandle": "{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-NasE4œ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}" + }, + { + "className": "", + "data": { + "sourceHandle": { + "dataType": "Memory", + "id": "Memory-x4ENQ", + "name": "messages_text", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "context", + "id": "Prompt-Xz9bN", + "inputTypes": ["Message", "Text"], + "type": "str" + } + }, + "id": "reactflow__edge-Memory-x4ENQ{œdataTypeœ:œMemoryœ,œidœ:œMemory-x4ENQœ,œnameœ:œmessages_textœ,œoutput_typesœ:[œMessageœ]}-Prompt-Xz9bN{œfieldNameœ:œcontextœ,œidœ:œPrompt-Xz9bNœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", + "source": "Memory-x4ENQ", + "sourceHandle": "{œdataTypeœ:œMemoryœ,œidœ:œMemory-x4ENQœ,œnameœ:œmessages_textœ,œoutput_typesœ:[œMessageœ]}", + "target": "Prompt-Xz9bN", + "targetHandle": "{œfieldNameœ:œcontextœ,œidœ:œPrompt-Xz9bNœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}" + } + ], + "nodes": [ + { + "data": { + "description": "Create a prompt template with dynamic variables.", + "display_name": "Prompt", + "id": "Prompt-Xz9bN", + "node": { + "base_classes": ["Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": { "template": ["context", "user_message"] }, + "description": "Create a prompt template with dynamic variables.", + "display_name": "Prompt", + "documentation": "", + "edited": false, + "field_order": ["template"], + "frozen": false, + "icon": "prompts", + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Prompt Message", + "method": "build_prompt", + "name": "prompt", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langflow.base.prompts.api_utils import process_prompt_template\nfrom langflow.custom import Component\nfrom langflow.inputs.inputs import DefaultPromptField\nfrom langflow.io import Output, PromptInput\nfrom langflow.schema.message import Message\nfrom langflow.template.utils import update_template_values\n\n\nclass PromptComponent(Component):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n trace_type = \"prompt\"\n name = \"Prompt\"\n\n inputs = [\n PromptInput(name=\"template\", display_name=\"Template\"),\n ]\n\n outputs = [\n Output(display_name=\"Prompt Message\", name=\"prompt\", method=\"build_prompt\"),\n ]\n\n async def build_prompt(\n self,\n ) -> Message:\n prompt = await Message.from_template_and_variables(**self._attributes)\n self.status = prompt.text\n return prompt\n\n def _update_template(self, frontend_node: dict):\n prompt_template = frontend_node[\"template\"][\"template\"][\"value\"]\n custom_fields = frontend_node[\"custom_fields\"]\n frontend_node_template = frontend_node[\"template\"]\n _ = process_prompt_template(\n template=prompt_template,\n name=\"template\",\n custom_fields=custom_fields,\n frontend_node_template=frontend_node_template,\n )\n return frontend_node\n\n def post_code_processing(self, new_frontend_node: dict, current_frontend_node: dict):\n \"\"\"\n This function is called after the code validation is done.\n \"\"\"\n frontend_node = super().post_code_processing(new_frontend_node, current_frontend_node)\n template = frontend_node[\"template\"][\"template\"][\"value\"]\n # Kept it duplicated for backwards compatibility\n _ = process_prompt_template(\n template=template,\n name=\"template\",\n custom_fields=frontend_node[\"custom_fields\"],\n frontend_node_template=frontend_node[\"template\"],\n )\n # Now that template is updated, we need to grab any values that were set in the current_frontend_node\n # and update the frontend_node with those values\n update_template_values(new_template=frontend_node, previous_template=current_frontend_node[\"template\"])\n return frontend_node\n\n def _get_fallback_input(self, **kwargs):\n return DefaultPromptField(**kwargs)\n" + }, + "context": { + "advanced": false, + "display_name": "context", + "dynamic": false, + "field_type": "str", + "fileTypes": [], + "file_path": "", + "info": "", + "input_types": ["Message", "Text"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "context", + "password": false, + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "type": "str", + "value": "" + }, + "template": { + "advanced": false, + "display_name": "Template", + "dynamic": false, + "info": "", + "list": false, + "load_from_db": false, + "name": "template", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "type": "prompt", + "value": "{context}\n\nUser: {user_message}\nAI: " + }, + "user_message": { + "advanced": false, + "display_name": "user_message", + "dynamic": false, + "field_type": "str", + "fileTypes": [], + "file_path": "", + "info": "", + "input_types": ["Message", "Text"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "user_message", + "password": false, + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "type": "str", + "value": "" + } + } + }, + "type": "Prompt" + }, + "dragging": false, + "height": 517, + "id": "Prompt-Xz9bN", + "position": { "x": 1880.8227904110583, "y": 625.8049209882275 }, + "positionAbsolute": { "x": 1880.8227904110583, "y": 625.8049209882275 }, + "selected": false, + "type": "genericNode", + "width": 384 + }, + { + "data": { + "description": "Get chat inputs from the Playground.", + "display_name": "Chat Input", + "id": "ChatInput-KovKB", + "node": { + "base_classes": ["Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Get chat inputs from the Playground.", + "display_name": "Chat Input", + "documentation": "", + "edited": false, + "field_order": [ + "input_value", + "store_message", + "sender", + "sender_name", + "session_id", + "files" + ], + "frozen": false, + "icon": "ChatInput", + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Message", + "method": "message_response", + "name": "message", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, FileInput, MessageTextInput, MultilineInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_USER, MESSAGE_SENDER_NAME_USER\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n name = \"ChatInput\"\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n )\n\n if (\n self.session_id\n and isinstance(message, Message)\n and isinstance(message.text, str)\n and self.should_store_message\n ):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n" + }, + "files": { + "advanced": true, + "display_name": "Files", + "dynamic": false, + "fileTypes": [ + "txt", + "md", + "mdx", + "csv", + "json", + "yaml", + "yml", + "xml", + "html", + "htm", + "pdf", + "docx", + "py", + "sh", + "sql", + "js", + "ts", + "tsx", + "jpg", + "jpeg", + "png", + "bmp", + "image" + ], + "file_path": "", + "info": "Files to be sent with the message.", + "list": true, + "name": "files", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "file", + "value": "" + }, + "input_value": { + "advanced": false, + "display_name": "Text", + "dynamic": false, + "info": "Message to be passed as input.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "input_value", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "sender": { + "advanced": true, + "display_name": "Sender Type", + "dynamic": false, + "info": "Type of sender.", + "name": "sender", + "options": ["Machine", "User"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "str", + "value": "User" + }, + "sender_name": { + "advanced": true, + "display_name": "Sender Name", + "dynamic": false, + "info": "Name of the sender.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "sender_name", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "User" + }, + "session_id": { + "advanced": true, + "display_name": "Session ID", + "dynamic": false, + "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "session_id", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "should_store_message": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Store Messages", + "dynamic": false, + "info": "Store the message in the history.", + "list": false, + "name": "should_store_message", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + } + } + }, + "type": "ChatInput" + }, + "dragging": false, + "height": 309, + "id": "ChatInput-KovKB", + "position": { "x": 1275.9262193671882, "y": 836.1228056896347 }, + "positionAbsolute": { "x": 1275.9262193671882, "y": 836.1228056896347 }, + "selected": false, + "type": "genericNode", + "width": 384 + }, + { + "data": { + "description": "Generates text using OpenAI LLMs.", + "display_name": "OpenAI", + "id": "OpenAIModel-pqHDB", + "node": { + "base_classes": ["LanguageModel", "Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Generates text using OpenAI LLMs.", + "display_name": "OpenAI", + "documentation": "", + "edited": false, + "field_order": [ + "input_value", + "max_tokens", + "model_kwargs", + "json_mode", + "output_schema", + "model_name", + "openai_api_base", + "openai_api_key", + "temperature", + "stream", + "system_message", + "seed" + ], + "frozen": false, + "icon": "OpenAI", + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Text", + "method": "text_response", + "name": "text_output", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + }, + { + "cache": true, + "display_name": "Language Model", + "method": "build_model", + "name": "model_output", + "selected": "LanguageModel", + "types": ["LanguageModel"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "api_key": { + "_input_type": "SecretStrInput", + "advanced": false, + "display_name": "OpenAI API Key", + "dynamic": false, + "info": "The OpenAI API Key to use for the OpenAI model.", + "input_types": ["Message"], + "load_from_db": true, + "name": "api_key", + "password": true, + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "type": "str", + "value": "" + }, + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "import operator\nfrom functools import reduce\n\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import OPENAI_MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = LCModelComponent._base_inputs + [\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n range_spec=RangeSpec(min=0, max=128000),\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n advanced=False,\n options=OPENAI_MODEL_NAMES,\n value=OPENAI_MODEL_NAMES[0],\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # self.output_schema is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature or 0.1,\n seed=seed,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n else:\n output = output.bind(response_format={\"type\": \"json_object\"}) # type: ignore\n\n return output # type: ignore\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n" + }, + "input_value": { + "advanced": false, + "display_name": "Input", + "dynamic": false, + "info": "", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "input_value", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "json_mode": { + "advanced": true, + "display_name": "JSON Mode", + "dynamic": false, + "info": "If True, it will output JSON regardless of passing a schema.", + "list": false, + "name": "json_mode", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": false + }, + "max_tokens": { + "advanced": true, + "display_name": "Max Tokens", + "dynamic": false, + "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.", + "list": false, + "name": "max_tokens", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": "" + }, + "model_kwargs": { + "advanced": true, + "display_name": "Model Kwargs", + "dynamic": false, + "info": "", + "list": false, + "name": "model_kwargs", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "type": "dict", + "value": {} + }, + "model_name": { + "advanced": false, + "display_name": "Model Name", + "dynamic": false, + "info": "", + "name": "model_name", + "options": [ + "gpt-4o-mini", + "gpt-4o", + "gpt-4-turbo", + "gpt-4-turbo-preview", + "gpt-4", + "gpt-3.5-turbo", + "gpt-3.5-turbo-0125" + ], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "str", + "value": "gpt-4o" + }, + "openai_api_base": { + "advanced": true, + "display_name": "OpenAI API Base", + "dynamic": false, + "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.", + "list": false, + "load_from_db": false, + "name": "openai_api_base", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "output_schema": { + "advanced": true, + "display_name": "Schema", + "dynamic": false, + "info": "The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.", + "list": true, + "name": "output_schema", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "type": "dict", + "value": {} + }, + "seed": { + "advanced": true, + "display_name": "Seed", + "dynamic": false, + "info": "The seed controls the reproducibility of the job.", + "list": false, + "name": "seed", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": 1 + }, + "stream": { + "advanced": true, + "display_name": "Stream", + "dynamic": false, + "info": "Stream the response from the model. Streaming works only in Chat.", + "list": false, + "name": "stream", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": false + }, + "system_message": { + "advanced": true, + "display_name": "System Message", + "dynamic": false, + "info": "System message to pass to the model.", + "list": false, + "load_from_db": false, + "name": "system_message", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "temperature": { + "advanced": false, + "display_name": "Temperature", + "dynamic": false, + "info": "", + "list": false, + "name": "temperature", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "float", + "value": 0.1 + } + } + }, + "type": "OpenAIModel" + }, + "dragging": false, + "height": 623, + "id": "OpenAIModel-pqHDB", + "position": { "x": 2468.968379487559, "y": 560.0689522326683 }, + "positionAbsolute": { "x": 2468.968379487559, "y": 560.0689522326683 }, + "selected": false, + "type": "genericNode", + "width": 384 + }, + { + "data": { + "description": "Display a chat message in the Playground.", + "display_name": "Chat Output", + "id": "ChatOutput-NasE4", + "node": { + "base_classes": ["Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Display a chat message in the Playground.", + "display_name": "Chat Output", + "documentation": "", + "edited": false, + "field_order": [ + "input_value", + "store_message", + "sender", + "sender_name", + "session_id", + "data_template" + ], + "frozen": false, + "icon": "ChatOutput", + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Message", + "method": "message_response", + "name": "message", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, MessageTextInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_NAME_AI, MESSAGE_SENDER_USER, MESSAGE_SENDER_AI\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageTextInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_AI,\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_AI,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n )\n if (\n self.session_id\n and isinstance(message, Message)\n and isinstance(message.text, str)\n and self.should_store_message\n ):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n" + }, + "data_template": { + "advanced": true, + "display_name": "Data Template", + "dynamic": false, + "info": "Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "data_template", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "{text}" + }, + "input_value": { + "advanced": false, + "display_name": "Text", + "dynamic": false, + "info": "Message to be passed as output.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "input_value", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "sender": { + "advanced": true, + "display_name": "Sender Type", + "dynamic": false, + "info": "Type of sender.", + "name": "sender", + "options": ["Machine", "User"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "str", + "value": "Machine" + }, + "sender_name": { + "advanced": true, + "display_name": "Sender Name", + "dynamic": false, + "info": "Name of the sender.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "sender_name", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "AI" + }, + "session_id": { + "advanced": true, + "display_name": "Session ID", + "dynamic": false, + "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "session_id", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "should_store_message": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Store Messages", + "dynamic": false, + "info": "Store the message in the history.", + "list": false, + "name": "should_store_message", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + } + } + }, + "type": "ChatOutput" + }, + "height": 385, + "id": "ChatOutput-NasE4", + "position": { "x": 3083.1710516244116, "y": 701.521688846004 }, + "selected": false, + "type": "genericNode", + "width": 384 + }, + { + "data": { + "description": "Retrieves stored chat messages from Langflow tables or an external memory.", + "display_name": "Chat Memory", + "id": "Memory-x4ENQ", + "node": { + "base_classes": ["BaseChatMemory", "Data", "Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Retrieves stored chat messages from Langflow tables or an external memory.", + "display_name": "Chat Memory", + "documentation": "", + "edited": false, + "field_order": [ + "memory", + "sender", + "sender_name", + "n_messages", + "session_id", + "order", + "template" + ], + "frozen": false, + "icon": "message-square-more", + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Messages (Data)", + "method": "retrieve_messages", + "name": "messages", + "selected": "Data", + "types": ["Data"], + "value": "__UNDEFINED__" + }, + { + "cache": true, + "display_name": "Messages (Text)", + "method": "retrieve_messages_as_text", + "name": "messages_text", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + }, + { + "cache": true, + "display_name": "Memory", + "method": "build_lc_memory", + "name": "lc_memory", + "selected": "BaseChatMemory", + "types": ["BaseChatMemory"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langchain.memory import ConversationBufferMemory\n\nfrom langflow.custom import Component\nfrom langflow.field_typing import BaseChatMemory\nfrom langflow.helpers.data import data_to_text\nfrom langflow.inputs import HandleInput\nfrom langflow.io import DropdownInput, IntInput, MessageTextInput, MultilineInput, Output\nfrom langflow.memory import LCBuiltinChatMemory, get_messages\nfrom langflow.schema import Data\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_USER\n\n\nclass MemoryComponent(Component):\n display_name = \"Chat Memory\"\n description = \"Retrieves stored chat messages from Langflow tables or an external memory.\"\n icon = \"message-square-more\"\n name = \"Memory\"\n\n inputs = [\n HandleInput(\n name=\"memory\",\n display_name=\"External Memory\",\n input_types=[\"BaseChatMessageHistory\"],\n info=\"Retrieve messages from an external memory. If empty, it will use the Langflow tables.\",\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER, \"Machine and User\"],\n value=\"Machine and User\",\n info=\"Filter by sender type.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Filter by sender name.\",\n advanced=True,\n ),\n IntInput(\n name=\"n_messages\",\n display_name=\"Number of Messages\",\n value=100,\n info=\"Number of messages to retrieve.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n DropdownInput(\n name=\"order\",\n display_name=\"Order\",\n options=[\"Ascending\", \"Descending\"],\n value=\"Ascending\",\n info=\"Order of the messages.\",\n advanced=True,\n ),\n MultilineInput(\n name=\"template\",\n display_name=\"Template\",\n info=\"The template to use for formatting the data. It can contain the keys {text}, {sender} or any other key in the message data.\",\n value=\"{sender_name}: {text}\",\n advanced=True,\n ),\n ]\n\n outputs = [\n Output(display_name=\"Messages (Data)\", name=\"messages\", method=\"retrieve_messages\"),\n Output(display_name=\"Messages (Text)\", name=\"messages_text\", method=\"retrieve_messages_as_text\"),\n Output(display_name=\"Memory\", name=\"lc_memory\", method=\"build_lc_memory\"),\n ]\n\n def retrieve_messages(self) -> Data:\n sender = self.sender\n sender_name = self.sender_name\n session_id = self.session_id\n n_messages = self.n_messages\n order = \"DESC\" if self.order == \"Descending\" else \"ASC\"\n\n if sender == \"Machine and User\":\n sender = None\n\n if self.memory:\n # override session_id\n self.memory.session_id = session_id\n\n stored = self.memory.messages\n # langchain memories are supposed to return messages in ascending order\n if order == \"DESC\":\n stored = stored[::-1]\n if n_messages:\n stored = stored[:n_messages]\n stored = [Message.from_lc_message(m) for m in stored]\n if sender:\n expected_type = MESSAGE_SENDER_AI if sender == MESSAGE_SENDER_AI else MESSAGE_SENDER_USER\n stored = [m for m in stored if m.type == expected_type]\n else:\n stored = get_messages(\n sender=sender,\n sender_name=sender_name,\n session_id=session_id,\n limit=n_messages,\n order=order,\n )\n self.status = stored\n return stored\n\n def retrieve_messages_as_text(self) -> Message:\n stored_text = data_to_text(self.template, self.retrieve_messages())\n self.status = stored_text\n return Message(text=stored_text)\n\n def build_lc_memory(self) -> BaseChatMemory:\n if self.memory:\n chat_memory = self.memory\n else:\n chat_memory = LCBuiltinChatMemory(flow_id=self.flow_id, session_id=self.session_id)\n return ConversationBufferMemory(chat_memory=chat_memory)\n" + }, + "memory": { + "advanced": false, + "display_name": "External Memory", + "dynamic": false, + "info": "Retrieve messages from an external memory. If empty, it will use the Langflow tables.", + "input_types": ["BaseChatMessageHistory"], + "list": false, + "name": "memory", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "other", + "value": "" + }, + "n_messages": { + "advanced": true, + "display_name": "Number of Messages", + "dynamic": false, + "info": "Number of messages to retrieve.", + "list": false, + "name": "n_messages", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": 100 + }, + "order": { + "advanced": true, + "display_name": "Order", + "dynamic": false, + "info": "Order of the messages.", + "name": "order", + "options": ["Ascending", "Descending"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "str", + "value": "Ascending" + }, + "sender": { + "advanced": true, + "display_name": "Sender Type", + "dynamic": false, + "info": "Filter by sender type.", + "name": "sender", + "options": ["Machine", "User", "Machine and User"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "str", + "value": "Machine and User" + }, + "sender_name": { + "advanced": true, + "display_name": "Sender Name", + "dynamic": false, + "info": "Filter by sender name.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "sender_name", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "session_id": { + "advanced": true, + "display_name": "Session ID", + "dynamic": false, + "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "session_id", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "template": { + "advanced": true, + "display_name": "Template", + "dynamic": false, + "info": "The template to use for formatting the data. It can contain the keys {text}, {sender} or any other key in the message data.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "template", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "{sender_name}: {text}" + } + } + }, + "type": "Memory" + }, + "dragging": false, + "height": 387, + "id": "Memory-x4ENQ", + "position": { "x": 1301.98330242754, "y": 422.33865605652574 }, + "positionAbsolute": { "x": 1301.98330242754, "y": 422.33865605652574 }, + "selected": false, + "type": "genericNode", + "width": 384 + } + ], + "viewport": { + "x": -377.45799796990354, + "y": 18.161555190942522, + "zoom": 0.45494095964690673 + } + }, + "description": "This project can be used as a starting point for building a Chat experience with user specific memory. You can set a different Session ID to start a new message history.", + "name": "Memory Chatbot (1)", + "last_tested_version": "1.0.15", + "endpoint_name": null, + "is_component": false +} diff --git a/src/frontend/tests/extended/features/outdated-message.spec.ts b/src/frontend/tests/extended/features/outdated-message.spec.ts new file mode 100644 index 000000000..e88eaf113 --- /dev/null +++ b/src/frontend/tests/extended/features/outdated-message.spec.ts @@ -0,0 +1,58 @@ +import { test } from "@playwright/test"; +import { readFileSync } from "fs"; + +test("user must be able outdated message on error", async ({ page }) => { + await page.goto("/"); + + let modalCount = 0; + try { + const modalTitleElement = await page?.getByTestId("modal-title"); + if (modalTitleElement) { + modalCount = await modalTitleElement.count(); + } + } catch (error) { + modalCount = 0; + } + + while (modalCount === 0) { + await page.getByText("New Flow", { exact: true }).click(); + await page.waitForTimeout(3000); + modalCount = await page.getByTestId("modal-title")?.count(); + } + await page.locator("span").filter({ hasText: "Close" }).first().click(); + + await page.locator("span").filter({ hasText: "My Collection" }).isVisible(); + // Read your file into a buffer. + const jsonContent = readFileSync("tests/assets/outdated_flow.json", "utf-8"); + + // Create the DataTransfer and File + const dataTransfer = await page.evaluateHandle((data) => { + const dt = new DataTransfer(); + // Convert the buffer to a hex array + const file = new File([data], "outdated_flow.json", { + type: "application/json", + }); + dt.items.add(file); + return dt; + }, jsonContent); + + // Now dispatch + await page.getByTestId("cards-wrapper").dispatchEvent("drop", { + dataTransfer, + }); + + await page.waitForTimeout(3000); + + await page.getByTestId("list-card").first().click(); + + await page + .getByTestId("popover-anchor-input-api_key") + .fill("this is a test to crash"); + + await page.getByTestId("button_run_chat output").click(); + + await page.waitForSelector("text=there are outdated components in the flow", { + timeout: 30000, + state: "visible", + }); +});