From 3406575c670827142e3217499d4e726cff42f3dd Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Wed, 10 Jul 2024 09:13:19 -0300 Subject: [PATCH] feat: add Maritalk model component (#2595) * feat: add Maritalk icon * feat: add Maritalk model component * fix: update icon size * feat: optimize MaritalkModelComponent initialization This commit optimizes the initialization of the MaritalkModelComponent class in the Maritalk.py file. It updates the default value for the 'temperature' parameter to be within the range of 0 to 1, using the RangeSpec class. This ensures that the temperature value is valid and improves the overall functionality of the component. * style: format maritalk svg * feat: optimize MaritalkModelComponent initialization This commit optimizes the initialization of the MaritalkModelComponent class in the Maritalk.py file. It updates the default value for the 'temperature' parameter to be within the range of 0 to 1, using the RangeSpec class. This ensures that the temperature value is valid and improves the overall functionality of the component. * feat: update 'stream' parameter to be advanced in MaritalkModelComponent This commit updates the 'stream' parameter in the MaritalkModelComponent class to be an advanced option. By setting the 'advanced' attribute to True, the 'stream' parameter will only be visible to advanced users. This change improves the usability of the component by hiding this option from regular users who do not need it. --- .../langflow/components/models/Maritalk.py | 61 ++++ .../src/icons/Maritalk/MaritalkIcon.jsx | 336 ++++++++++++++++++ src/frontend/src/icons/Maritalk/index.tsx | 9 + .../src/icons/Maritalk/maritalk-icon.svg | 139 ++++++++ src/frontend/src/utils/styleUtils.ts | 2 + 5 files changed, 547 insertions(+) create mode 100644 src/backend/base/langflow/components/models/Maritalk.py create mode 100644 src/frontend/src/icons/Maritalk/MaritalkIcon.jsx create mode 100644 src/frontend/src/icons/Maritalk/index.tsx create mode 100644 src/frontend/src/icons/Maritalk/maritalk-icon.svg diff --git a/src/backend/base/langflow/components/models/Maritalk.py b/src/backend/base/langflow/components/models/Maritalk.py new file mode 100644 index 000000000..c0740d2f7 --- /dev/null +++ b/src/backend/base/langflow/components/models/Maritalk.py @@ -0,0 +1,61 @@ +from langchain_community.chat_models import ChatMaritalk + +from langflow.base.constants import STREAM_INFO_TEXT +from langflow.base.models.model import LCModelComponent +from langflow.field_typing import LanguageModel +from langflow.field_typing.range_spec import RangeSpec +from langflow.inputs import BoolInput, DropdownInput, FloatInput, IntInput, MessageInput, SecretStrInput, StrInput + + +class MaritalkModelComponent(LCModelComponent): + display_name = "Maritalk" + description = "Generates text using Maritalk LLMs." + icon = "Maritalk" + name = "Maritalk" + inputs = [ + MessageInput(name="input_value", display_name="Input"), + IntInput( + name="max_tokens", + display_name="Max Tokens", + advanced=True, + value=512, + info="The maximum number of tokens to generate. Set to 0 for unlimited tokens.", + ), + DropdownInput( + name="model_name", + display_name="Model Name", + advanced=False, + options=["sabia-2-small", "sabia-2-medium"], + value=["sabia-2-small"], + ), + SecretStrInput( + name="api_key", + display_name="Maritalk API Key", + info="The Maritalk API Key to use for the OpenAI model.", + advanced=False, + ), + FloatInput(name="temperature", display_name="Temperature", value=0.1, range_spec=RangeSpec(min=0, max=1)), + BoolInput(name="stream", display_name="Stream", info=STREAM_INFO_TEXT, value=False, advanced=True), + StrInput( + name="system_message", + display_name="System Message", + info="System message to pass to the model.", + advanced=True, + ), + ] + + def build_model(self) -> LanguageModel: # type: ignore[type-var] + # self.output_schea is a list of dictionarie s + # let's convert it to a dictionary + api_key = self.api_key + temperature = self.temperature + model_name: str = self.model_name + max_tokens = self.max_tokens + + output = ChatMaritalk( + max_tokens=max_tokens, + model=model_name, + api_key=api_key, + temperature=temperature or 0.1, + ) + return output # type: ignore diff --git a/src/frontend/src/icons/Maritalk/MaritalkIcon.jsx b/src/frontend/src/icons/Maritalk/MaritalkIcon.jsx new file mode 100644 index 000000000..17c9aafb2 --- /dev/null +++ b/src/frontend/src/icons/Maritalk/MaritalkIcon.jsx @@ -0,0 +1,336 @@ +const SvgMaritalkIcon = (props) => ( + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +); +export default SvgMaritalkIcon; diff --git a/src/frontend/src/icons/Maritalk/index.tsx b/src/frontend/src/icons/Maritalk/index.tsx new file mode 100644 index 000000000..8137e4a0f --- /dev/null +++ b/src/frontend/src/icons/Maritalk/index.tsx @@ -0,0 +1,9 @@ +import React, { forwardRef } from "react"; +import SvgMaritalkIcon from "./MaritalkIcon"; + +export const MaritalkIcon = forwardRef< + SVGSVGElement, + React.PropsWithChildren<{}> +>((props, ref) => { + return ; +}); diff --git a/src/frontend/src/icons/Maritalk/maritalk-icon.svg b/src/frontend/src/icons/Maritalk/maritalk-icon.svg new file mode 100644 index 000000000..b3121e439 --- /dev/null +++ b/src/frontend/src/icons/Maritalk/maritalk-icon.svg @@ -0,0 +1,139 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/src/frontend/src/utils/styleUtils.ts b/src/frontend/src/utils/styleUtils.ts index adbdd2d0c..198f9eeb0 100644 --- a/src/frontend/src/utils/styleUtils.ts +++ b/src/frontend/src/utils/styleUtils.ts @@ -184,6 +184,7 @@ import { GroqIcon } from "../icons/Groq"; import { HuggingFaceIcon } from "../icons/HuggingFace"; import { IFixIcon } from "../icons/IFixIt"; import { LangChainIcon } from "../icons/LangChain"; +import { MaritalkIcon } from "../icons/Maritalk"; import { MetaIcon } from "../icons/Meta"; import { MidjourneyIcon } from "../icons/Midjorney"; import { MongoDBIcon } from "../icons/MongoDB"; @@ -339,6 +340,7 @@ export const nodeIconsLucide: iconsType = { OllamaEmbeddings: OllamaIcon, ChatOllamaModel: OllamaIcon, FAISS: MetaIcon, + Maritalk: MaritalkIcon, FaissSearch: MetaIcon, LangChain: LangChainIcon, AzureOpenAiModel: AzureIcon,