From a2406fac72262f2192575b965e802515bd6016c5 Mon Sep 17 00:00:00 2001
From: Sai Kolasani <92129478+saikolasani@users.noreply.github.com>
Date: Mon, 19 Aug 2024 14:28:28 -0700
Subject: [PATCH] feat: Add Perplexity Models Component (#3351)
---
.../components/models/PerplexityModel.py | 83 ++++++++++++++++++
.../langflow/components/models/__init__.py | 2 +
src/frontend/package-lock.json | 1 +
.../src/icons/Perplexity/Perplexity.jsx | 21 +++++
src/frontend/src/icons/Perplexity/index.tsx | 9 ++
.../src/icons/Perplexity/perplexity.svg | Bin 0 -> 2116 bytes
src/frontend/src/utils/styleUtils.ts | 2 +
7 files changed, 118 insertions(+)
create mode 100644 src/backend/base/langflow/components/models/PerplexityModel.py
create mode 100644 src/frontend/src/icons/Perplexity/Perplexity.jsx
create mode 100644 src/frontend/src/icons/Perplexity/index.tsx
create mode 100644 src/frontend/src/icons/Perplexity/perplexity.svg
diff --git a/src/backend/base/langflow/components/models/PerplexityModel.py b/src/backend/base/langflow/components/models/PerplexityModel.py
new file mode 100644
index 000000000..7265db9b4
--- /dev/null
+++ b/src/backend/base/langflow/components/models/PerplexityModel.py
@@ -0,0 +1,83 @@
+from langchain_community.chat_models import ChatPerplexity
+from pydantic.v1 import SecretStr
+
+from langflow.base.models.model import LCModelComponent
+from langflow.field_typing import LanguageModel
+from langflow.io import FloatInput, SecretStrInput, DropdownInput, IntInput
+
+
+class PerplexityComponent(LCModelComponent):
+ display_name = "Perplexity"
+ description = "Generate text using Perplexity LLMs."
+ documentation = "https://python.langchain.com/v0.2/docs/integrations/chat/perplexity/"
+ icon = "Perplexity"
+ name = "PerplexityModel"
+
+ inputs = LCModelComponent._base_inputs + [
+ DropdownInput(
+ name="model_name",
+ display_name="Model Name",
+ advanced=False,
+ options=[
+ "llama-3.1-sonar-small-128k-online",
+ "llama-3.1-sonar-large-128k-online",
+ "llama-3.1-sonar-huge-128k-online",
+ "llama-3.1-sonar-small-128k-chat",
+ "llama-3.1-sonar-large-128k-chat",
+ "llama-3.1-8b-instruct",
+ "llama-3.1-70b-instruct",
+ ],
+ value="llama-3.1-sonar-small-128k-online",
+ ),
+ IntInput(
+ name="max_output_tokens",
+ display_name="Max Output Tokens",
+ info="The maximum number of tokens to generate.",
+ ),
+ SecretStrInput(
+ name="api_key",
+ display_name="Perplexity API Key",
+ info="The Perplexity API Key to use for the Perplexity model.",
+ advanced=False,
+ ),
+ FloatInput(name="temperature", display_name="Temperature", value=0.75),
+ FloatInput(
+ name="top_p",
+ display_name="Top P",
+ info="The maximum cumulative probability of tokens to consider when sampling.",
+ advanced=True,
+ ),
+ IntInput(
+ name="n",
+ display_name="N",
+ info="Number of chat completions to generate for each prompt. Note that the API may not return the full n completions if duplicates are generated.",
+ advanced=True,
+ ),
+ IntInput(
+ name="top_k",
+ display_name="Top K",
+ info="Decode using top-k sampling: consider the set of top_k most probable tokens. Must be positive.",
+ advanced=True,
+ ),
+ ]
+
+ def build_model(self) -> LanguageModel: # type: ignore[type-var]
+ api_key = SecretStr(self.api_key).get_secret_value()
+ temperature = self.temperature
+ model = self.model_name
+ max_output_tokens = self.max_output_tokens
+ top_k = self.top_k
+ top_p = self.top_p
+ n = self.n
+
+ output = ChatPerplexity(
+ model=model,
+ temperature=temperature or 0.75,
+ pplx_api_key=api_key,
+ top_k=top_k or None,
+ top_p=top_p or None,
+ n=n or 1,
+ max_output_tokens=max_output_tokens,
+ )
+
+ return output # type: ignore
diff --git a/src/backend/base/langflow/components/models/__init__.py b/src/backend/base/langflow/components/models/__init__.py
index 3e708d5a8..08385063c 100644
--- a/src/backend/base/langflow/components/models/__init__.py
+++ b/src/backend/base/langflow/components/models/__init__.py
@@ -9,6 +9,7 @@ from .HuggingFaceModel import HuggingFaceEndpointsComponent
from .OllamaModel import ChatOllamaComponent
from .OpenAIModel import OpenAIModelComponent
from .VertexAiModel import ChatVertexAIComponent
+from .PerplexityModel import PerplexityComponent
__all__ = [
"AIMLModelComponent",
@@ -22,5 +23,6 @@ __all__ = [
"ChatOllamaComponent",
"OpenAIModelComponent",
"ChatVertexAIComponent",
+ "PerplexityComponent",
"base",
]
diff --git a/src/frontend/package-lock.json b/src/frontend/package-lock.json
index db0e15228..f150e6b42 100644
--- a/src/frontend/package-lock.json
+++ b/src/frontend/package-lock.json
@@ -1079,6 +1079,7 @@
},
"node_modules/@clack/prompts/node_modules/is-unicode-supported": {
"version": "1.3.0",
+ "extraneous": true,
"inBundle": true,
"license": "MIT",
"engines": {
diff --git a/src/frontend/src/icons/Perplexity/Perplexity.jsx b/src/frontend/src/icons/Perplexity/Perplexity.jsx
new file mode 100644
index 000000000..5258f03d7
--- /dev/null
+++ b/src/frontend/src/icons/Perplexity/Perplexity.jsx
@@ -0,0 +1,21 @@
+const SvgPerplexity = (props) => (
+
+);
+
+export default SvgPerplexity;
diff --git a/src/frontend/src/icons/Perplexity/index.tsx b/src/frontend/src/icons/Perplexity/index.tsx
new file mode 100644
index 000000000..d6103437c
--- /dev/null
+++ b/src/frontend/src/icons/Perplexity/index.tsx
@@ -0,0 +1,9 @@
+import React, { forwardRef } from "react";
+import PerplexitySVG from "./perplexity";
+
+export const PerplexityIcon = forwardRef<
+ SVGSVGElement,
+ React.PropsWithChildren<{}>
+>((props, ref) => {
+ return ;
+});
diff --git a/src/frontend/src/icons/Perplexity/perplexity.svg b/src/frontend/src/icons/Perplexity/perplexity.svg
new file mode 100644
index 0000000000000000000000000000000000000000..307f257c0124ad92a93df4fef260beaf4e717370
GIT binary patch
literal 2116
zcmbuAOHUg?5QO`uAab_Z_ah=4f)wQvDYh
zsjjZB8UOluZX4U#t)1A5-P_6@H2z?xwzRFTM|*E)y1x50)xET-t?fxG&aAM^ud_YI
zsZcgrd98H|n<#1_yt(dAcBi*nN50i~ty~M?^L=JtJ#wb^jecuIg0xk{i|+r6bEjO_
z%Kc|XYw_JG^Syqriu-2ITCvh-Yd@UJjow$f;PR~J!Yf%wIVxPM#<|DMeAVMW<95PZ
zNX?I0`{G!cD&|*PE?s$M)=TTgDj{Fkxj0^n&r)-V4Sjm2_)l*?belWg7k$fx0z$5?
zfxZ-Sr{~mKU6ozxS9_M;$Hxo`MP^!4JD=b+L@iW*p_QHDD$h1k{E0H+l3L$eDN!pX
za2m}G*O!_dkB{d+AI)DLp$%0E_Egj=A5~Xg^@-T@dJhh*U5zjcsSvs+mQigQ!I}AE
zo|&9-sy*o
z-CB<3cHV)*?5kWpQ8XRu72j!fN^>X<
zE_1nNCmNBt)id|C;DTOx4S~hl!t*qmZFJ$Q#E|+x%Dt=9S88@xL9@qVHEu?aPWP!|
zSsl@_mryZy;Sn;|?uUpk6Q3zf67GLN`oFS#yQy2JYr=kjb
zcd975wc4|aeT=^B0QP?69QmFUV@D7l5#ga2;Rl=$$eC8-1MpFh$SaL9F~m`wu0wt7
z*jDOfZ`Fa4_HEP`u0>zsLOa|;)x?f{68i)MT-9qfoeklO;fP&xhy