feat: Add DeepSeek Model component (#5443)
* feat: Add DeepSeek component * Adiciona DeepseekModelComponent ao __init__.py * [autofix.ci] apply automated fixes * Add DeepSeek icon and update component files * Add DeepSeek icon and update all related files * [autofix.ci] apply automated fixes * Change DeepSeek icon color to blue (#080c34) * [autofix.ci] apply automated fixes * Revert DeepSeek icon color to black * [autofix.ci] apply automated fixes * remove: name parameter from DeepSeekModel to avoid backwards compatibility issues * test: add unit tests for DeepSeek model component with mocks * fix: correct formatting issues in DeepSeek model tests * feat: update DeepSeek icon color to #4c6cfc * fix: make DeepSeek API key field required * fix: format SecretStrInput to comply with line length limit in DeepSeekModelComponent * feat: add dark mode support for component icon * [autofix.ci] apply automated fixes --------- Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> Co-authored-by: Edwin Jose <edwin.jose@datastax.com>
This commit is contained in:
parent
b3fd941bf6
commit
656f9f30db
7 changed files with 339 additions and 0 deletions
|
|
@ -4,6 +4,7 @@ from .anthropic import AnthropicModelComponent
|
|||
from .azure_openai import AzureChatOpenAIComponent
|
||||
from .baidu_qianfan_chat import QianfanChatEndpointComponent
|
||||
from .cohere import CohereComponent
|
||||
from .deepseek import DeepSeekModelComponent
|
||||
from .google_generative_ai import GoogleGenerativeAIComponent
|
||||
from .groq import GroqModel
|
||||
from .huggingface import HuggingFaceEndpointsComponent
|
||||
|
|
@ -26,6 +27,7 @@ __all__ = [
|
|||
"ChatOllamaComponent",
|
||||
"ChatVertexAIComponent",
|
||||
"CohereComponent",
|
||||
"DeepSeekModelComponent",
|
||||
"GoogleGenerativeAIComponent",
|
||||
"GroqModel",
|
||||
"HuggingFaceEndpointsComponent",
|
||||
|
|
|
|||
135
src/backend/base/langflow/components/models/deepseek.py
Normal file
135
src/backend/base/langflow/components/models/deepseek.py
Normal file
|
|
@ -0,0 +1,135 @@
|
|||
import requests
|
||||
from pydantic.v1 import SecretStr
|
||||
from typing_extensions import override
|
||||
|
||||
from langflow.base.models.model import LCModelComponent
|
||||
from langflow.field_typing import LanguageModel
|
||||
from langflow.field_typing.range_spec import RangeSpec
|
||||
from langflow.inputs import BoolInput, DictInput, DropdownInput, IntInput, SecretStrInput, SliderInput, StrInput
|
||||
|
||||
DEEPSEEK_MODELS = ["deepseek-chat"]
|
||||
|
||||
|
||||
class DeepSeekModelComponent(LCModelComponent):
|
||||
display_name = "DeepSeek"
|
||||
description = "Generate text using DeepSeek LLMs."
|
||||
icon = "DeepSeek"
|
||||
|
||||
inputs = [
|
||||
*LCModelComponent._base_inputs,
|
||||
IntInput(
|
||||
name="max_tokens",
|
||||
display_name="Max Tokens",
|
||||
advanced=True,
|
||||
info="Maximum number of tokens to generate. Set to 0 for unlimited.",
|
||||
range_spec=RangeSpec(min=0, max=128000),
|
||||
),
|
||||
DictInput(
|
||||
name="model_kwargs",
|
||||
display_name="Model Kwargs",
|
||||
advanced=True,
|
||||
info="Additional keyword arguments to pass to the model.",
|
||||
),
|
||||
BoolInput(
|
||||
name="json_mode",
|
||||
display_name="JSON Mode",
|
||||
advanced=True,
|
||||
info="If True, it will output JSON regardless of passing a schema.",
|
||||
),
|
||||
DropdownInput(
|
||||
name="model_name",
|
||||
display_name="Model Name",
|
||||
info="DeepSeek model to use",
|
||||
options=DEEPSEEK_MODELS,
|
||||
value="deepseek-chat",
|
||||
refresh_button=True,
|
||||
),
|
||||
StrInput(
|
||||
name="api_base",
|
||||
display_name="DeepSeek API Base",
|
||||
advanced=True,
|
||||
info="Base URL for API requests. Defaults to https://api.deepseek.com",
|
||||
value="https://api.deepseek.com",
|
||||
),
|
||||
SecretStrInput(
|
||||
name="api_key",
|
||||
display_name="DeepSeek API Key",
|
||||
info="The DeepSeek API Key",
|
||||
advanced=False,
|
||||
required=True,
|
||||
),
|
||||
SliderInput(
|
||||
name="temperature",
|
||||
display_name="Temperature",
|
||||
info="Controls randomness in responses",
|
||||
value=1.0,
|
||||
range_spec=RangeSpec(min=0, max=2, step=0.01),
|
||||
),
|
||||
IntInput(
|
||||
name="seed",
|
||||
display_name="Seed",
|
||||
info="The seed controls the reproducibility of the job.",
|
||||
advanced=True,
|
||||
value=1,
|
||||
),
|
||||
]
|
||||
|
||||
def get_models(self) -> list[str]:
|
||||
if not self.api_key:
|
||||
return DEEPSEEK_MODELS
|
||||
|
||||
url = f"{self.api_base}/models"
|
||||
headers = {"Authorization": f"Bearer {self.api_key}", "Accept": "application/json"}
|
||||
|
||||
try:
|
||||
response = requests.get(url, headers=headers, timeout=10)
|
||||
response.raise_for_status()
|
||||
model_list = response.json()
|
||||
return [model["id"] for model in model_list.get("data", [])]
|
||||
except requests.RequestException as e:
|
||||
self.status = f"Error fetching models: {e}"
|
||||
return DEEPSEEK_MODELS
|
||||
|
||||
@override
|
||||
def update_build_config(self, build_config: dict, field_value: str, field_name: str | None = None):
|
||||
if field_name in {"api_key", "api_base", "model_name"}:
|
||||
models = self.get_models()
|
||||
build_config["model_name"]["options"] = models
|
||||
return build_config
|
||||
|
||||
def build_model(self) -> LanguageModel:
|
||||
try:
|
||||
from langchain_openai import ChatOpenAI
|
||||
except ImportError as e:
|
||||
msg = "langchain-openai not installed. Please install with `pip install langchain-openai`"
|
||||
raise ImportError(msg) from e
|
||||
|
||||
api_key = SecretStr(self.api_key).get_secret_value() if self.api_key else None
|
||||
output = ChatOpenAI(
|
||||
model=self.model_name,
|
||||
temperature=self.temperature if self.temperature is not None else 0.1,
|
||||
max_tokens=self.max_tokens or None,
|
||||
model_kwargs=self.model_kwargs or {},
|
||||
base_url=self.api_base,
|
||||
api_key=api_key,
|
||||
streaming=self.stream if hasattr(self, "stream") else False,
|
||||
seed=self.seed,
|
||||
)
|
||||
|
||||
if self.json_mode:
|
||||
output = output.bind(response_format={"type": "json_object"})
|
||||
|
||||
return output
|
||||
|
||||
def _get_exception_message(self, e: Exception):
|
||||
"""Get message from DeepSeek API exception."""
|
||||
try:
|
||||
from openai import BadRequestError
|
||||
|
||||
if isinstance(e, BadRequestError):
|
||||
message = e.body.get("message")
|
||||
if message:
|
||||
return message
|
||||
except ImportError:
|
||||
pass
|
||||
return None
|
||||
112
src/backend/tests/unit/components/models/test_deepseek.py
Normal file
112
src/backend/tests/unit/components/models/test_deepseek.py
Normal file
|
|
@ -0,0 +1,112 @@
|
|||
from unittest.mock import MagicMock
|
||||
|
||||
import pytest
|
||||
from langflow.components.models import DeepSeekModelComponent
|
||||
from langflow.custom import Component
|
||||
from langflow.custom.utils import build_custom_component_template
|
||||
|
||||
|
||||
def test_deepseek_initialization():
|
||||
component = DeepSeekModelComponent()
|
||||
assert component.display_name == "DeepSeek"
|
||||
assert component.description == "Generate text using DeepSeek LLMs."
|
||||
assert component.icon == "DeepSeek"
|
||||
|
||||
|
||||
def test_deepseek_template():
|
||||
deepseek = DeepSeekModelComponent()
|
||||
component = Component(_code=deepseek._code)
|
||||
frontend_node, _ = build_custom_component_template(component)
|
||||
|
||||
# Verify basic structure
|
||||
assert isinstance(frontend_node, dict)
|
||||
|
||||
# Verify inputs
|
||||
assert "template" in frontend_node
|
||||
input_names = [input_["name"] for input_ in frontend_node["template"].values() if isinstance(input_, dict)]
|
||||
|
||||
expected_inputs = [
|
||||
"max_tokens",
|
||||
"model_kwargs",
|
||||
"json_mode",
|
||||
"model_name",
|
||||
"api_base",
|
||||
"api_key",
|
||||
"temperature",
|
||||
"seed",
|
||||
]
|
||||
|
||||
for input_name in expected_inputs:
|
||||
assert input_name in input_names
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("temperature", "max_tokens"),
|
||||
[
|
||||
(0.5, 100),
|
||||
(1.0, 500),
|
||||
(1.5, 1000),
|
||||
],
|
||||
)
|
||||
@pytest.fixture
|
||||
def mock_chat_openai(mocker):
|
||||
return mocker.patch("langchain_openai.ChatOpenAI")
|
||||
|
||||
|
||||
def test_deepseek_build_model(mock_chat_openai):
|
||||
component = DeepSeekModelComponent()
|
||||
component.temperature = 0.7
|
||||
component.max_tokens = 100
|
||||
component.api_key = "test-key"
|
||||
|
||||
# Mock the ChatOpenAI instance
|
||||
mock_instance = MagicMock()
|
||||
mock_chat_openai.return_value = mock_instance
|
||||
|
||||
model = component.build_model()
|
||||
|
||||
# Verify ChatOpenAI was called with correct params
|
||||
mock_chat_openai.assert_called_once_with(
|
||||
max_tokens=100,
|
||||
model_kwargs={},
|
||||
model="deepseek-chat",
|
||||
base_url="https://api.deepseek.com",
|
||||
api_key="test-key",
|
||||
temperature=0.7,
|
||||
seed=1,
|
||||
streaming=False,
|
||||
)
|
||||
assert model == mock_instance
|
||||
|
||||
|
||||
def test_deepseek_get_models(mocker):
|
||||
component = DeepSeekModelComponent()
|
||||
|
||||
# Mock requests.get
|
||||
mock_get = mocker.patch("requests.get")
|
||||
mock_response = MagicMock()
|
||||
mock_response.json.return_value = {"data": [{"id": "deepseek-chat"}, {"id": "deepseek-coder"}]}
|
||||
mock_get.return_value = mock_response
|
||||
|
||||
# Test with API key
|
||||
component.api_key = "test-key"
|
||||
models = component.get_models()
|
||||
assert models == ["deepseek-chat", "deepseek-coder"]
|
||||
|
||||
# Verify API call
|
||||
mock_get.assert_called_once_with(
|
||||
"https://api.deepseek.com/models",
|
||||
headers={"Authorization": "Bearer test-key", "Accept": "application/json"},
|
||||
timeout=10,
|
||||
)
|
||||
|
||||
|
||||
def test_deepseek_error_handling(mock_chat_openai):
|
||||
component = DeepSeekModelComponent()
|
||||
component.api_key = "invalid-key"
|
||||
|
||||
# Mock ChatOpenAI to raise exception
|
||||
mock_chat_openai.side_effect = Exception("Invalid API key")
|
||||
|
||||
with pytest.raises(Exception, match="Invalid API key"):
|
||||
component.build_model()
|
||||
44
src/frontend/src/icons/DeepSeek/DeepSeekIcon.jsx
Normal file
44
src/frontend/src/icons/DeepSeek/DeepSeekIcon.jsx
Normal file
|
|
@ -0,0 +1,44 @@
|
|||
import { stringToBool } from "@/utils/utils";
|
||||
|
||||
const DeepSeekSVG = (props) => (
|
||||
<svg
|
||||
version="1.0"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
width="225.000000pt"
|
||||
height="225.000000pt"
|
||||
viewBox="0 0 225.000000 225.000000"
|
||||
preserveAspectRatio="xMidYMid meet"
|
||||
{...props}
|
||||
>
|
||||
<g
|
||||
transform="translate(0.000000,225.000000) scale(0.100000,-0.100000)"
|
||||
fill={stringToBool(props.isdark) ? "#1f3a94" : "#4c6cfc"}
|
||||
stroke="none"
|
||||
>
|
||||
<path
|
||||
d="M1527 1893 c-18 -39 -22 -66 -22 -143 0 -84 3 -102 29 -158 20 -43
|
||||
47 -80 88 -118 58 -53 59 -56 53 -93 -3 -22 -13 -47 -20 -58 -25 -33 -110 23
|
||||
-266 174 -76 72 -162 148 -192 168 -90 60 -101 115 -35 177 35 32 40 41 28 49
|
||||
-27 18 -169 3 -250 -27 -71 -25 -86 -27 -255 -28 -169 -1 -185 -3 -258 -29
|
||||
-170 -59 -286 -166 -362 -332 -49 -106 -60 -164 -59 -305 1 -94 7 -145 23
|
||||
-205 76 -283 304 -521 591 -617 92 -31 100 -32 265 -32 133 0 184 3 235 18 84
|
||||
23 174 66 233 111 l47 36 47 -17 c29 -9 85 -16 138 -17 143 -3 195 15 195 68
|
||||
0 19 -11 29 -53 48 -30 13 -71 30 -93 37 -69 22 -69 26 -6 99 32 36 73 88 91
|
||||
114 84 123 156 333 168 488 l6 76 56 13 c75 17 124 43 182 97 60 56 95 125
|
||||
111 220 10 58 9 74 -1 87 -19 23 -34 20 -67 -15 -41 -44 -99 -68 -164 -69 -57
|
||||
0 -125 -25 -149 -53 -19 -23 -28 -21 -35 7 -10 39 -55 85 -110 113 -70 35 -93
|
||||
58 -111 114 -22 66 -48 67 -78 2z m-1161 -544 c190 -33 365 -129 501 -273 38
|
||||
-39 113 -136 168 -215 105 -151 216 -278 284 -326 23 -15 41 -32 41 -38 0 -14
|
||||
-146 -2 -199 17 -24 9 -83 45 -130 80 -139 104 -217 149 -259 150 -33 1 -37
|
||||
-2 -40 -26 -2 -14 8 -49 22 -77 30 -58 26 -76 -18 -91 -100 -33 -312 102 -429
|
||||
272 -44 64 -103 193 -127 278 -28 100 -37 221 -19 237 28 23 116 28 205 12z
|
||||
m943 -79 c50 -25 130 -115 166 -186 25 -50 27 -57 13 -71 -9 -8 -36 -18 -61
|
||||
-20 -36 -4 -53 -1 -81 17 -47 28 -56 47 -56 114 0 61 -24 96 -66 96 -35 0 -54
|
||||
10 -54 29 0 44 73 55 139 21z m-79 -105 c10 -12 10 -18 0 -30 -25 -30 -61 -7
|
||||
-46 30 3 8 12 15 19 15 8 0 20 -7 27 -15z"
|
||||
/>
|
||||
</g>
|
||||
</svg>
|
||||
);
|
||||
|
||||
export default DeepSeekSVG;
|
||||
33
src/frontend/src/icons/DeepSeek/deepseek.svg
Normal file
33
src/frontend/src/icons/DeepSeek/deepseek.svg
Normal file
|
|
@ -0,0 +1,33 @@
|
|||
<?xml version="1.0" standalone="no"?>
|
||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 20010904//EN"
|
||||
"http://www.w3.org/TR/2001/REC-SVG-20010904/DTD/svg10.dtd">
|
||||
<svg version="1.0" xmlns="http://www.w3.org/2000/svg"
|
||||
width="225.000000pt" height="225.000000pt" viewBox="0 0 225.000000 225.000000"
|
||||
preserveAspectRatio="xMidYMid meet">
|
||||
<metadata>
|
||||
Created by potrace 1.10, written by Peter Selinger 2001-2011
|
||||
</metadata>
|
||||
<g transform="translate(0.000000,225.000000) scale(0.100000,-0.100000)"
|
||||
fill="#000000" stroke="none">
|
||||
<path d="M1527 1893 c-18 -39 -22 -66 -22 -143 0 -84 3 -102 29 -158 20 -43
|
||||
47 -80 88 -118 58 -53 59 -56 53 -93 -3 -22 -13 -47 -20 -58 -25 -33 -110 23
|
||||
-266 174 -76 72 -162 148 -192 168 -90 60 -101 115 -35 177 35 32 40 41 28 49
|
||||
-27 18 -169 3 -250 -27 -71 -25 -86 -27 -255 -28 -169 -1 -185 -3 -258 -29
|
||||
-170 -59 -286 -166 -362 -332 -49 -106 -60 -164 -59 -305 1 -94 7 -145 23
|
||||
-205 76 -283 304 -521 591 -617 92 -31 100 -32 265 -32 133 0 184 3 235 18 84
|
||||
23 174 66 233 111 l47 36 47 -17 c29 -9 85 -16 138 -17 143 -3 195 15 195 68
|
||||
0 19 -11 29 -53 48 -30 13 -71 30 -93 37 -69 22 -69 26 -6 99 32 36 73 88 91
|
||||
114 84 123 156 333 168 488 l6 76 56 13 c75 17 124 43 182 97 60 56 95 125
|
||||
111 220 10 58 9 74 -1 87 -19 23 -34 20 -67 -15 -41 -44 -99 -68 -164 -69 -57
|
||||
0 -125 -25 -149 -53 -19 -23 -28 -21 -35 7 -10 39 -55 85 -110 113 -70 35 -93
|
||||
58 -111 114 -22 66 -48 67 -78 2z m-1161 -544 c190 -33 365 -129 501 -273 38
|
||||
-39 113 -136 168 -215 105 -151 216 -278 284 -326 23 -15 41 -32 41 -38 0 -14
|
||||
-146 -2 -199 17 -24 9 -83 45 -130 80 -139 104 -217 149 -259 150 -33 1 -37
|
||||
-2 -40 -26 -2 -14 8 -49 22 -77 30 -58 26 -76 -18 -91 -100 -33 -312 102 -429
|
||||
272 -44 64 -103 193 -127 278 -28 100 -37 221 -19 237 28 23 116 28 205 12z
|
||||
m943 -79 c50 -25 130 -115 166 -186 25 -50 27 -57 13 -71 -9 -8 -36 -18 -61
|
||||
-20 -36 -4 -53 -1 -81 17 -47 28 -56 47 -56 114 0 61 -24 96 -66 96 -35 0 -54
|
||||
10 -54 29 0 44 73 55 139 21z m-79 -105 c10 -12 10 -18 0 -30 -25 -30 -61 -7
|
||||
-46 30 3 8 12 15 19 15 8 0 20 -7 27 -15z"/>
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1.9 KiB |
11
src/frontend/src/icons/DeepSeek/index.tsx
Normal file
11
src/frontend/src/icons/DeepSeek/index.tsx
Normal file
|
|
@ -0,0 +1,11 @@
|
|||
import { useDarkStore } from "@/stores/darkStore";
|
||||
import React, { forwardRef } from "react";
|
||||
import DeepSeekSVG from "./DeepSeekIcon";
|
||||
|
||||
export const DeepSeekIcon = forwardRef<
|
||||
SVGSVGElement,
|
||||
React.PropsWithChildren<{}>
|
||||
>((props, ref) => {
|
||||
const isdark = useDarkStore((state) => state.dark).toString();
|
||||
return <DeepSeekSVG ref={ref} isdark={isdark} {...props} />;
|
||||
});
|
||||
|
|
@ -252,6 +252,7 @@ import { ComposioIcon } from "../icons/Composio";
|
|||
import { ConfluenceIcon } from "../icons/Confluence";
|
||||
import { CouchbaseIcon } from "../icons/Couchbase";
|
||||
import { CrewAiIcon } from "../icons/CrewAI";
|
||||
import { DeepSeekIcon } from "../icons/DeepSeek";
|
||||
import { ElasticsearchIcon } from "../icons/ElasticsearchStore";
|
||||
import { EvernoteIcon } from "../icons/Evernote";
|
||||
import { FBIcon } from "../icons/FacebookMessenger";
|
||||
|
|
@ -670,6 +671,7 @@ export const nodeIconsLucide: iconsType = {
|
|||
AzureChatOpenAI: OpenAiIcon,
|
||||
OpenAI: OpenAiIcon,
|
||||
OpenRouter: OpenRouterIcon,
|
||||
DeepSeek: DeepSeekIcon,
|
||||
OpenAIEmbeddings: OpenAiIcon,
|
||||
Pinecone: PineconeIcon,
|
||||
Qdrant: QDrantIcon,
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue