diff --git a/src/backend/base/langflow/components/models/__init__.py b/src/backend/base/langflow/components/models/__init__.py
index b757bd2c6..ecd9da217 100644
--- a/src/backend/base/langflow/components/models/__init__.py
+++ b/src/backend/base/langflow/components/models/__init__.py
@@ -4,6 +4,7 @@ from .anthropic import AnthropicModelComponent
from .azure_openai import AzureChatOpenAIComponent
from .baidu_qianfan_chat import QianfanChatEndpointComponent
from .cohere import CohereComponent
+from .deepseek import DeepSeekModelComponent
from .google_generative_ai import GoogleGenerativeAIComponent
from .groq import GroqModel
from .huggingface import HuggingFaceEndpointsComponent
@@ -26,6 +27,7 @@ __all__ = [
"ChatOllamaComponent",
"ChatVertexAIComponent",
"CohereComponent",
+ "DeepSeekModelComponent",
"GoogleGenerativeAIComponent",
"GroqModel",
"HuggingFaceEndpointsComponent",
diff --git a/src/backend/base/langflow/components/models/deepseek.py b/src/backend/base/langflow/components/models/deepseek.py
new file mode 100644
index 000000000..c01cf5756
--- /dev/null
+++ b/src/backend/base/langflow/components/models/deepseek.py
@@ -0,0 +1,135 @@
+import requests
+from pydantic.v1 import SecretStr
+from typing_extensions import override
+
+from langflow.base.models.model import LCModelComponent
+from langflow.field_typing import LanguageModel
+from langflow.field_typing.range_spec import RangeSpec
+from langflow.inputs import BoolInput, DictInput, DropdownInput, IntInput, SecretStrInput, SliderInput, StrInput
+
+DEEPSEEK_MODELS = ["deepseek-chat"]
+
+
+class DeepSeekModelComponent(LCModelComponent):
+ display_name = "DeepSeek"
+ description = "Generate text using DeepSeek LLMs."
+ icon = "DeepSeek"
+
+ inputs = [
+ *LCModelComponent._base_inputs,
+ IntInput(
+ name="max_tokens",
+ display_name="Max Tokens",
+ advanced=True,
+ info="Maximum number of tokens to generate. Set to 0 for unlimited.",
+ range_spec=RangeSpec(min=0, max=128000),
+ ),
+ DictInput(
+ name="model_kwargs",
+ display_name="Model Kwargs",
+ advanced=True,
+ info="Additional keyword arguments to pass to the model.",
+ ),
+ BoolInput(
+ name="json_mode",
+ display_name="JSON Mode",
+ advanced=True,
+ info="If True, it will output JSON regardless of passing a schema.",
+ ),
+ DropdownInput(
+ name="model_name",
+ display_name="Model Name",
+ info="DeepSeek model to use",
+ options=DEEPSEEK_MODELS,
+ value="deepseek-chat",
+ refresh_button=True,
+ ),
+ StrInput(
+ name="api_base",
+ display_name="DeepSeek API Base",
+ advanced=True,
+ info="Base URL for API requests. Defaults to https://api.deepseek.com",
+ value="https://api.deepseek.com",
+ ),
+ SecretStrInput(
+ name="api_key",
+ display_name="DeepSeek API Key",
+ info="The DeepSeek API Key",
+ advanced=False,
+ required=True,
+ ),
+ SliderInput(
+ name="temperature",
+ display_name="Temperature",
+ info="Controls randomness in responses",
+ value=1.0,
+ range_spec=RangeSpec(min=0, max=2, step=0.01),
+ ),
+ IntInput(
+ name="seed",
+ display_name="Seed",
+ info="The seed controls the reproducibility of the job.",
+ advanced=True,
+ value=1,
+ ),
+ ]
+
+ def get_models(self) -> list[str]:
+ if not self.api_key:
+ return DEEPSEEK_MODELS
+
+ url = f"{self.api_base}/models"
+ headers = {"Authorization": f"Bearer {self.api_key}", "Accept": "application/json"}
+
+ try:
+ response = requests.get(url, headers=headers, timeout=10)
+ response.raise_for_status()
+ model_list = response.json()
+ return [model["id"] for model in model_list.get("data", [])]
+ except requests.RequestException as e:
+ self.status = f"Error fetching models: {e}"
+ return DEEPSEEK_MODELS
+
+ @override
+ def update_build_config(self, build_config: dict, field_value: str, field_name: str | None = None):
+ if field_name in {"api_key", "api_base", "model_name"}:
+ models = self.get_models()
+ build_config["model_name"]["options"] = models
+ return build_config
+
+ def build_model(self) -> LanguageModel:
+ try:
+ from langchain_openai import ChatOpenAI
+ except ImportError as e:
+ msg = "langchain-openai not installed. Please install with `pip install langchain-openai`"
+ raise ImportError(msg) from e
+
+ api_key = SecretStr(self.api_key).get_secret_value() if self.api_key else None
+ output = ChatOpenAI(
+ model=self.model_name,
+ temperature=self.temperature if self.temperature is not None else 0.1,
+ max_tokens=self.max_tokens or None,
+ model_kwargs=self.model_kwargs or {},
+ base_url=self.api_base,
+ api_key=api_key,
+ streaming=self.stream if hasattr(self, "stream") else False,
+ seed=self.seed,
+ )
+
+ if self.json_mode:
+ output = output.bind(response_format={"type": "json_object"})
+
+ return output
+
+ def _get_exception_message(self, e: Exception):
+ """Get message from DeepSeek API exception."""
+ try:
+ from openai import BadRequestError
+
+ if isinstance(e, BadRequestError):
+ message = e.body.get("message")
+ if message:
+ return message
+ except ImportError:
+ pass
+ return None
diff --git a/src/backend/tests/unit/components/models/test_deepseek.py b/src/backend/tests/unit/components/models/test_deepseek.py
new file mode 100644
index 000000000..0d80f98d4
--- /dev/null
+++ b/src/backend/tests/unit/components/models/test_deepseek.py
@@ -0,0 +1,112 @@
+from unittest.mock import MagicMock
+
+import pytest
+from langflow.components.models import DeepSeekModelComponent
+from langflow.custom import Component
+from langflow.custom.utils import build_custom_component_template
+
+
+def test_deepseek_initialization():
+ component = DeepSeekModelComponent()
+ assert component.display_name == "DeepSeek"
+ assert component.description == "Generate text using DeepSeek LLMs."
+ assert component.icon == "DeepSeek"
+
+
+def test_deepseek_template():
+ deepseek = DeepSeekModelComponent()
+ component = Component(_code=deepseek._code)
+ frontend_node, _ = build_custom_component_template(component)
+
+ # Verify basic structure
+ assert isinstance(frontend_node, dict)
+
+ # Verify inputs
+ assert "template" in frontend_node
+ input_names = [input_["name"] for input_ in frontend_node["template"].values() if isinstance(input_, dict)]
+
+ expected_inputs = [
+ "max_tokens",
+ "model_kwargs",
+ "json_mode",
+ "model_name",
+ "api_base",
+ "api_key",
+ "temperature",
+ "seed",
+ ]
+
+ for input_name in expected_inputs:
+ assert input_name in input_names
+
+
+@pytest.mark.parametrize(
+ ("temperature", "max_tokens"),
+ [
+ (0.5, 100),
+ (1.0, 500),
+ (1.5, 1000),
+ ],
+)
+@pytest.fixture
+def mock_chat_openai(mocker):
+ return mocker.patch("langchain_openai.ChatOpenAI")
+
+
+def test_deepseek_build_model(mock_chat_openai):
+ component = DeepSeekModelComponent()
+ component.temperature = 0.7
+ component.max_tokens = 100
+ component.api_key = "test-key"
+
+ # Mock the ChatOpenAI instance
+ mock_instance = MagicMock()
+ mock_chat_openai.return_value = mock_instance
+
+ model = component.build_model()
+
+ # Verify ChatOpenAI was called with correct params
+ mock_chat_openai.assert_called_once_with(
+ max_tokens=100,
+ model_kwargs={},
+ model="deepseek-chat",
+ base_url="https://api.deepseek.com",
+ api_key="test-key",
+ temperature=0.7,
+ seed=1,
+ streaming=False,
+ )
+ assert model == mock_instance
+
+
+def test_deepseek_get_models(mocker):
+ component = DeepSeekModelComponent()
+
+ # Mock requests.get
+ mock_get = mocker.patch("requests.get")
+ mock_response = MagicMock()
+ mock_response.json.return_value = {"data": [{"id": "deepseek-chat"}, {"id": "deepseek-coder"}]}
+ mock_get.return_value = mock_response
+
+ # Test with API key
+ component.api_key = "test-key"
+ models = component.get_models()
+ assert models == ["deepseek-chat", "deepseek-coder"]
+
+ # Verify API call
+ mock_get.assert_called_once_with(
+ "https://api.deepseek.com/models",
+ headers={"Authorization": "Bearer test-key", "Accept": "application/json"},
+ timeout=10,
+ )
+
+
+def test_deepseek_error_handling(mock_chat_openai):
+ component = DeepSeekModelComponent()
+ component.api_key = "invalid-key"
+
+ # Mock ChatOpenAI to raise exception
+ mock_chat_openai.side_effect = Exception("Invalid API key")
+
+ with pytest.raises(Exception, match="Invalid API key"):
+ component.build_model()
diff --git a/src/frontend/src/icons/DeepSeek/DeepSeekIcon.jsx b/src/frontend/src/icons/DeepSeek/DeepSeekIcon.jsx
new file mode 100644
index 000000000..b0747f2d4
--- /dev/null
+++ b/src/frontend/src/icons/DeepSeek/DeepSeekIcon.jsx
@@ -0,0 +1,44 @@
+import { stringToBool } from "@/utils/utils";
+
+const DeepSeekSVG = (props) => (
+
+);
+
+export default DeepSeekSVG;
diff --git a/src/frontend/src/icons/DeepSeek/deepseek.svg b/src/frontend/src/icons/DeepSeek/deepseek.svg
new file mode 100644
index 000000000..e89b2adaa
--- /dev/null
+++ b/src/frontend/src/icons/DeepSeek/deepseek.svg
@@ -0,0 +1,33 @@
+
+
+
diff --git a/src/frontend/src/icons/DeepSeek/index.tsx b/src/frontend/src/icons/DeepSeek/index.tsx
new file mode 100644
index 000000000..48c612fda
--- /dev/null
+++ b/src/frontend/src/icons/DeepSeek/index.tsx
@@ -0,0 +1,11 @@
+import { useDarkStore } from "@/stores/darkStore";
+import React, { forwardRef } from "react";
+import DeepSeekSVG from "./DeepSeekIcon";
+
+export const DeepSeekIcon = forwardRef<
+ SVGSVGElement,
+ React.PropsWithChildren<{}>
+>((props, ref) => {
+ const isdark = useDarkStore((state) => state.dark).toString();
+ return ;
+});
diff --git a/src/frontend/src/utils/styleUtils.ts b/src/frontend/src/utils/styleUtils.ts
index be0332e42..54099d920 100644
--- a/src/frontend/src/utils/styleUtils.ts
+++ b/src/frontend/src/utils/styleUtils.ts
@@ -252,6 +252,7 @@ import { ComposioIcon } from "../icons/Composio";
import { ConfluenceIcon } from "../icons/Confluence";
import { CouchbaseIcon } from "../icons/Couchbase";
import { CrewAiIcon } from "../icons/CrewAI";
+import { DeepSeekIcon } from "../icons/DeepSeek";
import { ElasticsearchIcon } from "../icons/ElasticsearchStore";
import { EvernoteIcon } from "../icons/Evernote";
import { FBIcon } from "../icons/FacebookMessenger";
@@ -670,6 +671,7 @@ export const nodeIconsLucide: iconsType = {
AzureChatOpenAI: OpenAiIcon,
OpenAI: OpenAiIcon,
OpenRouter: OpenRouterIcon,
+ DeepSeek: DeepSeekIcon,
OpenAIEmbeddings: OpenAiIcon,
Pinecone: PineconeIcon,
Qdrant: QDrantIcon,