diff --git a/docs/docs/components/prompts.mdx b/docs/docs/components/prompts.mdx
index 4256e091a..3aafc9b96 100644
--- a/docs/docs/components/prompts.mdx
+++ b/docs/docs/components/prompts.mdx
@@ -21,7 +21,7 @@ The `PromptTemplate` component allows users to create prompts and define variabl
Once a variable is defined in the prompt template, it becomes a component
input of its own. Check out [Prompt
- Customization](../guidelines/prompt-customization.mdx) to learn more.
+ Customization](../docs/guidelines/prompt-customization.mdx) to learn more.
- **template:** Template used to format an individual request.
diff --git a/docs/docs/contributing/community.md b/docs/docs/contributing/community.md
index fb18b1172..51016f508 100644
--- a/docs/docs/contributing/community.md
+++ b/docs/docs/contributing/community.md
@@ -12,7 +12,7 @@
## 🐦 Stay tunned for **Langflow** on Twitter
-Follow [@logspace_ai](https://twitter.com/logspace_ai) on **Twitter** to get the latest news about **Langflow**.
+Follow [@logspace_ai](https://twitter.com/langflow_ai) on **Twitter** to get the latest news about **Langflow**.
---
## ⭐️ Star **Langflow** on GitHub
diff --git a/docs/docusaurus.config.js b/docs/docusaurus.config.js
index 538180ccd..430aebcb0 100644
--- a/docs/docusaurus.config.js
+++ b/docs/docusaurus.config.js
@@ -90,7 +90,7 @@ module.exports = {
},
{
position: "right",
- href: "https://twitter.com/logspace_ai",
+ href: "https://twitter.com/langflow_ai",
position: "right",
className: "header-twitter-link",
target: "_blank",
diff --git a/img/new_langflow_demo.gif b/img/new_langflow_demo.gif
new file mode 100644
index 000000000..ab80acf3e
Binary files /dev/null and b/img/new_langflow_demo.gif differ
diff --git a/poetry.lock b/poetry.lock
index c4252bf16..c4f2c52f9 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -191,13 +191,13 @@ vertex = ["google-auth (>=2,<3)"]
[[package]]
name = "anyio"
-version = "4.2.0"
+version = "4.3.0"
description = "High level compatibility layer for multiple asynchronous event loop implementations"
optional = false
python-versions = ">=3.8"
files = [
- {file = "anyio-4.2.0-py3-none-any.whl", hash = "sha256:745843b39e829e108e518c489b31dc757de7d2131d53fac32bd8df268227bfee"},
- {file = "anyio-4.2.0.tar.gz", hash = "sha256:e1875bb4b4e2de1669f4bc7869b6d3f54231cdced71605e6e64c9be77e3be50f"},
+ {file = "anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8"},
+ {file = "anyio-4.3.0.tar.gz", hash = "sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6"},
]
[package.dependencies]
@@ -5205,13 +5205,13 @@ xml = ["lxml (>=4.9.2)"]
[[package]]
name = "pandas-stubs"
-version = "2.1.4.231227"
+version = "2.2.0.240218"
description = "Type annotations for pandas"
optional = false
python-versions = ">=3.9"
files = [
- {file = "pandas_stubs-2.1.4.231227-py3-none-any.whl", hash = "sha256:211fc23e6ae87073bdf41dbf362c4a4d85e1e3477cb078dbac3da6c7fdaefba8"},
- {file = "pandas_stubs-2.1.4.231227.tar.gz", hash = "sha256:3ea29ef001e9e44985f5ebde02d4413f94891ef6ec7e5056fb07d125be796c23"},
+ {file = "pandas_stubs-2.2.0.240218-py3-none-any.whl", hash = "sha256:e97478320add9b958391b15a56c5f1bf29da656d5b747d28bbe708454b3a1fe6"},
+ {file = "pandas_stubs-2.2.0.240218.tar.gz", hash = "sha256:63138c12eec715d66d48611bdd922f31cd7c78bcadd19384c3bd61fd3720a11a"},
]
[package.dependencies]
diff --git a/pyproject.toml b/pyproject.toml
index f68c54800..7d9860ca2 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,6 +1,6 @@
[tool.poetry]
name = "langflow"
-version = "0.6.7a2"
+version = "0.6.7a5"
description = "A Python package with a built-in web application"
authors = ["Logspace "]
maintainers = [
diff --git a/src/backend/langflow/api/v1/login.py b/src/backend/langflow/api/v1/login.py
index 29db59855..2055c18c2 100644
--- a/src/backend/langflow/api/v1/login.py
+++ b/src/backend/langflow/api/v1/login.py
@@ -1,7 +1,5 @@
from fastapi import APIRouter, Depends, HTTPException, Request, Response, status
from fastapi.security import OAuth2PasswordRequestForm
-from sqlmodel import Session
-
from langflow.api.v1.schemas import Token
from langflow.services.auth.utils import (
authenticate_user,
@@ -10,6 +8,7 @@ from langflow.services.auth.utils import (
create_user_tokens,
)
from langflow.services.deps import get_session, get_settings_service
+from sqlmodel import Session
router = APIRouter(tags=["Login"])
@@ -20,7 +19,9 @@ async def login_to_get_access_token(
form_data: OAuth2PasswordRequestForm = Depends(),
db: Session = Depends(get_session),
# _: Session = Depends(get_current_active_user)
+ settings_service=Depends(get_settings_service),
):
+ auth_settings = settings_service.auth_settings
try:
user = authenticate_user(form_data.username, form_data.password, db)
except Exception as exc:
@@ -33,8 +34,20 @@ async def login_to_get_access_token(
if user:
tokens = create_user_tokens(user_id=user.id, db=db, update_last_login=True)
- response.set_cookie("refresh_token_lf", tokens["refresh_token"], httponly=True)
- response.set_cookie("access_token_lf", tokens["access_token"], httponly=False)
+ response.set_cookie(
+ "refresh_token_lf",
+ tokens["refresh_token"],
+ httponly=auth_settings.REFRESH_HTTPONLY,
+ samesite=auth_settings.REFRESH_SAME_SITE,
+ secure=auth_settings.REFRESH_SECURE,
+ )
+ response.set_cookie(
+ "access_token_lf",
+ tokens["access_token"],
+ httponly=auth_settings.ACCESS_HTTPONLY,
+ samesite=auth_settings.ACCESS_SAME_SITE,
+ secure=auth_settings.ACCESS_SECURE,
+ )
return tokens
else:
raise HTTPException(
@@ -46,11 +59,20 @@ async def login_to_get_access_token(
@router.get("/auto_login")
async def auto_login(
- response: Response, db: Session = Depends(get_session), settings_service=Depends(get_settings_service)
+ response: Response,
+ db: Session = Depends(get_session),
+ settings_service=Depends(get_settings_service),
):
+ auth_settings = settings_service.auth_settings
if settings_service.auth_settings.AUTO_LOGIN:
tokens = create_user_longterm_token(db)
- response.set_cookie("access_token_lf", tokens["access_token"], httponly=False)
+ response.set_cookie(
+ "access_token_lf",
+ tokens["access_token"],
+ httponly=auth_settings.ACCESS_HTTPONLY,
+ samesite=auth_settings.ACCESS_SAME_SITE,
+ secure=auth_settings.ACCESS_SECURE,
+ )
return tokens
raise HTTPException(
@@ -63,12 +85,27 @@ async def auto_login(
@router.post("/refresh")
-async def refresh_token(request: Request, response: Response):
+async def refresh_token(request: Request, response: Response, settings_service=Depends(get_settings_service)):
+ auth_settings = settings_service.auth_settings
+
token = request.cookies.get("refresh_token_lf")
+
if token:
tokens = create_refresh_token(token)
- response.set_cookie("refresh_token_lf", tokens["refresh_token"], httponly=True)
- response.set_cookie("access_token_lf", tokens["access_token"], httponly=False)
+ response.set_cookie(
+ "refresh_token_lf",
+ tokens["refresh_token"],
+ httponly=auth_settings.REFRESH_TOKEN_HTTPONLY,
+ samesite=auth_settings.REFRESH_SAME_SITE,
+ secure=auth_settings.REFRESH_SECURE,
+ )
+ response.set_cookie(
+ "access_token_lf",
+ tokens["access_token"],
+ httponly=auth_settings.ACCESS_HTTPONLY,
+ samesite=auth_settings.ACCESS_SAME_SITE,
+ secure=auth_settings.ACCESS_SECURE,
+ )
return tokens
else:
raise HTTPException(
diff --git a/src/backend/langflow/components/llms/ChatLiteLLM.py b/src/backend/langflow/components/llms/ChatLiteLLM.py
new file mode 100644
index 000000000..f2b7f7dc3
--- /dev/null
+++ b/src/backend/langflow/components/llms/ChatLiteLLM.py
@@ -0,0 +1,137 @@
+import os
+from typing import Any, Callable, Dict, Optional, Union
+
+from langchain_community.chat_models.litellm import ChatLiteLLM, ChatLiteLLMException
+from langflow import CustomComponent
+from langflow.field_typing import BaseLanguageModel
+
+
+class ChatLiteLLMComponent(CustomComponent):
+ display_name = "ChatLiteLLM"
+ description = "`LiteLLM` collection of large language models."
+ documentation = "https://python.langchain.com/docs/integrations/chat/litellm"
+
+ def build_config(self):
+ return {
+ "model": {
+ "display_name": "Model name",
+ "field_type": "str",
+ "advanced": False,
+ "required": True,
+ "info": "The name of the model to use. For example, `gpt-3.5-turbo`.",
+ },
+ "api_key": {
+ "display_name": "API key",
+ "field_type": "str",
+ "advanced": False,
+ "required": False,
+ "password": True,
+ },
+ "streaming": {
+ "display_name": "Streaming",
+ "field_type": "bool",
+ "advanced": True,
+ "required": False,
+ "default": True,
+ },
+ "temperature": {
+ "display_name": "Temperature",
+ "field_type": "float",
+ "advanced": False,
+ "required": False,
+ "default": 0.7,
+ },
+ "model_kwargs": {
+ "display_name": "Model kwargs",
+ "field_type": "dict",
+ "advanced": True,
+ "required": False,
+ "default": {},
+ },
+ "top_p": {
+ "display_name": "Top p",
+ "field_type": "float",
+ "advanced": True,
+ "required": False,
+ },
+ "top_k": {
+ "display_name": "Top k",
+ "field_type": "int",
+ "advanced": True,
+ "required": False,
+ },
+ "n": {
+ "display_name": "N",
+ "field_type": "int",
+ "advanced": True,
+ "required": False,
+ "info": "Number of chat completions to generate for each prompt. "
+ "Note that the API may not return the full n completions if duplicates are generated.",
+ "default": 1,
+ },
+ "max_tokens": {
+ "display_name": "Max tokens",
+ "field_type": "int",
+ "advanced": False,
+ "required": False,
+ "default": 256,
+ "info": "The maximum number of tokens to generate for each chat completion.",
+ },
+ "max_retries": {
+ "display_name": "Max retries",
+ "field_type": "int",
+ "advanced": True,
+ "required": False,
+ "default": 6,
+ },
+ "verbose": {
+ "display_name": "Verbose",
+ "field_type": "bool",
+ "advanced": True,
+ "required": False,
+ "default": False,
+ },
+ }
+
+ def build(
+ self,
+ model: str,
+ api_key: str,
+ streaming: bool = True,
+ temperature: Optional[float] = 0.7,
+ model_kwargs: Optional[Dict[str, Any]] = {},
+ top_p: Optional[float] = None,
+ top_k: Optional[int] = None,
+ n: int = 1,
+ max_tokens: int = 256,
+ max_retries: int = 6,
+ verbose: bool = False,
+ ) -> Union[BaseLanguageModel, Callable]:
+ try:
+ import litellm # type: ignore
+
+ litellm.drop_params = True
+ litellm.set_verbose = verbose
+ except ImportError:
+ raise ChatLiteLLMException(
+ "Could not import litellm python package. " "Please install it with `pip install litellm`"
+ )
+ if api_key:
+ if "perplexity" in model:
+ os.environ["PERPLEXITYAI_API_KEY"] = api_key
+ elif "replicate" in model:
+ os.environ["REPLICATE_API_KEY"] = api_key
+
+ LLM = ChatLiteLLM(
+ model=model,
+ client=None,
+ streaming=streaming,
+ temperature=temperature,
+ model_kwargs=model_kwargs if model_kwargs is not None else {},
+ top_p=top_p,
+ top_k=top_k,
+ n=n,
+ max_tokens=max_tokens,
+ max_retries=max_retries,
+ )
+ return LLM
diff --git a/src/backend/langflow/interface/custom/custom_component/component.py b/src/backend/langflow/interface/custom/custom_component/component.py
index 3af7acea5..b5e568333 100644
--- a/src/backend/langflow/interface/custom/custom_component/component.py
+++ b/src/backend/langflow/interface/custom/custom_component/component.py
@@ -20,9 +20,7 @@ class ComponentFunctionEntrypointNameNullError(HTTPException):
class Component:
ERROR_CODE_NULL: ClassVar[str] = "Python code must be provided."
- ERROR_FUNCTION_ENTRYPOINT_NAME_NULL: ClassVar[str] = (
- "The name of the entrypoint function must be provided."
- )
+ ERROR_FUNCTION_ENTRYPOINT_NAME_NULL: ClassVar[str] = "The name of the entrypoint function must be provided."
code: Optional[str] = None
_function_entrypoint_name: str = "build"
diff --git a/src/backend/langflow/services/settings/auth.py b/src/backend/langflow/services/settings/auth.py
index 92a696cc5..8463d0781 100644
--- a/src/backend/langflow/services/settings/auth.py
+++ b/src/backend/langflow/services/settings/auth.py
@@ -2,7 +2,10 @@ import secrets
from pathlib import Path
from typing import Optional
-from langflow.services.settings.constants import DEFAULT_SUPERUSER, DEFAULT_SUPERUSER_PASSWORD
+from langflow.services.settings.constants import (
+ DEFAULT_SUPERUSER,
+ DEFAULT_SUPERUSER_PASSWORD,
+)
from langflow.services.settings.utils import read_secret_from_file, write_secret_to_file
from loguru import logger
from passlib.context import CryptContext
@@ -34,6 +37,19 @@ class AuthSettings(BaseSettings):
SUPERUSER: str = DEFAULT_SUPERUSER
SUPERUSER_PASSWORD: str = DEFAULT_SUPERUSER_PASSWORD
+ REFRESH_SAME_SITE: str = "none"
+ """The SameSite attribute of the refresh token cookie."""
+ REFRESH_SECURE: bool = True
+ """The Secure attribute of the refresh token cookie."""
+ REFRESH_HTTPONLY: bool = True
+ """The HttpOnly attribute of the refresh token cookie."""
+ ACCESS_SAME_SITE: str = "none"
+ """The SameSite attribute of the access token cookie."""
+ ACCESS_SECURE: bool = True
+ """The Secure attribute of the access token cookie."""
+ ACCESS_HTTPONLY: bool = False
+ """The HttpOnly attribute of the access token cookie."""
+
pwd_context: CryptContext = CryptContext(schemes=["bcrypt"], deprecated="auto")
class Config:
diff --git a/src/frontend/src/components/headerComponent/index.tsx b/src/frontend/src/components/headerComponent/index.tsx
index d82355c83..efe9d73a4 100644
--- a/src/frontend/src/components/headerComponent/index.tsx
+++ b/src/frontend/src/components/headerComponent/index.tsx
@@ -122,7 +122,7 @@ export default function Header(): JSX.Element {
{stars ?? 0}
((set, get) => ({
});
get().setEdges(newEdges);
},
- setLastCopiedSelection: (newSelection) => {
+ setLastCopiedSelection: (newSelection, isCrop = false) => {
+ if (isCrop) {
+ const nodesIdsSelected = newSelection!.nodes.map((node) => node.id);
+ const edgesIdsSelected = newSelection!.edges.map((edge) => edge.id);
+
+ nodesIdsSelected.forEach((id) => {
+ get().deleteNode(id);
+ });
+
+ edgesIdsSelected.forEach((id) => {
+ get().deleteEdge(id);
+ });
+
+ const newNodes = get().nodes.filter(
+ (node) => !nodesIdsSelected.includes(node.id)
+ );
+ const newEdges = get().edges.filter(
+ (edge) => !edgesIdsSelected.includes(edge.id)
+ );
+
+ set({ nodes: newNodes, edges: newEdges });
+ }
+
set({ lastCopiedSelection: newSelection });
},
cleanFlow: () => {
diff --git a/src/frontend/src/stores/typesStore.ts b/src/frontend/src/stores/typesStore.ts
index a04b4c11f..1a0ae3634 100644
--- a/src/frontend/src/stores/typesStore.ts
+++ b/src/frontend/src/stores/typesStore.ts
@@ -23,7 +23,7 @@ export const useTypesStore = create((set, get) => ({
data: { ...old.data, ...data },
templates: templatesGenerator(data),
}));
- setLoading(false)
+ setLoading(false);
resolve();
})
.catch((error) => {
diff --git a/src/frontend/src/types/zustand/flow/index.ts b/src/frontend/src/types/zustand/flow/index.ts
index 937197efd..a85965f16 100644
--- a/src/frontend/src/types/zustand/flow/index.ts
+++ b/src/frontend/src/types/zustand/flow/index.ts
@@ -75,7 +75,8 @@ export type FlowStoreType = {
) => void;
lastCopiedSelection: { nodes: any; edges: any } | null;
setLastCopiedSelection: (
- newSelection: { nodes: any; edges: any } | null
+ newSelection: { nodes: any; edges: any } | null,
+ isCrop?: boolean
) => void;
cleanFlow: () => void;
setFilterEdge: (newState) => void;