Merge branch 'dev' into zustand/io/migration

This commit is contained in:
cristhianzl 2024-02-19 20:45:28 -03:00
commit 862770daea
15 changed files with 245 additions and 27 deletions

View file

@ -21,7 +21,7 @@ The `PromptTemplate` component allows users to create prompts and define variabl
<Admonition type="info">
Once a variable is defined in the prompt template, it becomes a component
input of its own. Check out [Prompt
Customization](../guidelines/prompt-customization.mdx) to learn more.
Customization](../docs/guidelines/prompt-customization.mdx) to learn more.
</Admonition>
- **template:** Template used to format an individual request.

View file

@ -12,7 +12,7 @@
## 🐦 Stay tunned for **Langflow** on Twitter
Follow [@logspace_ai](https://twitter.com/logspace_ai) on **Twitter** to get the latest news about **Langflow**.
Follow [@logspace_ai](https://twitter.com/langflow_ai) on **Twitter** to get the latest news about **Langflow**.
---
## ⭐️ Star **Langflow** on GitHub

View file

@ -90,7 +90,7 @@ module.exports = {
},
{
position: "right",
href: "https://twitter.com/logspace_ai",
href: "https://twitter.com/langflow_ai",
position: "right",
className: "header-twitter-link",
target: "_blank",

BIN
img/new_langflow_demo.gif Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 20 MiB

12
poetry.lock generated
View file

@ -191,13 +191,13 @@ vertex = ["google-auth (>=2,<3)"]
[[package]]
name = "anyio"
version = "4.2.0"
version = "4.3.0"
description = "High level compatibility layer for multiple asynchronous event loop implementations"
optional = false
python-versions = ">=3.8"
files = [
{file = "anyio-4.2.0-py3-none-any.whl", hash = "sha256:745843b39e829e108e518c489b31dc757de7d2131d53fac32bd8df268227bfee"},
{file = "anyio-4.2.0.tar.gz", hash = "sha256:e1875bb4b4e2de1669f4bc7869b6d3f54231cdced71605e6e64c9be77e3be50f"},
{file = "anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8"},
{file = "anyio-4.3.0.tar.gz", hash = "sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6"},
]
[package.dependencies]
@ -5205,13 +5205,13 @@ xml = ["lxml (>=4.9.2)"]
[[package]]
name = "pandas-stubs"
version = "2.1.4.231227"
version = "2.2.0.240218"
description = "Type annotations for pandas"
optional = false
python-versions = ">=3.9"
files = [
{file = "pandas_stubs-2.1.4.231227-py3-none-any.whl", hash = "sha256:211fc23e6ae87073bdf41dbf362c4a4d85e1e3477cb078dbac3da6c7fdaefba8"},
{file = "pandas_stubs-2.1.4.231227.tar.gz", hash = "sha256:3ea29ef001e9e44985f5ebde02d4413f94891ef6ec7e5056fb07d125be796c23"},
{file = "pandas_stubs-2.2.0.240218-py3-none-any.whl", hash = "sha256:e97478320add9b958391b15a56c5f1bf29da656d5b747d28bbe708454b3a1fe6"},
{file = "pandas_stubs-2.2.0.240218.tar.gz", hash = "sha256:63138c12eec715d66d48611bdd922f31cd7c78bcadd19384c3bd61fd3720a11a"},
]
[package.dependencies]

View file

@ -1,6 +1,6 @@
[tool.poetry]
name = "langflow"
version = "0.6.7a2"
version = "0.6.7a5"
description = "A Python package with a built-in web application"
authors = ["Logspace <contact@logspace.ai>"]
maintainers = [

View file

@ -1,7 +1,5 @@
from fastapi import APIRouter, Depends, HTTPException, Request, Response, status
from fastapi.security import OAuth2PasswordRequestForm
from sqlmodel import Session
from langflow.api.v1.schemas import Token
from langflow.services.auth.utils import (
authenticate_user,
@ -10,6 +8,7 @@ from langflow.services.auth.utils import (
create_user_tokens,
)
from langflow.services.deps import get_session, get_settings_service
from sqlmodel import Session
router = APIRouter(tags=["Login"])
@ -20,7 +19,9 @@ async def login_to_get_access_token(
form_data: OAuth2PasswordRequestForm = Depends(),
db: Session = Depends(get_session),
# _: Session = Depends(get_current_active_user)
settings_service=Depends(get_settings_service),
):
auth_settings = settings_service.auth_settings
try:
user = authenticate_user(form_data.username, form_data.password, db)
except Exception as exc:
@ -33,8 +34,20 @@ async def login_to_get_access_token(
if user:
tokens = create_user_tokens(user_id=user.id, db=db, update_last_login=True)
response.set_cookie("refresh_token_lf", tokens["refresh_token"], httponly=True)
response.set_cookie("access_token_lf", tokens["access_token"], httponly=False)
response.set_cookie(
"refresh_token_lf",
tokens["refresh_token"],
httponly=auth_settings.REFRESH_HTTPONLY,
samesite=auth_settings.REFRESH_SAME_SITE,
secure=auth_settings.REFRESH_SECURE,
)
response.set_cookie(
"access_token_lf",
tokens["access_token"],
httponly=auth_settings.ACCESS_HTTPONLY,
samesite=auth_settings.ACCESS_SAME_SITE,
secure=auth_settings.ACCESS_SECURE,
)
return tokens
else:
raise HTTPException(
@ -46,11 +59,20 @@ async def login_to_get_access_token(
@router.get("/auto_login")
async def auto_login(
response: Response, db: Session = Depends(get_session), settings_service=Depends(get_settings_service)
response: Response,
db: Session = Depends(get_session),
settings_service=Depends(get_settings_service),
):
auth_settings = settings_service.auth_settings
if settings_service.auth_settings.AUTO_LOGIN:
tokens = create_user_longterm_token(db)
response.set_cookie("access_token_lf", tokens["access_token"], httponly=False)
response.set_cookie(
"access_token_lf",
tokens["access_token"],
httponly=auth_settings.ACCESS_HTTPONLY,
samesite=auth_settings.ACCESS_SAME_SITE,
secure=auth_settings.ACCESS_SECURE,
)
return tokens
raise HTTPException(
@ -63,12 +85,27 @@ async def auto_login(
@router.post("/refresh")
async def refresh_token(request: Request, response: Response):
async def refresh_token(request: Request, response: Response, settings_service=Depends(get_settings_service)):
auth_settings = settings_service.auth_settings
token = request.cookies.get("refresh_token_lf")
if token:
tokens = create_refresh_token(token)
response.set_cookie("refresh_token_lf", tokens["refresh_token"], httponly=True)
response.set_cookie("access_token_lf", tokens["access_token"], httponly=False)
response.set_cookie(
"refresh_token_lf",
tokens["refresh_token"],
httponly=auth_settings.REFRESH_TOKEN_HTTPONLY,
samesite=auth_settings.REFRESH_SAME_SITE,
secure=auth_settings.REFRESH_SECURE,
)
response.set_cookie(
"access_token_lf",
tokens["access_token"],
httponly=auth_settings.ACCESS_HTTPONLY,
samesite=auth_settings.ACCESS_SAME_SITE,
secure=auth_settings.ACCESS_SECURE,
)
return tokens
else:
raise HTTPException(

View file

@ -0,0 +1,137 @@
import os
from typing import Any, Callable, Dict, Optional, Union
from langchain_community.chat_models.litellm import ChatLiteLLM, ChatLiteLLMException
from langflow import CustomComponent
from langflow.field_typing import BaseLanguageModel
class ChatLiteLLMComponent(CustomComponent):
display_name = "ChatLiteLLM"
description = "`LiteLLM` collection of large language models."
documentation = "https://python.langchain.com/docs/integrations/chat/litellm"
def build_config(self):
return {
"model": {
"display_name": "Model name",
"field_type": "str",
"advanced": False,
"required": True,
"info": "The name of the model to use. For example, `gpt-3.5-turbo`.",
},
"api_key": {
"display_name": "API key",
"field_type": "str",
"advanced": False,
"required": False,
"password": True,
},
"streaming": {
"display_name": "Streaming",
"field_type": "bool",
"advanced": True,
"required": False,
"default": True,
},
"temperature": {
"display_name": "Temperature",
"field_type": "float",
"advanced": False,
"required": False,
"default": 0.7,
},
"model_kwargs": {
"display_name": "Model kwargs",
"field_type": "dict",
"advanced": True,
"required": False,
"default": {},
},
"top_p": {
"display_name": "Top p",
"field_type": "float",
"advanced": True,
"required": False,
},
"top_k": {
"display_name": "Top k",
"field_type": "int",
"advanced": True,
"required": False,
},
"n": {
"display_name": "N",
"field_type": "int",
"advanced": True,
"required": False,
"info": "Number of chat completions to generate for each prompt. "
"Note that the API may not return the full n completions if duplicates are generated.",
"default": 1,
},
"max_tokens": {
"display_name": "Max tokens",
"field_type": "int",
"advanced": False,
"required": False,
"default": 256,
"info": "The maximum number of tokens to generate for each chat completion.",
},
"max_retries": {
"display_name": "Max retries",
"field_type": "int",
"advanced": True,
"required": False,
"default": 6,
},
"verbose": {
"display_name": "Verbose",
"field_type": "bool",
"advanced": True,
"required": False,
"default": False,
},
}
def build(
self,
model: str,
api_key: str,
streaming: bool = True,
temperature: Optional[float] = 0.7,
model_kwargs: Optional[Dict[str, Any]] = {},
top_p: Optional[float] = None,
top_k: Optional[int] = None,
n: int = 1,
max_tokens: int = 256,
max_retries: int = 6,
verbose: bool = False,
) -> Union[BaseLanguageModel, Callable]:
try:
import litellm # type: ignore
litellm.drop_params = True
litellm.set_verbose = verbose
except ImportError:
raise ChatLiteLLMException(
"Could not import litellm python package. " "Please install it with `pip install litellm`"
)
if api_key:
if "perplexity" in model:
os.environ["PERPLEXITYAI_API_KEY"] = api_key
elif "replicate" in model:
os.environ["REPLICATE_API_KEY"] = api_key
LLM = ChatLiteLLM(
model=model,
client=None,
streaming=streaming,
temperature=temperature,
model_kwargs=model_kwargs if model_kwargs is not None else {},
top_p=top_p,
top_k=top_k,
n=n,
max_tokens=max_tokens,
max_retries=max_retries,
)
return LLM

View file

@ -20,9 +20,7 @@ class ComponentFunctionEntrypointNameNullError(HTTPException):
class Component:
ERROR_CODE_NULL: ClassVar[str] = "Python code must be provided."
ERROR_FUNCTION_ENTRYPOINT_NAME_NULL: ClassVar[str] = (
"The name of the entrypoint function must be provided."
)
ERROR_FUNCTION_ENTRYPOINT_NAME_NULL: ClassVar[str] = "The name of the entrypoint function must be provided."
code: Optional[str] = None
_function_entrypoint_name: str = "build"

View file

@ -2,7 +2,10 @@ import secrets
from pathlib import Path
from typing import Optional
from langflow.services.settings.constants import DEFAULT_SUPERUSER, DEFAULT_SUPERUSER_PASSWORD
from langflow.services.settings.constants import (
DEFAULT_SUPERUSER,
DEFAULT_SUPERUSER_PASSWORD,
)
from langflow.services.settings.utils import read_secret_from_file, write_secret_to_file
from loguru import logger
from passlib.context import CryptContext
@ -34,6 +37,19 @@ class AuthSettings(BaseSettings):
SUPERUSER: str = DEFAULT_SUPERUSER
SUPERUSER_PASSWORD: str = DEFAULT_SUPERUSER_PASSWORD
REFRESH_SAME_SITE: str = "none"
"""The SameSite attribute of the refresh token cookie."""
REFRESH_SECURE: bool = True
"""The Secure attribute of the refresh token cookie."""
REFRESH_HTTPONLY: bool = True
"""The HttpOnly attribute of the refresh token cookie."""
ACCESS_SAME_SITE: str = "none"
"""The SameSite attribute of the access token cookie."""
ACCESS_SECURE: bool = True
"""The Secure attribute of the access token cookie."""
ACCESS_HTTPONLY: bool = False
"""The HttpOnly attribute of the access token cookie."""
pwd_context: CryptContext = CryptContext(schemes=["bcrypt"], deprecated="auto")
class Config:

View file

@ -122,7 +122,7 @@ export default function Header(): JSX.Element {
<div className="header-github-display">{stars ?? 0}</div>
</a>
<a
href="https://twitter.com/logspace_ai"
href="https://twitter.com/langflow_ai"
target="_blank"
rel="noreferrer"
className="text-muted-foreground"

View file

@ -110,6 +110,13 @@ export default function Page({
) {
event.preventDefault();
setLastCopiedSelection(_.cloneDeep(lastSelection));
} else if (
(event.ctrlKey || event.metaKey) &&
event.key === "x" &&
lastSelection
) {
event.preventDefault();
setLastCopiedSelection(_.cloneDeep(lastSelection), true);
} else if (
(event.ctrlKey || event.metaKey) &&
event.key === "v" &&

View file

@ -269,7 +269,29 @@ const useFlowStore = create<FlowStoreType>((set, get) => ({
});
get().setEdges(newEdges);
},
setLastCopiedSelection: (newSelection) => {
setLastCopiedSelection: (newSelection, isCrop = false) => {
if (isCrop) {
const nodesIdsSelected = newSelection!.nodes.map((node) => node.id);
const edgesIdsSelected = newSelection!.edges.map((edge) => edge.id);
nodesIdsSelected.forEach((id) => {
get().deleteNode(id);
});
edgesIdsSelected.forEach((id) => {
get().deleteEdge(id);
});
const newNodes = get().nodes.filter(
(node) => !nodesIdsSelected.includes(node.id)
);
const newEdges = get().edges.filter(
(edge) => !edgesIdsSelected.includes(edge.id)
);
set({ nodes: newNodes, edges: newEdges });
}
set({ lastCopiedSelection: newSelection });
},
cleanFlow: () => {

View file

@ -23,7 +23,7 @@ export const useTypesStore = create<TypesStoreType>((set, get) => ({
data: { ...old.data, ...data },
templates: templatesGenerator(data),
}));
setLoading(false)
setLoading(false);
resolve();
})
.catch((error) => {

View file

@ -75,7 +75,8 @@ export type FlowStoreType = {
) => void;
lastCopiedSelection: { nodes: any; edges: any } | null;
setLastCopiedSelection: (
newSelection: { nodes: any; edges: any } | null
newSelection: { nodes: any; edges: any } | null,
isCrop?: boolean
) => void;
cleanFlow: () => void;
setFilterEdge: (newState) => void;