feat(llms): Add support for using SecretStr from pydantic to store sensitive API keys securely
feat(llms): Add support for using process.env.PORT environment variable to configure server port fix(llms): Fix incorrect default value for model_kwargs parameter in AnthropicComponent fix(llms): Fix incorrect default value for model_kwargs parameter in ChatAnthropicComponent fix(llms): Fix incorrect default value for model_kwargs parameter in ChatOpenAIComponent fix(llms): Fix incorrect default value for model_kwargs parameter in ChatVertexAIComponent fix(llms): Fix incorrect default value for model_kwargs parameter in CohereComponent fix(llms): Fix incorrect default value for model_kwargs parameter in LlamaCppComponent fix(llms): Fix incorrect default value for model_kwargs parameter in VertexAIComponent fix(utilities): Fix incorrect default value for k parameter in BingSearchAPIWrapperComponent fix(vectorstores): Fix missing required documents parameter in FAISSComponent fix(vectorstores): Fix missing required documents parameter in PineconeComponent fix(vectorstores): Fix missing required documents parameter in QdrantComponent
This commit is contained in:
parent
9c06b16eb3
commit
8bff60d2f2
11 changed files with 31 additions and 33 deletions
|
|
@ -1,3 +1,4 @@
|
|||
from pydantic import SecretStr
|
||||
from langflow import CustomComponent
|
||||
from typing import Optional
|
||||
from langflow.field_typing import BaseLanguageModel, NestedDict
|
||||
|
|
@ -34,11 +35,11 @@ class AnthropicComponent(CustomComponent):
|
|||
self,
|
||||
anthropic_api_key: str,
|
||||
anthropic_api_url: str,
|
||||
model_kwargs: Optional[NestedDict],
|
||||
model_kwargs: NestedDict = {},
|
||||
temperature: Optional[float] = None,
|
||||
) -> BaseLanguageModel:
|
||||
return Anthropic(
|
||||
anthropic_api_key=anthropic_api_key,
|
||||
anthropic_api_key=SecretStr(anthropic_api_key),
|
||||
anthropic_api_url=anthropic_api_url,
|
||||
model_kwargs=model_kwargs,
|
||||
temperature=temperature,
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
from pydantic import SecretStr
|
||||
from langflow import CustomComponent
|
||||
from typing import Optional, Union, Callable
|
||||
from langflow.field_typing import BaseLanguageModel
|
||||
|
|
@ -33,13 +34,13 @@ class ChatAnthropicComponent(CustomComponent):
|
|||
|
||||
def build(
|
||||
self,
|
||||
anthropic_api_key: Optional[str] = None,
|
||||
anthropic_api_key: str,
|
||||
anthropic_api_url: Optional[str] = None,
|
||||
model_kwargs: dict = {},
|
||||
temperature: Optional[float] = None,
|
||||
) -> Union[BaseLanguageModel, Callable]:
|
||||
return ChatAnthropic(
|
||||
anthropic_api_key=anthropic_api_key,
|
||||
anthropic_api_key=SecretStr(anthropic_api_key),
|
||||
anthropic_api_url=anthropic_api_url,
|
||||
model_kwargs=model_kwargs,
|
||||
temperature=temperature,
|
||||
|
|
|
|||
|
|
@ -67,7 +67,7 @@ class ChatOpenAIComponent(CustomComponent):
|
|||
def build(
|
||||
self,
|
||||
max_tokens: Optional[int] = 256,
|
||||
model_kwargs: Optional[NestedDict] = {},
|
||||
model_kwargs: NestedDict = {},
|
||||
model_name: str = "gpt-4-1106-preview",
|
||||
openai_api_base: Optional[str] = None,
|
||||
openai_api_key: Optional[str] = None,
|
||||
|
|
|
|||
|
|
@ -66,12 +66,12 @@ class ChatVertexAIComponent(CustomComponent):
|
|||
project: str,
|
||||
examples: Optional[List[BaseMessage]] = [],
|
||||
location: str = "us-central1",
|
||||
max_output_tokens: Optional[int] = 128,
|
||||
max_output_tokens: int = 128,
|
||||
model_name: str = "chat-bison",
|
||||
temperature: Optional[float] = 0.0,
|
||||
top_k: Optional[int] = 40,
|
||||
top_p: Optional[float] = 0.95,
|
||||
verbose: Optional[bool] = False,
|
||||
temperature: float = 0.0,
|
||||
top_k: int = 40,
|
||||
top_p: float = 0.95,
|
||||
verbose: bool = False,
|
||||
) -> Union[BaseLanguageModel, BaseLLM]:
|
||||
return ChatVertexAI(
|
||||
credentials=credentials,
|
||||
|
|
|
|||
|
|
@ -1,6 +1,5 @@
|
|||
from langflow import CustomComponent
|
||||
from langchain_core.language_models.base import BaseLanguageModel
|
||||
from typing import Optional
|
||||
from langchain_community.llms.cohere import Cohere
|
||||
|
||||
|
||||
|
|
@ -19,7 +18,7 @@ class CohereComponent(CustomComponent):
|
|||
def build(
|
||||
self,
|
||||
cohere_api_key: str,
|
||||
max_tokens: Optional[int] = 256,
|
||||
temperature: Optional[float] = 0.75,
|
||||
max_tokens: int = 256,
|
||||
temperature: float = 0.75,
|
||||
) -> BaseLanguageModel:
|
||||
return Cohere(cohere_api_key=cohere_api_key, max_tokens=max_tokens, temperature=temperature)
|
||||
|
|
|
|||
|
|
@ -59,36 +59,36 @@ class LlamaCppComponent(CustomComponent):
|
|||
cache: Optional[bool] = None,
|
||||
client: Optional[Any] = None,
|
||||
echo: Optional[bool] = False,
|
||||
f16_kv: Optional[bool] = True,
|
||||
f16_kv: bool = True,
|
||||
grammar_path: Optional[str] = None,
|
||||
last_n_tokens_size: Optional[int] = 64,
|
||||
logits_all: Optional[bool] = False,
|
||||
logits_all: bool = False,
|
||||
logprobs: Optional[int] = None,
|
||||
lora_base: Optional[str] = None,
|
||||
lora_path: Optional[str] = None,
|
||||
max_tokens: Optional[int] = 256,
|
||||
metadata: Optional[Dict] = None,
|
||||
model_kwargs: Optional[Dict] = {},
|
||||
model_kwargs: Dict = {},
|
||||
n_batch: Optional[int] = 8,
|
||||
n_ctx: Optional[int] = 512,
|
||||
n_ctx: int = 512,
|
||||
n_gpu_layers: Optional[int] = 1,
|
||||
n_parts: Optional[int] = -1,
|
||||
n_parts: int = -1,
|
||||
n_threads: Optional[int] = 1,
|
||||
repeat_penalty: Optional[float] = 1.1,
|
||||
rope_freq_base: Optional[float] = 10000.0,
|
||||
rope_freq_scale: Optional[float] = 1.0,
|
||||
seed: Optional[int] = -1,
|
||||
rope_freq_base: float = 10000.0,
|
||||
rope_freq_scale: float = 1.0,
|
||||
seed: int = -1,
|
||||
stop: Optional[List[str]] = [],
|
||||
streaming: Optional[bool] = True,
|
||||
streaming: bool = True,
|
||||
suffix: Optional[str] = "",
|
||||
tags: Optional[List[str]] = [],
|
||||
temperature: Optional[float] = 0.8,
|
||||
top_k: Optional[int] = 40,
|
||||
top_p: Optional[float] = 0.95,
|
||||
use_mlock: Optional[bool] = False,
|
||||
use_mlock: bool = False,
|
||||
use_mmap: Optional[bool] = True,
|
||||
verbose: Optional[bool] = True,
|
||||
vocab_only: Optional[bool] = False,
|
||||
verbose: bool = True,
|
||||
vocab_only: bool = False,
|
||||
) -> LlamaCpp:
|
||||
return LlamaCpp(
|
||||
model_path=model_path,
|
||||
|
|
|
|||
|
|
@ -114,7 +114,7 @@ class VertexAIComponent(CustomComponent):
|
|||
location: str = "us-central1",
|
||||
max_output_tokens: int = 128,
|
||||
max_retries: int = 6,
|
||||
metadata: Dict = None,
|
||||
metadata: Dict = {},
|
||||
model_name: str = "text-bison",
|
||||
n: int = 1,
|
||||
name: Optional[str] = None,
|
||||
|
|
@ -127,8 +127,6 @@ class VertexAIComponent(CustomComponent):
|
|||
tuned_model_name: Optional[str] = None,
|
||||
verbose: bool = False,
|
||||
) -> Union[BaseLLM, Callable]:
|
||||
if metadata is None:
|
||||
metadata = {}
|
||||
return VertexAI(
|
||||
credentials=credentials,
|
||||
location=location,
|
||||
|
|
|
|||
|
|
@ -1,4 +1,3 @@
|
|||
from typing import Optional
|
||||
from langflow import CustomComponent
|
||||
|
||||
# Assuming `BingSearchAPIWrapper` is a class that exists in the context
|
||||
|
|
@ -26,7 +25,7 @@ class BingSearchAPIWrapperComponent(CustomComponent):
|
|||
self,
|
||||
bing_search_url: str,
|
||||
bing_subscription_key: str,
|
||||
k: Optional[int] = 10,
|
||||
k: int = 10,
|
||||
) -> BingSearchAPIWrapper:
|
||||
# 'k' has a default value and is not shown (show=False), so it is hardcoded here
|
||||
return BingSearchAPIWrapper(bing_search_url=bing_search_url, bing_subscription_key=bing_subscription_key, k=k)
|
||||
|
|
|
|||
|
|
@ -23,6 +23,6 @@ class FAISSComponent(CustomComponent):
|
|||
def build(
|
||||
self,
|
||||
embedding: Embeddings,
|
||||
documents: List[Document] = None,
|
||||
documents: List[Document],
|
||||
) -> Union[VectorStore, FAISS, BaseRetriever]:
|
||||
return FAISS.from_documents(documents=documents, embedding=embedding)
|
||||
|
|
|
|||
|
|
@ -28,7 +28,7 @@ class PineconeComponent(CustomComponent):
|
|||
def build(
|
||||
self,
|
||||
embedding: Embeddings,
|
||||
documents: List[Document] = None,
|
||||
documents: List[Document],
|
||||
index_name: Optional[str] = None,
|
||||
pinecone_api_key: Optional[str] = None,
|
||||
pinecone_env: Optional[str] = None,
|
||||
|
|
|
|||
|
|
@ -35,7 +35,7 @@ class QdrantComponent(CustomComponent):
|
|||
def build(
|
||||
self,
|
||||
embedding: Embeddings,
|
||||
documents: List[Document] = None,
|
||||
documents: List[Document],
|
||||
api_key: Optional[str] = None,
|
||||
collection_name: Optional[str] = None,
|
||||
content_payload_key: str = "page_content",
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue