Add Google Generative AI component
This commit is contained in:
parent
b869852365
commit
f5c673d207
3 changed files with 122 additions and 1 deletions
53
poetry.lock
generated
53
poetry.lock
generated
|
|
@ -2075,6 +2075,22 @@ certifi = "*"
|
|||
gevent = ">=0.13"
|
||||
six = "*"
|
||||
|
||||
[[package]]
|
||||
name = "google-ai-generativelanguage"
|
||||
version = "0.4.0"
|
||||
description = "Google Ai Generativelanguage API client library"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "google-ai-generativelanguage-0.4.0.tar.gz", hash = "sha256:c8199066c08f74c4e91290778329bb9f357ba1ea5d6f82de2bc0d10552bf4f8c"},
|
||||
{file = "google_ai_generativelanguage-0.4.0-py3-none-any.whl", hash = "sha256:e4c425376c1ee26c78acbc49a24f735f90ebfa81bf1a06495fae509a2433232c"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
google-api-core = {version = ">=1.34.0,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]}
|
||||
proto-plus = ">=1.22.3,<2.0.0dev"
|
||||
protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev"
|
||||
|
||||
[[package]]
|
||||
name = "google-api-core"
|
||||
version = "2.15.0"
|
||||
|
|
@ -2361,6 +2377,26 @@ files = [
|
|||
[package.extras]
|
||||
testing = ["pytest"]
|
||||
|
||||
[[package]]
|
||||
name = "google-generativeai"
|
||||
version = "0.3.1"
|
||||
description = "Google Generative AI High level API client library and tools."
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
files = [
|
||||
{file = "google_generativeai-0.3.1-py3-none-any.whl", hash = "sha256:800ec6041ca537b897d7ba654f4125651c64b38506f2bfce3b464370e3333a1b"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
google-ai-generativelanguage = "0.4.0"
|
||||
google-api-core = "*"
|
||||
google-auth = "*"
|
||||
protobuf = "*"
|
||||
tqdm = "*"
|
||||
|
||||
[package.extras]
|
||||
dev = ["Pillow", "absl-py", "black", "ipython", "nose2", "pandas", "pytype", "pyyaml"]
|
||||
|
||||
[[package]]
|
||||
name = "google-resumable-media"
|
||||
version = "2.7.0"
|
||||
|
|
@ -3635,6 +3671,21 @@ langchain-core = ">=0.1,<0.2"
|
|||
[package.extras]
|
||||
extended-testing = ["faker (>=19.3.1,<20.0.0)", "jinja2 (>=3,<4)", "presidio-analyzer (>=2.2.33,<3.0.0)", "presidio-anonymizer (>=2.2.33,<3.0.0)", "sentence-transformers (>=2,<3)", "vowpal-wabbit-next (==0.6.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "langchain-google-genai"
|
||||
version = "0.0.2"
|
||||
description = "An integration package connecting Google's genai package and LangChain"
|
||||
optional = false
|
||||
python-versions = ">=3.9,<4.0"
|
||||
files = [
|
||||
{file = "langchain_google_genai-0.0.2-py3-none-any.whl", hash = "sha256:d98b1bb5eb0b65e7582fe18031ec0fe35d78820deb2825bddfe6c37218008e5c"},
|
||||
{file = "langchain_google_genai-0.0.2.tar.gz", hash = "sha256:6209991f8c5b07efc194514ab04baf3e8ffa4a95ae697db7d1c6ba8cd430ead8"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
google-generativeai = ">=0.3.1,<0.4.0"
|
||||
langchain-core = ">=0.1,<0.2"
|
||||
|
||||
[[package]]
|
||||
name = "langdetect"
|
||||
version = "1.0.9"
|
||||
|
|
@ -9234,4 +9285,4 @@ local = ["ctransformers", "llama-cpp-python", "sentence-transformers"]
|
|||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = ">=3.9,<3.11"
|
||||
content-hash = "d6070874c0d4f695ee9863984c135de863f7978c33ed32998b0b7e11b2de2a9d"
|
||||
content-hash = "e86a9499adc75fb8d43f7bd1ff034128763c04669907bdf4e910ccc6a1f12770"
|
||||
|
|
|
|||
|
|
@ -103,6 +103,7 @@ qianfan = "0.0.5"
|
|||
pgvector = "^0.2.3"
|
||||
pyautogen = "^0.2.0"
|
||||
ffmpeg-python = "^0.2.0"
|
||||
langchain-google-genai = "^0.0.2"
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
pytest-asyncio = "^0.23.1"
|
||||
|
|
|
|||
69
src/backend/langflow/components/llms/GoogleGenerativeAI.py
Normal file
69
src/backend/langflow/components/llms/GoogleGenerativeAI.py
Normal file
|
|
@ -0,0 +1,69 @@
|
|||
from typing import Optional
|
||||
|
||||
from langchain_google_genai import ChatGoogleGenerativeAI
|
||||
|
||||
from langflow import CustomComponent
|
||||
from langflow.field_typing import BaseLanguageModel, RangeSpec, TemplateField
|
||||
|
||||
|
||||
class GoogleGenerativeAIComponent(CustomComponent):
|
||||
display_name: str = "Google Generative AI"
|
||||
description: str = "A component that uses Google Generative AI to generate text."
|
||||
documentation: str = "http://docs.langflow.org/components/custom"
|
||||
|
||||
def build_config(self):
|
||||
return {
|
||||
"google_api_key": TemplateField(
|
||||
display_name="Google API Key",
|
||||
info="The Google API Key to use for the Google Generative AI.",
|
||||
),
|
||||
"max_output_tokens": TemplateField(
|
||||
display_name="Max Output Tokens",
|
||||
info="The maximum number of tokens to generate.",
|
||||
),
|
||||
"temperature": TemplateField(
|
||||
display_name="Temperature",
|
||||
info="Run inference with this temperature. Must by in the closed interval [0.0, 1.0].",
|
||||
),
|
||||
"top_k": TemplateField(
|
||||
display_name="Top K",
|
||||
info="Decode using top-k sampling: consider the set of top_k most probable tokens. Must be positive.",
|
||||
range_spec=RangeSpec(min=0, max=2, step=0.1),
|
||||
advanced=True,
|
||||
),
|
||||
"top_p": TemplateField(
|
||||
display_name="Top P",
|
||||
info="The maximum cumulative probability of tokens to consider when sampling.",
|
||||
advanced=True,
|
||||
),
|
||||
"n": TemplateField(
|
||||
display_name="N",
|
||||
info="Number of chat completions to generate for each prompt. Note that the API may not return the full n completions if duplicates are generated.",
|
||||
advanced=True,
|
||||
),
|
||||
"model": TemplateField(
|
||||
display_name="Model",
|
||||
info="The name of the model to use. Supported examples: gemini-pro",
|
||||
options=["gemini-pro", "gemini-pro-vision"],
|
||||
),
|
||||
}
|
||||
|
||||
def build(
|
||||
self,
|
||||
google_api_key: str,
|
||||
model: str,
|
||||
max_output_tokens: Optional[int] = None,
|
||||
temperature: float = 0.1,
|
||||
top_k: Optional[int] = None,
|
||||
top_p: Optional[float] = None,
|
||||
n: Optional[int] = 1,
|
||||
) -> BaseLanguageModel:
|
||||
return ChatGoogleGenerativeAI(
|
||||
model=model,
|
||||
max_output_tokens=max_output_tokens or None,
|
||||
temperature=temperature,
|
||||
top_k=top_k or None,
|
||||
top_p=top_p or None,
|
||||
n=n or 1,
|
||||
google_api_key=google_api_key,
|
||||
)
|
||||
Loading…
Add table
Add a link
Reference in a new issue