diff --git a/src/backend/langflow/interface/initialize/llm.py b/src/backend/langflow/interface/initialize/llm.py new file mode 100644 index 000000000..2b9fa3b2e --- /dev/null +++ b/src/backend/langflow/interface/initialize/llm.py @@ -0,0 +1,9 @@ +def initialize_vertexai(class_object, params): + if credentials_path := params.get("credentials"): + from google.oauth2 import service_account + + credentials_object = service_account.Credentials.from_service_account_file( + filename=credentials_path + ) + params["credentials"] = credentials_object + return class_object(**params) diff --git a/src/backend/langflow/interface/initialize/loading.py b/src/backend/langflow/interface/initialize/loading.py index 3ddb64c24..c90713a08 100644 --- a/src/backend/langflow/interface/initialize/loading.py +++ b/src/backend/langflow/interface/initialize/loading.py @@ -6,6 +6,7 @@ from langchain.agents import agent as agent_module from langchain.agents.agent import AgentExecutor from langchain.agents.agent_toolkits.base import BaseToolkit from langchain.agents.tools import BaseTool +from langflow.interface.initialize.llm import initialize_vertexai from langflow.interface.initialize.vector_store import vecstore_initializer @@ -89,6 +90,8 @@ def instantiate_llm(node_type, class_object, params: Dict): # if "openai_api_base" in params and "jina" in params["openai_api_base"]: # False if condition is True ChatConfig.streaming = "jina" not in params.get("openai_api_base", "") + if node_type == "VertexAI": + return initialize_vertexai(class_object=class_object, params=params) return class_object(**params)