potential fix for ChatVertexAI component bug
This commit is contained in:
parent
ff71d10ff5
commit
fed093e830
1 changed files with 1 additions and 1 deletions
|
|
@ -139,7 +139,7 @@ def instantiate_llm(node_type, class_object, params: Dict):
|
|||
# This is a workaround so JinaChat works until streaming is implemented
|
||||
# if "openai_api_base" in params and "jina" in params["openai_api_base"]:
|
||||
# False if condition is True
|
||||
if node_type == "VertexAI":
|
||||
if "VertexAI" in node_type:
|
||||
return initialize_vertexai(class_object=class_object, params=params)
|
||||
# max_tokens sometimes is a string and should be an int
|
||||
if "max_tokens" in params:
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue