Update model invocation in OpenAIModel, VertexAiModel, CohereModel, LlamaCppModel, AnthropicModel, GoogleGenerativeAIModel, AzureOpenAIModel, CTransformersModel, OllamaModel, AmazonBedrockModel, BaiduQianfanChatModel, and HuggingFaceModel

This commit is contained in:
Gabriel Luiz Freitas Almeida 2024-02-27 16:52:04 -03:00
commit e0a67b0410
13 changed files with 13 additions and 13 deletions

View file

@ -31,7 +31,7 @@ class ConversationChainComponent(CustomComponent):
chain = ConversationChain(llm=llm)
else:
chain = ConversationChain(llm=llm, memory=memory)
result = chain.invoke(inputs)
result = chain.invoke(input_value)
# result is an AIMessage which is a subclass of BaseMessage
# We need to check if it is a string or a BaseMessage
if hasattr(result, "content") and isinstance(result.content, str):

View file

@ -60,7 +60,7 @@ class AmazonBedrockComponent(CustomComponent):
) # type: ignore
except Exception as e:
raise ValueError("Could not connect to AmazonBedrock API.") from e
message = output.invoke(inputs)
message = output.invoke(input_value)
result = message.content if hasattr(message, "content") else message
self.status = result
return result

View file

@ -77,7 +77,7 @@ class AnthropicLLM(CustomComponent):
)
except Exception as e:
raise ValueError("Could not connect to Anthropic API.") from e
message = output.invoke(inputs)
message = output.invoke(input_value)
result = message.content if hasattr(message, "content") else message
self.status = result
return result

View file

@ -99,7 +99,7 @@ class AzureChatOpenAIComponent(CustomComponent):
)
except Exception as e:
raise ValueError("Could not connect to AzureOpenAI API.") from e
message = output.invoke(inputs)
message = output.invoke(input_value)
result = message.content if hasattr(message, "content") else message
self.status = result
return result

View file

@ -94,7 +94,7 @@ class QianfanChatEndpointComponent(CustomComponent):
)
except Exception as e:
raise ValueError("Could not connect to Baidu Qianfan API.") from e
message = output.invoke(inputs)
message = output.invoke(input_value)
result = message.content if hasattr(message, "content") else message
self.status = result
return result

View file

@ -42,7 +42,7 @@ class CTransformersComponent(CustomComponent):
output = CTransformers(
model=model, model_file=model_file, model_type=model_type, config=config
)
message = output.invoke(inputs)
message = output.invoke(input_value)
result = message.content if hasattr(message, "content") else message
self.status = result
return result

View file

@ -43,7 +43,7 @@ class CohereComponent(CustomComponent):
max_tokens=max_tokens,
temperature=temperature,
)
message = output.invoke(inputs)
message = output.invoke(input_value)
result = message.content if hasattr(message, "content") else message
self.status = result
return result

View file

@ -73,7 +73,7 @@ class GoogleGenerativeAIComponent(CustomComponent):
n=n or 1,
google_api_key=SecretStr(google_api_key),
)
message = output.invoke(inputs)
message = output.invoke(input_value)
result = message.content if hasattr(message, "content") else message
self.status = result
return result

View file

@ -45,7 +45,7 @@ class HuggingFaceEndpointsComponent(CustomComponent):
except Exception as e:
raise ValueError("Could not connect to HuggingFace Endpoints API.") from e
output = ChatHuggingFace(llm=llm)
message = output.invoke(inputs)
message = output.invoke(input_value)alue)
result = message.content if hasattr(message, "content") else message
self.status = result
return result

View file

@ -135,7 +135,7 @@ class LlamaCppComponent(CustomComponent):
verbose=verbose,
vocab_only=vocab_only,
)
message = output.invoke(inputs)
message = output.invoke(input_value)
result = message.content if hasattr(message, "content") else message
self.status = result
return result

View file

@ -250,7 +250,7 @@ class ChatOllamaComponent(CustomComponent):
output = ChatOllama(**llm_params) # type: ignore
except Exception as e:
raise ValueError("Could not initialize Ollama LLM.") from e
message = output.invoke(inputs)
message = output.invoke(input_value)
result = message.content if hasattr(message, "content") else message
self.status = result
return result

View file

@ -80,7 +80,7 @@ class OpenAIModelComponent(CustomComponent):
temperature=temperature,
)
message = model.invoke(inputs)
message = model.invoke(input_value)
result = message.content if hasattr(message, "content") else message
self.status = result
return result

View file

@ -92,7 +92,7 @@ class ChatVertexAIComponent(CustomComponent):
top_p=top_p,
verbose=verbose,
)
message = output.invoke(inputs)
message = output.invoke(input_value)
result = message.content if hasattr(message, "content") else message
self.status = result
return result