feat: Add ruff rules for error messages (EM) (#3978)

Add ruff rules for error messages (EM)
This commit is contained in:
Christophe Bornet 2024-10-01 17:38:32 +02:00 committed by GitHub
commit 1668c91433
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
169 changed files with 1011 additions and 554 deletions

View file

@ -403,7 +403,8 @@ def superuser(
if result:
typer.echo("Default folder created successfully.")
else:
raise RuntimeError("Could not create default folder.")
msg = "Could not create default folder."
raise RuntimeError(msg)
typer.echo("Superuser created successfully.")
else:

View file

@ -135,7 +135,8 @@ async def build_graph_from_data(flow_id: str, payload: dict, **kwargs):
for vertex_id in graph._has_session_id_vertices:
vertex = graph.get_vertex(vertex_id)
if vertex is None:
raise ValueError(f"Vertex {vertex_id} not found")
msg = f"Vertex {vertex_id} not found"
raise ValueError(msg)
if not vertex._raw_params.get("session_id"):
vertex.update_raw_params({"session_id": flow_id}, overwrite=True)
@ -150,7 +151,8 @@ async def build_graph_from_db_no_cache(flow_id: str, session: Session):
"""Build and cache the graph."""
flow: Flow | None = session.get(Flow, flow_id)
if not flow or not flow.data:
raise ValueError("Invalid flow ID")
msg = "Invalid flow ID"
raise ValueError(msg)
return await build_graph_from_data(flow_id, flow.data, flow_name=flow.name, user_id=str(flow.user_id))
@ -260,4 +262,5 @@ async def cascade_delete_flow(session: Session, flow: Flow):
session.exec(delete(VertexBuildTable).where(VertexBuildTable.flow_id == flow.id)) # type: ignore
session.exec(delete(Flow).where(Flow.id == flow.id)) # type: ignore
except Exception as e:
raise RuntimeError(f"Unable to cascade delete flow: ${flow.id}", e)
msg = f"Unable to cascade delete flow: ${flow.id}"
raise RuntimeError(msg, e)

View file

@ -340,7 +340,8 @@ async def build_flow(
vertex_build_response_json = vertex_build_response.model_dump_json()
build_data = json.loads(vertex_build_response_json)
except Exception as exc:
raise ValueError(f"Error serializing vertex build response: {exc}") from exc
msg = f"Error serializing vertex build response: {exc}"
raise ValueError(msg) from exc
event_manager.on_end_vertex(data={"build_data": build_data})
await client_consumed_queue.get()
if vertex_build_response.valid:
@ -652,13 +653,15 @@ async def build_vertex_stream(
cache = await chat_service.get_cache(flow_id_str)
if not cache:
# If there's no cache
raise ValueError(f"No cache found for {flow_id_str}.")
msg = f"No cache found for {flow_id_str}."
raise ValueError(msg)
else:
graph = cache.get("result")
vertex: InterfaceVertex = graph.get_vertex(vertex_id)
if not hasattr(vertex, "stream"):
raise ValueError(f"Vertex {vertex_id} does not support streaming")
msg = f"Vertex {vertex_id} does not support streaming"
raise ValueError(msg)
if isinstance(vertex._built_result, str) and vertex._built_result:
stream_data = StreamData(
event="message",
@ -691,7 +694,8 @@ async def build_vertex_stream(
)
yield str(stream_data)
else:
raise ValueError(f"No result found for vertex {vertex_id}")
msg = f"No result found for vertex {vertex_id}"
raise ValueError(msg)
except Exception as exc:
logger.exception(f"Error building Component: {exc}")

View file

@ -94,17 +94,15 @@ def validate_input_and_tweaks(input_request: SimplifiedAPIRequest):
has_input_value = value.get("input_value") is not None
input_value_is_chat = input_request.input_value is not None and input_request.input_type == "chat"
if has_input_value and input_value_is_chat:
raise InvalidChatInputException(
"If you pass an input_value to the chat input, you cannot pass a tweak with the same name."
)
msg = "If you pass an input_value to the chat input, you cannot pass a tweak with the same name."
raise InvalidChatInputException(msg)
elif "Text Input" in key or "TextInput" in key:
if isinstance(value, dict):
has_input_value = value.get("input_value") is not None
input_value_is_text = input_request.input_value is not None and input_request.input_type == "text"
if has_input_value and input_value_is_text:
raise InvalidChatInputException(
"If you pass an input_value to the text input, you cannot pass a tweak with the same name."
)
msg = "If you pass an input_value to the text input, you cannot pass a tweak with the same name."
raise InvalidChatInputException(msg)
async def simple_run_flow(
@ -120,7 +118,8 @@ async def simple_run_flow(
user_id = api_key_user.id if api_key_user else None
flow_id_str = str(flow.id)
if flow.data is None:
raise ValueError(f"Flow {flow_id_str} has no data")
msg = f"Flow {flow_id_str} has no data"
raise ValueError(msg)
graph_data = flow.data.copy()
graph_data = process_tweaks(graph_data, input_request.tweaks or {}, stream=stream)
graph = Graph.from_payload(graph_data, flow_id=flow_id_str, user_id=str(user_id), flow_name=flow.name)
@ -331,8 +330,9 @@ async def webhook_run_flow(
data = await request.body()
if not data:
logger.error("Request body is empty")
msg = "Request body is empty. You should provide a JSON payload containing the flow ID."
raise ValueError(
"Request body is empty. You should provide a JSON payload containing the flow ID.",
msg,
)
# get all webhook components in the flow
@ -448,7 +448,8 @@ async def experimental_run_flow(
session_data = await session_service.load_session(session_id, flow_id=flow_id_str)
graph, artifacts = session_data if session_data else (None, None)
if graph is None:
raise ValueError(f"Session {session_id} not found")
msg = f"Session {session_id} not found"
raise ValueError(msg)
else:
# Get the flow that matches the flow_id and belongs to the user
# flow = session.query(Flow).filter(Flow.id == flow_id).filter(Flow.user_id == api_key_user.id).first()
@ -456,10 +457,12 @@ async def experimental_run_flow(
select(Flow).where(Flow.id == flow_id_str).where(Flow.user_id == api_key_user.id)
).first()
if flow is None:
raise ValueError(f"Flow {flow_id_str} not found")
msg = f"Flow {flow_id_str} not found"
raise ValueError(msg)
if flow.data is None:
raise ValueError(f"Flow {flow_id_str} has no data")
msg = f"Flow {flow_id_str} has no data"
raise ValueError(msg)
graph_data = flow.data
graph_data = process_tweaks(graph_data, tweaks or {})
graph = Graph.from_payload(graph_data, flow_id=flow_id_str)

View file

@ -110,7 +110,8 @@ class ChatResponse(ChatMessage):
@classmethod
def validate_message_type(cls, v):
if v not in ["start", "stream", "end", "error", "info", "file"]:
raise ValueError("type must be start, stream, end, error, info, or file")
msg = "type must be start, stream, end, error, info, or file"
raise ValueError(msg)
return v
@ -134,7 +135,8 @@ class FileResponse(ChatMessage):
@classmethod
def validate_data_type(cls, v):
if v not in ["image", "csv"]:
raise ValueError("data_type must be image or csv")
msg = "data_type must be image or csv"
raise ValueError(msg)
return v

View file

@ -67,9 +67,11 @@ class LCAgentComponent(Component):
output_names = [output.name for output in self.outputs]
for method_name in required_output_methods:
if method_name not in output_names:
raise ValueError(f"Output with name '{method_name}' must be defined.")
msg = f"Output with name '{method_name}' must be defined."
raise ValueError(msg)
elif not hasattr(self, method_name):
raise ValueError(f"Method '{method_name}' must be defined.")
msg = f"Method '{method_name}' must be defined."
raise ValueError(msg)
def get_agent_kwargs(self, flatten: bool = False) -> dict:
base = {
@ -102,7 +104,8 @@ class LCAgentComponent(Component):
)
self.status = result
if "output" not in result:
raise ValueError("Output key not found in result. Tried 'output'.")
msg = "Output key not found in result. Tried 'output'."
raise ValueError(msg)
return cast(str, result.get("output"))
@ -148,7 +151,8 @@ class LCToolsAgentComponent(LCAgentComponent):
)
self.status = result
if "output" not in result:
raise ValueError("Output key not found in result. Tried 'output'.")
msg = "Output key not found in result. Tried 'output'."
raise ValueError(msg)
return cast(str, result.get("output"))

View file

@ -46,7 +46,8 @@ class BaseCrewComponent(Component):
return self.tasks, self.agents
def build_crew(self) -> Crew:
raise NotImplementedError("build_crew must be implemented in subclasses")
msg = "build_crew must be implemented in subclasses"
raise NotImplementedError(msg)
def get_task_callback(
self,

View file

@ -12,6 +12,8 @@ class LCChainComponent(Component):
output_names = [output.name for output in self.outputs]
for method_name in required_output_methods:
if method_name not in output_names:
raise ValueError(f"Output with name '{method_name}' must be defined.")
msg = f"Output with name '{method_name}' must be defined."
raise ValueError(msg)
elif not hasattr(self, method_name):
raise ValueError(f"Method '{method_name}' must be defined.")
msg = f"Method '{method_name}' must be defined."
raise ValueError(msg)

View file

@ -49,7 +49,8 @@ def parse_curl_command(curl_command):
tokens = shlex.split(normalize_newlines(curl_command))
tokens = [token for token in tokens if token and token != " "]
if tokens and "curl" not in tokens[0]:
raise ValueError("Invalid curl command")
msg = "Invalid curl command"
raise ValueError(msg)
args_template = {
"command": None,
"url": None,

View file

@ -53,7 +53,8 @@ def retrieve_file_paths(
) -> list[str]:
path_obj = Path(path)
if not path_obj.exists() or not path_obj.is_dir():
raise ValueError(f"Path {path} must exist and be a directory.")
msg = f"Path {path} must exist and be a directory."
raise ValueError(msg)
def match_types(p: Path) -> bool:
return any(p.suffix == f".{t}" for t in types) if types else True
@ -83,7 +84,8 @@ def partition_file_to_data(file_path: str, silent_errors: bool) -> Data | None:
elements = partition(file_path)
except Exception as e:
if not silent_errors:
raise ValueError(f"Error loading file {file_path}: {e}") from e
msg = f"Error loading file {file_path}: {e}"
raise ValueError(msg) from e
return None
# Create a Data
@ -147,7 +149,8 @@ def parse_text_file_to_data(file_path: str, silent_errors: bool) -> Data | None:
text = ET.tostring(xml_element, encoding="unicode")
except Exception as e:
if not silent_errors:
raise ValueError(f"Error loading file {file_path}: {e}") from e
msg = f"Error loading file {file_path}: {e}"
raise ValueError(msg) from e
return None
record = Data(data={"file_path": file_path, "text": text})

View file

@ -15,9 +15,12 @@ class LCEmbeddingsModel(Component):
output_names = [output.name for output in self.outputs]
for method_name in required_output_methods:
if method_name not in output_names:
raise ValueError(f"Output with name '{method_name}' must be defined.")
msg = f"Output with name '{method_name}' must be defined."
raise ValueError(msg)
elif not hasattr(self, method_name):
raise ValueError(f"Method '{method_name}' must be defined.")
msg = f"Method '{method_name}' must be defined."
raise ValueError(msg)
def build_embeddings(self) -> Embeddings:
raise NotImplementedError("You must implement the build_embeddings method in your class.")
msg = "You must implement the build_embeddings method in your class."
raise NotImplementedError(msg)

View file

@ -22,7 +22,8 @@ class ChatComponent(Component):
flow_id=self.graph.flow_id,
)
if len(messages) > 1:
raise ValueError("Only one message can be stored at a time.")
msg = "Only one message can be stored at a time."
raise ValueError(msg)
stored_message = messages[0]
if hasattr(self, "_event_manager") and self._event_manager and stored_message.id:
if not isinstance(message.text, str):
@ -55,7 +56,8 @@ class ChatComponent(Component):
def _stream_message(self, message: Message, message_id: str) -> str:
iterator = message.text
if not isinstance(iterator, AsyncIterator | Iterator):
raise ValueError("The message must be an iterator or an async iterator.")
msg = "The message must be an iterator or an async iterator."
raise ValueError(msg)
if isinstance(iterator, AsyncIterator):
return run_until_complete(self._handle_async_iterator(iterator, message, message_id))

View file

@ -19,9 +19,11 @@ class LCToolComponent(Component):
output_names = [output.name for output in self.outputs]
for method_name in required_output_methods:
if method_name not in output_names:
raise ValueError(f"Output with name '{method_name}' must be defined.")
msg = f"Output with name '{method_name}' must be defined."
raise ValueError(msg)
elif not hasattr(self, method_name):
raise ValueError(f"Method '{method_name}' must be defined.")
msg = f"Method '{method_name}' must be defined."
raise ValueError(msg)
@abstractmethod
def run_model(self) -> Data | list[Data]:

View file

@ -22,9 +22,11 @@ class LCChatMemoryComponent(Component):
output_names = [output.name for output in self.outputs]
for method_name in required_output_methods:
if method_name not in output_names:
raise ValueError(f"Output with name '{method_name}' must be defined.")
msg = f"Output with name '{method_name}' must be defined."
raise ValueError(msg)
elif not hasattr(self, method_name):
raise ValueError(f"Method '{method_name}' must be defined.")
msg = f"Method '{method_name}' must be defined."
raise ValueError(msg)
def build_base_memory(self) -> BaseChatMemory:
return ConversationBufferMemory(chat_memory=self.build_message_history())

View file

@ -48,9 +48,11 @@ class LCModelComponent(Component):
output_names = [output.name for output in self.outputs]
for method_name in required_output_methods:
if method_name not in output_names:
raise ValueError(f"Output with name '{method_name}' must be defined.")
msg = f"Output with name '{method_name}' must be defined."
raise ValueError(msg)
elif not hasattr(self, method_name):
raise ValueError(f"Method '{method_name}' must be defined.")
msg = f"Method '{method_name}' must be defined."
raise ValueError(msg)
def text_response(self) -> Message:
input_value = self.input_value
@ -145,7 +147,8 @@ class LCModelComponent(Component):
):
messages: list[BaseMessage] = []
if not input_value and not system_message:
raise ValueError("The message you want to send to the model is empty.")
msg = "The message you want to send to the model is empty."
raise ValueError(msg)
system_message_added = False
if input_value:
if isinstance(input_value, Message):

View file

@ -127,14 +127,16 @@ def validate_prompt(prompt_template: str, silent_errors: bool = False) -> list[s
# Check if there are invalid characters in the input_variables
input_variables = _check_input_variables(input_variables)
if any(var in _INVALID_NAMES for var in input_variables):
raise ValueError(f"Invalid input variables. None of the variables can be named {', '.join(input_variables)}. ")
msg = f"Invalid input variables. None of the variables can be named {', '.join(input_variables)}. "
raise ValueError(msg)
try:
PromptTemplate(template=prompt_template, input_variables=input_variables)
except Exception as exc:
logger.error(f"Invalid prompt: {exc}")
if not silent_errors:
raise ValueError(f"Invalid prompt: {exc}") from exc
msg = f"Invalid prompt: {exc}"
raise ValueError(msg) from exc
return input_variables

View file

@ -14,9 +14,11 @@ class LCTextSplitterComponent(LCDocumentTransformerComponent):
output_names = [output.name for output in self.outputs]
for method_name in required_output_methods:
if method_name not in output_names:
raise ValueError(f"Output with name '{method_name}' must be defined.")
msg = f"Output with name '{method_name}' must be defined."
raise ValueError(msg)
elif not hasattr(self, method_name):
raise ValueError(f"Method '{method_name}' must be defined.")
msg = f"Method '{method_name}' must be defined."
raise ValueError(msg)
def build_document_transformer(self) -> BaseDocumentTransformer:
return self.build_text_splitter()

View file

@ -35,7 +35,8 @@ class ComponentTool(BaseTool):
results, _ = self.component(**kwargs)
return results
except Exception as e:
raise ToolException(f"Error running {self.name}: {e}")
msg = f"Error running {self.name}: {e}"
raise ToolException(msg)
ComponentTool.update_forward_refs()

View file

@ -33,7 +33,8 @@ class FlowTool(BaseTool):
elif self.graph is not None:
return build_schema_from_inputs(self.name, get_flow_inputs(self.graph))
else:
raise ToolException("No input schema available.")
msg = "No input schema available."
raise ToolException(msg)
def _run(
self,
@ -45,9 +46,8 @@ class FlowTool(BaseTool):
if len(args_names) == len(args):
kwargs = {arg["arg_name"]: arg_value for arg, arg_value in zip(args_names, args)}
elif len(args_names) != len(args) and len(args) != 0:
raise ToolException(
"Number of arguments does not match the number of inputs. Pass keyword arguments instead."
)
msg = "Number of arguments does not match the number of inputs. Pass keyword arguments instead."
raise ToolException(msg)
tweaks = {arg["component_name"]: kwargs[arg["arg_name"]] for arg in args_names}
run_outputs = run_until_complete(
@ -72,16 +72,16 @@ class FlowTool(BaseTool):
"""Validate the inputs."""
if len(args) > 0 and len(args) != len(args_names):
raise ToolException(
"Number of positional arguments does not match the number of inputs. Pass keyword arguments instead."
)
msg = "Number of positional arguments does not match the number of inputs. Pass keyword arguments instead."
raise ToolException(msg)
if len(args) == len(args_names):
kwargs = {arg_name["arg_name"]: arg_value for arg_name, arg_value in zip(args_names, args)}
missing_args = [arg["arg_name"] for arg in args_names if arg["arg_name"] not in kwargs]
if missing_args:
raise ToolException(f"Missing required arguments: {', '.join(missing_args)}")
msg = f"Missing required arguments: {', '.join(missing_args)}"
raise ToolException(msg)
return kwargs

View file

@ -47,10 +47,11 @@ class LCVectorStoreComponent(Component):
if hasattr(cls, "build_vector_store"):
method = cls.build_vector_store
if not hasattr(method, "_is_cached_vector_store_checked"):
raise TypeError(
msg = (
f"The method 'build_vector_store' in class {cls.__name__} "
"must be decorated with @check_cached_vector_store"
)
raise TypeError(msg)
trace_type = "retriever"
outputs = [
@ -81,9 +82,11 @@ class LCVectorStoreComponent(Component):
output_names = [output.name for output in self.outputs]
for method_name in required_output_methods:
if method_name not in output_names:
raise ValueError(f"Output with name '{method_name}' must be defined.")
msg = f"Output with name '{method_name}' must be defined."
raise ValueError(msg)
elif not hasattr(self, method_name):
raise ValueError(f"Method '{method_name}' must be defined.")
msg = f"Method '{method_name}' must be defined."
raise ValueError(msg)
def search_with_vector_store(
self,
@ -112,7 +115,8 @@ class LCVectorStoreComponent(Component):
if input_value and isinstance(input_value, str) and hasattr(vector_store, "search"):
docs = vector_store.search(query=input_value, search_type=search_type.lower(), k=k, **kwargs)
else:
raise ValueError("Invalid inputs provided.")
msg = "Invalid inputs provided."
raise ValueError(msg)
data = docs_to_data(docs)
self.status = data
return data
@ -136,7 +140,8 @@ class LCVectorStoreComponent(Component):
self.status = "Retriever built successfully."
return retriever
else:
raise ValueError(f"Vector Store {vector_store.__class__.__name__} does not have an as_retriever method.")
msg = f"Vector Store {vector_store.__class__.__name__} does not have an as_retriever method."
raise ValueError(msg)
def search_documents(self) -> list[Data]:
"""
@ -175,4 +180,5 @@ class LCVectorStoreComponent(Component):
"""
Builds the Vector Store object.
"""
raise NotImplementedError("build_vector_store method must be implemented.")
msg = "build_vector_store method must be implemented."
raise NotImplementedError(msg)

View file

@ -38,7 +38,8 @@ class OpenAIToolsAgentComponent(LCToolsAgentComponent):
def create_agent_runnable(self):
if "input" not in self.user_prompt:
raise ValueError("Prompt must contain 'input' key.")
msg = "Prompt must contain 'input' key."
raise ValueError(msg)
messages = [
("system", self.system_prompt),
("placeholder", "{chat_history}"),

View file

@ -33,7 +33,8 @@ class ToolCallingAgentComponent(LCToolsAgentComponent):
def create_agent_runnable(self):
if "input" not in self.user_prompt:
raise ValueError("Prompt must contain 'input' key.")
msg = "Prompt must contain 'input' key."
raise ValueError(msg)
messages = [
("system", self.system_prompt),
("placeholder", "{chat_history}"),

View file

@ -55,7 +55,8 @@ Question: {input}
def create_agent_runnable(self):
if "input" not in self.user_prompt:
raise ValueError("Prompt must contain 'input' key.")
msg = "Prompt must contain 'input' key."
raise ValueError(msg)
messages = [
("system", self.system_prompt),
("placeholder", "{chat_history}"),

View file

@ -25,7 +25,8 @@ class GetEnvVar(Component):
def process_inputs(self) -> Message:
if self.env_var_name not in os.environ:
raise Exception(f"Environment variable {self.env_var_name} not set")
msg = f"Environment variable {self.env_var_name} not set"
raise Exception(msg)
else:
message = Message(text=os.environ[self.env_var_name])
return message

View file

@ -92,4 +92,5 @@ class AssistantsRun(Component):
return message
except Exception as e:
print(e)
raise Exception(f"Error running assistant: {e}")
msg = f"Error running assistant: {e}"
raise Exception(msg)

View file

@ -34,14 +34,16 @@ class SQLGeneratorComponent(LCChainComponent):
prompt_template = None
if self.top_k < 1:
raise ValueError("Top K must be greater than 0.")
msg = "Top K must be greater than 0."
raise ValueError(msg)
if not prompt_template:
sql_query_chain = create_sql_query_chain(llm=self.llm, db=self.db, k=self.top_k)
else:
# Check if {question} is in the prompt
if "{question}" not in prompt_template.template or "question" not in prompt_template.input_variables:
raise ValueError("Prompt must contain `{question}` to be used with Natural Language to SQL.")
msg = "Prompt must contain `{question}` to be used with Natural Language to SQL."
raise ValueError(msg)
sql_query_chain = create_sql_query_chain(llm=self.llm, db=self.db, prompt=prompt_template, k=self.top_k)
query_writer: Runnable = sql_query_chain | {"query": lambda x: x.replace("SQLQuery:", "").strip()}
response = query_writer.invoke(

View file

@ -93,7 +93,8 @@ class APIRequestComponent(Component):
build_config["body"]["value"] = {}
except Exception as exc:
logger.error(f"Error parsing curl: {exc}")
raise ValueError(f"Error parsing curl: {exc}")
msg = f"Error parsing curl: {exc}"
raise ValueError(msg)
return build_config
def update_build_config(self, build_config: dotdict, field_value: Any, field_name: str | None = None):
@ -112,7 +113,8 @@ class APIRequestComponent(Component):
) -> Data:
method = method.upper()
if method not in ["GET", "POST", "PATCH", "PUT", "DELETE"]:
raise ValueError(f"Unsupported method: {method}")
msg = f"Unsupported method: {method}"
raise ValueError(msg)
if isinstance(body, str) and body:
try:
@ -120,7 +122,8 @@ class APIRequestComponent(Component):
except Exception as e:
logger.error(f"Error decoding JSON data: {e}")
body = None
raise ValueError(f"Error decoding JSON data: {e}")
msg = f"Error decoding JSON data: {e}"
raise ValueError(msg)
data = body if body else None

View file

@ -33,16 +33,19 @@ class FileComponent(Component):
def load_file(self) -> Data:
if not self.path:
raise ValueError("Please, upload a file to use this component.")
msg = "Please, upload a file to use this component."
raise ValueError(msg)
resolved_path = self.resolve_path(self.path)
silent_errors = self.silent_errors
extension = Path(resolved_path).suffix[1:].lower()
if extension == "doc":
raise ValueError("doc files are not supported. Please save as .docx")
msg = "doc files are not supported. Please save as .docx"
raise ValueError(msg)
if extension not in TEXT_FILE_TYPES:
raise ValueError(f"Unsupported file type: {extension}")
msg = f"Unsupported file type: {extension}"
raise ValueError(msg)
data = parse_text_file_to_data(resolved_path, silent_errors)
self.status = data if data else "No data"

View file

@ -95,7 +95,8 @@ class GmailLoaderComponent(Component):
if name == "From":
from_email = values["value"]
if from_email is None:
raise ValueError("From email not found.")
msg = "From email not found."
raise ValueError(msg)
if "parts" in msg["payload"]:
parts = msg["payload"]["parts"]
@ -113,7 +114,8 @@ class GmailLoaderComponent(Component):
additional_kwargs={"sender": from_email},
)
return message
raise ValueError("No plain text part found in the email.")
msg = "No plain text part found in the email."
raise ValueError(msg)
def _get_message_data(self, service: Any, message: Any) -> ChatSession:
msg = service.users().messages().get(userId="me", id=message["id"]).execute()
@ -141,7 +143,8 @@ class GmailLoaderComponent(Component):
if message_id == in_reply_to:
response_email = message
if response_email is None:
raise ValueError("Response email not found in the thread.")
msg = "Response email not found in the thread."
raise ValueError(msg)
starter_content = self._extract_email_content(response_email)
return ChatSession(messages=[starter_content, message_content])
else:
@ -172,7 +175,8 @@ class GmailLoaderComponent(Component):
try:
token_info = json.loads(json_string)
except JSONDecodeError as e:
raise ValueError("Invalid JSON string") from e
msg = "Invalid JSON string"
raise ValueError(msg) from e
creds = Credentials.from_authorized_user_info(token_info)
@ -182,11 +186,11 @@ class GmailLoaderComponent(Component):
try:
docs = loader.load()
except RefreshError as e:
raise ValueError(
"Authentication error: Unable to refresh authentication token. Please try to reauthenticate."
) from e
msg = "Authentication error: Unable to refresh authentication token. Please try to reauthenticate."
raise ValueError(msg) from e
except Exception as e:
raise ValueError(f"Error loading documents: {e}") from e
msg = f"Error loading documents: {e}"
raise ValueError(msg) from e
# Return the loaded documents
self.status = docs

View file

@ -44,7 +44,8 @@ class GoogleDriveComponent(Component):
if self.creds:
return self.creds
else:
raise ValueError("No credentials provided.")
msg = "No credentials provided."
raise ValueError(msg)
class Config:
arbitrary_types_allowed = True
@ -53,7 +54,8 @@ class GoogleDriveComponent(Component):
document_ids = [self.document_id]
if len(document_ids) != 1:
raise ValueError("Expected a single document ID")
msg = "Expected a single document ID"
raise ValueError(msg)
# TODO: Add validation to check if the document ID is valid
@ -61,7 +63,8 @@ class GoogleDriveComponent(Component):
try:
token_info = json.loads(json_string)
except JSONDecodeError as e:
raise ValueError("Invalid JSON string") from e
msg = "Invalid JSON string"
raise ValueError(msg) from e
# Initialize the custom loader with the provided credentials and document IDs
loader = CustomGoogleDriveLoader(
@ -73,11 +76,11 @@ class GoogleDriveComponent(Component):
docs = loader.load()
# catch google.auth.exceptions.RefreshError
except RefreshError as e:
raise ValueError(
"Authentication error: Unable to refresh authentication token. Please try to reauthenticate."
) from e
msg = "Authentication error: Unable to refresh authentication token. Please try to reauthenticate."
raise ValueError(msg) from e
except Exception as e:
raise ValueError(f"Error loading documents: {e}") from e
msg = f"Error loading documents: {e}"
raise ValueError(msg) from e
assert len(docs) == 1, "Expected a single document to be loaded."

View file

@ -65,7 +65,8 @@ class URLComponent(Component):
)
if not url_regex.match(string):
raise ValueError(f"Invalid URL: {string}")
msg = f"Invalid URL: {string}"
raise ValueError(msg)
return string

View file

@ -125,9 +125,8 @@ class ChatLiteLLMModelComponent(LCModelComponent):
litellm.drop_params = True
litellm.set_verbose = self.verbose
except ImportError:
raise ChatLiteLLMException(
"Could not import litellm python package. " "Please install it with `pip install litellm`"
)
msg = "Could not import litellm python package. " "Please install it with `pip install litellm`"
raise ChatLiteLLMException(msg)
# Remove empty keys
if "" in self.kwargs:
del self.kwargs[""]
@ -136,9 +135,11 @@ class ChatLiteLLMModelComponent(LCModelComponent):
# Report missing fields for Azure provider
if self.provider == "Azure":
if "api_base" not in self.kwargs:
raise Exception("Missing api_base on kwargs")
msg = "Missing api_base on kwargs"
raise Exception(msg)
if "api_version" not in self.model_kwargs:
raise Exception("Missing api_version on model_kwargs")
msg = "Missing api_version on model_kwargs"
raise Exception(msg)
output = ChatLiteLLM(
model=f"{self.provider.lower()}/{self.model}",
client=None,

View file

@ -40,7 +40,8 @@ class ExtractKeyFromDataComponent(CustomComponent):
extracted_keys[key] = getattr(data, key)
except AttributeError:
if not silent_error:
raise KeyError(f"The key '{key}' does not exist in the data.")
msg = f"The key '{key}' does not exist in the data."
raise KeyError(msg)
return_data = Data(data=extracted_keys)
self.status = return_data
return return_data

View file

@ -45,7 +45,8 @@ class SubFlowComponent(CustomComponent):
try:
flow_data = self.get_flow(field_value)
if not flow_data:
raise ValueError(f"Flow {field_value} not found.")
msg = f"Flow {field_value} not found."
raise ValueError(msg)
graph = Graph.from_payload(flow_data.data["data"])
# Get all inputs from the graph
inputs = get_flow_inputs(graph)

View file

@ -164,7 +164,8 @@ class AssemblyAILeMUR(Component):
max_output_size=self.max_output_size,
)
else:
raise ValueError(f"Endpoint not supported: {endpoint}")
msg = f"Endpoint not supported: {endpoint}"
raise ValueError(msg)
return result.dict()
@ -178,4 +179,5 @@ class AssemblyAILeMUR(Component):
elif model_name == "claude3_sonnet":
return aai.LemurModel.claude3_sonnet
else:
raise ValueError(f"Model name not supported: {model_name}")
msg = f"Model name not supported: {model_name}"
raise ValueError(msg)

View file

@ -77,6 +77,7 @@ class AzureOpenAIEmbeddingsComponent(LCModelComponent):
dimensions=self.dimensions or None,
)
except Exception as e:
raise ValueError(f"Could not connect to AzureOpenAIEmbeddings API: {str(e)}") from e
msg = f"Could not connect to AzureOpenAIEmbeddings API: {str(e)}"
raise ValueError(msg) from e
return embeddings

View file

@ -32,7 +32,8 @@ class GoogleGenerativeAIEmbeddingsComponent(Component):
def build_embeddings(self) -> Embeddings:
if not self.api_key:
raise ValueError("API Key is required")
msg = "API Key is required"
raise ValueError(msg)
class HotaGoogleGenerativeAIEmbeddings(GoogleGenerativeAIEmbeddings):
def __init__(self, *args, **kwargs):
@ -85,7 +86,8 @@ class GoogleGenerativeAIEmbeddingsComponent(Component):
BatchEmbedContentsRequest(requests=requests, model=self.model)
)
except Exception as e:
raise GoogleGenerativeAIError(f"Error embedding content: {e}") from e
msg = f"Error embedding content: {e}"
raise GoogleGenerativeAIError(msg) from e
embeddings.extend([list(np.pad(e.values, (0, 768), "constant")) for e in result.embeddings])
return embeddings

View file

@ -46,22 +46,25 @@ class HuggingFaceInferenceAPIEmbeddingsComponent(LCEmbeddingsModel):
def validate_inference_endpoint(self, inference_endpoint: str) -> bool:
parsed_url = urlparse(inference_endpoint)
if not all([parsed_url.scheme, parsed_url.netloc]):
raise ValueError(
msg = (
f"Invalid inference endpoint format: '{self.inference_endpoint}'. "
"Please ensure the URL includes both a scheme (e.g., 'http://' or 'https://') and a domain name. "
"Example: 'http://localhost:8080' or 'https://api.example.com'"
)
raise ValueError(msg)
try:
response = requests.get(f"{inference_endpoint}/health", timeout=5)
except requests.RequestException:
raise ValueError(
msg = (
f"Inference endpoint '{inference_endpoint}' is not responding. "
"Please ensure the URL is correct and the service is running."
)
raise ValueError(msg)
if response.status_code != 200:
raise ValueError(f"HuggingFace health check failed: {response.status_code}")
msg = f"HuggingFace health check failed: {response.status_code}"
raise ValueError(msg)
# returning True to solve linting error
return True
@ -86,11 +89,13 @@ class HuggingFaceInferenceAPIEmbeddingsComponent(LCEmbeddingsModel):
self.validate_inference_endpoint(api_url)
api_key = SecretStr("DummyAPIKeyForLocalDeployment")
elif not self.api_key:
raise ValueError("API Key is required for non-local inference endpoints")
msg = "API Key is required for non-local inference endpoints"
raise ValueError(msg)
else:
api_key = SecretStr(self.api_key)
try:
return self.create_huggingface_embeddings(api_key, api_url, self.model_name)
except Exception as e:
raise ValueError("Could not connect to HuggingFace Inference API.") from e
msg = "Could not connect to HuggingFace Inference API."
raise ValueError(msg) from e

View file

@ -43,7 +43,8 @@ class MistralAIEmbeddingsComponent(LCModelComponent):
def build_embeddings(self) -> Embeddings:
if not self.mistral_api_key:
raise ValueError("Mistral API Key is required")
msg = "Mistral API Key is required"
raise ValueError(msg)
api_key = SecretStr(self.mistral_api_key)

View file

@ -51,14 +51,16 @@ class NVIDIAEmbeddingsComponent(LCEmbeddingsModel):
build_config["model"]["options"] = ids
build_config["model"]["value"] = ids[0]
except Exception as e:
raise ValueError(f"Error getting model names: {e}")
msg = f"Error getting model names: {e}"
raise ValueError(msg)
return build_config
def build_embeddings(self) -> Embeddings:
try:
from langchain_nvidia_ai_endpoints import NVIDIAEmbeddings
except ImportError:
raise ImportError("Please install langchain-nvidia-ai-endpoints to use the Nvidia model.")
msg = "Please install langchain-nvidia-ai-endpoints to use the Nvidia model."
raise ImportError(msg)
try:
output = NVIDIAEmbeddings(
model=self.model,
@ -67,5 +69,6 @@ class NVIDIAEmbeddingsComponent(LCEmbeddingsModel):
nvidia_api_key=self.nvidia_api_key,
) # type: ignore
except Exception as e:
raise ValueError(f"Could not connect to NVIDIA API. Error: {e}") from e
msg = f"Could not connect to NVIDIA API. Error: {e}"
raise ValueError(msg) from e
return output

View file

@ -43,5 +43,6 @@ class OllamaEmbeddingsComponent(LCModelComponent):
temperature=self.temperature,
) # type: ignore
except Exception as e:
raise ValueError("Could not connect to Ollama API.") from e
msg = "Could not connect to Ollama API."
raise ValueError(msg) from e
return output

View file

@ -39,9 +39,8 @@ class VertexAIEmbeddingsComponent(LCModelComponent):
try:
from langchain_google_vertexai import VertexAIEmbeddings
except ImportError:
raise ImportError(
"Please install the langchain-google-vertexai package to use the VertexAIEmbeddings component."
)
msg = "Please install the langchain-google-vertexai package to use the VertexAIEmbeddings component."
raise ImportError(msg)
from google.oauth2 import service_account

View file

@ -62,7 +62,8 @@ class ParseJSONDataComponent(Component):
try:
to_filter_as_dict.append(json.loads(repair_json(f)))
except JSONDecodeError as e:
raise ValueError(f"Invalid JSON: {e}")
msg = f"Invalid JSON: {e}"
raise ValueError(msg)
full_filter_str = json.dumps(to_filter_as_dict)

View file

@ -57,9 +57,10 @@ class FirecrawlCrawlApi(CustomComponent):
try:
from firecrawl.firecrawl import FirecrawlApp # type: ignore
except ImportError:
raise ImportError(
msg = (
"Could not import firecrawl integration package. " "Please install it with `pip install firecrawl-py`."
)
raise ImportError(msg)
if crawlerOptions:
crawler_options_dict = crawlerOptions.__dict__["data"]["text"]
else:

View file

@ -50,9 +50,10 @@ class FirecrawlScrapeApi(CustomComponent):
try:
from firecrawl.firecrawl import FirecrawlApp # type: ignore
except ImportError:
raise ImportError(
msg = (
"Could not import firecrawl integration package. " "Please install it with `pip install firecrawl-py`."
)
raise ImportError(msg)
if extractorOptions:
extractor_options_dict = extractorOptions.__dict__["data"]["text"]
else:

View file

@ -43,6 +43,7 @@ class JSONDocumentBuilder(CustomComponent):
elif isinstance(document, Document):
documents = Document(page_content=orjson_dumps({key: document.page_content}, indent_2=False))
else:
raise TypeError(f"Expected Document or list of Documents, got {type(document)}")
msg = f"Expected Document or list of Documents, got {type(document)}"
raise TypeError(msg)
self.repr_value = documents
return documents

View file

@ -110,9 +110,11 @@ class SpiderTool(Component):
elif self.mode == "crawl":
result = app.crawl_url(self.url, parameters)
else:
raise ValueError(f"Invalid mode: {self.mode}. Must be 'scrape' or 'crawl'.")
msg = f"Invalid mode: {self.mode}. Must be 'scrape' or 'crawl'."
raise ValueError(msg)
except Exception as e:
raise Exception(f"Error: {str(e)}")
msg = f"Error: {str(e)}"
raise Exception(msg)
records = []

View file

@ -53,10 +53,11 @@ class AstraDBChatMemory(LCChatMemoryComponent):
try:
from langchain_astradb.chat_message_histories import AstraDBChatMessageHistory
except ImportError:
raise ImportError(
msg = (
"Could not import langchain Astra DB integration package. "
"Please install it with `pip install langchain-astradb`."
)
raise ImportError(msg)
memory = AstraDBChatMessageHistory(
session_id=self.session_id,

View file

@ -55,9 +55,8 @@ class CassandraChatMemory(LCChatMemoryComponent):
try:
import cassio
except ImportError:
raise ImportError(
"Could not import cassio integration package. " "Please install it with `pip install cassio`."
)
msg = "Could not import cassio integration package. " "Please install it with `pip install cassio`."
raise ImportError(msg)
from uuid import UUID

View file

@ -35,9 +35,8 @@ class ZepChatMemory(LCChatMemoryComponent):
zep_python.zep_client.API_BASE_PATH = self.api_base_path
except ImportError:
raise ImportError(
"Could not import zep-python package. " "Please install it with `pip install zep-python`."
)
msg = "Could not import zep-python package. " "Please install it with `pip install zep-python`."
raise ImportError(msg)
zep_client = ZepClient(api_url=self.url, api_key=self.api_key)
return ZepChatMessageHistory(session_id=self.session_id, zep_client=zep_client)

View file

@ -69,7 +69,8 @@ class AmazonBedrockComponent(LCModelComponent):
try:
from langchain_aws import ChatBedrock
except ImportError:
raise ImportError("langchain_aws is not installed. Please install it with `pip install langchain_aws`.")
msg = "langchain_aws is not installed. Please install it with `pip install langchain_aws`."
raise ImportError(msg)
if self.aws_access_key:
import boto3 # type: ignore
@ -103,5 +104,6 @@ class AmazonBedrockComponent(LCModelComponent):
streaming=self.stream,
)
except Exception as e:
raise ValueError("Could not connect to AmazonBedrock API.") from e
msg = "Could not connect to AmazonBedrock API."
raise ValueError(msg) from e
return output # type: ignore

View file

@ -63,9 +63,8 @@ class AnthropicModelComponent(LCModelComponent):
try:
from langchain_anthropic.chat_models import ChatAnthropic
except ImportError:
raise ImportError(
"langchain_anthropic is not installed. Please install it with `pip install langchain_anthropic`."
)
msg = "langchain_anthropic is not installed. Please install it with `pip install langchain_anthropic`."
raise ImportError(msg)
model = self.model
anthropic_api_key = self.anthropic_api_key
max_tokens = self.max_tokens
@ -82,7 +81,8 @@ class AnthropicModelComponent(LCModelComponent):
streaming=self.stream,
)
except Exception as e:
raise ValueError("Could not connect to Anthropic API.") from e
msg = "Could not connect to Anthropic API."
raise ValueError(msg) from e
return output # type: ignore

View file

@ -78,6 +78,7 @@ class AzureChatOpenAIComponent(LCModelComponent):
streaming=stream,
)
except Exception as e:
raise ValueError(f"Could not connect to AzureOpenAI API: {str(e)}") from e
msg = f"Could not connect to AzureOpenAI API: {str(e)}"
raise ValueError(msg) from e
return output # type: ignore

View file

@ -97,6 +97,7 @@ class QianfanChatEndpointComponent(LCModelComponent):
endpoint=endpoint,
)
except Exception as e:
raise ValueError("Could not connect to Baidu Qianfan API.") from e
msg = "Could not connect to Baidu Qianfan API."
raise ValueError(msg) from e
return output # type: ignore

View file

@ -63,7 +63,8 @@ class GoogleGenerativeAIComponent(LCModelComponent):
try:
from langchain_google_genai import ChatGoogleGenerativeAI
except ImportError:
raise ImportError("The 'langchain_google_genai' package is required to use the Google Generative AI model.")
msg = "The 'langchain_google_genai' package is required to use the Google Generative AI model."
raise ImportError(msg)
google_api_key = self.google_api_key
model = self.model

View file

@ -113,6 +113,7 @@ class HuggingFaceEndpointsComponent(LCModelComponent):
repetition_penalty=repetition_penalty,
)
except Exception as e:
raise ValueError("Could not connect to HuggingFace Endpoints API.") from e
msg = "Could not connect to HuggingFace Endpoints API."
raise ValueError(msg) from e
return llm

View file

@ -65,14 +65,16 @@ class NVIDIAModelComponent(LCModelComponent):
build_config["model_name"]["options"] = ids
build_config["model_name"]["value"] = ids[0]
except Exception as e:
raise ValueError(f"Error getting model names: {e}")
msg = f"Error getting model names: {e}"
raise ValueError(msg)
return build_config
def build_model(self) -> LanguageModel: # type: ignore[type-var]
try:
from langchain_nvidia_ai_endpoints import ChatNVIDIA
except ImportError:
raise ImportError("Please install langchain-nvidia-ai-endpoints to use the NVIDIA model.")
msg = "Please install langchain-nvidia-ai-endpoints to use the NVIDIA model."
raise ImportError(msg)
nvidia_api_key = self.nvidia_api_key
temperature = self.temperature
model_name: str = self.model_name

View file

@ -67,7 +67,8 @@ class ChatOllamaComponent(LCModelComponent):
model_names = [model["name"] for model in data.get("models", [])]
return model_names
except Exception as e:
raise ValueError("Could not retrieve models. Please, make sure Ollama is running.") from e
msg = "Could not retrieve models. Please, make sure Ollama is running."
raise ValueError(msg) from e
inputs = LCModelComponent._base_inputs + [
StrInput(
@ -261,6 +262,7 @@ class ChatOllamaComponent(LCModelComponent):
try:
output = ChatOllama(**llm_params) # type: ignore
except Exception as e:
raise ValueError("Could not initialize Ollama LLM.") from e
msg = "Could not initialize Ollama LLM."
raise ValueError(msg) from e
return output # type: ignore

View file

@ -42,9 +42,8 @@ class ChatVertexAIComponent(LCModelComponent):
try:
from langchain_google_vertexai import ChatVertexAI
except ImportError:
raise ImportError(
"Please install the langchain-google-vertexai package to use the VertexAIEmbeddings component."
)
msg = "Please install the langchain-google-vertexai package to use the VertexAIEmbeddings component."
raise ImportError(msg)
location = self.location or None
if self.credentials:
from google.cloud import aiplatform

View file

@ -110,7 +110,8 @@ class LangChainHubPromptComponent(Component):
# Check if the api key is provided
if not self.langchain_api_key:
raise ValueError("Please provide a LangChain API Key")
msg = "Please provide a LangChain API Key"
raise ValueError(msg)
# Pull the prompt from LangChain Hub
prompt_data = langchain.hub.pull(self.langchain_hub_prompt, api_key=self.langchain_api_key)

View file

@ -50,7 +50,8 @@ class CreateDataComponent(Component):
existing_fields = {}
if field_value_int > 15:
build_config["number_of_fields"]["value"] = 15
raise ValueError("Number of fields cannot exceed 15. Try using a Component to combine two Data.")
msg = "Number of fields cannot exceed 15. Try using a Component to combine two Data."
raise ValueError(msg)
if len(build_config) > len(default_keys):
# back up the existing template fields
for key in build_config.copy():
@ -99,4 +100,5 @@ class CreateDataComponent(Component):
data_keys = self.get_data().keys()
if self.text_key not in data_keys and self.text_key != "":
formatted_data_keys = ", ".join(data_keys)
raise ValueError(f"Text Key: '{self.text_key}' not found in the Data keys: '{formatted_data_keys}'")
msg = f"Text Key: '{self.text_key}' not found in the Data keys: '{formatted_data_keys}'"
raise ValueError(msg)

View file

@ -74,11 +74,13 @@ class FlowToolComponent(LCToolComponent):
def build_tool(self) -> Tool:
FlowTool.update_forward_refs()
if "flow_name" not in self._attributes or not self._attributes["flow_name"]:
raise ValueError("Flow name is required")
msg = "Flow name is required"
raise ValueError(msg)
flow_name = self._attributes["flow_name"]
flow_data = self.get_flow(flow_name)
if not flow_data:
raise ValueError("Flow not found.")
msg = "Flow not found."
raise ValueError(msg)
graph = Graph.from_payload(flow_data.data["data"])
try:
graph.set_run_id(self.graph.run_id)

View file

@ -48,9 +48,8 @@ class JSONCleaner(Component):
try:
from json_repair import repair_json # type: ignore
except ImportError:
raise ImportError(
"Could not import the json_repair package." "Please install it with `pip install json_repair`."
)
msg = "Could not import the json_repair package." "Please install it with `pip install json_repair`."
raise ImportError(msg)
"""Clean the input JSON string based on provided options and return the cleaned JSON string."""
json_str = self.json_str
@ -62,7 +61,8 @@ class JSONCleaner(Component):
start = json_str.find("{")
end = json_str.rfind("}")
if start == -1 or end == -1:
raise ValueError("Invalid JSON string: Missing '{' or '}'")
msg = "Invalid JSON string: Missing '{' or '}'"
raise ValueError(msg)
json_str = json_str[start : end + 1]
if remove_control_chars:
@ -78,7 +78,8 @@ class JSONCleaner(Component):
self.status = result
return Message(text=result)
except Exception as e:
raise ValueError(f"Error cleaning JSON string: {str(e)}")
msg = f"Error cleaning JSON string: {str(e)}"
raise ValueError(msg)
def _remove_control_characters(self, s: str) -> str:
"""Remove control characters from the string."""
@ -94,4 +95,5 @@ class JSONCleaner(Component):
json.loads(s)
return s
except json.JSONDecodeError as e:
raise ValueError(f"Invalid JSON string: {str(e)}")
msg = f"Invalid JSON string: {str(e)}"
raise ValueError(msg)

View file

@ -49,7 +49,8 @@ class RunFlowComponent(Component):
async def generate_results(self) -> list[Data]:
if "flow_name" not in self._attributes or not self._attributes["flow_name"]:
raise ValueError("Flow name is required")
msg = "Flow name is required"
raise ValueError(msg)
flow_name = self._attributes["flow_name"]
results: list[RunOutputs | None] = await self.run_flow(

View file

@ -118,7 +118,8 @@ class RunnableExecComponent(Component):
async def build_executor(self) -> Message:
input_dict, status = self.get_input_dict(self.runnable, self.input_key, self.input_value)
if not isinstance(self.runnable, AgentExecutor):
raise ValueError("The runnable must be an AgentExecutor")
msg = "The runnable must be an AgentExecutor"
raise ValueError(msg)
if self.use_stream:
return self.astream_events(input_dict)

View file

@ -48,7 +48,8 @@ class SQLExecutorComponent(CustomComponent):
try:
database = SQLDatabase.from_uri(database_url)
except Exception as e:
raise ValueError(f"An error occurred while connecting to the database: {e}")
msg = f"An error occurred while connecting to the database: {e}"
raise ValueError(msg)
try:
tool = QuerySQLDataBaseTool(db=database)
result = tool.run(query, include_columns=include_columns)

View file

@ -38,7 +38,8 @@ class SelectDataComponent(Component):
# Validate that the selected index is within bounds
if selected_index < 0 or selected_index >= len(self.data_list):
raise ValueError(f"Selected index {selected_index} is out of range.")
msg = f"Selected index {selected_index} is out of range."
raise ValueError(msg)
# Return the selected Data object
selected_data = self.data_list[selected_index]

View file

@ -39,7 +39,8 @@ class SubFlowComponent(Component):
try:
flow_data = self.get_flow(field_value)
if not flow_data:
raise ValueError(f"Flow {field_value} not found.")
msg = f"Flow {field_value} not found."
raise ValueError(msg)
graph = Graph.from_payload(flow_data.data["data"])
# Get all inputs from the graph
inputs = get_flow_inputs(graph)

View file

@ -56,7 +56,8 @@ class UpdateDataComponent(Component):
existing_fields = {}
if field_value_int > 15:
build_config["number_of_fields"]["value"] = 15
raise ValueError("Number of fields cannot exceed 15. Try using a Component to combine two Data.")
msg = "Number of fields cannot exceed 15. Try using a Component to combine two Data."
raise ValueError(msg)
if len(build_config) > len(default_keys):
# back up the existing template fields
for key in build_config.copy():
@ -105,4 +106,5 @@ class UpdateDataComponent(Component):
"""This function validates that the Text Key is one of the keys in the Data"""
data_keys = data.data.keys()
if self.text_key not in data_keys and self.text_key != "":
raise ValueError(f"Text Key: {self.text_key} not found in the Data keys: {','.join(data_keys)}")
msg = f"Text Key: {self.text_key} not found in the Data keys: {','.join(data_keys)}"
raise ValueError(msg)

View file

@ -48,5 +48,6 @@ class AmazonKendraRetrieverComponent(CustomComponent):
user_context=user_context,
) # type: ignore
except Exception as e:
raise ValueError("Could not connect to AmazonKendra API.") from e
msg = "Could not connect to AmazonKendra API."
raise ValueError(msg) from e
return cast(Retriever, output)

View file

@ -82,4 +82,5 @@ class CohereRerankComponent(LCVectorStoreComponent):
@check_cached_vector_store
def build_vector_store(self) -> VectorStore:
raise NotImplementedError("Cohere Rerank does not support vector stores.")
msg = "Cohere Rerank does not support vector stores."
raise NotImplementedError(msg)

View file

@ -25,5 +25,6 @@ class MetalRetrieverComponent(CustomComponent):
try:
metal = Metal(api_key=api_key, client_id=client_id, index_id=index_id)
except Exception as e:
raise ValueError("Could not connect to Metal API.") from e
msg = "Could not connect to Metal API."
raise ValueError(msg) from e
return cast(Retriever, MetalRetriever(client=metal, params=params or {}))

View file

@ -55,14 +55,16 @@ class NvidiaRerankComponent(LCVectorStoreComponent):
build_config["model"]["options"] = ids
build_config["model"]["value"] = ids[0]
except Exception as e:
raise ValueError(f"Error getting model names: {e}")
msg = f"Error getting model names: {e}"
raise ValueError(msg)
return build_config
def build_model(self):
try:
from langchain_nvidia_ai_endpoints import NVIDIARerank
except ImportError:
raise ImportError("Please install langchain-nvidia-ai-endpoints to use the NVIDIA model.")
msg = "Please install langchain-nvidia-ai-endpoints to use the NVIDIA model."
raise ImportError(msg)
return NVIDIARerank(api_key=self.api_key, model=self.model, base_url=self.base_url)
def build_base_retriever(self) -> Retriever: # type: ignore[type-var]
@ -79,4 +81,5 @@ class NvidiaRerankComponent(LCVectorStoreComponent):
@check_cached_vector_store
def build_vector_store(self) -> VectorStore:
raise NotImplementedError("NVIDIA Rerank does not support vector stores.")
msg = "NVIDIA Rerank does not support vector stores."
raise NotImplementedError(msg)

View file

@ -66,7 +66,8 @@ class SelfQueryRetrieverComponent(Component):
elif isinstance(self.query, str):
input_text = self.query
else:
raise ValueError(f"Query type {type(self.query)} not supported.")
msg = f"Query type {type(self.query)} not supported."
raise ValueError(msg)
documents = self_query_retriever.invoke(input=input_text, config={"callbacks": self.get_langchain_callbacks()})
data = [Data.from_document(document) for document in documents]

View file

@ -51,7 +51,8 @@ class VectaraSelfQueryRetriverComponent(CustomComponent):
for meta in metadata_field_info:
meta_obj = json.loads(meta)
if "name" not in meta_obj or "description" not in meta_obj or "type" not in meta_obj:
raise Exception("Incorrect metadata field info format.")
msg = "Incorrect metadata field info format."
raise Exception(msg)
attribute_info = AttributeInfo(
name=meta_obj["name"],
description=meta_obj["description"],

View file

@ -67,7 +67,8 @@ class GleanSearchAPIComponent(LCToolComponent):
results = self._search_api_results(query, **kwargs)
if len(results) == 0:
raise AssertionError("No good Glean Search Result was found")
msg = "No good Glean Search Result was found"
raise AssertionError(msg)
return results

View file

@ -39,5 +39,6 @@ class GoogleSearchAPIComponent(LCToolComponent):
try:
from langchain_google_community import GoogleSearchAPIWrapper # type: ignore
except ImportError:
raise ImportError("Please install langchain-google-community to use GoogleSearchAPIWrapper.")
msg = "Please install langchain-google-community to use GoogleSearchAPIWrapper."
raise ImportError(msg)
return GoogleSearchAPIWrapper(google_api_key=self.google_api_key, google_cse_id=self.google_cse_id, k=self.k)

View file

@ -175,7 +175,8 @@ class PythonCodeStructuredTool(LCToolComponent):
field_name = attr.split("|")[1]
func_arg = self._find_arg(named_functions, func_name, field_name)
if func_arg is None:
raise Exception(f"Failed to find arg: {field_name}")
msg = f"Failed to find arg: {field_name}"
raise Exception(msg)
field_annotation = func_arg["annotation"]
field_description = self._get_value(self._attributes[attr], str)
@ -250,7 +251,8 @@ class PythonCodeStructuredTool(LCToolComponent):
func = {"name": node.name, "args": []}
for arg in node.args.args:
if arg.lineno != arg.end_lineno:
raise Exception("Multiline arguments are not supported")
msg = "Multiline arguments are not supported"
raise Exception(msg)
func_arg = {
"name": arg.arg,

View file

@ -54,14 +54,16 @@ class PythonREPLToolComponent(LCToolComponent):
elif isinstance(global_imports, list):
modules = global_imports
else:
raise ValueError("global_imports must be either a string or a list")
msg = "global_imports must be either a string or a list"
raise ValueError(msg)
for module in modules:
try:
imported_module = importlib.import_module(module)
global_dict[imported_module.__name__] = imported_module
except ImportError:
raise ImportError(f"Could not import module {module}")
msg = f"Could not import module {module}"
raise ImportError(msg)
return global_dict
def build_tool(self) -> Tool:

View file

@ -89,7 +89,8 @@ class SearXNGToolComponent(LCToolComponent):
@staticmethod
def search(query: str, categories: list[str] = []) -> list:
if not SearxSearch._categories and not categories:
raise ValueError("No categories provided.")
msg = "No categories provided."
raise ValueError(msg)
all_categories = SearxSearch._categories + list(set(categories) - set(SearxSearch._categories))
try:
url = f"{SearxSearch._url}/"

View file

@ -367,10 +367,11 @@ class AstraVectorStoreComponent(LCVectorStoreComponent):
from langchain_astradb import AstraDBVectorStore
from langchain_astradb.utils.astradb import SetupMode
except ImportError:
raise ImportError(
msg = (
"Could not import langchain Astra DB integration package. "
"Please install it with `pip install langchain-astradb`."
)
raise ImportError(msg)
try:
if not self.setup_mode:
@ -378,7 +379,8 @@ class AstraVectorStoreComponent(LCVectorStoreComponent):
setup_mode_value = SetupMode[self.setup_mode.upper()]
except KeyError:
raise ValueError(f"Invalid setup mode: {self.setup_mode}")
msg = f"Invalid setup mode: {self.setup_mode}"
raise ValueError(msg)
if self.embedding:
embedding_dict = {"embedding": self.embedding}
@ -423,7 +425,8 @@ class AstraVectorStoreComponent(LCVectorStoreComponent):
try:
vector_store = AstraDBVectorStore(**vector_store_kwargs)
except Exception as e:
raise ValueError(f"Error initializing AstraDBVectorStore: {str(e)}") from e
msg = f"Error initializing AstraDBVectorStore: {str(e)}"
raise ValueError(msg) from e
self._add_documents_to_vector_store(vector_store)
@ -435,14 +438,16 @@ class AstraVectorStoreComponent(LCVectorStoreComponent):
if isinstance(_input, Data):
documents.append(_input.to_lc_document())
else:
raise ValueError("Vector Store Inputs must be Data objects.")
msg = "Vector Store Inputs must be Data objects."
raise ValueError(msg)
if documents:
logger.debug(f"Adding {len(documents)} documents to the Vector Store.")
try:
vector_store.add_documents(documents)
except Exception as e:
raise ValueError(f"Error adding documents to AstraDBVectorStore: {str(e)}") from e
msg = f"Error adding documents to AstraDBVectorStore: {str(e)}"
raise ValueError(msg) from e
else:
logger.debug("No documents to add to the Vector Store.")
@ -481,7 +486,8 @@ class AstraVectorStoreComponent(LCVectorStoreComponent):
docs = vector_store.search(query=self.search_input, search_type=search_type, **search_args)
except Exception as e:
raise ValueError(f"Error performing search in AstraDBVectorStore: {str(e)}") from e
msg = f"Error performing search in AstraDBVectorStore: {str(e)}"
raise ValueError(msg) from e
logger.debug(f"Retrieved documents: {len(docs)}")

View file

@ -137,9 +137,8 @@ class CassandraVectorStoreComponent(LCVectorStoreComponent):
import cassio
from langchain_community.utilities.cassandra import SetupMode
except ImportError:
raise ImportError(
"Could not import cassio integration package. " "Please install it with `pip install cassio`."
)
msg = "Could not import cassio integration package. " "Please install it with `pip install cassio`."
raise ImportError(msg)
from uuid import UUID
@ -235,10 +234,11 @@ class CassandraVectorStoreComponent(LCVectorStoreComponent):
docs = vector_store.search(query=self.search_query, search_type=search_type, **search_args)
except KeyError as e:
if "content" in str(e):
raise ValueError(
msg = (
"You should ingest data through Langflow (or LangChain) to query it in Langflow. "
"Your collection does not contain a field name 'content'."
)
raise ValueError(msg)
else:
raise e
@ -262,7 +262,8 @@ class CassandraVectorStoreComponent(LCVectorStoreComponent):
args["filter"] = clean_filter
if self.body_search:
if not self.enable_body_search:
raise ValueError("You should enable body search when creating the table to search the body field.")
msg = "You should enable body search when creating the table to search the body field."
raise ValueError(msg)
args["body_search"] = self.body_search
return args

View file

@ -126,9 +126,8 @@ class CassandraGraphVectorStoreComponent(LCVectorStoreComponent):
import cassio
from langchain_community.utilities.cassandra import SetupMode
except ImportError:
raise ImportError(
"Could not import cassio integration package. " "Please install it with `pip install cassio`."
)
msg = "Could not import cassio integration package. " "Please install it with `pip install cassio`."
raise ImportError(msg)
database_ref = self.database_ref
@ -214,10 +213,11 @@ class CassandraGraphVectorStoreComponent(LCVectorStoreComponent):
docs = vector_store.search(query=self.search_query, search_type=search_type, **search_args)
except KeyError as e:
if "content" in str(e):
raise ValueError(
msg = (
"You should ingest data through Langflow (or LangChain) to query it in Langflow. "
"Your collection does not contain a field name 'content'."
) from e
)
raise ValueError(msg) from e
else:
raise e

View file

@ -107,9 +107,10 @@ class ChromaVectorStoreComponent(LCVectorStoreComponent):
from chromadb import Client
from langchain_chroma import Chroma
except ImportError:
raise ImportError(
msg = (
"Could not import Chroma integration package. " "Please install it with `pip install langchain-chroma`."
)
raise ImportError(msg)
# Chroma settings
chroma_settings = None
client = None
@ -163,7 +164,8 @@ class ChromaVectorStoreComponent(LCVectorStoreComponent):
if _input not in _stored_documents_without_id:
documents.append(_input.to_lc_document())
else:
raise ValueError("Vector Store Inputs must be Data objects.")
msg = "Vector Store Inputs must be Data objects."
raise ValueError(msg)
if documents and self.embedding is not None:
logger.debug(f"Adding {len(documents)} documents to the Vector Store.")

View file

@ -72,16 +72,18 @@ class ClickhouseVectorStoreComponent(LCVectorStoreComponent):
try:
import clickhouse_connect # type: ignore
except ImportError as e:
raise ImportError(
msg = (
"Failed to import Clickhouse dependencies. "
"Install it using `pip install langflow[clickhouse-connect] --pre`"
) from e
)
raise ImportError(msg) from e
try:
client = clickhouse_connect.get_client(host=self.host, username=self.username, password=self.password)
client.command("SELECT 1")
except Exception as e:
raise ValueError(f"Failed to connect to Clickhouse: {e}")
msg = f"Failed to connect to Clickhouse: {e}"
raise ValueError(msg)
documents = []
for _input in self.ingest_data or []:

View file

@ -48,9 +48,8 @@ class CouchbaseVectorStoreComponent(LCVectorStoreComponent):
from couchbase.cluster import Cluster # type: ignore
from couchbase.options import ClusterOptions # type: ignore
except ImportError as e:
raise ImportError(
"Failed to import Couchbase dependencies. Install it using `pip install langflow[couchbase] --pre`"
) from e
msg = "Failed to import Couchbase dependencies. Install it using `pip install langflow[couchbase] --pre`"
raise ImportError(msg) from e
try:
auth = PasswordAuthenticator(self.couchbase_username, self.couchbase_password)
@ -59,7 +58,8 @@ class CouchbaseVectorStoreComponent(LCVectorStoreComponent):
cluster.wait_until_ready(timedelta(seconds=5))
except Exception as e:
raise ValueError(f"Failed to connect to Couchbase: {e}")
msg = f"Failed to connect to Couchbase: {e}"
raise ValueError(msg)
documents = []
for _input in self.ingest_data or []:

View file

@ -62,7 +62,8 @@ class FaissVectorStoreComponent(LCVectorStoreComponent):
Builds the FAISS object.
"""
if not self.persist_directory:
raise ValueError("Folder path is required to save the FAISS index.")
msg = "Folder path is required to save the FAISS index."
raise ValueError(msg)
path = self.resolve_path(self.persist_directory)
documents = []
@ -83,7 +84,8 @@ class FaissVectorStoreComponent(LCVectorStoreComponent):
Search for documents in the FAISS vector store.
"""
if not self.persist_directory:
raise ValueError("Folder path is required to load the FAISS index.")
msg = "Folder path is required to load the FAISS index."
raise ValueError(msg)
path = self.resolve_path(self.persist_directory)
vector_store = FAISS.load_local(
@ -94,7 +96,8 @@ class FaissVectorStoreComponent(LCVectorStoreComponent):
)
if not vector_store:
raise ValueError("Failed to load the FAISS index.")
msg = "Failed to load the FAISS index."
raise ValueError(msg)
logger.debug(f"Search input: {self.search_query}")
logger.debug(f"Number of results: {self.number_of_results}")

View file

@ -181,18 +181,18 @@ class HCDVectorStoreComponent(LCVectorStoreComponent):
from langchain_astradb import AstraDBVectorStore
from langchain_astradb.utils.astradb import SetupMode
except ImportError:
raise ImportError(
msg = (
"Could not import langchain Astra DB integration package. "
"Please install it with `pip install langchain-astradb`."
)
raise ImportError(msg)
try:
from astrapy.authentication import UsernamePasswordTokenProvider
from astrapy.constants import Environment
except ImportError:
raise ImportError(
"Could not import astrapy integration package. " "Please install it with `pip install astrapy`."
)
msg = "Could not import astrapy integration package. " "Please install it with `pip install astrapy`."
raise ImportError(msg)
try:
if not self.setup_mode:
@ -200,7 +200,8 @@ class HCDVectorStoreComponent(LCVectorStoreComponent):
setup_mode_value = SetupMode[self.setup_mode.upper()]
except KeyError:
raise ValueError(f"Invalid setup mode: {self.setup_mode}")
msg = f"Invalid setup mode: {self.setup_mode}"
raise ValueError(msg)
if not isinstance(self.embedding, dict):
embedding_dict = {"embedding": self.embedding}
@ -246,7 +247,8 @@ class HCDVectorStoreComponent(LCVectorStoreComponent):
try:
vector_store = AstraDBVectorStore(**vector_store_kwargs)
except Exception as e:
raise ValueError(f"Error initializing AstraDBVectorStore: {str(e)}") from e
msg = f"Error initializing AstraDBVectorStore: {str(e)}"
raise ValueError(msg) from e
self._add_documents_to_vector_store(vector_store)
return vector_store
@ -257,14 +259,16 @@ class HCDVectorStoreComponent(LCVectorStoreComponent):
if isinstance(_input, Data):
documents.append(_input.to_lc_document())
else:
raise ValueError("Vector Store Inputs must be Data objects.")
msg = "Vector Store Inputs must be Data objects."
raise ValueError(msg)
if documents:
logger.debug(f"Adding {len(documents)} documents to the Vector Store.")
try:
vector_store.add_documents(documents)
except Exception as e:
raise ValueError(f"Error adding documents to AstraDBVectorStore: {str(e)}") from e
msg = f"Error adding documents to AstraDBVectorStore: {str(e)}"
raise ValueError(msg) from e
else:
logger.debug("No documents to add to the Vector Store.")
@ -302,7 +306,8 @@ class HCDVectorStoreComponent(LCVectorStoreComponent):
docs = vector_store.search(query=self.search_input, search_type=search_type, **search_args)
except Exception as e:
raise ValueError(f"Error performing search in AstraDBVectorStore: {str(e)}") from e
msg = f"Error performing search in AstraDBVectorStore: {str(e)}"
raise ValueError(msg) from e
logger.debug(f"Retrieved documents: {len(docs)}")

View file

@ -74,9 +74,10 @@ class MilvusVectorStoreComponent(LCVectorStoreComponent):
try:
from langchain_milvus.vectorstores import Milvus as LangchainMilvus
except ImportError:
raise ImportError(
msg = (
"Could not import Milvus integration package. " "Please install it with `pip install langchain-milvus`."
)
raise ImportError(msg)
self.connection_args.update(uri=self.uri, token=self.password)
milvus_store = LangchainMilvus(
embedding_function=self.embedding,

View file

@ -39,13 +39,15 @@ class MongoVectorStoreComponent(LCVectorStoreComponent):
try:
from pymongo import MongoClient
except ImportError:
raise ImportError("Please install pymongo to use MongoDB Atlas Vector Store")
msg = "Please install pymongo to use MongoDB Atlas Vector Store"
raise ImportError(msg)
try:
mongo_client: MongoClient = MongoClient(self.mongodb_atlas_cluster_uri)
collection = mongo_client[self.db_name][self.collection_name]
except Exception as e:
raise ValueError(f"Failed to connect to MongoDB Atlas: {e}")
msg = f"Failed to connect to MongoDB Atlas: {e}"
raise ValueError(msg)
documents = []
for _input in self.ingest_data or []:

View file

@ -85,7 +85,8 @@ class QdrantVectorStoreComponent(LCVectorStoreComponent):
documents.append(_input)
if not isinstance(self.embedding, Embeddings):
raise ValueError("Invalid embedding object")
msg = "Invalid embedding object"
raise ValueError(msg)
if documents:
qdrant = Qdrant.from_documents(documents, embedding=self.embedding, **qdrant_kwargs)

View file

@ -58,7 +58,8 @@ class RedisVectorStoreComponent(LCVectorStoreComponent):
if not documents:
if self.schema is None:
raise ValueError("If no documents are provided, a schema must be provided.")
msg = "If no documents are provided, a schema must be provided."
raise ValueError(msg)
redis_vs = Redis.from_existing_index(
embedding=self.embedding,
index_name=self.redis_index_name,

View file

@ -59,7 +59,8 @@ class VectaraVectorStoreComponent(LCVectorStoreComponent):
try:
from langchain_community.vectorstores import Vectara
except ImportError:
raise ImportError("Could not import Vectara. Please install it with `pip install langchain-community`.")
msg = "Could not import Vectara. Please install it with `pip install langchain-community`."
raise ImportError(msg)
vectara = Vectara(
vectara_customer_id=self.vectara_customer_id,

View file

@ -50,7 +50,8 @@ class WeaviateVectorStoreComponent(LCVectorStoreComponent):
client = weaviate.Client(url=self.url)
if self.index_name != self.index_name.capitalize():
raise ValueError(f"Weaviate requires the index name to be capitalized. Use: {self.index_name.capitalize()}")
msg = f"Weaviate requires the index name to be capitalized. Use: {self.index_name.capitalize()}"
raise ValueError(msg)
documents = []
for _input in self.ingest_data or []:

View file

@ -137,7 +137,8 @@ class VectaraRagComponent(Component):
from langchain_community.vectorstores import Vectara
from langchain_community.vectorstores.vectara import RerankConfig, SummaryConfig, VectaraQueryConfig
except ImportError:
raise ImportError("Could not import Vectara. Please install it with `pip install langchain-community`.")
msg = "Could not import Vectara. Please install it with `pip install langchain-community`."
raise ImportError(msg)
vectara = Vectara(self.vectara_customer_id, self.vectara_corpus_id, self.vectara_api_key)
rerank_config = RerankConfig(self.reranker, self.reranker_k, self.diversity_bias)

View file

@ -13,7 +13,8 @@ def validate_icon(value: str, *args, **kwargs):
elif not value.startswith(":") or not value.endswith(":"):
# emoji should have both starting and ending colons
# so if one of them is missing, we will raise
raise ValueError(f"Invalid emoji. {value} is not a valid emoji.")
msg = f"Invalid emoji. {value} is not a valid emoji."
raise ValueError(msg)
emoji_value = emoji.emojize(value, variant="emoji_type")
if value == emoji_value:

View file

@ -66,7 +66,8 @@ class CodeParser:
self.cache: TTLCache = TTLCache(maxsize=1024, ttl=60)
if isinstance(code, type):
if not inspect.isclass(code):
raise ValueError("The provided code must be a class.")
msg = "The provided code must be a class."
raise ValueError(msg)
# If the code is a class, get its source code
code = inspect.getsource(code)
self.code = code

View file

@ -138,11 +138,13 @@ class Component(CustomComponent):
try:
module = inspect.getmodule(self.__class__)
if module is None:
raise ValueError("Could not find module for class")
msg = "Could not find module for class"
raise ValueError(msg)
class_code = inspect.getsource(module)
self._code = class_code
except OSError:
raise ValueError(f"Could not find source code for {self.__class__.__name__}")
msg = f"Could not find source code for {self.__class__.__name__}"
raise ValueError(msg)
def set(self, **kwargs):
"""
@ -209,7 +211,8 @@ class Component(CustomComponent):
"""
if name in self._inputs:
return self._inputs[name]
raise ValueError(f"Input {name} not found in {self.__class__.__name__}")
msg = f"Input {name} not found in {self.__class__.__name__}"
raise ValueError(msg)
def get_output(self, name: str) -> Any:
"""
@ -226,20 +229,23 @@ class Component(CustomComponent):
"""
if name in self._outputs_map:
return self._outputs_map[name]
raise ValueError(f"Output {name} not found in {self.__class__.__name__}")
msg = f"Output {name} not found in {self.__class__.__name__}"
raise ValueError(msg)
def set_on_output(self, name: str, **kwargs):
output = self.get_output(name)
for key, value in kwargs.items():
if not hasattr(output, key):
raise ValueError(f"Output {name} does not have a method {key}")
msg = f"Output {name} does not have a method {key}"
raise ValueError(msg)
setattr(output, key, value)
def set_output_value(self, name: str, value: Any):
if name in self._outputs_map:
self._outputs_map[name].value = value
else:
raise ValueError(f"Output {name} not found in {self.__class__.__name__}")
msg = f"Output {name} not found in {self.__class__.__name__}"
raise ValueError(msg)
def map_outputs(self, outputs: list[Output]):
"""
@ -256,7 +262,8 @@ class Component(CustomComponent):
"""
for output in outputs:
if output.name is None:
raise ValueError("Output name cannot be None.")
msg = "Output name cannot be None."
raise ValueError(msg)
# Deepcopy is required to avoid modifying the original component;
# allows each instance of each component to modify its own output
self._outputs_map[output.name] = deepcopy(output)
@ -274,7 +281,8 @@ class Component(CustomComponent):
"""
for input_ in inputs:
if input_.name is None:
raise ValueError("Input name cannot be None.")
msg = "Input name cannot be None."
raise ValueError(msg)
self._inputs[input_.name] = deepcopy(input_)
def validate(self, params: dict):
@ -303,7 +311,8 @@ class Component(CustomComponent):
output = next((output for output in self._outputs_map.values() if output.method == method.__name__), None)
if output is None:
method_name = method.__name__ if hasattr(method, "__name__") else str(method)
raise ValueError(f"Output with method {method_name} not found")
msg = f"Output with method {method_name} not found"
raise ValueError(msg)
return output
def _inherits_from_component(self, method: Callable):
@ -340,13 +349,15 @@ class Component(CustomComponent):
matching_pairs.append((output, input_))
if len(matching_pairs) > 1:
matching_pairs_str = self._build_error_string_from_matching_pairs(matching_pairs)
raise ValueError(
msg = (
f"There are multiple outputs from {value.__class__.__name__} "
f"that can connect to inputs in {self.__class__.__name__}: {matching_pairs_str}"
)
raise ValueError(msg)
output, input_ = matching_pairs[0]
if not isinstance(output.method, str):
raise ValueError(f"Method {output.method} is not a valid output of {value.__class__.__name__}")
msg = f"Method {output.method} is not a valid output of {value.__class__.__name__}"
raise ValueError(msg)
return getattr(value, output.method)
def _process_connection_or_parameter(self, key, value):
@ -361,9 +372,8 @@ class Component(CustomComponent):
try:
self._method_is_valid_output(value)
except ValueError:
raise ValueError(
f"Method {value.__name__} is not a valid output of {value.__self__.__class__.__name__}"
)
msg = f"Method {value.__name__} is not a valid output of {value.__self__.__class__.__name__}"
raise ValueError(msg)
self._connect_to_component(key, value, _input)
else:
self._set_parameter_or_attribute(key, value)
@ -416,10 +426,11 @@ class Component(CustomComponent):
def _set_parameter_or_attribute(self, key, value):
if isinstance(value, Component):
methods = ", ".join([f"'{output.method}'" for output in value.outputs])
raise ValueError(
msg = (
f"You set {value.display_name} as value for `{key}`. "
f"You should pass one of the following: {methods}"
)
raise ValueError(msg)
self._set_input_value(key, value)
self._parameters[key] = value
self._attributes[key] = value
@ -453,26 +464,28 @@ class Component(CustomComponent):
return self.__dict__[f"_{name}"]
if name.startswith("_") and name[1:] in BACKWARDS_COMPATIBLE_ATTRIBUTES:
return self.__dict__[name]
raise AttributeError(f"{name} not found in {self.__class__.__name__}")
msg = f"{name} not found in {self.__class__.__name__}"
raise AttributeError(msg)
def _set_input_value(self, name: str, value: Any):
if name in self._inputs:
input_value = self._inputs[name].value
if isinstance(input_value, Component):
methods = ", ".join([f"'{output.method}'" for output in input_value.outputs])
raise ValueError(
msg = (
f"You set {input_value.display_name} as value for `{name}`. "
f"You should pass one of the following: {methods}"
)
raise ValueError(msg)
if callable(input_value):
raise ValueError(
f"Input {name} is connected to {input_value.__self__.display_name}.{input_value.__name__}"
)
msg = f"Input {name} is connected to {input_value.__self__.display_name}.{input_value.__name__}"
raise ValueError(msg)
self._inputs[name].value = value
if hasattr(self._inputs[name], "load_from_db"):
self._inputs[name].load_from_db = False
else:
raise ValueError(f"Input {name} not found in {self.__class__.__name__}")
msg = f"Input {name} not found in {self.__class__.__name__}"
raise ValueError(msg)
def _validate_outputs(self):
# Raise Error if some rule isn't met
@ -489,10 +502,12 @@ class Component(CustomComponent):
except KeyError:
close_match = find_closest_match(name, list(template.keys()))
if close_match:
raise ValueError(
msg = (
f"Parameter '{name}' not found in {self.__class__.__name__}. " f"Did you mean '{close_match}'?"
)
raise ValueError(f"Parameter {name} not found in {self.__class__.__name__}. ")
raise ValueError(msg)
msg = f"Parameter {name} not found in {self.__class__.__name__}. "
raise ValueError(msg)
def _get_method_return_type(self, method_name: str) -> list[str]:
method = getattr(self, method_name)
@ -569,10 +584,11 @@ class Component(CustomComponent):
_attributes = {}
for key, value in params.items():
if key in self.__dict__ and value != getattr(self, key):
raise ValueError(
msg = (
f"{self.__class__.__name__} defines an input parameter named '{key}' "
f"that is a reserved word and cannot be used."
)
raise ValueError(msg)
_attributes[key] = value
for key, input_obj in self._inputs.items():
if key not in _attributes:
@ -634,7 +650,8 @@ class Component(CustomComponent):
or output.name in self._vertex.edges_source_names
):
if output.method is None:
raise ValueError(f"Output {output.name} does not have a method defined.")
msg = f"Output {output.name} does not have a method defined."
raise ValueError(msg)
self._current_output = output.name
method: Callable = getattr(self, output.method)
if output.cache and output.value != UNDEFINED:

View file

@ -112,39 +112,48 @@ class CustomComponent(BaseComponent):
def update_state(self, name: str, value: Any):
if not self._vertex:
raise ValueError("Vertex is not set")
msg = "Vertex is not set"
raise ValueError(msg)
try:
self._vertex.graph.update_state(name=name, record=value, caller=self._vertex.id)
except Exception as e:
raise ValueError(f"Error updating state: {e}")
msg = f"Error updating state: {e}"
raise ValueError(msg)
def stop(self, output_name: str | None = None):
if not output_name and self._vertex and len(self._vertex.outputs) == 1:
output_name = self._vertex.outputs[0]["name"]
elif not output_name:
raise ValueError("You must specify an output name to call stop")
msg = "You must specify an output name to call stop"
raise ValueError(msg)
if not self._vertex:
raise ValueError("Vertex is not set")
msg = "Vertex is not set"
raise ValueError(msg)
try:
self.graph.mark_branch(vertex_id=self._vertex.id, output_name=output_name, state="INACTIVE")
except Exception as e:
raise ValueError(f"Error stopping {self.display_name}: {e}")
msg = f"Error stopping {self.display_name}: {e}"
raise ValueError(msg)
def append_state(self, name: str, value: Any):
if not self._vertex:
raise ValueError("Vertex is not set")
msg = "Vertex is not set"
raise ValueError(msg)
try:
self._vertex.graph.append_state(name=name, record=value, caller=self._vertex.id)
except Exception as e:
raise ValueError(f"Error appending state: {e}")
msg = f"Error appending state: {e}"
raise ValueError(msg)
def get_state(self, name: str):
if not self._vertex:
raise ValueError("Vertex is not set")
msg = "Vertex is not set"
raise ValueError(msg)
try:
return self._vertex.graph.get_state(name=name)
except Exception as e:
raise ValueError(f"Error getting state: {e}")
msg = f"Error getting state: {e}"
raise ValueError(msg)
@staticmethod
def resolve_path(path: str) -> str:
@ -270,14 +279,16 @@ class CustomComponent(BaseComponent):
try:
data_dict[key] = model_dump[key]
except KeyError:
raise ValueError(f"Key {key} not found in {item}")
msg = f"Key {key} not found in {item}"
raise ValueError(msg)
elif isinstance(item, str):
data_dict = {"text": item}
elif isinstance(item, dict):
data_dict = item.copy()
else:
raise ValueError(f"Invalid data type: {type(item)}")
msg = f"Invalid data type: {type(item)}"
raise ValueError(msg)
data_objects.append(Data(data=data_dict))
@ -416,7 +427,8 @@ class CustomComponent(BaseComponent):
def get_variable(name: str, field: str):
if hasattr(self, "_user_id") and not self.user_id:
raise ValueError(f"User id is not set for {self.__class__.__name__}")
msg = f"User id is not set for {self.__class__.__name__}"
raise ValueError(msg)
variable_service = get_variable_service() # Get service instance
# Retrieve and decrypt the variable by name for the current user
with session_scope() as session:
@ -436,7 +448,8 @@ class CustomComponent(BaseComponent):
List[str]: The names of the variables for the current user.
"""
if hasattr(self, "_user_id") and not self.user_id:
raise ValueError(f"User id is not set for {self.__class__.__name__}")
msg = f"User id is not set for {self.__class__.__name__}"
raise ValueError(msg)
variable_service = get_variable_service()
with session_scope() as session:
@ -469,7 +482,8 @@ class CustomComponent(BaseComponent):
async def load_flow(self, flow_id: str, tweaks: dict | None = None) -> Graph:
if not self.user_id:
raise ValueError("Session is invalid")
msg = "Session is invalid"
raise ValueError(msg)
return await load_flow(user_id=str(self._user_id), flow_id=flow_id, tweaks=tweaks)
async def run_flow(
@ -492,11 +506,13 @@ class CustomComponent(BaseComponent):
def list_flows(self) -> list[Data]:
if not self.user_id:
raise ValueError("Session is invalid")
msg = "Session is invalid"
raise ValueError(msg)
try:
return list_flows(user_id=str(self._user_id))
except Exception as e:
raise ValueError(f"Error listing flows: {e}")
msg = f"Error listing flows: {e}"
raise ValueError(msg)
def build(self, *args: Any, **kwargs: Any) -> Any:
"""

View file

@ -124,7 +124,8 @@ class DirectoryReader:
Walk through the directory path and return a list of all .py files.
"""
if not (safe_path := self.get_safe_path()):
raise CustomComponentPathValueError(f"The path needs to start with '{self.base_path}'.")
msg = f"The path needs to start with '{self.base_path}'."
raise CustomComponentPathValueError(msg)
file_list = []
safe_path_obj = Path(safe_path)

Some files were not shown because too many files have changed in this diff Show more