refactor: Add get_message_from_openai_exception function to handle OpenAI exceptions

The code changes in exceptions.py add a new function `get_message_from_openai_exception` to handle exceptions raised by the OpenAI library. This function retrieves the error message from the exception and returns it. This refactor improves the error handling and enhances the code's robustness.
This commit is contained in:
ogabrielluiz 2024-06-18 23:07:12 -03:00
commit ddd4ebf4fa
2 changed files with 50 additions and 20 deletions

View file

@ -0,0 +1,19 @@
def get_message_from_openai_exception(exception: Exception) -> str:
"""
Get a message from an OpenAI exception.
Args:
exception (Exception): The exception to get the message from.
Returns:
str: The message from the exception.
"""
try:
from openai import BadRequestError
except ImportError:
return
if isinstance(exception, BadRequestError):
message = exception.body.get("message")
if message:
return message
return

View file

@ -6,6 +6,7 @@ from langchain_core.language_models.chat_models import BaseChatModel
from langchain_core.language_models.llms import LLM
from langchain_core.messages import AIMessage, HumanMessage, SystemMessage
from langflow.base.models.exceptions import get_message_from_openai_exception
from langflow.custom import Component
from langflow.schema.message import Message
@ -26,13 +27,18 @@ class LCModelComponent(Component):
Returns:
The result obtained from the output object.
"""
if stream:
result = runnable.stream(input_value)
else:
message = runnable.invoke(input_value)
result = message.content if hasattr(message, "content") else message
self.status = result
return result
try:
if stream:
result = runnable.stream(input_value)
else:
message = runnable.invoke(input_value)
result = message.content if hasattr(message, "content") else message
self.status = result
return result
except Exception as e:
if message := get_message_from_openai_exception(e):
raise ValueError(message)
raise e
def build_status_message(self, message: AIMessage):
"""
@ -104,17 +110,22 @@ class LCModelComponent(Component):
else:
messages.append(HumanMessage(content=input_value))
inputs = messages or {}
if stream:
return runnable.stream(inputs)
else:
message = runnable.invoke(inputs)
result = message.content if hasattr(message, "content") else message
if isinstance(message, AIMessage):
status_message = self.build_status_message(message)
self.status = status_message
elif isinstance(result, dict):
result = json.dumps(message, indent=4)
self.status = result
try:
if stream:
return runnable.stream(inputs)
else:
self.status = result
return result
message = runnable.invoke(inputs)
result = message.content if hasattr(message, "content") else message
if isinstance(message, AIMessage):
status_message = self.build_status_message(message)
self.status = status_message
elif isinstance(result, dict):
result = json.dumps(message, indent=4)
self.status = result
else:
self.status = result
return result
except Exception as e:
if message := get_message_from_openai_exception(e):
raise ValueError(message)
raise e