🐛 fix(callback.py): replace ChatResponse with PromptResponse in AsyncStreamingLLMCallbackHandler to correctly handle prompt after formatting

🔀 chore(schemas.py): add PromptResponse schema to handle prompt responses in addition to ChatResponse schema
This commit is contained in:
Gabriel Luiz Freitas Almeida 2023-09-29 11:28:36 -03:00 committed by anovazzi1
commit ba2736f085
2 changed files with 11 additions and 6 deletions

View file

@ -3,7 +3,7 @@ from uuid import UUID
from langchain.callbacks.base import AsyncCallbackHandler, BaseCallbackHandler
from langflow.api.v1.schemas import ChatResponse
from langflow.api.v1.schemas import ChatResponse, PromptResponse
from typing import Any, Dict, List, Optional
@ -92,10 +92,7 @@ class AsyncStreamingLLMCallbackHandler(AsyncCallbackHandler):
if "Prompt after formatting" in text:
text = text.replace("Prompt after formatting:\n", "")
text = remove_ansi_escape_codes(text)
resp = ChatResponse(
message="",
type="stream",
intermediate_steps="",
resp = PromptResponse(
prompt=text,
)
await self.websocket.send_json(resp.dict())

View file

@ -86,7 +86,7 @@ class ChatResponse(ChatMessage):
"""Chat response schema."""
intermediate_steps: str
prompt: Optional[str] = ""
type: str
is_bot: bool = True
files: list = []
@ -98,6 +98,14 @@ class ChatResponse(ChatMessage):
return v
class PromptResponse(ChatMessage):
"""Prompt response schema."""
prompt: str
type: str = "prompt"
is_bot: bool = True
class FileResponse(ChatMessage):
"""File response schema."""