🐛 fix(callback.py): replace ChatResponse with PromptResponse in AsyncStreamingLLMCallbackHandler to correctly handle prompt after formatting
🔀 chore(schemas.py): add PromptResponse schema to handle prompt responses in addition to ChatResponse schema
This commit is contained in:
parent
3bf055a990
commit
ba2736f085
2 changed files with 11 additions and 6 deletions
|
|
@ -3,7 +3,7 @@ from uuid import UUID
|
|||
|
||||
from langchain.callbacks.base import AsyncCallbackHandler, BaseCallbackHandler
|
||||
|
||||
from langflow.api.v1.schemas import ChatResponse
|
||||
from langflow.api.v1.schemas import ChatResponse, PromptResponse
|
||||
|
||||
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
|
@ -92,10 +92,7 @@ class AsyncStreamingLLMCallbackHandler(AsyncCallbackHandler):
|
|||
if "Prompt after formatting" in text:
|
||||
text = text.replace("Prompt after formatting:\n", "")
|
||||
text = remove_ansi_escape_codes(text)
|
||||
resp = ChatResponse(
|
||||
message="",
|
||||
type="stream",
|
||||
intermediate_steps="",
|
||||
resp = PromptResponse(
|
||||
prompt=text,
|
||||
)
|
||||
await self.websocket.send_json(resp.dict())
|
||||
|
|
|
|||
|
|
@ -86,7 +86,7 @@ class ChatResponse(ChatMessage):
|
|||
"""Chat response schema."""
|
||||
|
||||
intermediate_steps: str
|
||||
prompt: Optional[str] = ""
|
||||
|
||||
type: str
|
||||
is_bot: bool = True
|
||||
files: list = []
|
||||
|
|
@ -98,6 +98,14 @@ class ChatResponse(ChatMessage):
|
|||
return v
|
||||
|
||||
|
||||
class PromptResponse(ChatMessage):
|
||||
"""Prompt response schema."""
|
||||
|
||||
prompt: str
|
||||
type: str = "prompt"
|
||||
is_bot: bool = True
|
||||
|
||||
|
||||
class FileResponse(ChatMessage):
|
||||
"""File response schema."""
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue