Makes Prompt after formatting be displayed correctly (#982)

This commit is contained in:
Gabriel Luiz Freitas Almeida 2023-09-28 12:04:33 -03:00 committed by GitHub
commit 1f62c596a2
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
7 changed files with 33 additions and 7 deletions

View file

@ -10,6 +10,7 @@ from typing import Any, Dict, List, Optional
from fastapi import WebSocket
from langflow.utils.util import remove_ansi_escape_codes
from langchain.schema import AgentAction, AgentFinish
from loguru import logger
@ -85,6 +86,16 @@ class AsyncStreamingLLMCallbackHandler(AsyncCallbackHandler):
# This runs when first sending the prompt
# to the LLM, adding it will send the final prompt
# to the frontend
if "Prompt after formatting" in text:
text = text.replace("Prompt after formatting:\n", "")
text = remove_ansi_escape_codes(text)
resp = ChatResponse(
message="",
type="stream",
intermediate_steps="",
prompt=text,
)
await self.websocket.send_json(resp.dict())
async def on_agent_action(self, action: AgentAction, **kwargs: Any):
log = f"Thought: {action.log}"

View file

@ -85,6 +85,7 @@ class ChatResponse(ChatMessage):
"""Chat response schema."""
intermediate_steps: str
prompt: Optional[str] = ""
type: str
is_bot: bool = True
files: list = []

View file

@ -202,7 +202,9 @@ class ChatService(Service):
json_payload = await websocket.receive_json()
try:
payload = orjson.loads(json_payload)
except Exception:
# except TypeError or JSONDecodeError how?
except Exception as exc:
logger.error(f"Error decoding JSON: {exc}")
payload = json_payload
if "clear_history" in payload:
self.chat_history.history[client_id] = []
@ -220,7 +222,7 @@ class ChatService(Service):
)
except Exception as exc:
# Handle any exceptions that might occur
logger.error(f"Error handling websocket: {exc}")
logger.exception(f"Error handling websocket: {exc}")
await self.close_connection(
client_id=client_id,
code=status.WS_1011_INTERNAL_ERROR,

View file

@ -11,6 +11,10 @@ from langflow.utils import constants
from langchain.schema import Document
def remove_ansi_escape_codes(text):
return re.sub(r"\x1b\[[0-9;]*[a-zA-Z]", "", text)
def build_template_from_function(
name: str, type_to_loader_dict: Dict, add_function: bool = False
):

View file

@ -1,4 +1,4 @@
import axios, { AxiosResponse } from "axios";
import { AxiosResponse } from "axios";
import { ReactFlowJsonObject } from "reactflow";
import { BASE_URL_API } from "../../constants/constants";
import { api } from "../../controllers/API/api";
@ -522,4 +522,4 @@ export async function deleteApiKey(api_key: string) {
console.log("Error:", error);
throw error;
}
}
}

View file

@ -125,12 +125,13 @@ export default function FormModal({
function updateLastMessage({
str,
thought,
prompt,
end = false,
files,
}: {
str?: string;
thought?: string;
// end param default is false
prompt?: string;
end?: boolean;
files?: Array<any>;
}) {
@ -150,6 +151,9 @@ export default function FormModal({
if (files) {
newChat[newChat.length - 1].files = files;
}
if (prompt) {
newChat[newChat.length - 2].template = prompt;
}
return newChat;
});
}
@ -201,7 +205,6 @@ export default function FormModal({
}
function handleWsMessage(data: any) {
console.log(data);
if (Array.isArray(data) && data.length > 0) {
//set chat history
setChatHistory((_) => {
@ -267,7 +270,11 @@ export default function FormModal({
isStream = false;
}
if (data.type === "stream" && isStream) {
updateLastMessage({ str: data.message });
if (data.prompt) {
updateLastMessage({ prompt: data.prompt });
} else {
updateLastMessage({ str: data.message });
}
}
}

View file

@ -8,5 +8,6 @@ export type ChatMessageType = {
isSend: boolean;
thought?: string;
files?: Array<{ data: string; type: string; data_type: string }>;
prompt?: string;
chatKey: string;
};