Skip to content

Commit

Permalink
Fix chat stream response on multiline content json (#1666)
Browse files Browse the repository at this point in the history
  • Loading branch information
beastoin authored Jan 10, 2025
2 parents 32f3ba8 + d0f0f6b commit 7bff9b9
Show file tree
Hide file tree
Showing 3 changed files with 11 additions and 6 deletions.
6 changes: 3 additions & 3 deletions app/lib/backend/http/api/messages.dart
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,7 @@ Stream<ServerMessageChunk> sendMessageStreamServer(String text, {String? appId})
}

if (line.startsWith('done: ')) {
var text = line.substring(6);
var text = utf8.decode(base64.decode(line.substring(6)));
debugPrint(text);
yield ServerMessageChunk(messageId, text, MessageChunkType.done,
message: ServerMessage.fromJson(json.decode(text)));
Expand Down Expand Up @@ -177,14 +177,14 @@ Stream<ServerMessageChunk> sendVoiceMessageStreamServer(List<File> files) async*
}

if (line.startsWith('done: ')) {
var text = line.substring(6);
var text = utf8.decode(base64.decode(line.substring(6)));
yield ServerMessageChunk(messageId, text, MessageChunkType.done,
message: ServerMessage.fromJson(json.decode(text)));
continue;
}

if (line.startsWith('message: ')) {
var text = line.substring(9);
var text = utf8.decode(base64.decode(line.substring(9)));
yield ServerMessageChunk(messageId, text, MessageChunkType.message,
message: ServerMessage.fromJson(json.decode(text)));
continue;
Expand Down
4 changes: 3 additions & 1 deletion backend/routers/chat.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import uuid
import re
import json
import base64
from datetime import datetime, timezone
from typing import List, Optional

Expand Down Expand Up @@ -106,7 +107,8 @@ async def generate_stream():
ai_message_dict = ai_message.dict()
response_message = ResponseMessage(**ai_message_dict)
response_message.ask_for_nps = ask_for_nps
yield f"done: {response_message.model_dump_json()}\n\n"
data = base64.b64encode(bytes(response_message.model_dump_json(), 'utf-8')).decode('utf-8')
yield f"done: {data}\n\n"

return StreamingResponse(
generate_stream(),
Expand Down
7 changes: 5 additions & 2 deletions backend/utils/chat.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import threading
import time
import base64
import uuid
from datetime import datetime, timezone
from typing import List, AsyncGenerator
Expand Down Expand Up @@ -111,7 +112,8 @@ def delete_file():
chat_db.add_message(uid, message.dict())

# stream
yield f"message: {message.model_dump_json()}\n\n"
mdata = base64.b64encode(bytes(message.model_dump_json(), 'utf-8')).decode('utf-8')
yield f"message: {mdata}\n\n"

# not support plugin
plugin = None
Expand Down Expand Up @@ -162,7 +164,8 @@ def process_message(response: str, callback_data: dict):
ai_message_dict = ai_message.dict()
response_message = ResponseMessage(**ai_message_dict)
response_message.ask_for_nps = ask_for_nps
yield f"done: {response_message.model_dump_json()}\n\n"
data = base64.b64encode(bytes(response_message.model_dump_json(), 'utf-8')).decode('utf-8')
yield f"done: {data}\n\n"

# send notification
token = notification_db.get_token_only(uid)
Expand Down

0 comments on commit 7bff9b9

Please sign in to comment.