mirror of
https://github.com/AppFlowy-IO/AppFlowy.git
synced 2025-04-24 06:37:14 -04:00
fix: loading message hot fix (#6858)
This commit is contained in:
parent
6a02679963
commit
df82c99755
2 changed files with 39 additions and 31 deletions
|
@ -1,5 +1,3 @@
|
|||
import 'dart:async';
|
||||
|
||||
import 'package:appflowy/plugins/ai_chat/application/chat_entity.dart';
|
||||
import 'package:appflowy/plugins/ai_chat/application/chat_message_stream.dart';
|
||||
import 'package:appflowy_backend/dispatch/dispatch.dart';
|
||||
|
@ -25,39 +23,24 @@ class ChatAIMessageBloc extends Bloc<ChatAIMessageEvent, ChatAIMessageState> {
|
|||
parseMetadata(refSourceJsonString),
|
||||
),
|
||||
) {
|
||||
_dispatch();
|
||||
|
||||
if (state.stream != null) {
|
||||
state.stream!.listen(
|
||||
onData: (text) {
|
||||
if (!isClosed) {
|
||||
add(ChatAIMessageEvent.updateText(text));
|
||||
}
|
||||
},
|
||||
onError: (error) {
|
||||
if (!isClosed) {
|
||||
add(ChatAIMessageEvent.receiveError(error.toString()));
|
||||
}
|
||||
},
|
||||
onAIResponseLimit: () {
|
||||
if (!isClosed) {
|
||||
add(const ChatAIMessageEvent.onAIResponseLimit());
|
||||
}
|
||||
},
|
||||
onMetadata: (metadata) {
|
||||
if (!isClosed) {
|
||||
add(ChatAIMessageEvent.receiveMetadata(metadata));
|
||||
}
|
||||
},
|
||||
);
|
||||
_startListening();
|
||||
|
||||
if (state.stream!.error != null) {
|
||||
Future.delayed(const Duration(milliseconds: 300), () {
|
||||
if (!isClosed) {
|
||||
add(ChatAIMessageEvent.receiveError(state.stream!.error!));
|
||||
}
|
||||
});
|
||||
add(ChatAIMessageEvent.receiveError(state.stream!.error!));
|
||||
}
|
||||
if (state.stream!.aiLimitReached) {
|
||||
add(const ChatAIMessageEvent.onAIResponseLimit());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
final String chatId;
|
||||
final Int64? questionId;
|
||||
|
||||
void _dispatch() {
|
||||
on<ChatAIMessageEvent>(
|
||||
(event, emit) {
|
||||
event.when(
|
||||
|
@ -130,8 +113,30 @@ class ChatAIMessageBloc extends Bloc<ChatAIMessageEvent, ChatAIMessageState> {
|
|||
);
|
||||
}
|
||||
|
||||
final String chatId;
|
||||
final Int64? questionId;
|
||||
void _startListening() {
|
||||
state.stream!.listen(
|
||||
onData: (text) {
|
||||
if (!isClosed) {
|
||||
add(ChatAIMessageEvent.updateText(text));
|
||||
}
|
||||
},
|
||||
onError: (error) {
|
||||
if (!isClosed) {
|
||||
add(ChatAIMessageEvent.receiveError(error.toString()));
|
||||
}
|
||||
},
|
||||
onAIResponseLimit: () {
|
||||
if (!isClosed) {
|
||||
add(const ChatAIMessageEvent.onAIResponseLimit());
|
||||
}
|
||||
},
|
||||
onMetadata: (metadata) {
|
||||
if (!isClosed) {
|
||||
add(ChatAIMessageEvent.receiveMetadata(metadata));
|
||||
}
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@freezed
|
||||
|
|
|
@ -27,6 +27,7 @@ class AnswerStream {
|
|||
_onMetadata!(parseMetadata(s));
|
||||
}
|
||||
} else if (event == "AI_RESPONSE_LIMIT") {
|
||||
_aiLimitReached = true;
|
||||
if (_onAIResponseLimit != null) {
|
||||
_onAIResponseLimit!();
|
||||
}
|
||||
|
@ -49,6 +50,7 @@ class AnswerStream {
|
|||
final StreamController<String> _controller = StreamController.broadcast();
|
||||
late StreamSubscription<String> _subscription;
|
||||
bool _hasStarted = false;
|
||||
bool _aiLimitReached = false;
|
||||
String? _error;
|
||||
String _text = "";
|
||||
|
||||
|
@ -63,6 +65,7 @@ class AnswerStream {
|
|||
int get nativePort => _port.sendPort.nativePort;
|
||||
bool get hasStarted => _hasStarted;
|
||||
String? get error => _error;
|
||||
bool get aiLimitReached => _aiLimitReached;
|
||||
String get text => _text;
|
||||
|
||||
Future<void> dispose() async {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue