604 lines
20 KiB
Dart
604 lines
20 KiB
Dart
import 'dart:async';
|
||
import 'dart:collection';
|
||
import 'dart:io';
|
||
import 'dart:ui' as ui;
|
||
import 'dart:math'; // Import the math package to use sqrt
|
||
|
||
import 'package:flutter/foundation.dart';
|
||
import 'package:flutter/rendering.dart';
|
||
import 'package:flutter/services.dart';
|
||
import 'package:flutter_pcm_sound/flutter_pcm_sound.dart';
|
||
import 'package:flutter_voice_processor/flutter_voice_processor.dart';
|
||
import 'package:gallery_saver/gallery_saver.dart';
|
||
import 'package:get/get.dart';
|
||
import 'package:image_gallery_saver/image_gallery_saver.dart';
|
||
import 'package:path_provider/path_provider.dart';
|
||
import 'package:permission_handler/permission_handler.dart';
|
||
import 'package:star_lock/app_settings/app_settings.dart';
|
||
import 'package:star_lock/login/login/entity/LoginEntity.dart';
|
||
import 'package:star_lock/main/lockDetail/lockDetail/lockDetail_logic.dart';
|
||
import 'package:star_lock/main/lockDetail/lockDetail/lockDetail_state.dart';
|
||
import 'package:star_lock/main/lockDetail/lockDetail/lockNetToken_entity.dart';
|
||
import 'package:star_lock/main/lockDetail/lockSet/lockSet/lockSetInfo_entity.dart';
|
||
import 'package:star_lock/main/lockMian/entity/lockListInfo_entity.dart';
|
||
import 'package:star_lock/network/api_repository.dart';
|
||
import 'package:star_lock/talk/call/g711.dart';
|
||
import 'package:star_lock/talk/starChart/constant/talk_status.dart';
|
||
import 'package:star_lock/talk/starChart/handle/other/packet_loss_statistics.dart';
|
||
import 'package:star_lock/talk/starChart/handle/other/talk_data_model.dart';
|
||
import 'package:star_lock/talk/starChart/proto/talk_data.pb.dart';
|
||
import 'package:star_lock/talk/starChart/proto/talk_data_h264_frame.pb.dart';
|
||
import 'package:star_lock/talk/starChart/proto/talk_expect.pb.dart';
|
||
import 'package:star_lock/talk/starChart/star_chart_manage.dart';
|
||
import 'package:star_lock/talk/starChart/views/talkView/talk_view_state.dart';
|
||
import 'package:star_lock/talk/starChart/webView/h264_web_view_state.dart';
|
||
import 'package:star_lock/tools/G711Tool.dart';
|
||
import 'package:star_lock/tools/bugly/bugly_tool.dart';
|
||
import 'package:webview_flutter/webview_flutter.dart';
|
||
|
||
import '../../../../tools/baseGetXController.dart';
|
||
|
||
class H264WebViewLogic extends BaseGetXController {
|
||
final H264WebViewState state = H264WebViewState();
|
||
|
||
final LockDetailState lockDetailState = Get.put(LockDetailLogic()).state;
|
||
|
||
// 添加模拟数据相关变量
|
||
static const int CHUNK_SIZE = 4096;
|
||
Timer? _mockDataTimer;
|
||
int _startAudioTime = 0; // 开始播放时间戳
|
||
int audioBufferSize = 2; // 音频默认缓冲2帧
|
||
bool _isFirstAudioFrame = true; // 是否是第一帧
|
||
// 定义音频帧缓冲和发送函数
|
||
final List<int> _bufferedAudioFrames = <int>[];
|
||
final Queue<List<int>> _frameBuffer = Queue<List<int>>();
|
||
static const int FRAME_BUFFER_SIZE = 25;
|
||
|
||
// 添加监听状态和订阅引用
|
||
bool _isListening = false;
|
||
StreamSubscription? _streamSubscription;
|
||
|
||
@override
|
||
void onInit() {
|
||
// 初始化 WebView 控制器
|
||
state.webViewController = WebViewController()
|
||
..setJavaScriptMode(JavaScriptMode.unrestricted)
|
||
..enableZoom(false)
|
||
..addJavaScriptChannel(
|
||
'Flutter',
|
||
onMessageReceived: (message) {
|
||
print("来自 HTML 的消息: ${message.message}");
|
||
},
|
||
);
|
||
|
||
super.onInit();
|
||
// 创建流数据监听
|
||
_createFramesStreamListen();
|
||
_startListenTalkStatus();
|
||
state.talkStatus.value = state.startChartTalkStatus.status;
|
||
// 初始化音频播放器
|
||
_initFlutterPcmSound();
|
||
|
||
// 初始化录音控制器
|
||
_initAudioRecorder();
|
||
|
||
// 加载本地 HTML
|
||
_loadLocalHtml();
|
||
|
||
// playLocalTestVideo();
|
||
|
||
requestPermissions();
|
||
}
|
||
|
||
Future<void> requestPermissions() async {
|
||
// 申请存储权限
|
||
var storageStatus = await Permission.storage.request();
|
||
// 申请录音权限
|
||
var microphoneStatus = await Permission.microphone.request();
|
||
|
||
if (storageStatus.isGranted && microphoneStatus.isGranted) {
|
||
print("Permissions granted");
|
||
} else {
|
||
print("Permissions denied");
|
||
// 如果权限被拒绝,可以提示用户或跳转到设置页面
|
||
if (await Permission.storage.isPermanentlyDenied) {
|
||
openAppSettings(); // 跳转到应用设置页面
|
||
}
|
||
}
|
||
}
|
||
|
||
/// 初始化音频录制器
|
||
void _initAudioRecorder() {
|
||
state.voiceProcessor = VoiceProcessor.instance;
|
||
}
|
||
|
||
/// 初始化音频播放器
|
||
void _initFlutterPcmSound() {
|
||
const int sampleRate = 8000;
|
||
FlutterPcmSound.setLogLevel(LogLevel.none);
|
||
FlutterPcmSound.setup(sampleRate: sampleRate, channelCount: 1);
|
||
// 设置 feed 阈值
|
||
if (Platform.isAndroid) {
|
||
FlutterPcmSound.setFeedThreshold(1024); // Android 平台的特殊处理
|
||
} else {
|
||
FlutterPcmSound.setFeedThreshold(2000); // 非 Android 平台的处理
|
||
}
|
||
}
|
||
|
||
void _createFramesStreamListen() async {
|
||
// 防止重复监听
|
||
if (_isListening) {
|
||
AppLog.log("已经存在数据流监听,避免重复监听");
|
||
return;
|
||
}
|
||
|
||
AppLog.log("==== 启动新的数据流监听 ====");
|
||
_isListening = true;
|
||
_streamSubscription = state.talkDataRepository.talkDataStream
|
||
.listen((TalkDataModel talkDataModel) async {
|
||
final talkData = talkDataModel.talkData;
|
||
final contentType = talkData!.contentType;
|
||
final currentTime = DateTime.now().millisecondsSinceEpoch;
|
||
|
||
// 判断数据类型,进行分发处理
|
||
switch (contentType) {
|
||
case TalkData_ContentTypeE.G711:
|
||
if (state.isShowLoading.isFalse) {
|
||
// // 第一帧到达时记录开始时间
|
||
if (_isFirstAudioFrame) {
|
||
_startAudioTime = currentTime;
|
||
_isFirstAudioFrame = false;
|
||
}
|
||
|
||
// 计算音频延迟
|
||
final expectedTime = _startAudioTime + talkData.durationMs;
|
||
final audioDelay = currentTime - expectedTime;
|
||
|
||
// 如果延迟太大,清空缓冲区并直接播放
|
||
if (audioDelay > 500) {
|
||
state.audioBuffer.clear();
|
||
if (state.isOpenVoice.value) {
|
||
_playAudioFrames();
|
||
}
|
||
return;
|
||
}
|
||
if (state.audioBuffer.length >= audioBufferSize) {
|
||
state.audioBuffer.removeAt(0); // 丢弃最旧的数据
|
||
}
|
||
state.audioBuffer.add(talkData); // 添加新数据
|
||
// 添加音频播放逻辑,与视频类似
|
||
_playAudioFrames();
|
||
}
|
||
|
||
break;
|
||
case TalkData_ContentTypeE.H264:
|
||
// // 添加新帧到缓冲区
|
||
_frameBuffer.add(talkData.content);
|
||
|
||
// 当缓冲区超过最大容量时,发送最早的帧并移除
|
||
while (_frameBuffer.length > FRAME_BUFFER_SIZE) {
|
||
if (_frameBuffer.isNotEmpty) {
|
||
final frame = _frameBuffer.removeFirst();
|
||
await _sendBufferedData(frame);
|
||
}
|
||
if (state.isShowLoading.isTrue) {
|
||
state.isShowLoading.value = false;
|
||
}
|
||
}
|
||
break;
|
||
}
|
||
});
|
||
}
|
||
|
||
/// 播放本地测试视频文件
|
||
// Future<void> playLocalTestVideo() async {
|
||
// try {
|
||
// ByteData data = await rootBundle.load('assets/html/demo.h264');
|
||
// List<int> bytes = data.buffer.asUint8List();
|
||
//
|
||
// int offset = 0;
|
||
// _mockDataTimer = Timer.periodic(Duration(milliseconds: 40), (timer) {
|
||
// if (offset >= bytes.length) {
|
||
// timer.cancel();
|
||
// return;
|
||
// }
|
||
//
|
||
// int end = min(offset + CHUNK_SIZE, bytes.length);
|
||
// List<int> chunk = bytes.sublist(offset, end);
|
||
// _sendBufferedData(chunk);
|
||
//
|
||
// offset += CHUNK_SIZE;
|
||
// });
|
||
// } catch (e) {
|
||
// AppLog.log('加载测试视频文件失败: $e');
|
||
// }
|
||
// }
|
||
|
||
// 新增:音频帧播放逻辑
|
||
void _playAudioFrames() {
|
||
// 如果缓冲区为空或未达到目标大小,不进行播放
|
||
// 音频缓冲区要求更小,以减少延迟
|
||
if (state.audioBuffer.isEmpty ||
|
||
state.audioBuffer.length < audioBufferSize) {
|
||
return;
|
||
}
|
||
|
||
// 找出时间戳最小的音频帧
|
||
TalkData? oldestFrame;
|
||
int oldestIndex = -1;
|
||
for (int i = 0; i < state.audioBuffer.length; i++) {
|
||
if (oldestFrame == null ||
|
||
state.audioBuffer[i].durationMs < oldestFrame.durationMs) {
|
||
oldestFrame = state.audioBuffer[i];
|
||
oldestIndex = i;
|
||
}
|
||
}
|
||
|
||
// 确保找到了有效帧
|
||
if (oldestFrame != null && oldestIndex != -1) {
|
||
if (state.isOpenVoice.value) {
|
||
// 播放音频
|
||
_playAudioData(oldestFrame);
|
||
}
|
||
state.audioBuffer.removeAt(oldestIndex);
|
||
}
|
||
}
|
||
|
||
/// 播放音频数据
|
||
void _playAudioData(TalkData talkData) async {
|
||
if (state.isOpenVoice.value) {
|
||
final list =
|
||
G711().decodeAndDenoise(talkData.content, true, 8000, 300, 150);
|
||
// // 将 PCM 数据转换为 PcmArrayInt16
|
||
final PcmArrayInt16 fromList = PcmArrayInt16.fromList(list);
|
||
FlutterPcmSound.feed(fromList);
|
||
if (!state.isPlaying.value) {
|
||
FlutterPcmSound.play();
|
||
state.isPlaying.value = true;
|
||
}
|
||
}
|
||
}
|
||
|
||
/// 加载html文件
|
||
Future<void> _loadLocalHtml() async {
|
||
// 加载 HTML 文件内容
|
||
final String fileHtmlContent =
|
||
await rootBundle.loadString('assets/html/h264.html');
|
||
|
||
// 加载 JS 文件内容
|
||
final String jsContent =
|
||
await rootBundle.loadString('assets/html/jmuxer.min.js');
|
||
|
||
// 将 JS 文件内容嵌入到 HTML 中
|
||
final String htmlWithJs = fileHtmlContent.replaceAll(
|
||
'<script src="jmuxer.min.js"></script>', // 替换掉引用外部 JS 的标签
|
||
'<script>$jsContent</script>' // 使用内联方式嵌入 JS 内容
|
||
);
|
||
|
||
// 加载最终的 HTML 字符串到 WebView 中
|
||
if (state.webViewController != null) {
|
||
state.webViewController.loadHtmlString(htmlWithJs); // 设置 baseUrl 避免资源加载问题
|
||
}
|
||
}
|
||
|
||
// 修改后的发送方法
|
||
_sendBufferedData(List<int> buffer) async {
|
||
// 原始发送逻辑
|
||
String jsCode = "feedDataFromFlutter($buffer);";
|
||
await state.webViewController.runJavaScript(jsCode);
|
||
|
||
if (state.isShowLoading.isTrue) {
|
||
await Future.delayed(Duration(seconds: 1));
|
||
state.isShowLoading.value = false;
|
||
}
|
||
}
|
||
|
||
/// 监听对讲状态
|
||
void _startListenTalkStatus() {
|
||
state.startChartTalkStatus.statusStream.listen((talkStatus) {
|
||
state.talkStatus.value = talkStatus;
|
||
switch (talkStatus) {
|
||
case TalkStatus.rejected:
|
||
case TalkStatus.hangingUpDuring:
|
||
case TalkStatus.notTalkData:
|
||
case TalkStatus.notTalkPing:
|
||
case TalkStatus.end:
|
||
_handleInvalidTalkStatus();
|
||
break;
|
||
case TalkStatus.answeredSuccessfully:
|
||
state.oneMinuteTimeTimer?.cancel(); // 取消旧定时器
|
||
state.oneMinuteTimeTimer ??=
|
||
Timer.periodic(const Duration(seconds: 1), (Timer t) {
|
||
if (state.isShowLoading.isFalse) {
|
||
state.oneMinuteTime.value++;
|
||
// if (state.oneMinuteTime.value >= 60) {
|
||
// t.cancel(); // 取消定时器
|
||
// state.oneMinuteTime.value = 0;
|
||
// }
|
||
}
|
||
});
|
||
break;
|
||
default:
|
||
// 其他状态的处理
|
||
break;
|
||
}
|
||
});
|
||
}
|
||
|
||
/// 更新发送预期数据
|
||
void updateTalkExpect() {
|
||
TalkExpectReq talkExpectReq = TalkExpectReq();
|
||
state.isOpenVoice.value = !state.isOpenVoice.value;
|
||
if (!state.isOpenVoice.value) {
|
||
talkExpectReq = TalkExpectReq(
|
||
videoType: [VideoTypeE.IMAGE],
|
||
audioType: [],
|
||
);
|
||
showToast('已静音'.tr);
|
||
} else {
|
||
talkExpectReq = TalkExpectReq(
|
||
videoType: [VideoTypeE.IMAGE],
|
||
audioType: [AudioTypeE.G711],
|
||
);
|
||
}
|
||
|
||
/// 修改发送预期数据
|
||
StartChartManage().changeTalkExpectDataTypeAndReStartTalkExpectMessageTimer(
|
||
talkExpect: talkExpectReq);
|
||
}
|
||
|
||
/// 处理无效通话状态
|
||
void _handleInvalidTalkStatus() {}
|
||
|
||
/// 截图并保存到相册
|
||
Future<void> captureAndSavePng() async {
|
||
try {
|
||
if (state.globalKey.currentContext == null) {
|
||
AppLog.log('截图失败: 未找到当前上下文');
|
||
return;
|
||
}
|
||
final RenderRepaintBoundary boundary = state.globalKey.currentContext!
|
||
.findRenderObject()! as RenderRepaintBoundary;
|
||
final ui.Image image = await boundary.toImage();
|
||
final ByteData? byteData =
|
||
await image.toByteData(format: ui.ImageByteFormat.png);
|
||
|
||
if (byteData == null) {
|
||
AppLog.log('截图失败: 图像数据为空');
|
||
return;
|
||
}
|
||
final Uint8List pngBytes = byteData.buffer.asUint8List();
|
||
|
||
// 获取应用程序的文档目录
|
||
final Directory directory = await getApplicationDocumentsDirectory();
|
||
final String imagePath = '${directory.path}/screenshot.png';
|
||
|
||
// 将截图保存为文件
|
||
final File imgFile = File(imagePath);
|
||
await imgFile.writeAsBytes(pngBytes);
|
||
|
||
// 将截图保存到相册
|
||
await ImageGallerySaver.saveFile(imagePath);
|
||
|
||
AppLog.log('截图保存路径: $imagePath');
|
||
showToast('截图已保存到相册'.tr);
|
||
} catch (e) {
|
||
AppLog.log('截图失败: $e');
|
||
}
|
||
}
|
||
|
||
// 发起接听命令
|
||
void initiateAnswerCommand() {
|
||
StartChartManage().startTalkAcceptTimer();
|
||
}
|
||
|
||
//开始录音
|
||
Future<void> startProcessingAudio() async {
|
||
try {
|
||
if (await state.voiceProcessor?.hasRecordAudioPermission() ?? false) {
|
||
await state.voiceProcessor?.start(state.frameLength, state.sampleRate);
|
||
final bool? isRecording = await state.voiceProcessor?.isRecording();
|
||
state.isRecordingAudio.value = isRecording!;
|
||
state.startRecordingAudioTime.value = DateTime.now();
|
||
|
||
// 增加录音帧监听器和错误监听器
|
||
state.voiceProcessor
|
||
?.addFrameListeners(<VoiceProcessorFrameListener>[_onFrame]);
|
||
state.voiceProcessor?.addErrorListener(_onError);
|
||
} else {
|
||
// state.errorMessage.value = 'Recording permission not granted';
|
||
}
|
||
} on PlatformException catch (ex) {
|
||
// state.errorMessage.value = 'Failed to start recorder: $ex';
|
||
}
|
||
state.isOpenVoice.value = false;
|
||
}
|
||
|
||
/// 停止录音
|
||
Future<void> stopProcessingAudio() async {
|
||
try {
|
||
await state.voiceProcessor?.stop();
|
||
state.voiceProcessor?.removeFrameListener(_onFrame);
|
||
state.udpSendDataFrameNumber = 0;
|
||
// 记录结束时间
|
||
state.endRecordingAudioTime.value = DateTime.now();
|
||
|
||
// 计算录音的持续时间
|
||
final Duration duration = state.endRecordingAudioTime.value
|
||
.difference(state.startRecordingAudioTime.value);
|
||
|
||
state.recordingAudioTime.value = duration.inSeconds;
|
||
} on PlatformException catch (ex) {
|
||
// state.errorMessage.value = 'Failed to stop recorder: $ex';
|
||
} finally {
|
||
final bool? isRecording = await state.voiceProcessor?.isRecording();
|
||
state.isRecordingAudio.value = isRecording!;
|
||
state.isOpenVoice.value = true;
|
||
}
|
||
}
|
||
|
||
// 音频帧处理
|
||
Future<void> _onFrame(List<int> frame) async {
|
||
// 添加最大缓冲限制
|
||
if (_bufferedAudioFrames.length > state.frameLength * 3) {
|
||
_bufferedAudioFrames.clear(); // 清空过多积累的数据
|
||
return;
|
||
}
|
||
|
||
// 首先应用固定增益提升基础音量
|
||
List<int> amplifiedFrame = _applyGain(frame, 1.8);
|
||
// 编码为G711数据
|
||
List<int> encodedData = G711Tool.encode(amplifiedFrame, 0); // 0表示A-law
|
||
_bufferedAudioFrames.addAll(encodedData);
|
||
// 使用相对时间戳
|
||
final int ms = DateTime.now().millisecondsSinceEpoch % 1000000; // 使用循环时间戳
|
||
int getFrameLength = state.frameLength;
|
||
if (Platform.isIOS) {
|
||
getFrameLength = state.frameLength * 2;
|
||
}
|
||
|
||
// 添加发送间隔控制
|
||
if (_bufferedAudioFrames.length >= state.frameLength) {
|
||
try {
|
||
await StartChartManage().sendTalkDataMessage(
|
||
talkData: TalkData(
|
||
content: _bufferedAudioFrames,
|
||
contentType: TalkData_ContentTypeE.G711,
|
||
durationMs: ms,
|
||
),
|
||
);
|
||
} finally {
|
||
_bufferedAudioFrames.clear(); // 确保清理缓冲区
|
||
}
|
||
} else {
|
||
_bufferedAudioFrames.addAll(encodedData);
|
||
}
|
||
}
|
||
|
||
// 错误监听
|
||
void _onError(VoiceProcessorException error) {
|
||
AppLog.log(error.message!);
|
||
}
|
||
|
||
// 添加音频增益处理方法
|
||
List<int> _applyGain(List<int> pcmData, double gainFactor) {
|
||
List<int> result = List<int>.filled(pcmData.length, 0);
|
||
|
||
for (int i = 0; i < pcmData.length; i++) {
|
||
// PCM数据通常是有符号的16位整数
|
||
int sample = pcmData[i];
|
||
|
||
// 应用增益
|
||
double amplified = sample * gainFactor;
|
||
|
||
// 限制在有效范围内,防止溢出
|
||
if (amplified > 32767) {
|
||
amplified = 32767;
|
||
} else if (amplified < -32768) {
|
||
amplified = -32768;
|
||
}
|
||
|
||
result[i] = amplified.toInt();
|
||
}
|
||
|
||
return result;
|
||
}
|
||
|
||
/// 挂断
|
||
void udpHangUpAction() async {
|
||
if (state.talkStatus.value == TalkStatus.answeredSuccessfully) {
|
||
// 如果是通话中就挂断
|
||
StartChartManage().startTalkHangupMessageTimer();
|
||
} else {
|
||
// 拒绝
|
||
StartChartManage().startTalkRejectMessageTimer();
|
||
}
|
||
// _mockDataTimer?.cancel();
|
||
// _mockDataTimer = null;
|
||
PacketLossStatistics().reset();
|
||
Get.back();
|
||
}
|
||
|
||
// 远程开锁
|
||
Future<void> remoteOpenLock() async {
|
||
final lockPeerId = StartChartManage().lockPeerId;
|
||
final lockListPeerId = StartChartManage().lockListPeerId;
|
||
int lockId = lockDetailState.keyInfos.value.lockId ?? 0;
|
||
|
||
// 如果锁列表获取到peerId,代表有多个锁,使用锁列表的peerId
|
||
// 从列表中遍历出对应的peerId
|
||
lockListPeerId.forEach((element) {
|
||
if (element.network?.peerId == lockPeerId) {
|
||
lockId = element.lockId ?? 0;
|
||
}
|
||
});
|
||
|
||
final LockSetInfoEntity lockSetInfoEntity =
|
||
await ApiRepository.to.getLockSettingInfoData(
|
||
lockId: lockId.toString(),
|
||
);
|
||
if (lockSetInfoEntity.errorCode!.codeIsSuccessful) {
|
||
if (lockSetInfoEntity.data?.lockFeature?.remoteUnlock == 1 &&
|
||
lockSetInfoEntity.data?.lockSettingInfo?.remoteUnlock == 1) {
|
||
final LoginEntity entity = await ApiRepository.to
|
||
.remoteOpenLock(lockId: lockId.toString(), timeOut: 60);
|
||
if (entity.errorCode!.codeIsSuccessful) {
|
||
showToast('已开锁'.tr);
|
||
StartChartManage().lockListPeerId = [];
|
||
}
|
||
} else {
|
||
showToast('该锁的远程开锁功能未启用'.tr);
|
||
}
|
||
}
|
||
}
|
||
|
||
/// 停止播放音频
|
||
void _stopPlayG711Data() async {
|
||
await FlutterPcmSound.pause();
|
||
await FlutterPcmSound.stop();
|
||
await FlutterPcmSound.clear();
|
||
}
|
||
|
||
@override
|
||
void onClose() {
|
||
_stopPlayG711Data(); // 停止播放音频
|
||
|
||
state.audioBuffer.clear(); // 清空音频缓冲区
|
||
|
||
state.oneMinuteTimeTimer?.cancel();
|
||
state.oneMinuteTimeTimer = null;
|
||
|
||
// 停止播放音频
|
||
stopProcessingAudio();
|
||
|
||
state.oneMinuteTimeTimer?.cancel(); // 取消旧定时器
|
||
state.oneMinuteTimeTimer = null; // 取消旧定时器
|
||
state.oneMinuteTime.value = 0;
|
||
|
||
// 取消数据流监听
|
||
_streamSubscription?.cancel();
|
||
_isListening = false;
|
||
|
||
// 重置期望数据
|
||
StartChartManage().reSetDefaultTalkExpect();
|
||
|
||
super.onClose();
|
||
}
|
||
|
||
@override
|
||
void dispose() {
|
||
// _mockDataTimer?.cancel();
|
||
// _mockDataTimer = null;
|
||
super.dispose();
|
||
StartChartManage().startTalkHangupMessageTimer();
|
||
state.animationController.dispose();
|
||
state.webViewController.clearCache();
|
||
state.webViewController.reload();
|
||
state.oneMinuteTimeTimer?.cancel();
|
||
state.oneMinuteTimeTimer = null;
|
||
stopProcessingAudio();
|
||
StartChartManage().reSetDefaultTalkExpect();
|
||
_frameBuffer.clear();
|
||
}
|
||
}
|