import 'dart:async'; import 'dart:io'; import 'dart:ui' as ui; import 'package:flutter/rendering.dart'; import 'package:flutter/services.dart'; import 'package:flutter_easyloading/flutter_easyloading.dart'; import 'package:flutter_pcm_sound/flutter_pcm_sound.dart'; import 'package:flutter_screen_recording/flutter_screen_recording.dart'; import 'package:flutter_voice_processor/flutter_voice_processor.dart'; import 'package:gallery_saver/gallery_saver.dart'; import 'package:get/get.dart'; import 'package:image_gallery_saver/image_gallery_saver.dart'; import 'package:path_provider/path_provider.dart'; import 'package:permission_handler/permission_handler.dart'; import 'package:star_lock/app_settings/app_settings.dart'; import 'package:star_lock/blue/blue_manage.dart'; import 'package:star_lock/blue/io_protocol/io_openLock.dart'; import 'package:star_lock/blue/io_tool/io_tool.dart'; import 'package:star_lock/main/lockDetail/lockDetail/lockDetail_logic.dart'; import 'package:star_lock/main/lockDetail/lockDetail/lockDetail_state.dart'; import 'package:star_lock/main/lockDetail/lockDetail/lockNetToken_entity.dart'; import 'package:star_lock/network/api_repository.dart'; import 'package:star_lock/talk/startChart/constant/talk_status.dart'; import 'package:star_lock/talk/startChart/proto/talk_data.pb.dart'; import 'package:star_lock/talk/startChart/proto/talk_data.pbenum.dart'; import 'package:star_lock/talk/startChart/proto/talk_expect.pb.dart'; import 'package:star_lock/talk/startChart/start_chart_manage.dart'; import 'package:star_lock/talk/startChart/views/talkView/talk_view_state.dart'; import 'package:star_lock/talk/udp/udp_manage.dart'; import 'package:star_lock/talk/udp/udp_senderManage.dart'; import 'package:star_lock/tools/bugly/bugly_tool.dart'; import 'package:star_lock/tools/storage.dart'; import '../../../../tools/baseGetXController.dart'; class TalkViewLogic extends BaseGetXController { final TalkViewState state = TalkViewState(); final LockDetailState lockDetailState = Get.put(LockDetailLogic()).state; Timer? _syncTimer; // 音视频播放刷新率定时器 int _startTime = 0; // 开始播放时间戳,用于判断帧数据中的时间戳位置 final int bufferSize = 20; // 缓冲区大小(以帧为单位) final List frameTimestamps = []; // 帧时间戳用于计算 FPS int frameIntervalMs = 45; // 初始帧间隔设置为45毫秒(约22FPS) int minFrameIntervalMs = 30; // 最小帧间隔(约33 FPS) int maxFrameIntervalMs = 500; // 最大帧间隔(约1 FPS) // int maxFrameIntervalMs = 100; // 最大帧间隔(约10 FPS) /// 初始化音频播放器 void _initFlutterPcmSound() { const int sampleRate = 44100; FlutterPcmSound.setLogLevel(LogLevel.verbose); FlutterPcmSound.setup(sampleRate: sampleRate, channelCount: 2); // 设置 feed 阈值 if (Platform.isAndroid) { FlutterPcmSound.setFeedThreshold(-1); // Android 平台的特殊处理 } else { FlutterPcmSound.setFeedThreshold(sampleRate ~/ 32); // 非 Android 平台的处理 } } /// 挂断 void udpHangUpAction() async { if (state.talkStatus.value == TalkStatus.answeredSuccessfully) { // 如果是通话中就挂断 StartChartManage().sendTalkHangupMessage(); } else { // 拒绝 StartChartManage().sendTalkRejectMessage(); } Get.back(); } // 发起接听命令 void initiateAnswerCommand() { StartChartManage().startTalkAcceptTimer(); } // 监听音视频数据流 void _startListenTalkData() { state.talkDataRepository.talkDataStream.listen((TalkData talkData) { final contentType = talkData.contentType; // 判断数据类型,进行分发处理 switch (contentType) { case TalkData_ContentTypeE.G711: if (state.audioBuffer.length < bufferSize) { state.audioBuffer.add(talkData); } // print('收到音频数据'); break; case TalkData_ContentTypeE.Image: if (state.videoBuffer.length < bufferSize) { state.videoBuffer.add(talkData); } // print('talkData durationMs-->:${talkData.durationMs}'); /// 更新网络状态 // updateNetworkStatus(currentTimestamp); break; } }); } /// 监听对讲状态 void _startListenTalkStatus() { state.startChartTalkStatus.statusStream.listen((talkStatus) { state.talkStatus.value = talkStatus; switch (talkStatus) { case TalkStatus.rejected: case TalkStatus.hangingUpDuring: case TalkStatus.notTalkData: case TalkStatus.notTalkPing: case TalkStatus.end: _handleInvalidTalkStatus(); break; default: // 其他状态的处理 break; } }); } /// 播放音频数据 void _playAudioData(TalkData talkData) { // 将 PCM 数据转换为 PcmArrayInt16 final PcmArrayInt16 fromList = PcmArrayInt16.fromList(talkData.content); FlutterPcmSound.feed(fromList); if (!state.isPlaying.value) { FlutterPcmSound.play(); state.isPlaying.value = true; } } /// 播放视频数据 void _playVideoData(TalkData talkData) { state.listData.value = Uint8List.fromList(talkData.content); } /// 启动播放 void _startPlayback() { int frameIntervalMs = 45; // 初始帧间隔设置为45毫秒(约22FPS) Future.delayed(Duration(milliseconds: 800), () { _startTime = DateTime.now().millisecondsSinceEpoch; _syncTimer ??= Timer.periodic(Duration(milliseconds: frameIntervalMs), (timer) { final currentTime = DateTime.now().millisecondsSinceEpoch; final elapsedTime = currentTime - _startTime; // 根据 elapsedTime 同步音频和视频 // AppLog.log('Elapsed Time: $elapsedTime ms'); // 动态调整帧间隔 _adjustFrameInterval(); // 播放合适的音频帧 if (state.audioBuffer.isNotEmpty && state.audioBuffer.first.durationMs <= elapsedTime) { // 判断音频开关是否打开 if (state.isOpenVoice.value) { _playAudioData(state.audioBuffer.removeAt(0)); } else { // 如果不播放音频,只从缓冲区中读取数据,但不移除 // 你可以根据需要调整此处逻辑,例如保留缓冲区的最大长度,防止无限增长 // 仅移除缓冲区数据但不播放音频,确保音频也是实时更新的 state.audioBuffer.removeAt(0); } } // 播放合适的视频帧 // 跳帧策略:如果缓冲区中有多个帧,且它们的时间戳都在当前时间之前,则播放最新的帧 while (state.videoBuffer.isNotEmpty && state.videoBuffer.first.durationMs <= elapsedTime) { // 如果有多个帧,移除旧的帧,保持最新的帧 if (state.videoBuffer.length > 1) { state.videoBuffer.removeAt(0); } else { _playVideoData(state.videoBuffer.removeAt(0)); } } }); }); } /// 动态调整帧间隔 void _adjustFrameInterval() { if (state.videoBuffer.length < 10 && frameIntervalMs < maxFrameIntervalMs) { // 如果缓冲区较小且帧间隔小于最大值,则增加帧间隔 frameIntervalMs += 5; } else if (state.videoBuffer.length > 20 && frameIntervalMs > minFrameIntervalMs) { // 如果缓冲区较大且帧间隔大于最小值,则减少帧间隔 frameIntervalMs -= 5; } _syncTimer?.cancel(); _syncTimer = Timer.periodic(Duration(milliseconds: frameIntervalMs), (timer) { final currentTime = DateTime.now().millisecondsSinceEpoch; final elapsedTime = currentTime - _startTime; // 播放合适的音频帧 if (state.audioBuffer.isNotEmpty && state.audioBuffer.first.durationMs <= elapsedTime) { // 判断音频开关是否打开 if (state.isOpenVoice.value) { _playAudioData(state.audioBuffer.removeAt(0)); } else { // 如果不播放音频,只从缓冲区中读取数据,但不移除 // 你可以根据需要调整此处逻辑,例如保留缓冲区的最大长度,防止无限增长 // 仅移除缓冲区数据但不播放音频,确保音频也是实时更新的 state.audioBuffer.removeAt(0); } } // 播放合适的视频帧 // 跳帧策略:如果缓冲区中有多个帧,且它们的时间戳都在当前时间之前,则播放最新的帧 while (state.videoBuffer.isNotEmpty && state.videoBuffer.first.durationMs <= elapsedTime) { // 如果有多个帧,移除旧的帧,保持最新的帧 if (state.videoBuffer.length > 1) { state.videoBuffer.removeAt(0); } else { _playVideoData(state.videoBuffer.removeAt(0)); } } }); } /// 修改网络状态 void updateNetworkStatus(int currentTimestamp) { if (state.lastFrameTimestamp.value != 0) { final frameInterval = currentTimestamp - state.lastFrameTimestamp.value; if (frameInterval > 500 && frameInterval <= 1000) { // 判断帧间隔是否在500毫秒到1秒之间 state.networkStatus.value = NetworkStatus.lagging; showNetworkStatus("Network is lagging"); } else if (frameInterval > 1000) { // 判断帧间隔是否超过1秒 state.networkStatus.value = NetworkStatus.delayed; showNetworkStatus("Network is delayed"); } else { state.networkStatus.value = NetworkStatus.normal; state.alertCount.value = 0; // 重置计数器 } } state.lastFrameTimestamp.value = currentTimestamp; } /// 提示网络状态 void showNetworkStatus(String message) { // 如果提示次数未达到最大值且 EasyLoading 未显示,则显示提示 if (state.alertCount.value < state.maxAlertNumber.value && !EasyLoading.isShow) { showToast(message); state.alertCount++; } } /// 停止播放音频 void _stopPlayG711Data() async { await FlutterPcmSound.pause(); await FlutterPcmSound.stop(); await FlutterPcmSound.clear(); } /// 开门 udpOpenDoorAction() async { final List? privateKey = await Storage.getStringList(saveBluePrivateKey); final List getPrivateKeyList = changeStringListToIntList(privateKey!); final List? signKey = await Storage.getStringList(saveBlueSignKey); final List signKeyDataList = changeStringListToIntList(signKey!); final List? token = await Storage.getStringList(saveBlueToken); final List getTokenList = changeStringListToIntList(token!); await _getLockNetToken(); final OpenLockCommand openLockCommand = OpenLockCommand( lockID: BlueManage().connectDeviceName, userID: await Storage.getUid(), openMode: lockDetailState.openDoorModel, openTime: _getUTCNetTime(), onlineToken: lockDetailState.lockNetToken, token: getTokenList, needAuthor: 1, signKey: signKeyDataList, privateKey: getPrivateKeyList, ); final messageDetail = openLockCommand.messageDetail(); // 发送远程开门消息 StartChartManage().sendRemoteUnLockMessage( bluetoothDeviceName: BlueManage().connectDeviceName, openLockCommand: messageDetail, ); showToast('已发送开门通知'); } int _getUTCNetTime() { if (lockDetailState.isHaveNetwork) { return DateTime.now().millisecondsSinceEpoch ~/ 1000 + lockDetailState.differentialTime; } else { return 0; } } // 获取手机联网token,根据锁设置里面获取的开锁时是否联网来判断是否调用这个接口 Future _getLockNetToken() async { final LockNetTokenEntity entity = await ApiRepository.to.getLockNetToken( lockId: lockDetailState.keyInfos.value.lockId.toString()); if (entity.errorCode!.codeIsSuccessful) { lockDetailState.lockNetToken = entity.data!.token!.toString(); AppLog.log('从服务器获取联网token:${lockDetailState.lockNetToken}'); } else { BuglyTool.uploadException( message: '点击了需要联网开锁', detail: '点击了需要联网开锁 获取连网token失败', upload: true); showToast('网络访问失败,请检查网络是否正常'.tr, something: () {}); } } /// 获取权限状态 Future getPermissionStatus() async { final Permission permission = Permission.microphone; //granted 通过,denied 被拒绝,permanentlyDenied 拒绝且不在提示 final PermissionStatus status = await permission.status; if (status.isGranted) { return true; } else if (status.isDenied) { requestPermission(permission); } else if (status.isPermanentlyDenied) { openAppSettings(); } else if (status.isRestricted) { requestPermission(permission); } else {} return false; } ///申请权限 void requestPermission(Permission permission) async { final PermissionStatus status = await permission.request(); if (status.isPermanentlyDenied) { openAppSettings(); } } Future requestPermissions() async { // 申请存储权限 var storageStatus = await Permission.storage.request(); // 申请录音权限 var microphoneStatus = await Permission.microphone.request(); if (storageStatus.isGranted && microphoneStatus.isGranted) { print("Permissions granted"); } else { print("Permissions denied"); // 如果权限被拒绝,可以提示用户或跳转到设置页面 if (await Permission.storage.isPermanentlyDenied) { openAppSettings(); // 跳转到应用设置页面 } } } Future startRecording() async { requestPermissions(); if (state.isRecordingScreen.value) { showToast('录屏已开始,请勿重复点击'); } bool start = await FlutterScreenRecording.startRecordScreen( "Screen Recording", // 视频文件名 titleNotification: "Recording in progress", // 通知栏标题 messageNotification: "Tap to stop recording", // 通知栏内容 ); if (start) { state.isRecordingScreen.value = true; } } Future stopRecording() async { String path = await FlutterScreenRecording.stopRecordScreen; print("Recording saved to: $path"); // 将视频保存到系统相册 bool? success = await GallerySaver.saveVideo(path); if (success == true) { print("Video saved to gallery"); } else { print("Failed to save video to gallery"); } showToast('录屏结束,已保存到系统相册'); state.isRecordingScreen.value = false; } @override void onReady() { super.onReady(); } @override void onInit() { super.onInit(); // 启动监听音视频数据流 _startListenTalkData(); // 启动监听对讲状态 _startListenTalkStatus(); // 在没有监听成功之前赋值一遍状态 // *** 由于页面会在状态变化之后才会初始化,导致识别不到最新的状态,在这里手动赋值 *** state.talkStatus.value = state.startChartTalkStatus.status; // 初始化音频播放器 _initFlutterPcmSound(); // 启动播放定时器 _startPlayback(); // 初始化录音控制器 _initAudioRecorder(); requestPermissions(); } @override void onClose() { _stopPlayG711Data(); state.listData.value = Uint8List(0); state.audioBuffer.clear(); state.videoBuffer.clear(); _syncTimer?.cancel(); _syncTimer = null; } /// 处理无效通话状态 void _handleInvalidTalkStatus() { state.listData.value = Uint8List(0); // 停止播放音频 _stopPlayG711Data(); // 状态错误,返回页面 Get.back(); } /// 更新发送预期数据 void updateTalkExpect() { TalkExpectReq talkExpectReq = TalkExpectReq(); if (state.isOpenVoice.value) { talkExpectReq = TalkExpectReq( videoType: [VideoTypeE.IMAGE], audioType: [AudioTypeE.G711], ); } else { talkExpectReq = TalkExpectReq( videoType: [VideoTypeE.IMAGE], audioType: [], ); } /// 修改发送预期数据 StartChartManage().changeTalkExpectDataTypeAndReStartTalkExpectMessageTimer( talkExpect: talkExpectReq); state.isOpenVoice.value = !state.isOpenVoice.value; } /// 截图并保存到相册 Future captureAndSavePng() async { try { if (state.globalKey.currentContext == null) { AppLog.log('截图失败: 未找到当前上下文'); return; } final RenderRepaintBoundary boundary = state.globalKey.currentContext! .findRenderObject()! as RenderRepaintBoundary; final ui.Image image = await boundary.toImage(); final ByteData? byteData = await image.toByteData(format: ui.ImageByteFormat.png); if (byteData == null) { AppLog.log('截图失败: 图像数据为空'); return; } final Uint8List pngBytes = byteData.buffer.asUint8List(); // 获取应用程序的文档目录 final Directory directory = await getApplicationDocumentsDirectory(); final String imagePath = '${directory.path}/screenshot.png'; // 将截图保存为文件 final File imgFile = File(imagePath); await imgFile.writeAsBytes(pngBytes); // 将截图保存到相册 await ImageGallerySaver.saveFile(imagePath); AppLog.log('截图保存路径: $imagePath'); showToast('截图已保存到相册'.tr); } catch (e) { AppLog.log('截图失败: $e'); } } /// 初始化音频录制器 void _initAudioRecorder() { state.voiceProcessor = VoiceProcessor.instance; } //开始录音 Future startProcessingAudio() async { // 增加录音帧监听器和错误监听器 state.voiceProcessor?.addFrameListener(_onFrame); state.voiceProcessor?.addErrorListener(_onError); try { if (await state.voiceProcessor?.hasRecordAudioPermission() ?? false) { await state.voiceProcessor?.start(state.frameLength, state.sampleRate); final bool? isRecording = await state.voiceProcessor?.isRecording(); state.isRecordingAudio.value = isRecording!; state.startRecordingAudioTime.value = DateTime.now(); } else { // state.errorMessage.value = 'Recording permission not granted'; } } on PlatformException catch (ex) { // state.errorMessage.value = 'Failed to start recorder: $ex'; } } /// 停止录音 Future stopProcessingAudio() async { try { await state.voiceProcessor?.stop(); state.voiceProcessor?.removeFrameListener(_onFrame); state.udpSendDataFrameNumber = 0; // 记录结束时间 state.endRecordingAudioTime.value = DateTime.now(); // 计算录音的持续时间 final duration = state.endRecordingAudioTime.value! .difference(state.startRecordingAudioTime.value!); state.recordingAudioTime.value = duration.inSeconds; } on PlatformException catch (ex) { // state.errorMessage.value = 'Failed to stop recorder: $ex'; } finally { final bool? isRecording = await state.voiceProcessor?.isRecording(); state.isRecordingAudio.value = isRecording!; } } Future _onFrame(List frame) async { state.recordingAudioAllFrames.add(frame); // 将帧添加到状态变量中 // final List concatenatedFrames = // _concatenateFrames(state.recordingAudioAllFrames); // 连接所有帧 final List pcmBytes = _listLinearToULaw(frame); // 发送音频数据 StartChartManage().sendTalkDataMessage( talkData: TalkData( content: pcmBytes, contentType: TalkData_ContentTypeE.G711, durationMs: DateTime.now().millisecondsSinceEpoch - state.startRecordingAudioTime.value.millisecondsSinceEpoch, ), ); } void _onError(VoiceProcessorException error) { // state.errorMessage.value = error.message!; AppLog.log(error.message!); } // 拿到的音频转化成pcm List _listLinearToULaw(List pcmList) { final List uLawList = []; for (int pcmVal in pcmList) { final int uLawVal = _linearToULaw(pcmVal); uLawList.add(uLawVal); } return uLawList; } // 拿到的音频转化成pcm int _linearToULaw(int pcmVal) { int mask; int seg; int uval; if (pcmVal < 0) { pcmVal = 0x84 - pcmVal; mask = 0x7F; } else { pcmVal += 0x84; mask = 0xFF; } seg = search(pcmVal); if (seg >= 8) { return 0x7F ^ mask; } else { uval = seg << 4; uval |= (pcmVal >> (seg + 3)) & 0xF; return uval ^ mask; } } int search(int val) { final List table = [ 0xFF, 0x1FF, 0x3FF, 0x7FF, 0xFFF, 0x1FFF, 0x3FFF, 0x7FFF ]; const int size = 8; for (int i = 0; i < size; i++) { if (val <= table[i]) { return i; } } return size; } }