diff --git a/lib/talk/starChart/handle/impl/udp_talk_accept_handler.dart b/lib/talk/starChart/handle/impl/udp_talk_accept_handler.dart index 2605525c..baaae2e8 100644 --- a/lib/talk/starChart/handle/impl/udp_talk_accept_handler.dart +++ b/lib/talk/starChart/handle/impl/udp_talk_accept_handler.dart @@ -78,6 +78,7 @@ class UdpTalkAcceptHandler extends ScpMessageBaseHandle } } + /// 收到同意接听回复之后增加音频的期望数据 void _handleSendExpect() { final LockListInfoItemEntity currentKeyInfo = CommonDataManage().currentKeyInfo; diff --git a/lib/talk/starChart/handle/impl/udp_talk_request_handler.dart b/lib/talk/starChart/handle/impl/udp_talk_request_handler.dart index 974c8f25..91fdcc83 100644 --- a/lib/talk/starChart/handle/impl/udp_talk_request_handler.dart +++ b/lib/talk/starChart/handle/impl/udp_talk_request_handler.dart @@ -1,20 +1,15 @@ import 'dart:convert'; import 'dart:io'; -import 'package:flutter/services.dart'; - -import 'package:flutter_local_notifications/flutter_local_notifications.dart'; import 'package:get/get.dart'; import 'package:star_lock/appRouters.dart'; import 'package:star_lock/app_settings/app_settings.dart'; import 'package:star_lock/main/lockMian/entity/lockListInfo_entity.dart'; import 'package:star_lock/talk/starChart/constant/message_type_constant.dart'; -import 'package:star_lock/talk/starChart/constant/talk_constant.dart'; import 'package:star_lock/talk/starChart/constant/talk_status.dart'; import 'package:star_lock/talk/starChart/entity/scp_message.dart'; import 'package:star_lock/talk/starChart/handle/scp_message_base_handle.dart'; import 'package:star_lock/talk/starChart/handle/scp_message_handle.dart'; -import 'package:star_lock/talk/starChart/proto/gateway_reset.pb.dart'; import 'package:star_lock/talk/starChart/proto/generic.pb.dart'; import 'package:star_lock/talk/starChart/proto/talk_expect.pb.dart'; import 'package:star_lock/talk/starChart/proto/talk_request.pb.dart'; @@ -28,26 +23,10 @@ class UdpTalkRequestHandler extends ScpMessageBaseHandle RxString currentLanguage = CurrentLocaleTool.getCurrentLocaleString().obs; // 当前选择语言 - // 添加上次处理请求的时间戳 - int _lastRequestTime = 0; - @override void handleReq(ScpMessage scpMessage) async { - final currentTime = DateTime.now().millisecondsSinceEpoch; - // 确保与上次请求间隔至少1秒 - if (currentTime - _lastRequestTime < 1000) { - // 如果间隔小于1秒,直接拒绝请求 - replyErrorMessage(scpMessage); - AppLog.log('对讲请求过于频繁,已拒绝'); - return; - } - - // 更新最后处理时间 - _lastRequestTime = currentTime; - // 判断是否登录账户 final loginData = await Storage.getLoginData(); - // 如果登录账户不为空,且不是被动接听状态,且不是接听成功状态 if (loginData != null && (talkStatus.status != TalkStatus.passiveCallWaitingAnswer || @@ -77,6 +56,8 @@ class UdpTalkRequestHandler extends ScpMessageBaseHandle // 收到对讲请求的应答 startChartManage.FromPeerId = scpMessage.ToPeerId!; startChartManage.ToPeerId = scpMessage.FromPeerId!; + // 处理预期数据格式 + _handleResponseSendExpect(); // 发送预期数据 startChartManage.startTalkExpectTimer(); // 停止发送对讲请求 @@ -99,7 +80,7 @@ class UdpTalkRequestHandler extends ScpMessageBaseHandle // 来电事件的处理 void _talkRequestEvent({required String talkObjectName}) { // 发送预期数据、通知锁板需要获取视频数据 - _handleSendExpect(); + _handleRequestSendExpect(); // 播放铃声 //test:使用自定义铃声 playRingtone(); @@ -188,7 +169,8 @@ class UdpTalkRequestHandler extends ScpMessageBaseHandle } } - void _handleSendExpect() { + /// app收到的对讲请求后,发送的预期数据 + void _handleRequestSendExpect() { final LockListInfoItemEntity currentKeyInfo = CommonDataManage().currentKeyInfo; final isH264 = currentKeyInfo.lockFeature?.isH264 == 1; @@ -209,4 +191,27 @@ class UdpTalkRequestHandler extends ScpMessageBaseHandle print('锁不支持H264和MJPEG,默认发送图像视频格式期望数据'); } } + + /// app主动发请求,收到回复后发送的预期数据 + void _handleResponseSendExpect() { + final LockListInfoItemEntity currentKeyInfo = + CommonDataManage().currentKeyInfo; + final isH264 = currentKeyInfo.lockFeature?.isH264 == 1; + final isMJpeg = currentKeyInfo.lockFeature?.isMJpeg == 1; + + // 优先使用H264,其次是MJPEG + if (isH264) { + // 锁支持H264,发送H264视频和G711音频期望 + startChartManage.sendH264VideoAndG711AudioTalkExpectData(); + print('锁支持H264,发送H264视频格式期望数据'); + } else if (isMJpeg) { + // 锁只支持MJPEG,发送图像视频和G711音频期望 + startChartManage.sendImageVideoAndG711AudioTalkExpectData(); + print('锁不支持H264,支持MJPEG,发送MJPEG视频格式期望数据'); + } else { + // 默认使用图像视频 + startChartManage.sendImageVideoAndG711AudioTalkExpectData(); + print('锁不支持H264和MJPEG,默认发送图像视频格式期望数据'); + } + } } diff --git a/lib/talk/starChart/views/talkView/talk_view_logic.dart b/lib/talk/starChart/views/talkView/talk_view_logic.dart index eaf051c1..cc959c0b 100644 --- a/lib/talk/starChart/views/talkView/talk_view_logic.dart +++ b/lib/talk/starChart/views/talkView/talk_view_logic.dart @@ -51,7 +51,6 @@ class TalkViewLogic extends BaseGetXController { int _startAudioTime = 0; // 开始播放时间戳 bool _isFirstFrame = true; // 是否是第一帧 - // 定义音频帧缓冲和发送函数 final List _bufferedAudioFrames = []; @@ -65,6 +64,10 @@ class TalkViewLogic extends BaseGetXController { int _lastFpsUpdateTime = 0; Timer? _fpsTimer; + // 添加监听状态和订阅引用 + bool _isListening = false; + StreamSubscription? _streamSubscription; + /// 初始化音频播放器 void _initFlutterPcmSound() { const int sampleRate = 8000; @@ -97,7 +100,15 @@ class TalkViewLogic extends BaseGetXController { // 监听音视频数据流 void _startListenTalkData() { - state.talkDataRepository.talkDataStream + // 防止重复监听 + if (_isListening) { + AppLog.log("已经存在数据流监听,避免重复监听"); + return; + } + + AppLog.log("==== 启动新的数据流监听 ===="); + _isListening = true; + _streamSubscription = state.talkDataRepository.talkDataStream .listen((TalkDataModel talkDataModel) async { final talkData = talkDataModel.talkData; final contentType = talkData!.contentType; @@ -106,13 +117,13 @@ class TalkViewLogic extends BaseGetXController { // 判断数据类型,进行分发处理 switch (contentType) { case TalkData_ContentTypeE.G711: - // // 第一帧到达时记录开始时间 - if (_isFirstAudioFrame) { - _startAudioTime = currentTime; - _isFirstAudioFrame = false; - } + // // 第一帧到达时记录开始时间 + if (_isFirstAudioFrame) { + _startAudioTime = currentTime; + _isFirstAudioFrame = false; + } - // 计算音频延迟 + // 计算音频延迟 final expectedTime = _startAudioTime + talkData.durationMs; final audioDelay = currentTime - expectedTime; @@ -384,7 +395,6 @@ class TalkViewLogic extends BaseGetXController { } } - /// 获取权限状态 Future getPermissionStatus() async { final Permission permission = Permission.microphone; @@ -504,6 +514,9 @@ class TalkViewLogic extends BaseGetXController { state.oneMinuteTimeTimer?.cancel(); // 取消旧定时器 state.oneMinuteTimeTimer = null; // 取消旧定时器 state.oneMinuteTime.value = 0; + // 取消数据流监听 + _streamSubscription?.cancel(); + _isListening = false; super.onClose(); } diff --git a/lib/talk/starChart/webView/h264_web_logic.dart b/lib/talk/starChart/webView/h264_web_logic.dart index 4f5789c2..b7307383 100644 --- a/lib/talk/starChart/webView/h264_web_logic.dart +++ b/lib/talk/starChart/webView/h264_web_logic.dart @@ -54,6 +54,10 @@ class H264WebViewLogic extends BaseGetXController { final Queue> _frameBuffer = Queue>(); static const int FRAME_BUFFER_SIZE = 25; + // 添加监听状态和订阅引用 + bool _isListening = false; + StreamSubscription? _streamSubscription; + @override void onInit() { // 初始化 WebView 控制器 @@ -122,7 +126,15 @@ class H264WebViewLogic extends BaseGetXController { } void _createFramesStreamListen() async { - state.talkDataRepository.talkDataStream + // 防止重复监听 + if (_isListening) { + AppLog.log("已经存在数据流监听,避免重复监听"); + return; + } + + AppLog.log("==== 启动新的数据流监听 ===="); + _isListening = true; + _streamSubscription = state.talkDataRepository.talkDataStream .listen((TalkDataModel talkDataModel) async { final talkData = talkDataModel.talkData; final contentType = talkData!.contentType; @@ -131,30 +143,33 @@ class H264WebViewLogic extends BaseGetXController { // 判断数据类型,进行分发处理 switch (contentType) { case TalkData_ContentTypeE.G711: - // // 第一帧到达时记录开始时间 - if (_isFirstAudioFrame) { - _startAudioTime = currentTime; - _isFirstAudioFrame = false; - } - - // 计算音频延迟 - final expectedTime = _startAudioTime + talkData.durationMs; - final audioDelay = currentTime - expectedTime; - - // 如果延迟太大,清空缓冲区并直接播放 - if (audioDelay > 500) { - state.audioBuffer.clear(); - if (state.isOpenVoice.value) { - _playAudioFrames(); + if (state.isShowLoading.isFalse) { + // // 第一帧到达时记录开始时间 + if (_isFirstAudioFrame) { + _startAudioTime = currentTime; + _isFirstAudioFrame = false; } - return; + + // 计算音频延迟 + final expectedTime = _startAudioTime + talkData.durationMs; + final audioDelay = currentTime - expectedTime; + + // 如果延迟太大,清空缓冲区并直接播放 + if (audioDelay > 500) { + state.audioBuffer.clear(); + if (state.isOpenVoice.value) { + _playAudioFrames(); + } + return; + } + if (state.audioBuffer.length >= audioBufferSize) { + state.audioBuffer.removeAt(0); // 丢弃最旧的数据 + } + state.audioBuffer.add(talkData); // 添加新数据 + // 添加音频播放逻辑,与视频类似 + _playAudioFrames(); } - if (state.audioBuffer.length >= audioBufferSize) { - state.audioBuffer.removeAt(0); // 丢弃最旧的数据 - } - state.audioBuffer.add(talkData); // 添加新数据 - // 添加音频播放逻辑,与视频类似 - _playAudioFrames(); + break; case TalkData_ContentTypeE.H264: // // 添加新帧到缓冲区 @@ -537,6 +552,39 @@ class H264WebViewLogic extends BaseGetXController { } } + /// 停止播放音频 + void _stopPlayG711Data() async { + await FlutterPcmSound.pause(); + await FlutterPcmSound.stop(); + await FlutterPcmSound.clear(); + } + + @override + void onClose() { + _stopPlayG711Data(); // 停止播放音频 + + state.audioBuffer.clear(); // 清空音频缓冲区 + + state.oneMinuteTimeTimer?.cancel(); + state.oneMinuteTimeTimer = null; + + // 停止播放音频 + stopProcessingAudio(); + + state.oneMinuteTimeTimer?.cancel(); // 取消旧定时器 + state.oneMinuteTimeTimer = null; // 取消旧定时器 + state.oneMinuteTime.value = 0; + + // 取消数据流监听 + _streamSubscription?.cancel(); + _isListening = false; + + // 重置期望数据 + StartChartManage().reSetDefaultTalkExpect(); + + super.onClose(); + } + @override void dispose() { // _mockDataTimer?.cancel(); diff --git a/lib/talk/starChart/webView/h264_web_view.dart b/lib/talk/starChart/webView/h264_web_view.dart index 51aa30d8..aa18aa3c 100644 --- a/lib/talk/starChart/webView/h264_web_view.dart +++ b/lib/talk/starChart/webView/h264_web_view.dart @@ -7,6 +7,7 @@ import 'package:flutter_screenutil/flutter_screenutil.dart'; import 'package:get/get.dart'; import 'package:star_lock/app_settings/app_colors.dart'; import 'package:star_lock/app_settings/app_settings.dart'; +import 'package:star_lock/talk/call/callTalk.dart'; import 'package:star_lock/talk/starChart/constant/talk_status.dart'; import 'package:star_lock/talk/starChart/handle/other/talk_data_repository.dart'; import 'package:star_lock/talk/starChart/proto/talk_data.pbserver.dart'; @@ -413,8 +414,10 @@ class _H264WebViewState extends State } @override void dispose() { - state.animationController.dispose(); // 确保释放控制器 - super.dispose(); + state.animationController.dispose(); + CallTalk().finishAVData(); + // UdpTalkDataHandler().resetDataRates(); + super.dispose(); } }