diff --git a/lib/talk/starChart/views/talkView/talk_view_logic.dart b/lib/talk/starChart/views/talkView/talk_view_logic.dart index cc959c0b..02ed3f25 100644 --- a/lib/talk/starChart/views/talkView/talk_view_logic.dart +++ b/lib/talk/starChart/views/talkView/talk_view_logic.dart @@ -2,6 +2,7 @@ import 'dart:async'; import 'dart:io'; import 'dart:ui' as ui; import 'dart:math'; // Import the math package to use sqrt +import 'dart:ui' show decodeImageFromList; import 'package:flutter/foundation.dart'; import 'package:flutter/rendering.dart'; @@ -38,36 +39,25 @@ class TalkViewLogic extends BaseGetXController { final LockDetailState lockDetailState = Get.put(LockDetailLogic()).state; - final int minBufferSize = 2; // 最小缓冲2帧,约166ms - final int maxBufferSize = 20; // 最大缓冲8帧,约666ms - int bufferSize = 8; // 初始化为默认大小 - // 修改音频相关的成员变量 - final int minAudioBufferSize = 1; // 音频最小缓冲1帧 - final int maxAudioBufferSize = 3; // 音频最大缓冲3帧 + int bufferSize = 8; // 增大缓冲区,满时才渲染 + int audioBufferSize = 2; // 音频默认缓冲2帧 bool _isFirstAudioFrame = true; // 是否是第一帧 - // 添加开始时间记录 - int _startTime = 0; // 开始播放时间戳 + int _startAudioTime = 0; // 开始播放时间戳 - bool _isFirstFrame = true; // 是否是第一帧 // 定义音频帧缓冲和发送函数 final List _bufferedAudioFrames = []; - final Map _imageCache = {}; - - // 添加一个变量用于记录上一帧的时间戳 - int _lastFrameTimestamp = 0; // 初始值为 0 - - // 添加帧率计算相关变量 - int _frameCount = 0; - int _lastFpsUpdateTime = 0; - Timer? _fpsTimer; - // 添加监听状态和订阅引用 bool _isListening = false; StreamSubscription? _streamSubscription; + Timer? videoRenderTimer; // 视频渲染定时器 + + int _renderedFrameCount = 0; + int _lastFpsPrintTime = DateTime.now().millisecondsSinceEpoch; + /// 初始化音频播放器 void _initFlutterPcmSound() { const int sampleRate = 8000; @@ -143,79 +133,16 @@ class TalkViewLogic extends BaseGetXController { _playAudioFrames(); break; case TalkData_ContentTypeE.Image: - // 第一帧到达时记录开始时间 - if (_isFirstFrame) { - _startTime = currentTime; - _isFirstFrame = false; - // AppLog.log('第一帧帧的时间戳:${talkData.durationMs}'); - } - // AppLog.log('其他帧的时间戳:${talkData.durationMs}'); - // 计算帧间间隔 - if (_lastFrameTimestamp != 0) { - final int frameInterval = talkData.durationMs - _lastFrameTimestamp; - _adjustBufferSize(frameInterval); // 根据帧间间隔调整缓冲区 - } - _lastFrameTimestamp = talkData.durationMs; // 更新上一帧时间戳 - - // 然后添加到播放缓冲区 - if (state.videoBuffer.length >= bufferSize) { - state.videoBuffer.removeAt(0); - } + // 固定长度缓冲区,最多保留bufferSize帧 state.videoBuffer.add(talkData); - // 先进行解码和缓存 - await _decodeAndCacheFrame(talkData); - // 最后尝试播放 - _playVideoFrames(); + if (state.videoBuffer.length > bufferSize) { + state.videoBuffer.removeAt(0); // 移除最旧帧 + } break; } }); } - // 修改:视频帧播放逻辑 - void _playVideoFrames() { - // 如果缓冲区为空或未达到目标大小,不进行播放 - if (state.videoBuffer.isEmpty || state.videoBuffer.length < bufferSize) { - // AppLog.log('📊 缓冲中 - 当前缓冲区大小: ${state.videoBuffer.length}/${bufferSize}'); - return; - } - // 找出时间戳最小的帧(最旧的帧) - TalkData? oldestFrame; - int oldestIndex = -1; - for (int i = 0; i < state.videoBuffer.length; i++) { - if (oldestFrame == null || - state.videoBuffer[i].durationMs < oldestFrame.durationMs) { - oldestFrame = state.videoBuffer[i]; - oldestIndex = i; - } - } - // 确保找到了有效帧 - if (oldestFrame != null && oldestIndex != -1) { - final cacheKey = oldestFrame.content.hashCode.toString(); - - // 使用缓存的解码图片更新显示 - if (_imageCache.containsKey(cacheKey)) { - state.currentImage.value = _imageCache[cacheKey]; - state.listData.value = Uint8List.fromList(oldestFrame.content); - state.videoBuffer.removeAt(oldestIndex); // 移除已播放的帧 - - // // 更新帧率计算 - // _frameCount++; - // final currentTime = DateTime.now().millisecondsSinceEpoch; - // final elapsed = currentTime - _lastFpsUpdateTime; - // - // if (elapsed >= 1000) { - // // 每秒更新一次 - // state.fps.value = (_frameCount * 1000 / elapsed).round(); - // _frameCount = 0; - // _lastFpsUpdateTime = currentTime; - // } - } else { - // AppLog.log('⚠️ 帧未找到缓存 - Key: $cacheKey'); - state.videoBuffer.removeAt(oldestIndex); // 移除无法播放的帧 - } - } - } - // 新增:音频帧播放逻辑 void _playAudioFrames() { // 如果缓冲区为空或未达到目标大小,不进行播放 @@ -246,50 +173,6 @@ class TalkViewLogic extends BaseGetXController { } } - // 新增:解码和缓存帧的方法 - Future _decodeAndCacheFrame(TalkData talkData) async { - try { - String cacheKey = talkData.content.hashCode.toString(); - - // 如果该帧还没有被缓存,则进行解码和缓存 - if (!_imageCache.containsKey(cacheKey)) { - final Uint8List uint8Data = Uint8List.fromList(talkData.content); - final ui.Image image = await decodeImageFromList(uint8Data); - - // 管理缓存大小 - if (_imageCache.length >= bufferSize) { - _imageCache.remove(_imageCache.keys.first); - } - - // 添加到缓存 - _imageCache[cacheKey] = image; - - // AppLog.log('📥 缓存新帧 - 缓存数: ${_imageCache.length}, Key: $cacheKey'); - } - } catch (e) { - AppLog.log('❌ 帧解码错误: $e'); - } - } - - // 新增:动态调整缓冲区大小的方法 - void _adjustBufferSize(int frameInterval) { - const int frameDuration = 83; // 假设每帧的时间间隔为 83ms(12fps) - const int delayThresholdHigh = frameDuration * 2; // 高延迟阈值(2帧时间) - const int delayThresholdLow = frameDuration; // 低延迟阈值(1帧时间) - const int adjustInterval = 1; // 每次调整1帧 - - if (frameInterval > delayThresholdHigh && bufferSize < maxBufferSize) { - // 帧间间隔较大,增加缓冲区 - bufferSize = min(bufferSize + adjustInterval, maxBufferSize); - AppLog.log('📈 增加缓冲区 - 当前大小: $bufferSize, 帧间间隔: ${frameInterval}ms'); - } else if (frameInterval < delayThresholdLow && - bufferSize > minBufferSize) { - // 帧间间隔较小,减少缓冲区 - bufferSize = max(bufferSize - adjustInterval, minBufferSize); - AppLog.log('📉 减少缓冲区 - 当前大小: $bufferSize, 帧间间隔: ${frameInterval}ms'); - } - } - /// 监听对讲状态 void _startListenTalkStatus() { state.startChartTalkStatus.statusStream.listen((talkStatus) { @@ -496,6 +379,32 @@ class TalkViewLogic extends BaseGetXController { _initAudioRecorder(); requestPermissions(); + + // 启动视频渲染定时器(10fps) + videoRenderTimer = Timer.periodic(const Duration(milliseconds: 100), (_) { + final int now = DateTime.now().millisecondsSinceEpoch; + if (state.videoBuffer.isNotEmpty) { + final TalkData oldestFrame = state.videoBuffer.removeAt(0); + if (oldestFrame.content.isNotEmpty) { + state.listData.value = Uint8List.fromList(oldestFrame.content); // 备份原始数据 + final int decodeStart = DateTime.now().millisecondsSinceEpoch; + decodeImageFromList(Uint8List.fromList(oldestFrame.content)).then((ui.Image img) { + final int decodeEnd = DateTime.now().millisecondsSinceEpoch; + state.currentImage.value = img; + _renderedFrameCount++; + // 每秒统计一次fps + if (now - _lastFpsPrintTime >= 1000) { + // print('实际渲染fps: $_renderedFrameCount'); + _renderedFrameCount = 0; + _lastFpsPrintTime = now; + } + }).catchError((e) { + print('图片解码失败: $e'); + }); + } + } + // 如果缓冲区为空,不做任何操作,保持上一次内容 + }); } @override @@ -510,7 +419,7 @@ class TalkViewLogic extends BaseGetXController { stopProcessingAudio(); // 清理图片缓存 - _imageCache.clear(); + // _imageCache.clear(); state.oneMinuteTimeTimer?.cancel(); // 取消旧定时器 state.oneMinuteTimeTimer = null; // 取消旧定时器 state.oneMinuteTime.value = 0; @@ -518,6 +427,10 @@ class TalkViewLogic extends BaseGetXController { _streamSubscription?.cancel(); _isListening = false; + // 释放视频渲染定时器 + videoRenderTimer?.cancel(); + videoRenderTimer = null; + super.onClose(); } @@ -526,6 +439,9 @@ class TalkViewLogic extends BaseGetXController { stopProcessingAudio(); // 重置期望数据 StartChartManage().reSetDefaultTalkExpect(); + // 释放视频渲染定时器 + videoRenderTimer?.cancel(); + videoRenderTimer = null; super.dispose(); } diff --git a/lib/talk/starChart/views/talkView/talk_view_page.dart b/lib/talk/starChart/views/talkView/talk_view_page.dart index f9159cee..264e0252 100644 --- a/lib/talk/starChart/views/talkView/talk_view_page.dart +++ b/lib/talk/starChart/views/talkView/talk_view_page.dart @@ -135,16 +135,12 @@ class _TalkViewPageState extends State child: SizedBox.expand( child: RotatedBox( quarterTurns: startChartManage.rotateAngle ~/ 90, - child: Obx( - () => state.currentImage.value != null - ? RawImage( - image: state.currentImage.value, - width: ScreenUtil().scaleWidth, - height: ScreenUtil().scaleHeight, - fit: BoxFit.cover, - filterQuality: FilterQuality.high, - ) - : Container(color: Colors.transparent), + child: RawImage( + image: state.currentImage.value, + width: ScreenUtil().scaleWidth, + height: ScreenUtil().scaleHeight, + fit: BoxFit.cover, + filterQuality: FilterQuality.high, ), ), ), @@ -160,35 +156,33 @@ class _TalkViewPageState extends State style: TextStyle(color: Colors.black, fontSize: 26.sp), )) : Container()), - Obx( - () => state.listData.value.isNotEmpty && - state.oneMinuteTime.value > 0 - ? Positioned( - top: ScreenUtil().statusBarHeight + 75.h, - width: 1.sw, - child: Obx( - () { - final String sec = (state.oneMinuteTime.value % 60) - .toString() - .padLeft(2, '0'); - final String min = (state.oneMinuteTime.value ~/ 60) - .toString() - .padLeft(2, '0'); - return Row( - mainAxisAlignment: MainAxisAlignment.center, - children: [ - Text( - '$min:$sec', - style: TextStyle( - fontSize: 26.sp, color: Colors.white), - ), - ], - ); - }, - ), - ) - : Container(), - ), + Obx(() => + state.listData.value.isNotEmpty && state.oneMinuteTime.value > 0 + ? Positioned( + top: ScreenUtil().statusBarHeight + 75.h, + width: 1.sw, + child: Obx( + () { + final String sec = (state.oneMinuteTime.value % 60) + .toString() + .padLeft(2, '0'); + final String min = (state.oneMinuteTime.value ~/ 60) + .toString() + .padLeft(2, '0'); + return Row( + mainAxisAlignment: MainAxisAlignment.center, + children: [ + Text( + '$min:$sec', + style: TextStyle( + fontSize: 26.sp, color: Colors.white), + ), + ], + ); + }, + ), + ) + : Container()), Positioned( bottom: 10.w, child: Container( @@ -458,7 +452,6 @@ class _TalkViewPageState extends State // if (state.talkStatus.value == TalkStatus.answeredSuccessfully && // state.listData.value.length > 0) { // logic.udpOpenDoorAction(); - logic.remoteOpenLock(); // } // if (UDPManage().remoteUnlock == 1) { // logic.udpOpenDoorAction(); @@ -466,6 +459,7 @@ class _TalkViewPageState extends State // } else { // logic.showToast('请在锁设置中开启远程开锁'.tr); // } + logic.remoteOpenLock(); }, ) ]); diff --git a/lib/talk/starChart/views/talkView/talk_view_state.dart b/lib/talk/starChart/views/talkView/talk_view_state.dart index aafc8605..02f83efb 100644 --- a/lib/talk/starChart/views/talkView/talk_view_state.dart +++ b/lib/talk/starChart/views/talkView/talk_view_state.dart @@ -90,6 +90,5 @@ class TalkViewState { RxBool isLongPressing = false.obs; // 旋转角度(以弧度为单位) RxBool hasAudioData = false.obs; // 是否有音频数据 RxInt lastAudioTimestamp = 0.obs; // 最后接收到的音频数据的时间戳 - // 添加图片状态变量 - final Rx currentImage = Rx(null); + Rx currentImage = Rx(null); }