diff --git a/lib/talk/starChart/views/native/talk_view_native_decode_logic.dart b/lib/talk/starChart/views/native/talk_view_native_decode_logic.dart index ba87d327..9a6cbcbf 100644 --- a/lib/talk/starChart/views/native/talk_view_native_decode_logic.dart +++ b/lib/talk/starChart/views/native/talk_view_native_decode_logic.dart @@ -49,13 +49,15 @@ class TalkViewNativeDecodeLogic extends BaseGetXController { int _networkQualityScore = 5; // 1-5分,5为最佳 int _frameDropCount = 0; int _totalFrameCount = 0; - // 设备性能评估 - int _devicePerformanceScore = 5; // 1-5分,5为最佳 - int bufferSize = 5; // 初始化为默认大小 + int bufferSize = 25; // 初始化为默认大小 int audioBufferSize = 20; // 音频默认缓冲2帧 + List? _cachedSps; + List? _cachedPps; + bool _waitingForCompleteIFrame = false; + int _frameProcessCount = 0; int _lastFrameProcessTime = 0; double _actualFps = 0.0; @@ -113,7 +115,7 @@ class TalkViewNativeDecodeLogic extends BaseGetXController { }// 注意:避免过于积极地提高帧率,这可能导致卡顿 } - /// 根据网络和设备状况动态调整缓冲区大小 + /// 根据网络动态调整缓冲区大小 void _adjustBufferSizeDynamically() { // 计算网络质量得分 final double dropRate = _totalFrameCount > 0 @@ -133,91 +135,27 @@ class TalkViewNativeDecodeLogic extends BaseGetXController { _networkQualityScore = 5; // 优秀 } - // 结合设备性能和网络质量计算缓冲区大小 - final int calculatedBufferSize = _calculateOptimalBufferSize( - networkQuality: _networkQualityScore, - devicePerformance: _devicePerformanceScore - ); - - // 只有当缓冲区大小变化较大时才调整 - if ((state.maxFrameBufferSize - calculatedBufferSize).abs() > 3) { - bufferSize = calculatedBufferSize; - AppLog.log('动态调整缓冲区大小至: $calculatedBufferSize (网络质量: $_networkQualityScore, 设备性能: $_devicePerformanceScore)'); - } - } - - /// 监测网络状况 - void _monitorNetworkCondition() { - // 可以通过帧处理时间来间接评估网络状况 - // 如果帧处理时间过长,可能表示网络延迟高或数据包丢失 - - // 也可以通过缓冲区长度来判断网络状况 - final int bufferLength = state.h264FrameBuffer.length; - - // 如果缓冲区经常接近最大值,说明消费速度跟不上生产速度,网络可能较差 - if (bufferLength > state.maxFrameBufferSize * 0.8) { - // 网络状况可能变差 - if (_networkQualityScore > 1) { - _networkQualityScore--; + // iOS平台使用更小的缓冲区以降低延迟 + if (Platform.isIOS) { + if (_networkQualityScore >= 4) { + state.maxFrameBufferSize = 8; // 网络好时最小缓冲区 + } else if (_networkQualityScore >= 3) { + state.maxFrameBufferSize = 12; // 网络一般时中等缓冲区 + } else { + state.maxFrameBufferSize = 15; // 网络差时稍大缓冲区 } - } else if (bufferLength < state.maxFrameBufferSize * 0.3) { - // 网络状况良好 - if (_networkQualityScore < 5) { - _networkQualityScore++; + } else { + // Android平台原有逻辑 + if (_networkQualityScore <= 2) { + state.maxFrameBufferSize = 25; + } else if (_networkQualityScore >= 4) { + state.maxFrameBufferSize = 10; + } else { + state.maxFrameBufferSize = 15; } } } - /// 计算最优缓冲区大小 - int _calculateOptimalBufferSize({required int networkQuality, required int devicePerformance}) { - // 基础缓冲区大小基于网络质量 - int baseBufferSize; - switch (networkQuality) { - case 1: // 网络很差 - baseBufferSize = 20; - break; - case 2: // 网络较差 - baseBufferSize = 15; - break; - case 3: // 网络一般 - baseBufferSize = 10; - break; - case 4: // 网络良好 - baseBufferSize = 5; - break; - case 5: // 网络优秀 - baseBufferSize = 3; - break; - default: - baseBufferSize = 10; - } - - // 根据设备性能调整 - double performanceFactor; - switch (devicePerformance) { - case 1: // 设备性能很差 - performanceFactor = 1.5; - break; - case 2: // 设备性能较差 - performanceFactor = 1.2; - break; - case 3: // 设备性能一般 - performanceFactor = 1.0; - break; - case 4: // 设备性能良好 - performanceFactor = 0.8; - break; - case 5: // 设备性能优秀 - performanceFactor = 0.6; - break; - default: - performanceFactor = 1.0; - } - - // 计算最终缓冲区大小,最小为1,最大不超过60 - return (baseBufferSize * performanceFactor).round().clamp(1, 60); - } - // 回绕阈值,动态调整,frameSeq较小时阈值也小 int _getFrameSeqRolloverThreshold(int lastSeq) { if (lastSeq > 2000) { @@ -243,10 +181,6 @@ class TalkViewNativeDecodeLogic extends BaseGetXController { final List> _preIFrameCache = []; bool _hasWrittenFirstIFrame = false; - // 新增:SPS/PPS状态追踪变量 - bool hasSps = false; - bool hasPps = false; - // 新增:SPS/PPS缓存 List? spsCache; List? ppsCache; @@ -281,9 +215,9 @@ class TalkViewNativeDecodeLogic extends BaseGetXController { // 添加开始时间记录 final startTime = DateTime.now().millisecondsSinceEpoch; - // 使用超时控制避免长时间等待 // 设置超时时间为500ms,避免过长等待 - final timeoutFuture = Future.delayed(Duration(milliseconds: 500), () => null); + final timeoutMs = Platform.isIOS ? 300 : 500; + final timeoutFuture = Future.delayed(Duration(milliseconds: timeoutMs), () => null); final decoderFuture = VideoDecodePlugin.initDecoder(config); // 初始化解码器并获取textureId @@ -313,8 +247,9 @@ class TalkViewNativeDecodeLogic extends BaseGetXController { state.isLoading.value = false; } // 启动定时器发送帧数据 - // 延迟启动帧处理定时器,让用户感觉更快 - Future.delayed(Duration(milliseconds: 50), () { + // iOS平台使用更短的延迟启动 + final delayMs = Platform.isIOS ? 30 : 50; + Future.delayed(Duration(milliseconds: delayMs), () { _startFrameProcessTimer(); }); } catch (e) { @@ -433,6 +368,8 @@ class TalkViewNativeDecodeLogic extends BaseGetXController { // 更智能的缓冲区管理 if (state.h264FrameBuffer.length >= state.maxFrameBufferSize) { _manageBufferOverflow(frameType); + // 添加智能丢帧策略 + _implementSmartFrameDropping(); } // 如果缓冲区仍然超出最大大小,移除最旧的帧 @@ -445,6 +382,54 @@ class TalkViewNativeDecodeLogic extends BaseGetXController { _invalidateFrameIndex(); } + /// 智能丢帧策略以保持流畅性 + void _implementSmartFrameDropping() { + final bufferLength = state.h264FrameBuffer.length; + + // iOS平台更积极的丢帧策略 + if (Platform.isIOS && bufferLength > 30) { + _dropOldFramesAggressively(); + } else if (bufferLength > 40) { + // 其他平台原有逻辑 + _dropOldPFrame(); + } else if (bufferLength > 30) { + _dropSomeBFrame(); + } + } + void _dropOldFramesAggressively() { + // 查找并移除较旧的帧以降低延迟 + final oldFrameIndex = state.h264FrameBuffer.indexWhere((frame) => + frame['frameSeq'] < (_lastFrameSeq ?? 0) - 20); + + if (oldFrameIndex != -1 && oldFrameIndex < state.h264FrameBuffer.length) { + state.h264FrameBuffer.removeAt(oldFrameIndex); + _invalidateFrameIndex(); + } + } + void _dropOldPFrame() { + // 查找并移除较旧的P帧 + final pFrameIndex = state.h264FrameBuffer.indexWhere((frame) => + frame['frameType'] == TalkDataH264Frame_FrameTypeE.P && + frame['frameSeq'] < (_lastFrameSeq ?? 0) - 50); + + if (pFrameIndex != -1) { + state.h264FrameBuffer.removeAt(pFrameIndex); + _invalidateFrameIndex(); + } + } + + void _dropSomeBFrame() { + // 如果存在B帧,丢弃部分旧的B帧 + final bFrameIndex = state.h264FrameBuffer.indexWhere((frame) => + frame['frameType'] == TalkDataH264Frame_FrameTypeE.I && + frame['frameSeq'] < (_lastFrameSeq ?? 0) - 30); + + if (bFrameIndex != -1) { + state.h264FrameBuffer.removeAt(bFrameIndex); + _invalidateFrameIndex(); + } + } + /// 启动帧处理定时器 void _startFrameProcessTimer() { // 取消已有定时器 @@ -496,16 +481,18 @@ class TalkViewNativeDecodeLogic extends BaseGetXController { f['frameType'] != TalkDataH264Frame_FrameTypeE.I && f['frameSeq'] < (_lastFrameSeq ?? 0) - 100); // 只移除较旧的帧 - if (pbIndex != -1) { + if (pbIndex != -1 && pbIndex < state.h264FrameBuffer.length) { state.h264FrameBuffer.removeAt(pbIndex); - } else { + } else if (state.h264FrameBuffer.isNotEmpty) { // 如果没有找到合适的旧帧,移除最旧的帧 state.h264FrameBuffer.removeAt(0); } } else { // 新帧是P帧或B帧,移除最旧的帧 - state.h264FrameBuffer.removeAt(0); - _invalidateFrameIndex(); + if (state.h264FrameBuffer.isNotEmpty) { + state.h264FrameBuffer.removeAt(0); + _invalidateFrameIndex(); + } } } @@ -548,16 +535,35 @@ class TalkViewNativeDecodeLogic extends BaseGetXController { } return -1; } + int _findBestFrameIndex() { + // 检查缓冲区是否为空 + if (state.h264FrameBuffer.isEmpty) { + return -1; + } + // iOS平台优先处理最新的I帧以降低延迟 + if (Platform.isIOS) { + final iFrameIndexes = _buildFrameIndex(TalkDataH264Frame_FrameTypeE.I); + if (iFrameIndexes.isNotEmpty) { + // 返回最新的I帧索引 + return iFrameIndexes.last.value; + } + } // 优先处理与最近解码的I帧相关的P帧 if (lastDecodedIFrameSeq != null) { final pFrameIndex = _findRelatedPFrame(lastDecodedIFrameSeq!); - if (pFrameIndex >= 0) return pFrameIndex; + // 添加边界检查 + if (pFrameIndex >= 0 && pFrameIndex < state.h264FrameBuffer.length) { + return pFrameIndex; + } } // 查找最早的I帧 final iFrameIndex = _findEarliestIFrame(); - if (iFrameIndex >= 0) return iFrameIndex; + // 添加边界检查 + if (iFrameIndex >= 0 && iFrameIndex < state.h264FrameBuffer.length) { + return iFrameIndex; + } // 如果没有I帧,处理最早的帧 return state.h264FrameBuffer.isNotEmpty ? 0 : -1; @@ -588,6 +594,9 @@ class TalkViewNativeDecodeLogic extends BaseGetXController { try { state.isProcessingFrame = true; + // 添加智能丢帧策略 + _implementSmartFrameDropping(); + // 动态调整处理策略基于缓冲区长度 final bufferLength = state.h264FrameBuffer.length; @@ -614,7 +623,11 @@ class TalkViewNativeDecodeLogic extends BaseGetXController { final frameIndex = _findBestFrameIndex(); // 处理选中的帧 - if (frameIndex >= 0) { + if (frameIndex >= 0 && frameIndex < state.h264FrameBuffer.length) { + // 再次检查边界以确保安全 + if (frameIndex >= state.h264FrameBuffer.length) { + return; + } final Map frameMap = state.h264FrameBuffer.removeAt(frameIndex); final List? frameData = frameMap['frameData']; @@ -623,7 +636,53 @@ class TalkViewNativeDecodeLogic extends BaseGetXController { final int? frameSeqI = frameMap['frameSeqI']; final int? pts = frameMap['pts']; - if (frameData != null && + // 如果在等待完整I帧状态,检查是否是带有SPS/PPS的I帧 + if (_waitingForCompleteIFrame && frameType == TalkDataH264Frame_FrameTypeE.I) { + // 构造包含SPS/PPS的完整帧数据 + List completeFrameData = []; + + // 添加SPS(如果存在) + if (_cachedSps != null && _cachedSps!.isNotEmpty) { + completeFrameData.addAll(_cachedSps!); + } + + // 添加PPS(如果存在) + if (_cachedPps != null && _cachedPps!.isNotEmpty) { + completeFrameData.addAll(_cachedPps!); + } + + // 添加原始帧数据 + if (frameData != null) { + completeFrameData.addAll(frameData); + } + + // 发送完整帧 + if (completeFrameData.isNotEmpty && state.textureId.value != null) { + final int pluginFrameType = 0; // I帧 + + try { + await VideoDecodePlugin.sendFrame( + frameData: completeFrameData, + frameType: pluginFrameType, + frameSeq: frameSeq!, + timestamp: pts!, + splitNalFromIFrame: true, + refIFrameSeq: frameSeqI!, + ).timeout(Duration(milliseconds: 25)); + + // 成功发送后,取消等待状态 + _waitingForCompleteIFrame = false; + lastDecodedIFrameSeq = frameSeq; + } catch (e) { + AppLog.log('发送完整I帧失败: $e'); + if (e is TimeoutException) { + _frameDropCount++; + } + } + } + } + // 正常处理其他帧 + else if (frameData != null && frameType != null && frameSeq != null && frameSeqI != null && @@ -634,8 +693,21 @@ class TalkViewNativeDecodeLogic extends BaseGetXController { // 异步发送帧,添加超时处理 try { - // iOS平台使用更短的超时时间 - final timeoutMs = Platform.isIOS ? 15 : 20; + // 根据网络质量动态调整超时时间 + int timeoutMs; + switch (_networkQualityScore) { + case 1: // 网络很差 + timeoutMs = 50; + break; + case 2: // 网络较差 + timeoutMs = 40; + break; + case 3: // 网络一般 + timeoutMs = 30; + break; + default: + timeoutMs = Platform.isIOS ? 25 : 30; + } await VideoDecodePlugin.sendFrame( frameData: frameData, frameType: pluginFrameType, @@ -648,9 +720,14 @@ class TalkViewNativeDecodeLogic extends BaseGetXController { // 更新最后解码的I帧序号 if (frameType == TalkDataH264Frame_FrameTypeE.I) { lastDecodedIFrameSeq = frameSeq; + _waitingForCompleteIFrame = false; // 接收到任何I帧都取消等待 } } catch (e) { AppLog.log('发送帧数据超时或失败: $e'); + // 根据错误类型决定是否增加帧丢弃计数 + if (e is TimeoutException) { + _frameDropCount++; + } } } } @@ -851,34 +928,6 @@ class TalkViewNativeDecodeLogic extends BaseGetXController { Future stopRecording() async {} - /// 评估设备性能 - Future _evaluateDevicePerformance() async { - final stopwatch = Stopwatch()..start(); - - // 执行一些计算密集型任务来评估性能 - for (int i = 0; i < 100000; i++) { - sqrt(i); - } - - stopwatch.stop(); - final int elapsedMs = stopwatch.elapsedMilliseconds; - - // 根据计算时间评估设备性能 - if (elapsedMs < 50) { - _devicePerformanceScore = 5; // 优秀 - } else if (elapsedMs < 100) { - _devicePerformanceScore = 4; // 良好 - } else if (elapsedMs < 200) { - _devicePerformanceScore = 3; // 一般 - } else if (elapsedMs < 400) { - _devicePerformanceScore = 2; // 较差 - } else { - _devicePerformanceScore = 1; // 很差 - } - - AppLog.log('设备性能评估完成: $_devicePerformanceScore (耗时: ${elapsedMs}ms)'); - } - Future _checkRequiredPermissions() async { // 检查是否已获得必要权限 var storageStatus = await Permission.storage.status; @@ -901,11 +950,6 @@ class TalkViewNativeDecodeLogic extends BaseGetXController { void onInit() { super.onInit(); - // 异步评估设备性能 - WidgetsBinding.instance.addPostFrameCallback((_) { - _evaluateDevicePerformance(); - }); - // 启动监听对讲状态 _startListenTalkStatus(); // 在没有监听成功之前赋值一遍状态 @@ -1287,14 +1331,19 @@ class TalkViewNativeDecodeLogic extends BaseGetXController { _pendingResetWidth = width; _pendingResetHeight = height; - // 并行执行两个操作以提高效率---使用更短的总超时时间 - await Future.wait([ - // 立即重置解码器 - _resetDecoderForNewStream(width, height), - // 修改发送预期数据 - Future.microtask(() => - StartChartManage().changeTalkExpectDataTypeAndReStartTalkExpectMessageTimer(talkExpect: talkExpectReq)) - ]).timeout(const Duration(milliseconds: 1500)); // 设置总超时时间 + try { + // 并行执行两个操作以提高效率,设置更短的总超时时间 + await Future.wait([ + // 立即重置解码器 + _resetDecoderForNewStream(width, height), + // 修改发送预期数据 + Future.microtask(() => + StartChartManage().changeTalkExpectDataTypeAndReStartTalkExpectMessageTimer(talkExpect: talkExpectReq)) + ]).timeout(const Duration(milliseconds: 1500)); // 设置总超时时间 + } catch (e) { + AppLog.log('切换清晰度超时或失败: $e'); + state.isLoading.value = false; + } } void _initHdOptions() { @@ -1310,31 +1359,22 @@ class TalkViewNativeDecodeLogic extends BaseGetXController { // 新增:重置解码器方法 Future _resetDecoderForNewStream(int width, int height) async { try { - // 显示加载状态 state.isLoading.value = true; - // 先停止帧处理定时器 _stopFrameProcessTimer(); - - // 快速清理缓冲区以加快响应 _clearFrameBufferQuickly(); - // 释放旧解码器 - 使用最短超时时间 - 减少不必要的延时 + // 释放旧解码器 if (state.textureId.value != null) { try { - // 极短超时时间,避免阻塞 await VideoDecodePlugin.releaseDecoder().timeout(Duration(milliseconds: 100)); state.textureId.value = null; } catch (e) { AppLog.log('释放解码器超时或失败: $e'); - // 即使释放失败也继续执行 state.textureId.value = null; } } - // 最小化等待时间 - await Future.delayed(Duration(milliseconds: 0)); - - // 创建新的解码器配置 + // 创建优化的解码器配置 final config = VideoDecoderConfig( width: width, height: height, @@ -1343,34 +1383,30 @@ class TalkViewNativeDecodeLogic extends BaseGetXController { // 初始化新解码器 try { - // 使用较短超时时间 final textureId = await VideoDecodePlugin.initDecoder(config) - .timeout(Duration(milliseconds: 500)); - + .timeout(Duration(milliseconds: 300)); if (textureId != null) { - state.textureId.value = textureId; + Future.microtask(() => state.textureId.value = textureId); AppLog.log('解码器初始化成功:textureId=$textureId'); - // 设置帧渲染监听 VideoDecodePlugin.setOnFrameRenderedListener((textureId) { - AppLog.log('开始渲染======='); - // 只有真正渲染出首帧时才关闭loading - state.isLoading.value = false; + // 快速响应首帧渲染 + Future.microtask(() { + state.isLoading.value = false; + }); }); - // 重新启动帧处理定时器 - _startFrameProcessTimer(); - // 重置相关状态 _decodedIFrames.clear(); state.h264FrameBuffer.clear(); state.isProcessingFrame = false; _lastFrameSeq = null; lastDecodedIFrameSeq = null; - hasSps = false; - hasPps = false; - spsCache = null; - ppsCache = null; + + // 重要:标记需要等待带完整信息的I帧 + _waitingForCompleteIFrame = true; + + _startFrameProcessTimer(); } else { AppLog.log('解码器初始化失败'); state.isLoading.value = false; @@ -1385,6 +1421,7 @@ class TalkViewNativeDecodeLogic extends BaseGetXController { } } + void _processFrame(TalkDataModel talkDataModel) { final talkData = talkDataModel.talkData; final talkDataH264Frame = talkDataModel.talkDataH264Frame;