diff --git a/lib/main/lockDetail/lockDetail/lockDetail_state.dart b/lib/main/lockDetail/lockDetail/lockDetail_state.dart index 8aa1c1d3..da67a7b3 100755 --- a/lib/main/lockDetail/lockDetail/lockDetail_state.dart +++ b/lib/main/lockDetail/lockDetail/lockDetail_state.dart @@ -17,9 +17,6 @@ class LockDetailState { StreamSubscription? DetailLockInfo; StreamSubscription? SuccessfulDistributionNetworkEvent; - // 添加网络质量状态变量 - final RxInt networkQualityScore = 1.obs; // 1-5分,5为最佳 - final RxString networkQualityMessage = ''.obs; // 网络质量提示信息 String lockNetToken = '0'; int differentialTime = 0; // 服务器时间与本地时间差值 bool isHaveNetwork = true; diff --git a/lib/main/lockDetail/monitoring/monitoring/lockMonitoring_logic.dart b/lib/main/lockDetail/monitoring/monitoring/lockMonitoring_logic.dart index 2b47048d..fbb8b966 100644 --- a/lib/main/lockDetail/monitoring/monitoring/lockMonitoring_logic.dart +++ b/lib/main/lockDetail/monitoring/monitoring/lockMonitoring_logic.dart @@ -408,6 +408,9 @@ class LockMonitoringLogic extends BaseGetXController { _getUDPStatusRefreshUIAction(); initRecorder(); + + // 页面加载时自动发起接听请求 + initiateUdpAnswerAction(); } @override diff --git a/lib/main/lockDetail/monitoring/monitoring/lockMonitoring_state.dart b/lib/main/lockDetail/monitoring/monitoring/lockMonitoring_state.dart index c0f07e83..163b6d35 100644 --- a/lib/main/lockDetail/monitoring/monitoring/lockMonitoring_state.dart +++ b/lib/main/lockDetail/monitoring/monitoring/lockMonitoring_state.dart @@ -47,7 +47,7 @@ class LockMonitoringState { // 定时器如果发送了接听的命令 而没收到回复就每秒重复发送10次 late Timer answerTimer = Timer(const Duration(seconds: 1), () {}); //接听命令定时器 RxInt answerSeconds = 0.obs; - RxBool isClickAnswer = false.obs; //是否点击了接听按钮 + RxBool isClickAnswer = true.obs; //是否点击了接听按钮 late Timer hangUpTimer = Timer(const Duration(seconds: 1), () {}); //挂断命令定时器 RxInt hangUpSeconds = 0.obs; diff --git a/lib/talk/starChart/views/native/talk_view_native_decode_logic.dart b/lib/talk/starChart/views/native/talk_view_native_decode_logic.dart index 41bd34ac..c5da8f64 100644 --- a/lib/talk/starChart/views/native/talk_view_native_decode_logic.dart +++ b/lib/talk/starChart/views/native/talk_view_native_decode_logic.dart @@ -1,5 +1,6 @@ import 'dart:async'; import 'dart:io'; +import 'dart:math'; import 'dart:ui' as ui; import 'package:flutter/foundation.dart'; @@ -37,9 +38,9 @@ class TalkViewNativeDecodeLogic extends BaseGetXController { final LockDetailState lockDetailState = Get.put(LockDetailLogic()).state; - int bufferSize = 25; // 初始化为默认大小 + int bufferSize = 2; // 初始化为默认大小,减小缓冲区以降低延迟 - int audioBufferSize = 20; // 音频默认缓冲2帧 + int audioBufferSize = 3; // 音频默认缓冲3帧,减少音频延迟 // 回绕阈值,动态调整,frameSeq较小时阈值也小 int _getFrameSeqRolloverThreshold(int lastSeq) { @@ -88,6 +89,44 @@ class TalkViewNativeDecodeLogic extends BaseGetXController { bool _waitingForIFrame = false; int? lastDecodedIFrameSeq; + + // 新增:缓冲区满载处理相关变量 + int _consecutiveFullBufferCount = 0; // 连续缓冲区满载次数 + int _maxConsecutiveFullBufferCount = 3; // 最大连续满载次数,超过此值将采取措施,降低为3以更快响应 + bool _isAdjustingForBufferFull = false; // 是否正在为缓冲区满载调整 + + // 新增:记录最近一次视频画面开始渲染的时间 + DateTime? _lastVideoRenderTime; + + // 新增:自适应缓冲区大小相关变量 + int _currentBufferSize = 3; // 当前缓冲区大小 + int _lastBufferSizeAdjustmentTime = 0; // 上次缓冲区大小调整时间 + int _bufferSizeAdjustmentCooldown = 2000; // 缓冲区大小调整冷却时间(毫秒) + + // 新增:网络状况评估相关变量 + int _lastNetworkQualityCheckTime = 0; // 上次网络质量检查时间 + int _framesProcessedSinceLastCheck = 0; // 自上次检查以来处理的帧数 + int _framesDroppedSinceLastCheck = 0; // 自上次检查以来丢弃的帧数 + double _currentNetworkQualityScore = 1.0; // 当前网络质量评分(0.0-1.0, 1.0为最佳) + + // 网络质量评估相关变量 + List _frameReceiveTimes = []; // 存储帧接收时间戳 + List _frameSeqList = []; // 存储帧序列号用于计算丢包 + int _totalFramesReceived = 0; // 总接收帧数 + int _lostFrames = 0; // 丢失帧数 + int _lastFrameSeqNum = -1; // 上一帧序列号 + DateTime? _testStartTime; // 测试开始时间 + Timer? _networkQualityTestTimer; // 网络质量评估定时器 + + // 包、帧和关键帧统计变量 + int _totalPacketsReceived = 0; // 总接收包数 + int _totalFramesReceivedCount = 0; // 总接收帧数 + int _iFramesReceived = 0; // 接收到的关键帧数(I帧) + int _pFramesReceived = 0; // 接收到的预测帧数(P帧) + int _processedFrames = 0; // 已处理的帧数 + int _droppedFrames = 0; // 丢弃的帧数 + int _framesInBuffer = 0; // 进入缓冲区的帧数 + int _bufferSize = 0; // 缓冲区大小 // 初始化视频解码器 Future _initVideoDecoder() async { @@ -128,16 +167,20 @@ class TalkViewNativeDecodeLogic extends BaseGetXController { height: height, codecType: 'h264', ); - AppLog.log('解码器配置的宽高为:${config.width}x${config.height}'); // 初始化解码器并获取textureId final textureId = await VideoDecodePlugin.initDecoder(config); if (textureId != null) { Future.microtask(() => state.textureId.value = textureId); - AppLog.log('视频解码器初始化成功:textureId=$textureId'); VideoDecodePlugin.setOnFrameRenderedListener((textureId) { AppLog.log('已经开始渲染======='); + // 记录视频开始渲染时间 + _lastVideoRenderTime = DateTime.now(); // 只有真正渲染出首帧时才关闭loading - Future.microtask(() => state.isLoading.value = false); + if (state.isLoading.value) { + Future.microtask(() => state.isLoading.value = false); + } else { + AppLog.log('视频已在渲染状态,保持当前状态'); + } }); } else { AppLog.log('视频解码器初始化失败'); @@ -243,6 +286,110 @@ class TalkViewNativeDecodeLogic extends BaseGetXController { } _lastFrameSeq = frameSeq; } + + // 记录帧接收 + recordFrameReceived(frameSeq, frameType); + + // 优化:对于低延迟场景,仅保留最新的帧以减少延迟 + bool isVideoRendering = state.textureId.value != null && + (state.isLoading.isFalse || _lastVideoRenderTime != null); + + if (isVideoRendering) { + // 如果视频已经开始渲染,优先保留最新的I帧和其关联的P帧 + if (frameType == TalkDataH264Frame_FrameTypeE.I) { + // 对于新的I帧,移除旧的I帧和其关联的P帧 + _removeOldFramesForIFrame(frameSeq); + + // 当接收到新I帧时,动态调整缓冲区大小以适应当前网络状况 + _adjustBufferSizeForNetworkCondition(); + } else { + // 对于P帧,确保它关联到有效的I帧 + _cleanOldPFrameForCurrentIFrame(frameSeq, frameSeqI); + } + } else { + // 视频未渲染时,按原有逻辑处理 + if (state.h264FrameBuffer.length >= state.maxFrameBufferSize) { + // 检查是否已经有视频开始渲染 + bool isVideoRendering = state.textureId.value != null && + (state.isLoading.isFalse || _lastVideoRenderTime != null); + + // 如果视频已经在渲染,我们采用更智能的缓冲区清理策略 + if (isVideoRendering) { + _consecutiveFullBufferCount++; + + // 当连续多次缓冲区满载时,采取特殊处理策略 + if (_consecutiveFullBufferCount >= _maxConsecutiveFullBufferCount) { + + // 计算当前网络质量并调整缓冲区大小 + _evaluateCurrentNetworkQuality(); + + // 查找所有非关键帧(非I帧)进行清理 + final framesToRemove = []; + for (int i = 0; i < state.h264FrameBuffer.length; i++) { + if (state.h264FrameBuffer[i]['frameType'] != TalkDataH264Frame_FrameTypeE.I) { + framesToRemove.add(i); + } + } + + // 从后往前删除,避免索引变化问题 + framesToRemove.reversed.forEach((index) { + state.h264FrameBuffer.removeAt(index); + recordDroppedFrame(); + }); + + // 如果仍有过多帧,保留最新的I帧及相关的P帧 + if (state.h264FrameBuffer.length > _currentBufferSize ~/ 2) { + // 找到最后一个I帧的位置 + int lastIFrameIndex = -1; + for (int i = state.h264FrameBuffer.length - 1; i >= 0; i--) { + if (state.h264FrameBuffer[i]['frameType'] == TalkDataH264Frame_FrameTypeE.I) { + lastIFrameIndex = i; + break; + } + } + + // 如果找到I帧,只保留I帧及其后续的P帧,删除前面的帧 + if (lastIFrameIndex > 0) { + for (int i = 0; i < lastIFrameIndex; i++) { + state.h264FrameBuffer.removeAt(0); + recordDroppedFrame(); + } + } else { + // 如果没有I帧,只保留最新的半数帧 + while (state.h264FrameBuffer.length > _currentBufferSize ~/ 2) { + state.h264FrameBuffer.removeAt(0); + recordDroppedFrame(); + } + } + } + + _consecutiveFullBufferCount = 0; // 重置计数 + _isAdjustingForBufferFull = false; + } else { + // 非连续满载情况下,采用保守清理策略 + int pbIndex = state.h264FrameBuffer.indexWhere((f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.P); + if (pbIndex != -1) { + state.h264FrameBuffer.removeAt(pbIndex); + recordDroppedFrame(); // 记录丢弃的帧 + } else { + state.h264FrameBuffer.removeAt(0); + recordDroppedFrame(); // 记录丢弃的帧 + } + } + } else { + // 视频尚未渲染时,使用原始策略 + int pbIndex = state.h264FrameBuffer.indexWhere((f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.P); + if (pbIndex != -1) { + state.h264FrameBuffer.removeAt(pbIndex); + recordDroppedFrame(); // 记录丢弃的帧 + } else { + state.h264FrameBuffer.removeAt(0); + recordDroppedFrame(); // 记录丢弃的帧 + } + } + } + } + // 创建包含帧数据和类型的Map final Map frameMap = { 'frameData': frameData, @@ -253,18 +400,169 @@ class TalkViewNativeDecodeLogic extends BaseGetXController { 'scpMessage': scpMessage, }; - // 如果缓冲区超出最大大小,优先丢弃P/B帧 - while (state.h264FrameBuffer.length >= state.maxFrameBufferSize) { - int pbIndex = state.h264FrameBuffer.indexWhere((f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.P); - if (pbIndex != -1) { - state.h264FrameBuffer.removeAt(pbIndex); - } else { - state.h264FrameBuffer.removeAt(0); - } - } - // 将帧添加到缓冲区 state.h264FrameBuffer.add(frameMap); + recordFrameInBuffer(); // 记录进入缓冲区的帧 + + // 重置连续满载计数(只有在缓冲区未满时) + if (state.h264FrameBuffer.length < state.maxFrameBufferSize) { + _consecutiveFullBufferCount = 0; + } + } + + // 辅助方法:移除旧的I帧和其关联的P帧 + void _removeOldFramesForIFrame(int newIFrameSeq) { + // 保留最新的I帧和相关的P帧,移除旧的I帧和其关联的P帧 + final List> framesToKeep = []; + + // 分别收集I帧和P帧 + final List> iFrames = []; + final List> pFrames = []; + + for (var frame in state.h264FrameBuffer) { + if (frame['frameType'] == TalkDataH264Frame_FrameTypeE.I) { + iFrames.add(frame); + } else { + pFrames.add(frame); + } + } + + // 按frameSeq排序I帧 + iFrames.sort((a, b) => (a['frameSeq'] as int).compareTo(b['frameSeq'] as int)); + + // 保留最新的I帧(即当前正在添加的新I帧) + final int maxIFramesToKeep = 2; // 保留最多2个I帧以确保平滑过渡 + final int startIdx = max(0, iFrames.length - maxIFramesToKeep); + + for (int i = startIdx; i < iFrames.length; i++) { + framesToKeep.add(iFrames[i]); + } + + // 对于P帧,只保留与保留的I帧关联的P帧 + for (var pFrame in pFrames) { + int refIFrameSeq = pFrame['frameSeqI']; + bool shouldKeep = false; + + // 检查该P帧引用的I帧是否被保留 + for (var keptIFrame in framesToKeep) { + if (keptIFrame['frameType'] == TalkDataH264Frame_FrameTypeE.I && + keptIFrame['frameSeq'] == refIFrameSeq) { + shouldKeep = true; + break; + } + } + + if (shouldKeep) { + framesToKeep.add(pFrame); + } else { + recordDroppedFrame(); // 记录丢弃的帧 + } + } + + // 用筛选后的帧替换原缓冲区 + state.h264FrameBuffer.clear(); + state.h264FrameBuffer.addAll(framesToKeep); + } + + // 辅助方法:清理当前I帧的旧P帧 + void _cleanOldPFrameForCurrentIFrame(int frameSeq, int frameSeqI) { + // 为当前I帧保留最新的P帧,移除过旧的P帧 + final List> framesToKeep = []; + final List> framesToRemove = []; + + // 首先添加所有非当前I帧引用的帧 + for (var frame in state.h264FrameBuffer) { + if (frame['frameSeqI'] != frameSeqI || frame['frameType'] == TalkDataH264Frame_FrameTypeE.I) { + framesToKeep.add(frame); + } + } + + // 收集当前I帧引用的P帧 + final List> currentIFramePFrames = []; + for (var frame in state.h264FrameBuffer) { + if (frame['frameSeqI'] == frameSeqI && frame['frameType'] == TalkDataH264Frame_FrameTypeE.P) { + currentIFramePFrames.add(frame); + } + } + + // 按frameSeq排序P帧 + currentIFramePFrames.sort((a, b) => (a['frameSeq'] as int).compareTo(b['frameSeq'] as int)); + + // 保留最新的P帧(例如保留最近的5个) + final int maxPFramesToKeep = 5; + final int startIdx = max(0, currentIFramePFrames.length - maxPFramesToKeep); + + for (int i = startIdx; i < currentIFramePFrames.length; i++) { + framesToKeep.add(currentIFramePFrames[i]); + } + + // 记录被丢弃的P帧 + for (int i = 0; i < startIdx; i++) { + recordDroppedFrame(); + } + + // 用筛选后的帧替换原缓冲区 + state.h264FrameBuffer.clear(); + state.h264FrameBuffer.addAll(framesToKeep); + } + + /// 根据网络状况动态调整缓冲区大小 + void _adjustBufferSizeForNetworkCondition() { + // 获取当前时间 + int currentTime = DateTime.now().millisecondsSinceEpoch; + + // 检查是否在冷却期内 + if (currentTime - _lastBufferSizeAdjustmentTime < _bufferSizeAdjustmentCooldown) { + return; + } + + // 更新最后调整时间 + _lastBufferSizeAdjustmentTime = currentTime; + + // 根据当前网络质量评分调整缓冲区大小 + if (_currentNetworkQualityScore > 0.7) { + // 网络状况良好,使用较小缓冲区以降低延迟 + _currentBufferSize = state.adaptiveBufferSizeMin; + } else if (_currentNetworkQualityScore > 0.4) { + // 网络状况一般,使用中等缓冲区以平衡延迟和稳定性 + _currentBufferSize = (state.adaptiveBufferSizeMin + state.adaptiveBufferSizeMax) ~/ 2; + } else { + // 网络状况较差,使用较大缓冲区以减少卡顿 + _currentBufferSize = state.adaptiveBufferSizeMax; + } + + AppLog.log('根据网络状况调整缓冲区大小: ${_currentBufferSize} (当前网络质量评分: ${_currentNetworkQualityScore.toStringAsFixed(2)})'); + } + + /// 评估当前网络质量 + void _evaluateCurrentNetworkQuality() { + int currentTime = DateTime.now().millisecondsSinceEpoch; + + // 检查是否到了网络质量检查间隔 + if (currentTime - _lastNetworkQualityCheckTime < state.networkQualityCheckIntervalMs) { + return; + } + + _lastNetworkQualityCheckTime = currentTime; + + // 计算丢帧率 + double dropRate = 0.0; + int totalProcessed = _framesProcessedSinceLastCheck + _framesDroppedSinceLastCheck; + if (totalProcessed > 0) { + dropRate = _framesDroppedSinceLastCheck / totalProcessed; + } + + // 更新网络质量评分 (基于丢帧率,评分越高网络越好) + _currentNetworkQualityScore = 1.0 - dropRate; + if (_currentNetworkQualityScore < 0) { + _currentNetworkQualityScore = 0.0; + } + + AppLog.log('网络质量评估: 丢帧率=${dropRate.toStringAsFixed(2)}, 网络质量评分=${_currentNetworkQualityScore.toStringAsFixed(2)}'); + + // 重置计数器 + _framesProcessedSinceLastCheck = 0; + _framesDroppedSinceLastCheck = 0; } /// 启动帧处理定时器 @@ -272,8 +570,8 @@ class TalkViewNativeDecodeLogic extends BaseGetXController { // 取消已有定时器 state.frameProcessTimer?.cancel(); - // 计算定时器间隔,确保以目标帧率处理帧 - final int intervalMs = (1000 / state.targetFps).round(); + // 使用配置的间隔时间 + final int intervalMs = state.frameProcessIntervalMs; // 创建新定时器 state.frameProcessTimer = Timer.periodic(Duration(milliseconds: intervalMs), (timer) { @@ -984,4 +1282,215 @@ class TalkViewNativeDecodeLogic extends BaseGetXController { break; } } + + /// 记录帧接收 + void recordFrameReceived(int frameSeq, TalkDataH264Frame_FrameTypeE frameType) { + _totalFramesReceivedCount++; + + // 根据帧类型更新计数 + if (frameType == TalkDataH264Frame_FrameTypeE.I) { + _iFramesReceived++; + } else if (frameType == TalkDataH264Frame_FrameTypeE.P) { + _pFramesReceived++; + } + + // 计算丢包 - 修复:添加合理的帧序号差异限制,避免异常高的丢包计算 + if (_lastFrameSeqNum != -1) { + if (frameSeq > _lastFrameSeqNum + 1) { + // 计算中间缺失的帧数,但限制最大丢失数量以防止异常值 + int gap = frameSeq - _lastFrameSeqNum - 1; + // 设置最大允许的帧间隙,防止因为帧序列号回绕或异常导致的错误计算 + int maxAllowedGap = 50; // 可根据实际情况调整 + if (gap > maxAllowedGap) { + // 如果间隙过大,可能是帧序列号回绕或其他异常,不计入丢失帧 + AppLog.log('检测到帧序列号异常跳跃: gap=$gap, 当前frameSeq=$frameSeq, 上一个frameSeq=$_lastFrameSeqNum'); + } else { + _lostFrames += gap; + } + } else if (frameSeq <= _lastFrameSeqNum && frameSeq < 100 && _lastFrameSeqNum > 1000) { + // 检测到帧序列号回绕(从大数值回到小数值),重置参考值 + // 这种情况通常发生在新的视频流开始或流重启 + AppLog.log('检测到帧序列号回绕: 从 $_lastFrameSeqNum 回到 $frameSeq'); + _lastFrameSeqNum = frameSeq; + } + } + _lastFrameSeqNum = frameSeq; + + // 记录接收时间 + _frameReceiveTimes.add(DateTime.now().millisecondsSinceEpoch); + _frameSeqList.add(frameSeq); + + // 只保留最近的100个帧记录 + if (_frameReceiveTimes.length > 100) { + _frameReceiveTimes.removeAt(0); + _frameSeqList.removeAt(0); + } + } + + /// 记录包接收 + void recordPacketReceived() { + _totalPacketsReceived++; + } + + /// 记录已处理的帧 + void recordProcessedFrame() { + _processedFrames++; + _framesProcessedSinceLastCheck++; // 用于网络质量评估 + } + + /// 记录丢弃的帧 + void recordDroppedFrame() { + _droppedFrames++; + _framesDroppedSinceLastCheck++; // 用于网络质量评估 + } + + /// 记录进入缓冲区的帧 + void recordFrameInBuffer() { + _framesInBuffer++; + } + + /// 开始网络质量评估 + void startNetworkQualityAssessment() { + resetNetworkQualityAssessmentVariables(); + _testStartTime = DateTime.now(); + + // 初始化统计计数器 + _bufferSize = state.maxFrameBufferSize; // 记录缓冲区大小 + + // 启动定时器每秒评估一次网络质量 + _networkQualityTestTimer = Timer.periodic(const Duration(seconds: 1), (timer) { + _evaluateNetworkQuality(); + }); + } + + /// 停止网络质量评估 + void stopNetworkQualityAssessment() { + _networkQualityTestTimer?.cancel(); + _networkQualityTestTimer = null; + } + + /// 重置网络质量评估变量 + void resetNetworkQualityAssessmentVariables() { + _totalFramesReceived = 0; + _lostFrames = 0; + _lastFrameSeqNum = -1; + _frameReceiveTimes.clear(); + _frameSeqList.clear(); + _testStartTime = null; + + // 重置统计计数器 + _totalPacketsReceived = 0; + _totalFramesReceivedCount = 0; + _iFramesReceived = 0; + _pFramesReceived = 0; + _processedFrames = 0; + _droppedFrames = 0; + _framesInBuffer = 0; + } + + /// 评估网络质量 + void _evaluateNetworkQuality() { + if (_testStartTime == null) return; + + final elapsed = DateTime.now().difference(_testStartTime!).inSeconds; + if (elapsed == 0) return; + + // 计算丢包率 - 确保丢包率不会超过100% + double lossRate = 0.0; + if (_totalFramesReceivedCount > 0) { + lossRate = (_lostFrames / _totalFramesReceivedCount) * 100; + // 限制丢包率不超过100% + if (lossRate > 100.0) { + lossRate = 100.0; + } + } + + + // 计算抖动 (基于相邻帧的时间间隔变化) + if (_frameReceiveTimes.length >= 2) { + List intervals = []; + for (int i = 1; i < _frameReceiveTimes.length; i++) { + intervals.add(_frameReceiveTimes[i] - _frameReceiveTimes[i-1]); + } + } + + // 根据网络质量动态调整缓冲区大小 + _adjustBufferSizeBasedOnNetworkQuality(); + } + + /// 根据丢包率、帧率和抖动确定网络质量等级 + String _getNetworkQualityLevel(double lossRate, double frameRate, double jitter) { + if (lossRate < 1.0 && frameRate > 15.0 && jitter < 50.0) { + return "优秀"; + } else if (lossRate < 3.0 && frameRate > 10.0 && jitter < 100.0) { + return "良好"; + } else if (lossRate < 5.0 && frameRate > 5.0 && jitter < 200.0) { + return "一般"; + } else { + return "较差"; + } + } + + /// 预测视频卡顿概率 + String _predictStutterProbability(double lossRate, double frameRate, double jitter) { + if (lossRate < 2.0 && frameRate > 10.0 && jitter < 100.0) { + return "低风险 - 视频流畅"; + } else if (lossRate <= 5.0 && frameRate >= 5.0 && jitter <= 200.0) { + return "中风险 - 可能轻微卡顿"; + } else { + return "高风险 - 可能严重卡顿"; + } + } + + /// 根据网络质量动态调整缓冲区大小 + void _adjustBufferSizeBasedOnNetworkQuality() { + if (_testStartTime == null) return; + + final elapsed = DateTime.now().difference(_testStartTime!).inSeconds; + if (elapsed == 0) return; + + // 计算丢包率 + double lossRate = 0.0; + if (_totalFramesReceivedCount > 0) { + lossRate = (_lostFrames / _totalFramesReceivedCount) * 100; + if (lossRate > 100.0) lossRate = 100.0; + } + + // 计算平均接收帧率 + final avgFrameRate = _totalFramesReceivedCount / elapsed; + + // 计算抖动 + double jitter = 0.0; + if (_frameReceiveTimes.length >= 2) { + List intervals = []; + for (int i = 1; i < _frameReceiveTimes.length; i++) { + intervals.add(_frameReceiveTimes[i] - _frameReceiveTimes[i-1]); + } + + if (intervals.length > 1) { + double mean = intervals.reduce((a, b) => a + b) / intervals.length; + double variance = 0.0; + for (int interval in intervals) { + variance += pow(interval - mean, 2).toDouble(); + } + variance /= intervals.length; + jitter = sqrt(variance); + } + } + + // 根据网络质量调整缓冲区大小 + if (lossRate < 2.0 && avgFrameRate > 15.0 && jitter < 50.0) { + // 网络质量优秀 - 使用最小缓冲区以降低延迟 + _currentBufferSize = state.adaptiveBufferSizeMin; + } else if (lossRate < 5.0 && avgFrameRate > 10.0 && jitter < 100.0) { + // 网络质量良好 - 使用中等偏小缓冲区 + _currentBufferSize = (state.adaptiveBufferSizeMin + state.adaptiveBufferSizeMax) ~/ 3; + } else if (lossRate < 10.0 && avgFrameRate > 5.0 && jitter < 200.0) { + // 网络质量一般 - 使用中等偏大缓冲区 + _currentBufferSize = ((state.adaptiveBufferSizeMin + state.adaptiveBufferSizeMax) ~/ 2 + state.adaptiveBufferSizeMax) ~/ 2; + } else { + // 网络质量差 - 使用最大缓冲区以减少卡顿 + _currentBufferSize = state.adaptiveBufferSizeMax; + } + } } \ No newline at end of file diff --git a/lib/talk/starChart/views/native/talk_view_native_decode_state.dart b/lib/talk/starChart/views/native/talk_view_native_decode_state.dart index 6186f832..5ec53618 100644 --- a/lib/talk/starChart/views/native/talk_view_native_decode_state.dart +++ b/lib/talk/starChart/views/native/talk_view_native_decode_state.dart @@ -109,8 +109,12 @@ class TalkViewNativeDecodeState { // H264帧缓冲区相关 final List> h264FrameBuffer = >[]; // H264帧缓冲区,存储帧数据和类型 - int maxFrameBufferSize = 25; // 最大缓冲区大小 + int maxFrameBufferSize = 3; // 最大缓冲区大小,减小以降低延迟 final int targetFps = 25; // 目标解码帧率,只是为了快速填充native的缓冲区 + final int adaptiveBufferSizeMin = 2; // 自适应缓冲区最小大小 + final int adaptiveBufferSizeMax = 6; // 自适应缓冲区最大大小 + final int networkQualityCheckIntervalMs = 2000; // 网络质量检查间隔(毫秒) + final int frameProcessIntervalMs = 10; // 帧处理间隔(毫秒),提高响应速度 Timer? frameProcessTimer; // 帧处理定时器 bool isProcessingFrame = false; // 是否正在处理帧 int lastProcessedTimestamp = 0; // 上次处理帧的时间戳