视频对讲---排查卡顿

This commit is contained in:
sky.min 2026-01-23 11:57:09 +08:00
parent 113a6a345e
commit 47e4b86086
2 changed files with 424 additions and 143 deletions

View File

@ -28,13 +28,12 @@ import 'package:star_lock/talk/starChart/proto/talk_expect.pb.dart';
import 'package:star_lock/talk/starChart/star_chart_manage.dart';
import 'package:star_lock/talk/starChart/views/native/talk_view_native_decode_state.dart';
import 'package:star_lock/tools/G711Tool.dart';
import 'package:star_lock/tools/baseGetXController.dart';
import 'package:star_lock/tools/callkit_handler.dart';
import 'package:star_lock/tools/commonDataManage.dart';
import 'package:star_lock/tools/storage.dart';
import 'package:video_decode_plugin/video_decode_plugin.dart';
import '../../../../tools/baseGetXController.dart';
class TalkViewNativeDecodeLogic extends BaseGetXController {
final TalkViewNativeDecodeState state = TalkViewNativeDecodeState();
@ -44,35 +43,48 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
int audioBufferSize = 30; // 2030
//
// -------m
double networkQualityScore = 1.0; // 1.0
//
// -------m
int _lastBufferSizeAdjustmentTime = 0;
//
// -------m
int _frameCount = 0;
int _lastFpsCalculationTime = 0;
Timer? _fpsUpdateTimer;
//
// -------m
int _consecutiveFullBufferCount = 0;
static const int _maxConsecutiveFullBuffer = 5; //
//
// -------m
int _lastSlowDecodeTime = 0;
int _slowDecodeCount = 0;
static const int _slowDecodeThreshold = 50; // ()
static const int _slowDecodeAdjustmentThreshold = 3; //
//
// -------m 使
static const double _bufferUsageThreshold = 0.8; // 使80%
int _lastBufferClearTime = 0;
static const int _minClearInterval = 500; // 500ms
// -------m
int? _firstFrameReceivedTime;
int? _firstFrameRenderedTime;
//
// -------m
bool _isAdjustingBuffer = false;
static const int _minAdjustmentInterval = 3000; // 3
// -------m
int _lastFrameProcessTime = 0; //
int _lastFrameCount = 0; //
int _stutterDetectionInterval = 1000; //
double _expectedFps = 25.0; // FPS
int _frameDropThreshold = 5; //
Timer? _stutterDetectionTimer; //
// frameSeq较小时阈值也小
int _getFrameSeqRolloverThreshold(int lastSeq) {
if (lastSeq > 2000) {
@ -121,6 +133,15 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
int? lastDecodedIFrameSeq;
// -------m
final bool _isRenderingStarted = false;
final int _lastRenderStartTime = 0;
static const int _renderStartBufferThreshold = 5; //
// -------m
final bool _isConsumingForRender = false;
static const int _consumeForRenderDuration = 2000; // 2
//
Future<void> _initVideoDecoder() async {
try {
@ -147,22 +168,22 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
if (_firstFrameReceivedTime != null) {
final renderTime = _firstFrameRenderedTime! - _firstFrameReceivedTime!;
AppLog.log('首帧渲染耗时: ${renderTime}ms (${renderTime/1000.0}s)');
//
_startNetworkQualityMonitor();
//
_startStutterDetection();
//
_startFpsCalculation();
}
// loading
Future.microtask(() => state.isLoading.value = false);
//
_startFpsCalculation();
});
} else {
AppLog.log('视频解码器初始化失败');
}
//
_startFrameProcessTimer();
//
_startNetworkQualityMonitor();
} catch (e) {
AppLog.log('初始化视频解码器错误: $e');
//
@ -174,7 +195,7 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
}
}
///
// -------m
void _startFpsCalculation() {
_fpsUpdateTimer?.cancel();
_fpsUpdateTimer = Timer.periodic(const Duration(seconds: 1), (timer) {
@ -182,7 +203,7 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
});
}
///
// -------m
void _updateFps() {
final currentTime = DateTime.now().millisecondsSinceEpoch;
final timeDiff = currentTime - _lastFpsCalculationTime;
@ -199,7 +220,7 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
}
}
///
// -------m
void _startNetworkQualityMonitor() {
Timer.periodic(Duration(milliseconds: state.networkQualityCheckIntervalMs), (timer) {
_calculateNetworkQuality();
@ -207,7 +228,7 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
});
}
///
// -------m
void _calculateNetworkQuality() {
//
//
@ -220,12 +241,43 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
//
networkQualityScore = (lossRateImpact * 0.6 + fpsImpact * 0.4).clamp(0.0, 1.0);
AppLog.log('网络质量评分: ${networkQualityScore.toStringAsFixed(2)}, 丢包率: ${state.packetLossRate.value.toStringAsFixed(2)}, FPS: ${state.decoderFps.value.toStringAsFixed(2)}');
//
_checkAndReportStutter();
// FPS是每秒显示的帧数
// AppLog.log('网络质量评分: ${networkQualityScore.toStringAsFixed(2)}, 分包丢失率: ${state.packetLossRate.value.toStringAsFixed(2)}, FPS: ${state.decoderFps.value.toStringAsFixed(2)}');
}
///
void _adjustBufferSizeBasedOnNetworkQuality() async {
final currentTime = DateTime.now().millisecondsSinceEpoch;
// -------m
void _checkAndReportStutter() {
// FPS是否显著下降
if (state.decoderFps.value < state.targetFps * 0.5) { // FPS低于目标FPS的50%
AppLog.log('视频卡顿检测: FPS过低! 目标FPS: ${state.targetFps}, 实际FPS: ${state.decoderFps.value.toStringAsFixed(2)}, 丢包率: ${state.packetLossRate.value.toStringAsFixed(2)}, 网络质量: ${networkQualityScore.toStringAsFixed(2)}');
}
//
double bufferUsage = state.h264FrameBuffer.length / state.maxFrameBufferSize;
if (bufferUsage > 0.9) { // 90%
AppLog.log('视频卡顿检测: 缓冲区严重过载! 占用率: ${(bufferUsage * 100).toStringAsFixed(1)}% (${state.h264FrameBuffer.length}/${state.maxFrameBufferSize})');
}
//
if (state.packetLossRate.value > 0.1) { // 10%
AppLog.log('视频卡顿检测: 丢包率过高! 丢包率: ${(state.packetLossRate.value * 100).toStringAsFixed(1)}%, 目标FPS: ${state.targetFps}, 实际FPS: ${state.decoderFps.value.toStringAsFixed(2)}');
}
//
if (networkQualityScore < 0.4 ||
state.decoderFps.value < state.targetFps * 0.4 ||
bufferUsage > 0.85 ||
state.packetLossRate.value > 0.15) {
AppLog.log('严重卡顿警告: 网络质量=${networkQualityScore.toStringAsFixed(2)}, FPS=${state.decoderFps.value.toStringAsFixed(2)}/${state.targetFps}, 丢包率=${(state.packetLossRate.value * 100).toStringAsFixed(1)}%, 缓冲区=${(bufferUsage * 100).toStringAsFixed(1)}%');
}
}
// -------m
Future<void> _adjustBufferSizeBasedOnNetworkQuality() async {
final int currentTime = DateTime.now().millisecondsSinceEpoch;
//
if (currentTime - _lastBufferSizeAdjustmentTime < _minAdjustmentInterval || _isAdjustingBuffer) {
@ -259,6 +311,11 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
AppLog.log('缓冲区大小调整为: $newBufferSize (网络质量评分: ${networkQualityScore.toStringAsFixed(2)})');
}
//
if (networkQualityScore < 0.3) {
AppLog.log('网络质量严重不佳,可能导致视频卡顿: 网络质量评分 ${networkQualityScore.toStringAsFixed(2)}, 目标FPS: ${state.targetFps}, 实际FPS: ${state.decoderFps.value.toStringAsFixed(2)}, 丢包率: ${state.packetLossRate.value.toStringAsFixed(2)}');
}
_lastBufferSizeAdjustmentTime = currentTime;
await Future.delayed(const Duration(milliseconds: 100)); //
_isAdjustingBuffer = false;
@ -302,6 +359,8 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
int frameSeqI,
ScpMessage scpMessage,
) {
//
_optimizeBufferForRender();
// frameSeq较小时阈值也小
if (!_pendingStreamReset && _lastFrameSeq != null && frameType == TalkDataH264Frame_FrameTypeE.I && frameSeq < _lastFrameSeq!) {
int dynamicThreshold = _getFrameSeqRolloverThreshold(_lastFrameSeq!);
@ -373,6 +432,16 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
_consecutiveFullBufferCount = 0;
}
// -------m 使
final double bufferUsage = state.h264FrameBuffer.length / state.maxFrameBufferSize;
if (bufferUsage >= _bufferUsageThreshold) {
final int currentTime = DateTime.now().millisecondsSinceEpoch;
if (currentTime - _lastBufferClearTime > _minClearInterval) {
_performPreemptiveCleanup();
_lastBufferClearTime = currentTime;
}
}
// P帧
while (state.h264FrameBuffer.length >= state.maxFrameBufferSize) {
int pIndex = state.h264FrameBuffer.indexWhere((f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.P);
@ -388,41 +457,159 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
state.h264FrameBuffer.add(frameMap);
}
///
// -------m
void _handleConsecutiveFullBuffer() {
AppLog.log('缓冲区连续满载 $_consecutiveFullBufferCount 次,执行紧急清理');
// I帧及关联的P帧P帧
final iFrames = state.h264FrameBuffer.where((f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.I).toList();
if (iFrames.length > 1) {
// I帧
iFrames.sort((a, b) => (a['frameSeq'] as int).compareTo(b['frameSeq'] as int));
// I帧
for (int i = 0; i < iFrames.length - 1; i++) {
state.h264FrameBuffer.remove(iFrames[i]);
//
if (_isRenderingStarted && _isConsumingForRender) {
// I帧和其关联的P帧
final iFrames = state.h264FrameBuffer.where((f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.I).toList();
if (iFrames.isNotEmpty) {
// I帧
final latestIFrame = iFrames.reduce((a, b) => (a['frameSeq'] as int) > (b['frameSeq'] as int) ? a : b);
final int latestIFrameSeq = latestIFrame['frameSeq'];
// I帧关联的P帧
final validPFrames = state.h264FrameBuffer
.where((f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.P &&
f['frameSeqI'] == latestIFrameSeq)
.toList();
// I帧和其关联的P帧
state.h264FrameBuffer.clear();
state.h264FrameBuffer.add(latestIFrame);
state.h264FrameBuffer.addAll(validPFrames);
}
}
//
int targetSize = state.maxFrameBufferSize ~/ 2;
while (state.h264FrameBuffer.length > targetSize) {
// P帧I帧
int pIndex = state.h264FrameBuffer.indexWhere((f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.P);
if (pIndex != -1) {
state.h264FrameBuffer.removeAt(pIndex);
continue;
} else {
// I帧及关联的P帧P帧
final iFrames = state.h264FrameBuffer.where((f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.I).toList();
if (iFrames.length > 1) {
// I帧
iFrames.sort((a, b) => (a['frameSeq'] as int).compareTo(b['frameSeq'] as int));
// I帧
for (int i = 0; i < iFrames.length - 1; i++) {
state.h264FrameBuffer.remove(iFrames[i]);
}
}
// P帧
if (state.h264FrameBuffer.length > targetSize) {
state.h264FrameBuffer.removeAt(0);
//
int targetSize = state.maxFrameBufferSize ~/ 2;
while (state.h264FrameBuffer.length > targetSize) {
// P帧I帧
int pIndex = state.h264FrameBuffer.indexWhere((f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.P);
if (pIndex != -1) {
state.h264FrameBuffer.removeAt(pIndex);
continue;
}
// P帧
if (state.h264FrameBuffer.length > targetSize) {
state.h264FrameBuffer.removeAt(0);
}
}
}
AppLog.log('紧急清理完成,当前缓冲区大小: ${state.h264FrameBuffer.length}');
}
// -------m
void _periodicBufferOptimization() {
// P帧I帧的P帧
if (lastDecodedIFrameSeq != null) {
int threshold = lastDecodedIFrameSeq! - 10; // I帧的P帧
for (int i = state.h264FrameBuffer.length - 1; i >= 0; i--) {
final frame = state.h264FrameBuffer[i];
if (frame['frameType'] == TalkDataH264Frame_FrameTypeE.P) {
int refIFrameSeq = frame['frameSeqI'];
if (refIFrameSeq < threshold) {
// P帧引用的I帧太旧了
state.h264FrameBuffer.removeAt(i);
AppLog.log('清理过期P帧seq: ${frame['frameSeq']}, 引用的I帧seq: $refIFrameSeq');
}
}
}
}
}
// -------m 使
void _adjustProcessingStrategyByBufferUsage() {
double bufferUsage = state.h264FrameBuffer.length / state.maxFrameBufferSize;
//
if (_isRenderingStarted && _isConsumingForRender) {
state.frameProcessIntervalMs = 2; //
return;
}
// 使80%
if (bufferUsage > 0.8) {
//
if (state.frameProcessIntervalMs >= 10) {
state.frameProcessIntervalMs = 2; //
AppLog.log('缓冲区使用率高(${(bufferUsage * 100).toStringAsFixed(1)}%),加快帧处理速度');
}
} else if (bufferUsage < 0.3 && state.frameProcessIntervalMs == 2) {
// 使
state.frameProcessIntervalMs = 5; //
AppLog.log('缓冲区使用率正常(${(bufferUsage * 100).toStringAsFixed(1)}%),恢复正常帧处理速度');
}
// 使
if (bufferUsage > 0.95 && state.maxFrameBufferSize < state.adaptiveBufferSizeMax) {
int newBufferSize = state.maxFrameBufferSize + 2;
if (newBufferSize <= state.adaptiveBufferSizeMax) {
state.maxFrameBufferSize = newBufferSize;
AppLog.log('缓冲区使用率极高,临时扩大缓冲区到 $newBufferSize');
}
}
}
// -------m
void _performPreemptiveCleanup() {
// 50%
int targetSize = state.maxFrameBufferSize ~/ 2;
int framesToRemove = state.h264FrameBuffer.length - targetSize;
if (framesToRemove <= 0) return;
// P帧I帧以确保视频质量
int removedCount = 0;
// P帧I帧的P帧
for (int i = state.h264FrameBuffer.length - 1; i >= 0 && removedCount < framesToRemove; i--) {
final frame = state.h264FrameBuffer[i];
if (frame['frameType'] == TalkDataH264Frame_FrameTypeE.P) {
// P帧是否关联到已处理过的I帧
int refIFrameSeq = frame['frameSeqI'];
if (lastDecodedIFrameSeq != null && refIFrameSeq < lastDecodedIFrameSeq!) {
// P帧关联的I帧已经过期
state.h264FrameBuffer.removeAt(i);
removedCount++;
}
}
}
// P帧
if (removedCount < framesToRemove) {
for (int i = state.h264FrameBuffer.length - 1; i >= 0 && removedCount < framesToRemove; i--) {
if (state.h264FrameBuffer[i]['frameType'] == TalkDataH264Frame_FrameTypeE.P) {
state.h264FrameBuffer.removeAt(i);
removedCount++;
}
}
}
// B帧
while (removedCount < framesToRemove && state.h264FrameBuffer.length > targetSize) {
state.h264FrameBuffer.removeAt(0); //
removedCount++;
}
}
///
void _startFrameProcessTimer() {
//
@ -453,15 +640,122 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
return;
}
// 使
_adjustProcessingStrategyByBufferUsage();
try {
// I帧frameSeq最小的I帧消费
final iFrames = state.h264FrameBuffer.where((f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.I).toList();
if (iFrames.isNotEmpty) {
iFrames.sort((a, b) => (a['frameSeq'] as int).compareTo(b['frameSeq'] as int));
final minIFrame = iFrames.first;
final minIFrameSeq = minIFrame['frameSeq'];
//
bool isBufferHalfFull = state.h264FrameBuffer.length >= (state.maxFrameBufferSize ~/ 2);
//
bool isEarlyRenderPhase = _isRenderingStarted &&
(DateTime.now().millisecondsSinceEpoch - _lastRenderStartTime < 3000);
if (isEarlyRenderPhase || isBufferHalfFull) {
await _processLatestFrame(isBufferHalfFull);
}
} finally {
final endTime = DateTime.now().microsecondsSinceEpoch;
final durationMs = (endTime - startTime) / 1000.0;
// > 5ms
if (durationMs > 5) {
debugPrint('[_processNextFrameFromBuffer] 耗时: ${durationMs.toStringAsFixed(2)} ms');
// 使
// AppLog.log('Frame processing took ${durationMs.toStringAsFixed(2)} ms');
}
}
}
///
Future<void> _processLatestFrame(bool isBufferHalfFull) async {
// I帧
final iFrames = state.h264FrameBuffer.where((f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.I).toList();
iFrames.sort((a, b) => (a['frameSeq'] as int).compareTo(b['frameSeq'] as int));
if (iFrames.isNotEmpty) {
final latestIFrame = iFrames.first;
final latestIFrameSeq = latestIFrame['frameSeq'];
//final targetIndex = state.h264FrameBuffer.indexWhere((f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.I && f['frameSeq'] == minIFrameSeq,
// frameSeq排序I帧
final targetIndex = state.h264FrameBuffer.indexWhere((f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.I && f['frameSeq'] == latestIFrameSeq,
);
state.isProcessingFrame = true;
final Map<String, dynamic>? frameMap = state.h264FrameBuffer.removeAt(targetIndex);
if (frameMap == null) {
state.isProcessingFrame = false;
return;
}
final List<int>? frameData = frameMap['frameData'];
final TalkDataH264Frame_FrameTypeE? frameType = frameMap['frameType'];
final int? frameSeq = frameMap['frameSeq'];
final int? frameSeqI = frameMap['frameSeqI'];
final int? pts = frameMap['pts'];
if (frameData == null || frameType == null || frameSeq == null || frameSeqI == null || pts == null) {
state.isProcessingFrame = false;
return;
}
if (state.textureId.value == null) {
state.isProcessingFrame = false;
return;
}
lastDecodedIFrameSeq = latestIFrameSeq;
//
final int decodeStartTime = DateTime.now().millisecondsSinceEpoch;
await VideoDecodePlugin.sendFrame(
frameData: frameData,
frameType: 0,
frameSeq: frameSeq,
timestamp: pts,
splitNalFromIFrame: true,
refIFrameSeq: frameSeqI,
);
//
final decodeEndTime = DateTime.now().millisecondsSinceEpoch;
// -------m
final decodeDuration = decodeEndTime - decodeStartTime;
if (decodeDuration > _slowDecodeThreshold) {
_slowDecodeCount++;
if (decodeEndTime - _lastSlowDecodeTime < 5000) { // 5
if (_slowDecodeCount >= _slowDecodeAdjustmentThreshold) {
//
_handleSlowDecodePerformance();
_slowDecodeCount = 0; //
}
} else {
_slowDecodeCount = 1; //
}
_lastSlowDecodeTime = decodeEndTime;
} else {
//
if (decodeEndTime - _lastSlowDecodeTime > 10000) { // 10
_slowDecodeCount = 0;
}
}
state.isProcessingFrame = false;
_frameCount++; //
//
if (state.h264FrameBuffer.length > state.maxFrameBufferSize * 0.8) {
AppLog.log('视频卡顿警告: 缓冲区占用过高 (${state.h264FrameBuffer.length}/${state.maxFrameBufferSize}), 可能出现卡顿');
}
return;
}
// I帧时refIFrameSeq等于lastDecodedIFrameSeq的P帧
if (lastDecodedIFrameSeq != null) {
final validPFrames =
state.h264FrameBuffer.where((f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.P && f['frameSeqI'] == lastDecodedIFrameSeq).toList();
if (validPFrames.isNotEmpty) {
validPFrames.sort((a, b) => (b['frameSeq'] as int).compareTo(a['frameSeq'] as int));
final latestPFrame = validPFrames.first;
final targetIndex = state.h264FrameBuffer.indexWhere(
(f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.I && f['frameSeq'] == minIFrameSeq,
(f) =>
f['frameType'] == TalkDataH264Frame_FrameTypeE.P && f['frameSeq'] == latestPFrame['frameSeq'] && f['frameSeqI'] == lastDecodedIFrameSeq,
);
state.isProcessingFrame = true;
final Map<String, dynamic>? frameMap = state.h264FrameBuffer.removeAt(targetIndex);
@ -482,20 +776,23 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
state.isProcessingFrame = false;
return;
}
lastDecodedIFrameSeq = minIFrameSeq;
//
final decodeStartTime = DateTime.now().millisecondsSinceEpoch;
await VideoDecodePlugin.sendFrame(
frameData: frameData,
frameType: 0,
frameType: 1,
frameSeq: frameSeq,
timestamp: pts,
splitNalFromIFrame: true,
refIFrameSeq: frameSeqI,
);
//
final decodeEndTime = DateTime.now().millisecondsSinceEpoch;
//
// -------m
final decodeDuration = decodeEndTime - decodeStartTime;
if (decodeDuration > _slowDecodeThreshold) {
_slowDecodeCount++;
@ -517,92 +814,22 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
}
state.isProcessingFrame = false;
_frameCount++; //
// -------m
_frameCount++;
// -------m
if (state.h264FrameBuffer.length > state.maxFrameBufferSize * 0.8) {
AppLog.log('视频卡顿警告: 缓冲区占用过高 (${state.h264FrameBuffer.length}/${state.maxFrameBufferSize}), 可能出现卡顿');
}
return;
}
// I帧时refIFrameSeq等于lastDecodedIFrameSeq的P帧
if (lastDecodedIFrameSeq != null) {
final validPFrames =
state.h264FrameBuffer.where((f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.P && f['frameSeqI'] == lastDecodedIFrameSeq).toList();
if (validPFrames.isNotEmpty) {
validPFrames.sort((a, b) => (a['frameSeq'] as int).compareTo(b['frameSeq'] as int));
final minPFrame = validPFrames.first;
final targetIndex = state.h264FrameBuffer.indexWhere(
(f) =>
f['frameType'] == TalkDataH264Frame_FrameTypeE.P && f['frameSeq'] == minPFrame['frameSeq'] && f['frameSeqI'] == lastDecodedIFrameSeq,
);
state.isProcessingFrame = true;
final Map<String, dynamic>? frameMap = state.h264FrameBuffer.removeAt(targetIndex);
if (frameMap == null) {
state.isProcessingFrame = false;
return;
}
final List<int>? frameData = frameMap['frameData'];
final TalkDataH264Frame_FrameTypeE? frameType = frameMap['frameType'];
final int? frameSeq = frameMap['frameSeq'];
final int? frameSeqI = frameMap['frameSeqI'];
final int? pts = frameMap['pts'];
if (frameData == null || frameType == null || frameSeq == null || frameSeqI == null || pts == null) {
state.isProcessingFrame = false;
return;
}
if (state.textureId.value == null) {
state.isProcessingFrame = false;
return;
}
final decodeStartTime = DateTime.now().millisecondsSinceEpoch;
await VideoDecodePlugin.sendFrame(
frameData: frameData,
frameType: 1,
frameSeq: frameSeq,
timestamp: pts,
splitNalFromIFrame: true,
refIFrameSeq: frameSeqI,
);
final decodeEndTime = DateTime.now().millisecondsSinceEpoch;
//
final decodeDuration = decodeEndTime - decodeStartTime;
if (decodeDuration > _slowDecodeThreshold) {
_slowDecodeCount++;
if (decodeEndTime - _lastSlowDecodeTime < 5000) { // 5
if (_slowDecodeCount >= _slowDecodeAdjustmentThreshold) {
//
_handleSlowDecodePerformance();
_slowDecodeCount = 0; //
}
} else {
_slowDecodeCount = 1; //
}
_lastSlowDecodeTime = decodeEndTime;
} else {
//
if (decodeEndTime - _lastSlowDecodeTime > 10000) { // 10
_slowDecodeCount = 0;
}
}
state.isProcessingFrame = false;
_frameCount++; //
return;
}
}
// I帧到来
} finally {
final endTime = DateTime.now().microsecondsSinceEpoch;
final durationMs = (endTime - startTime) / 1000.0;
// > 10ms
if (durationMs > 10) {
debugPrint('[_processNextFrameFromBuffer] 耗时: ${durationMs.toStringAsFixed(2)} ms');
// 使
// AppLog.log('Frame processing took ${durationMs.toStringAsFixed(2)} ms');
}
}
// I帧到来
}
///
// -------m
void _handleSlowDecodePerformance() {
AppLog.log('检测到连续慢解码,触发性能优化');
@ -613,6 +840,60 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
}
}
// -------m
void _startStutterDetection() {
_stutterDetectionTimer?.cancel();
_stutterDetectionTimer = Timer.periodic(Duration(milliseconds: _stutterDetectionInterval), (timer) {
_checkForStutter();
});
}
// -------m
void _checkForStutter() {
final currentTime = DateTime.now().millisecondsSinceEpoch;
final currentFrameCount = state.totalFrames.value;
if (_lastFrameProcessTime != 0 && _lastFrameCount != 0) {
final timeDiff = currentTime - _lastFrameProcessTime;
final frameDiff = currentFrameCount - _lastFrameCount;
final actualFps = timeDiff > 0 ? (frameDiff / (timeDiff / 1000.0)) : 0.0;
//
final expectedFrameCount = _expectedFps * (timeDiff / 1000.0);
final droppedFrameCount = (expectedFrameCount - frameDiff).toInt();
//
if (actualFps < _expectedFps * 0.6) { // FPS低于期望FPS的60%
AppLog.log('视频卡顿检测: 期望FPS: ${_expectedFps.toStringAsFixed(2)}, 实际FPS: ${actualFps.toStringAsFixed(2)}, 时间间隔: ${timeDiff}ms, 期望帧数: ${expectedFrameCount.toStringAsFixed(0)}, 实际帧数: ${frameDiff}, 丢弃帧数: ${droppedFrameCount}');
//
state.droppedFrames.value += droppedFrameCount > 0 ? droppedFrameCount : 0;
}
//
if (droppedFrameCount > _frameDropThreshold) {
AppLog.log('视频丢帧警告: 期望帧数: ${expectedFrameCount.toStringAsFixed(0)}, 实际帧数: ${frameDiff}, 丢弃帧数: ${droppedFrameCount}');
}
}
//
_lastFrameProcessTime = currentTime;
_lastFrameCount = currentFrameCount;
}
// -------m
void _optimizeBufferForRender() {
//
if (!_isRenderingStarted && state.h264FrameBuffer.length > 3) {
// I帧+P帧
final iFrames = state.h264FrameBuffer.where((f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.I).toList();
if (iFrames.isNotEmpty) {
// I帧
AppLog.log('[Render] 渲染启动outputFrameQueue已达低水位: ${state.h264FrameBuffer.length}');
}
}
}
///
void _stopFrameProcessTimer() {
state.frameProcessTimer?.cancel();

View File

@ -109,12 +109,12 @@ class TalkViewNativeDecodeState {
// H264帧缓冲区相关
final List<Map<String, dynamic>> h264FrameBuffer = <Map<String, dynamic>>[]; // H264帧缓冲区
int maxFrameBufferSize = 8; // 28
int maxFrameBufferSize = 6; // 86
final int targetFps = 25; // ,native的缓冲区
final int adaptiveBufferSizeMin = 2; //
final int adaptiveBufferSizeMax = 8; // 68
final int adaptiveBufferSizeMax = 6; // 86
final int networkQualityCheckIntervalMs = 2000; // ()
int frameProcessIntervalMs = 5; // ()105
int frameProcessIntervalMs = 5; // ()5ms平衡性能和流畅度
Timer? frameProcessTimer; //
bool isProcessingFrame = false; //
int lastProcessedTimestamp = 0; //