1.视频对讲优化--优化帧处理定时器启动逻辑和优化缓冲区动态调整

2.排查bug并优化
This commit is contained in:
sky_min 2025-11-21 17:29:45 +08:00
parent cddf11485f
commit 42a4e88c73

View File

@ -49,13 +49,15 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
int _networkQualityScore = 5; // 1-55 int _networkQualityScore = 5; // 1-55
int _frameDropCount = 0; int _frameDropCount = 0;
int _totalFrameCount = 0; int _totalFrameCount = 0;
//
int _devicePerformanceScore = 5; // 1-55
int bufferSize = 5; // int bufferSize = 25; //
int audioBufferSize = 20; // 2 int audioBufferSize = 20; // 2
List<int>? _cachedSps;
List<int>? _cachedPps;
bool _waitingForCompleteIFrame = false;
int _frameProcessCount = 0; int _frameProcessCount = 0;
int _lastFrameProcessTime = 0; int _lastFrameProcessTime = 0;
double _actualFps = 0.0; double _actualFps = 0.0;
@ -113,7 +115,7 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
}// }//
} }
/// ///
void _adjustBufferSizeDynamically() { void _adjustBufferSizeDynamically() {
// //
final double dropRate = _totalFrameCount > 0 final double dropRate = _totalFrameCount > 0
@ -133,89 +135,25 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
_networkQualityScore = 5; // _networkQualityScore = 5; //
} }
// // iOS平台使用更小的缓冲区以降低延迟
final int calculatedBufferSize = _calculateOptimalBufferSize( if (Platform.isIOS) {
networkQuality: _networkQualityScore, if (_networkQualityScore >= 4) {
devicePerformance: _devicePerformanceScore state.maxFrameBufferSize = 8; //
); } else if (_networkQualityScore >= 3) {
state.maxFrameBufferSize = 12; //
// } else {
if ((state.maxFrameBufferSize - calculatedBufferSize).abs() > 3) { state.maxFrameBufferSize = 15; //
bufferSize = calculatedBufferSize; }
AppLog.log('动态调整缓冲区大小至: $calculatedBufferSize (网络质量: $_networkQualityScore, 设备性能: $_devicePerformanceScore)'); } else {
// Android平台原有逻辑
if (_networkQualityScore <= 2) {
state.maxFrameBufferSize = 25;
} else if (_networkQualityScore >= 4) {
state.maxFrameBufferSize = 10;
} else {
state.maxFrameBufferSize = 15;
} }
} }
///
void _monitorNetworkCondition() {
//
//
//
final int bufferLength = state.h264FrameBuffer.length;
//
if (bufferLength > state.maxFrameBufferSize * 0.8) {
//
if (_networkQualityScore > 1) {
_networkQualityScore--;
}
} else if (bufferLength < state.maxFrameBufferSize * 0.3) {
//
if (_networkQualityScore < 5) {
_networkQualityScore++;
}
}
}
///
int _calculateOptimalBufferSize({required int networkQuality, required int devicePerformance}) {
//
int baseBufferSize;
switch (networkQuality) {
case 1: //
baseBufferSize = 20;
break;
case 2: //
baseBufferSize = 15;
break;
case 3: //
baseBufferSize = 10;
break;
case 4: //
baseBufferSize = 5;
break;
case 5: //
baseBufferSize = 3;
break;
default:
baseBufferSize = 10;
}
//
double performanceFactor;
switch (devicePerformance) {
case 1: //
performanceFactor = 1.5;
break;
case 2: //
performanceFactor = 1.2;
break;
case 3: //
performanceFactor = 1.0;
break;
case 4: //
performanceFactor = 0.8;
break;
case 5: //
performanceFactor = 0.6;
break;
default:
performanceFactor = 1.0;
}
// 160
return (baseBufferSize * performanceFactor).round().clamp(1, 60);
} }
// frameSeq较小时阈值也小 // frameSeq较小时阈值也小
@ -243,10 +181,6 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
final List<List<int>> _preIFrameCache = []; final List<List<int>> _preIFrameCache = [];
bool _hasWrittenFirstIFrame = false; bool _hasWrittenFirstIFrame = false;
// SPS/PPS状态追踪变量
bool hasSps = false;
bool hasPps = false;
// SPS/PPS缓存 // SPS/PPS缓存
List<int>? spsCache; List<int>? spsCache;
List<int>? ppsCache; List<int>? ppsCache;
@ -281,9 +215,9 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
// //
final startTime = DateTime.now().millisecondsSinceEpoch; final startTime = DateTime.now().millisecondsSinceEpoch;
// 使
// 500ms // 500ms
final timeoutFuture = Future.delayed(Duration(milliseconds: 500), () => null); final timeoutMs = Platform.isIOS ? 300 : 500;
final timeoutFuture = Future.delayed(Duration(milliseconds: timeoutMs), () => null);
final decoderFuture = VideoDecodePlugin.initDecoder(config); final decoderFuture = VideoDecodePlugin.initDecoder(config);
// textureId // textureId
@ -313,8 +247,9 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
state.isLoading.value = false; state.isLoading.value = false;
} }
// //
// // iOS平台使用更短的延迟启动
Future.delayed(Duration(milliseconds: 50), () { final delayMs = Platform.isIOS ? 30 : 50;
Future.delayed(Duration(milliseconds: delayMs), () {
_startFrameProcessTimer(); _startFrameProcessTimer();
}); });
} catch (e) { } catch (e) {
@ -433,6 +368,8 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
// //
if (state.h264FrameBuffer.length >= state.maxFrameBufferSize) { if (state.h264FrameBuffer.length >= state.maxFrameBufferSize) {
_manageBufferOverflow(frameType); _manageBufferOverflow(frameType);
//
_implementSmartFrameDropping();
} }
// //
@ -445,6 +382,54 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
_invalidateFrameIndex(); _invalidateFrameIndex();
} }
///
void _implementSmartFrameDropping() {
final bufferLength = state.h264FrameBuffer.length;
// iOS平台更积极的丢帧策略
if (Platform.isIOS && bufferLength > 30) {
_dropOldFramesAggressively();
} else if (bufferLength > 40) {
//
_dropOldPFrame();
} else if (bufferLength > 30) {
_dropSomeBFrame();
}
}
void _dropOldFramesAggressively() {
//
final oldFrameIndex = state.h264FrameBuffer.indexWhere((frame) =>
frame['frameSeq'] < (_lastFrameSeq ?? 0) - 20);
if (oldFrameIndex != -1 && oldFrameIndex < state.h264FrameBuffer.length) {
state.h264FrameBuffer.removeAt(oldFrameIndex);
_invalidateFrameIndex();
}
}
void _dropOldPFrame() {
// P帧
final pFrameIndex = state.h264FrameBuffer.indexWhere((frame) =>
frame['frameType'] == TalkDataH264Frame_FrameTypeE.P &&
frame['frameSeq'] < (_lastFrameSeq ?? 0) - 50);
if (pFrameIndex != -1) {
state.h264FrameBuffer.removeAt(pFrameIndex);
_invalidateFrameIndex();
}
}
void _dropSomeBFrame() {
// B帧B帧
final bFrameIndex = state.h264FrameBuffer.indexWhere((frame) =>
frame['frameType'] == TalkDataH264Frame_FrameTypeE.I &&
frame['frameSeq'] < (_lastFrameSeq ?? 0) - 30);
if (bFrameIndex != -1) {
state.h264FrameBuffer.removeAt(bFrameIndex);
_invalidateFrameIndex();
}
}
/// ///
void _startFrameProcessTimer() { void _startFrameProcessTimer() {
// //
@ -496,18 +481,20 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
f['frameType'] != TalkDataH264Frame_FrameTypeE.I && f['frameType'] != TalkDataH264Frame_FrameTypeE.I &&
f['frameSeq'] < (_lastFrameSeq ?? 0) - 100); // f['frameSeq'] < (_lastFrameSeq ?? 0) - 100); //
if (pbIndex != -1) { if (pbIndex != -1 && pbIndex < state.h264FrameBuffer.length) {
state.h264FrameBuffer.removeAt(pbIndex); state.h264FrameBuffer.removeAt(pbIndex);
} else { } else if (state.h264FrameBuffer.isNotEmpty) {
// //
state.h264FrameBuffer.removeAt(0); state.h264FrameBuffer.removeAt(0);
} }
} else { } else {
// P帧或B帧 // P帧或B帧
if (state.h264FrameBuffer.isNotEmpty) {
state.h264FrameBuffer.removeAt(0); state.h264FrameBuffer.removeAt(0);
_invalidateFrameIndex(); _invalidateFrameIndex();
} }
} }
}
// //
void _invalidateFrameIndex() { void _invalidateFrameIndex() {
@ -548,16 +535,35 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
} }
return -1; return -1;
} }
int _findBestFrameIndex() { int _findBestFrameIndex() {
//
if (state.h264FrameBuffer.isEmpty) {
return -1;
}
// iOS平台优先处理最新的I帧以降低延迟
if (Platform.isIOS) {
final iFrameIndexes = _buildFrameIndex(TalkDataH264Frame_FrameTypeE.I);
if (iFrameIndexes.isNotEmpty) {
// I帧索引
return iFrameIndexes.last.value;
}
}
// I帧相关的P帧 // I帧相关的P帧
if (lastDecodedIFrameSeq != null) { if (lastDecodedIFrameSeq != null) {
final pFrameIndex = _findRelatedPFrame(lastDecodedIFrameSeq!); final pFrameIndex = _findRelatedPFrame(lastDecodedIFrameSeq!);
if (pFrameIndex >= 0) return pFrameIndex; //
if (pFrameIndex >= 0 && pFrameIndex < state.h264FrameBuffer.length) {
return pFrameIndex;
}
} }
// I帧 // I帧
final iFrameIndex = _findEarliestIFrame(); final iFrameIndex = _findEarliestIFrame();
if (iFrameIndex >= 0) return iFrameIndex; //
if (iFrameIndex >= 0 && iFrameIndex < state.h264FrameBuffer.length) {
return iFrameIndex;
}
// I帧 // I帧
return state.h264FrameBuffer.isNotEmpty ? 0 : -1; return state.h264FrameBuffer.isNotEmpty ? 0 : -1;
@ -588,6 +594,9 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
try { try {
state.isProcessingFrame = true; state.isProcessingFrame = true;
//
_implementSmartFrameDropping();
// //
final bufferLength = state.h264FrameBuffer.length; final bufferLength = state.h264FrameBuffer.length;
@ -614,7 +623,11 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
final frameIndex = _findBestFrameIndex(); final frameIndex = _findBestFrameIndex();
// //
if (frameIndex >= 0) { if (frameIndex >= 0 && frameIndex < state.h264FrameBuffer.length) {
//
if (frameIndex >= state.h264FrameBuffer.length) {
return;
}
final Map<String, dynamic> frameMap = state.h264FrameBuffer.removeAt(frameIndex); final Map<String, dynamic> frameMap = state.h264FrameBuffer.removeAt(frameIndex);
final List<int>? frameData = frameMap['frameData']; final List<int>? frameData = frameMap['frameData'];
@ -623,7 +636,53 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
final int? frameSeqI = frameMap['frameSeqI']; final int? frameSeqI = frameMap['frameSeqI'];
final int? pts = frameMap['pts']; final int? pts = frameMap['pts'];
if (frameData != null && // I帧状态SPS/PPS的I帧
if (_waitingForCompleteIFrame && frameType == TalkDataH264Frame_FrameTypeE.I) {
// SPS/PPS的完整帧数据
List<int> completeFrameData = [];
// SPS
if (_cachedSps != null && _cachedSps!.isNotEmpty) {
completeFrameData.addAll(_cachedSps!);
}
// PPS
if (_cachedPps != null && _cachedPps!.isNotEmpty) {
completeFrameData.addAll(_cachedPps!);
}
//
if (frameData != null) {
completeFrameData.addAll(frameData);
}
//
if (completeFrameData.isNotEmpty && state.textureId.value != null) {
final int pluginFrameType = 0; // I帧
try {
await VideoDecodePlugin.sendFrame(
frameData: completeFrameData,
frameType: pluginFrameType,
frameSeq: frameSeq!,
timestamp: pts!,
splitNalFromIFrame: true,
refIFrameSeq: frameSeqI!,
).timeout(Duration(milliseconds: 25));
//
_waitingForCompleteIFrame = false;
lastDecodedIFrameSeq = frameSeq;
} catch (e) {
AppLog.log('发送完整I帧失败: $e');
if (e is TimeoutException) {
_frameDropCount++;
}
}
}
}
//
else if (frameData != null &&
frameType != null && frameType != null &&
frameSeq != null && frameSeq != null &&
frameSeqI != null && frameSeqI != null &&
@ -634,8 +693,21 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
// //
try { try {
// iOS平台使用更短的超时时间 //
final timeoutMs = Platform.isIOS ? 15 : 20; int timeoutMs;
switch (_networkQualityScore) {
case 1: //
timeoutMs = 50;
break;
case 2: //
timeoutMs = 40;
break;
case 3: //
timeoutMs = 30;
break;
default:
timeoutMs = Platform.isIOS ? 25 : 30;
}
await VideoDecodePlugin.sendFrame( await VideoDecodePlugin.sendFrame(
frameData: frameData, frameData: frameData,
frameType: pluginFrameType, frameType: pluginFrameType,
@ -648,9 +720,14 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
// I帧序号 // I帧序号
if (frameType == TalkDataH264Frame_FrameTypeE.I) { if (frameType == TalkDataH264Frame_FrameTypeE.I) {
lastDecodedIFrameSeq = frameSeq; lastDecodedIFrameSeq = frameSeq;
_waitingForCompleteIFrame = false; // I帧都取消等待
} }
} catch (e) { } catch (e) {
AppLog.log('发送帧数据超时或失败: $e'); AppLog.log('发送帧数据超时或失败: $e');
//
if (e is TimeoutException) {
_frameDropCount++;
}
} }
} }
} }
@ -851,34 +928,6 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
Future<void> stopRecording() async {} Future<void> stopRecording() async {}
///
Future<void> _evaluateDevicePerformance() async {
final stopwatch = Stopwatch()..start();
//
for (int i = 0; i < 100000; i++) {
sqrt(i);
}
stopwatch.stop();
final int elapsedMs = stopwatch.elapsedMilliseconds;
//
if (elapsedMs < 50) {
_devicePerformanceScore = 5; //
} else if (elapsedMs < 100) {
_devicePerformanceScore = 4; //
} else if (elapsedMs < 200) {
_devicePerformanceScore = 3; //
} else if (elapsedMs < 400) {
_devicePerformanceScore = 2; //
} else {
_devicePerformanceScore = 1; //
}
AppLog.log('设备性能评估完成: $_devicePerformanceScore (耗时: ${elapsedMs}ms)');
}
Future<void> _checkRequiredPermissions() async { Future<void> _checkRequiredPermissions() async {
// //
var storageStatus = await Permission.storage.status; var storageStatus = await Permission.storage.status;
@ -901,11 +950,6 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
void onInit() { void onInit() {
super.onInit(); super.onInit();
//
WidgetsBinding.instance.addPostFrameCallback((_) {
_evaluateDevicePerformance();
});
// //
_startListenTalkStatus(); _startListenTalkStatus();
// //
@ -1287,7 +1331,8 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
_pendingResetWidth = width; _pendingResetWidth = width;
_pendingResetHeight = height; _pendingResetHeight = height;
// ---使 try {
//
await Future.wait([ await Future.wait([
// //
_resetDecoderForNewStream(width, height), _resetDecoderForNewStream(width, height),
@ -1295,6 +1340,10 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
Future.microtask(() => Future.microtask(() =>
StartChartManage().changeTalkExpectDataTypeAndReStartTalkExpectMessageTimer(talkExpect: talkExpectReq)) StartChartManage().changeTalkExpectDataTypeAndReStartTalkExpectMessageTimer(talkExpect: talkExpectReq))
]).timeout(const Duration(milliseconds: 1500)); // ]).timeout(const Duration(milliseconds: 1500)); //
} catch (e) {
AppLog.log('切换清晰度超时或失败: $e');
state.isLoading.value = false;
}
} }
void _initHdOptions() { void _initHdOptions() {
@ -1310,31 +1359,22 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
// //
Future<void> _resetDecoderForNewStream(int width, int height) async { Future<void> _resetDecoderForNewStream(int width, int height) async {
try { try {
//
state.isLoading.value = true; state.isLoading.value = true;
//
_stopFrameProcessTimer(); _stopFrameProcessTimer();
//
_clearFrameBufferQuickly(); _clearFrameBufferQuickly();
// - 使 - //
if (state.textureId.value != null) { if (state.textureId.value != null) {
try { try {
//
await VideoDecodePlugin.releaseDecoder().timeout(Duration(milliseconds: 100)); await VideoDecodePlugin.releaseDecoder().timeout(Duration(milliseconds: 100));
state.textureId.value = null; state.textureId.value = null;
} catch (e) { } catch (e) {
AppLog.log('释放解码器超时或失败: $e'); AppLog.log('释放解码器超时或失败: $e');
// 使
state.textureId.value = null; state.textureId.value = null;
} }
} }
// //
await Future.delayed(Duration(milliseconds: 0));
//
final config = VideoDecoderConfig( final config = VideoDecoderConfig(
width: width, width: width,
height: height, height: height,
@ -1343,23 +1383,18 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
// //
try { try {
// 使
final textureId = await VideoDecodePlugin.initDecoder(config) final textureId = await VideoDecodePlugin.initDecoder(config)
.timeout(Duration(milliseconds: 500)); .timeout(Duration(milliseconds: 300));
if (textureId != null) { if (textureId != null) {
state.textureId.value = textureId; Future.microtask(() => state.textureId.value = textureId);
AppLog.log('解码器初始化成功textureId=$textureId'); AppLog.log('解码器初始化成功textureId=$textureId');
//
VideoDecodePlugin.setOnFrameRenderedListener((textureId) { VideoDecodePlugin.setOnFrameRenderedListener((textureId) {
AppLog.log('开始渲染======='); //
// loading Future.microtask(() {
state.isLoading.value = false; state.isLoading.value = false;
}); });
});
//
_startFrameProcessTimer();
// //
_decodedIFrames.clear(); _decodedIFrames.clear();
@ -1367,10 +1402,11 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
state.isProcessingFrame = false; state.isProcessingFrame = false;
_lastFrameSeq = null; _lastFrameSeq = null;
lastDecodedIFrameSeq = null; lastDecodedIFrameSeq = null;
hasSps = false;
hasPps = false; // I帧
spsCache = null; _waitingForCompleteIFrame = true;
ppsCache = null;
_startFrameProcessTimer();
} else { } else {
AppLog.log('解码器初始化失败'); AppLog.log('解码器初始化失败');
state.isLoading.value = false; state.isLoading.value = false;
@ -1385,6 +1421,7 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
} }
} }
void _processFrame(TalkDataModel talkDataModel) { void _processFrame(TalkDataModel talkDataModel) {
final talkData = talkDataModel.talkData; final talkData = talkDataModel.talkData;
final talkDataH264Frame = talkDataModel.talkDataH264Frame; final talkDataH264Frame = talkDataModel.talkDataH264Frame;