优化视频bug

This commit is contained in:
sky_min 2025-11-10 16:50:45 +08:00
parent f9ee17de21
commit 6b6a754cf9
2 changed files with 273 additions and 245 deletions

View File

@ -56,16 +56,25 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
_frameProcessCount = 0;
_lastFrameProcessTime = now;
//
if (_actualFps < state.targetFps * 0.7) {
//
state.targetFps = (state.targetFps * 0.9).clamp(15.0, 60.0) as int;
//
final targetFps = state.targetFps.toDouble();
final actualRatio = _actualFps / targetFps;
//
if (actualRatio < 0.4) {
//
state.targetFps = (targetFps * 0.6).round().clamp(15, 60);
_startFrameProcessTimer();
} else if (_actualFps > state.targetFps * 1.2 && state.targetFps < 30.0) {
} else if (actualRatio < 0.6) {
//
state.targetFps = (targetFps * 0.8).round().clamp(15, 60);
_startFrameProcessTimer();
} else if (actualRatio > 1.8 && targetFps < 25) {
//
state.targetFps = (state.targetFps * 1.1).clamp(15.0, 30.0) as int;
state.targetFps = (targetFps * 1.15).round().clamp(15, 30);
_startFrameProcessTimer();
}
//
}
}
@ -128,25 +137,33 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
height: StartChartManage().videoHeight,
codecType: 'h264',
);
// 使
final timeoutFuture = Future.delayed(const Duration(seconds: 3), () => null);
final decoderFuture = VideoDecodePlugin.initDecoder(config);
// textureId
final textureId = await VideoDecodePlugin.initDecoder(config);
final textureId = await Future.any([decoderFuture, timeoutFuture]);
if (textureId != null) {
Future.microtask(() => state.textureId.value = textureId);
AppLog.log('视频解码器初始化成功textureId=$textureId');
//
VideoDecodePlugin.setOnFrameRenderedListener((textureId) {
AppLog.log('已经开始渲染=======');
// loading
Future.microtask(() => state.isLoading.value = false);
});
} else {
AppLog.log('视频解码器初始化失败');
AppLog.log('视频解码器初始化失败或超时');
state.isLoading.value = false;
}
//
_startFrameProcessTimer();
} catch (e) {
AppLog.log('初始化视频解码器错误: $e');
state.isLoading.value = false;
//
await Future.delayed(const Duration(seconds: 2));
await Future.delayed(const Duration(seconds: 1));
if (!Get.isRegistered<TalkViewNativeDecodeLogic>()) {
return; //
}
@ -185,13 +202,13 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
/// H264帧到缓冲区
void _addFrameToBuffer(
List<int> frameData,
TalkDataH264Frame_FrameTypeE frameType,
int pts,
int frameSeq,
int frameSeqI,
ScpMessage scpMessage,
) {
List<int> frameData,
TalkDataH264Frame_FrameTypeE frameType,
int pts,
int frameSeq,
int frameSeqI,
ScpMessage scpMessage,
) {
// frameSeq较小时阈值也小
if (!_pendingStreamReset && _lastFrameSeq != null && frameType == TalkDataH264Frame_FrameTypeE.I && frameSeq < _lastFrameSeq!) {
int dynamicThreshold = _getFrameSeqRolloverThreshold(_lastFrameSeq!);
@ -237,8 +254,11 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
} else {
//
if (_lastFrameSeq != null && frameSeq <= _lastFrameSeq!) {
AppLog.log('丢弃乱序或重复帧: frameSeq=$frameSeq, lastFrameSeq=$_lastFrameSeq');
return;
//
if ((_lastFrameSeq! - frameSeq).abs() > 50) { //
AppLog.log('丢弃乱序或重复帧: frameSeq=$frameSeq, lastFrameSeq=$_lastFrameSeq');
return;
}
}
_lastFrameSeq = frameSeq;
}
@ -252,33 +272,16 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
'scpMessage': scpMessage,
};
// P/B帧
//
if (state.h264FrameBuffer.length >= state.maxFrameBufferSize) {
_manageBufferOverflow(frameType);
}
//
while (state.h264FrameBuffer.length >= state.maxFrameBufferSize) {
int pbIndex = state.h264FrameBuffer.indexWhere((f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.P);
if (pbIndex != -1) {
state.h264FrameBuffer.removeAt(pbIndex);
} else {
state.h264FrameBuffer.removeAt(0);
}
state.h264FrameBuffer.removeAt(0);
}
//
if (state.h264FrameBuffer.length > state.maxFrameBufferSize * 0.8) {
//
while (state.h264FrameBuffer.length >= state.maxFrameBufferSize * 0.9) {
// P帧
int pbIndex = state.h264FrameBuffer.indexWhere((f) =>
f['frameType'] == TalkDataH264Frame_FrameTypeE.P &&
f['frameSeq'] < frameSeq - 100);
if (pbIndex != -1) {
state.h264FrameBuffer.removeAt(pbIndex);
} else {
// P帧
state.h264FrameBuffer.removeAt(0);
}
}
}
//
state.h264FrameBuffer.add(frameMap);
}
@ -286,10 +289,10 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
///
void _startFrameProcessTimer() {
//
state.frameProcessTimer?.cancel();
_stopFrameProcessTimer();
//
final int intervalMs = (1000 / state.targetFps).round();
final int intervalMs = max(16, min(40, (1000 / state.targetFps).round()));
//
state.frameProcessTimer = Timer.periodic(Duration(milliseconds: intervalMs), (timer) {
@ -298,23 +301,56 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
AppLog.log('启动帧处理定时器,目标帧率: ${state.targetFps}fps间隔: ${intervalMs}ms');
}
///
///
void _clearFrameBufferQuickly() {
//
if (state.h264FrameBuffer.length > 3) {
// 3
state.h264FrameBuffer.removeRange(0, state.h264FrameBuffer.length - 3);
}
//
_lastFrameSeq = null;
lastDecodedIFrameSeq = null;
_decodedIFrames.clear();
state.isProcessingFrame = false;
}
///
void _stopFrameProcessTimer() {
state.frameProcessTimer?.cancel();
state.frameProcessTimer = null;
//
// state.h264FrameBuffer.clear();
state.isProcessingFrame = false;
}
///
void _manageBufferOverflow(TalkDataH264Frame_FrameTypeE newFrameType) {
// I帧
if (newFrameType == TalkDataH264Frame_FrameTypeE.I) {
// P/B帧进行移除
int pbIndex = state.h264FrameBuffer.indexWhere((f) =>
f['frameType'] != TalkDataH264Frame_FrameTypeE.I &&
f['frameSeq'] < (_lastFrameSeq ?? 0) - 100); //
if (pbIndex != -1) {
state.h264FrameBuffer.removeAt(pbIndex);
} else {
//
state.h264FrameBuffer.removeAt(0);
}
} else {
// P帧或B帧
state.h264FrameBuffer.removeAt(0);
}
}
///
void _processNextFrameFromBuffer() async {
_monitorFrameProcessingPerformance();
final startTime = DateTime.now().microsecondsSinceEpoch;
//
final bufferLength = state.h264FrameBuffer.length;
//
if (bufferLength > 30 && state.targetFps < 60) {
_adjustFrameProcessFrequency(state.targetFps * 1.5);
}
//
else if (bufferLength < 10 && state.targetFps > 25) {
_adjustFrameProcessFrequency(state.targetFps * 0.8);
}
//
if (state.isProcessingFrame) {
return;
@ -326,166 +362,131 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
}
try {
// I帧frameSeq最小的I帧消费
final iFrames = state.h264FrameBuffer.where((f) =>
f['frameType'] == TalkDataH264Frame_FrameTypeE.I).toList();
iFrames.sort((a, b) =>
(a['frameSeq'] as int).compareTo(b['frameSeq'] as int));
if (iFrames.isNotEmpty) {
final minIFrame = iFrames.first;
final minIFrameSeq = minIFrame['frameSeq'];
final targetIndex = state.h264FrameBuffer.indexWhere(
(f) =>
f['frameType'] == TalkDataH264Frame_FrameTypeE.I &&
f['frameSeq'] == minIFrameSeq,
);
state.isProcessingFrame = true;
final Map<String, dynamic>? frameMap = state.h264FrameBuffer.removeAt(
targetIndex);
if (frameMap == null) {
state.isProcessingFrame = false;
return;
}
final List<int>? frameData = frameMap['frameData'];
final TalkDataH264Frame_FrameTypeE? frameType = frameMap['frameType'];
final int? frameSeq = frameMap['frameSeq'];
final int? frameSeqI = frameMap['frameSeqI'];
final int? pts = frameMap['pts'];
if (frameData == null || frameType == null || frameSeq == null ||
frameSeqI == null || pts == null) {
state.isProcessingFrame = false;
return;
}
if (state.textureId.value == null) {
state.isProcessingFrame = false;
return;
}
lastDecodedIFrameSeq = minIFrameSeq;
await VideoDecodePlugin.sendFrame(
frameData: frameData,
frameType: 0,
frameSeq: frameSeq,
timestamp: pts,
splitNalFromIFrame: true,
refIFrameSeq: frameSeqI,
);
state.isProcessingFrame = false;
return;
}
state.isProcessingFrame = true;
// 使
Map<String, dynamic>? frameToProcess;
//
final bufferLength = state.h264FrameBuffer.length;
//
if (bufferLength > 50) {
//
_temporarilyIncreaseProcessFrequency();
} else if (bufferLength < 5) {
//
_adjustFrameProcessFrequency((state.targetFps * 0.9).toDouble());
}
//
int frameIndex = -1;
// I帧时refIFrameSeq等于lastDecodedIFrameSeq的P帧
// I帧相关的P帧
if (lastDecodedIFrameSeq != null) {
// I帧关联的P帧
for (int i = 0; i < state.h264FrameBuffer.length; i++) {
final frame = state.h264FrameBuffer[i];
if (frame['frameType'] == TalkDataH264Frame_FrameTypeE.P &&
frame['frameSeqI'] == lastDecodedIFrameSeq) {
frameToProcess = frame;
frameIndex = i;
break;
}
}
final validPFrames =
state.h264FrameBuffer.where((f) =>
// I帧关联的P帧
final validPFrames = state.h264FrameBuffer
.where((f) =>
f['frameType'] == TalkDataH264Frame_FrameTypeE.P &&
f['frameSeqI'] == lastDecodedIFrameSeq).toList();
f['frameSeqI'] == lastDecodedIFrameSeq)
.toList()
..sort((a, b) => (a['frameSeq'] as int).compareTo(b['frameSeq'] as int));
if (validPFrames.isNotEmpty) {
validPFrames.sort((a, b) =>
(a['frameSeq'] as int).compareTo(b['frameSeq'] as int));
final minPFrame = validPFrames.first;
final targetIndex = state.h264FrameBuffer.indexWhere(
frameIndex = state.h264FrameBuffer.indexWhere(
(f) =>
f['frameType'] == TalkDataH264Frame_FrameTypeE.P &&
f['frameSeq'] == minPFrame['frameSeq'] &&
f['frameSeqI'] == lastDecodedIFrameSeq,
);
state.isProcessingFrame = true;
final Map<String, dynamic>? frameMap = state.h264FrameBuffer.removeAt(
targetIndex);
if (frameMap == null) {
state.isProcessingFrame = false;
return;
}
final List<int>? frameData = frameMap['frameData'];
final TalkDataH264Frame_FrameTypeE? frameType = frameMap['frameType'];
final int? frameSeq = frameMap['frameSeq'];
final int? frameSeqI = frameMap['frameSeqI'];
final int? pts = frameMap['pts'];
if (frameData == null || frameType == null || frameSeq == null ||
frameSeqI == null || pts == null) {
state.isProcessingFrame = false;
return;
}
if (state.textureId.value == null) {
state.isProcessingFrame = false;
return;
}
await VideoDecodePlugin.sendFrame(
frameData: frameData,
frameType: 1,
frameSeq: frameSeq,
timestamp: pts,
splitNalFromIFrame: true,
refIFrameSeq: frameSeqI,
);
state.isProcessingFrame = false;
return;
}
}
// P帧I帧
if (frameToProcess == null) {
int earliestIframeIndex = -1;
int earliestIframeSeq = 999999;
for (int i = 0; i < state.h264FrameBuffer.length; i++) {
final frame = state.h264FrameBuffer[i];
if (frame['frameType'] == TalkDataH264Frame_FrameTypeE.I) {
final frameSeq = frame['frameSeq'] as int;
if (frameSeq < earliestIframeSeq) {
earliestIframeSeq = frameSeq;
frameToProcess = frame;
earliestIframeIndex = i;
// P帧I帧
if (frameIndex == -1) {
final iFrames = state.h264FrameBuffer
.where((f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.I)
.toList()
..sort((a, b) => (a['frameSeq'] as int).compareTo(b['frameSeq'] as int));
if (iFrames.isNotEmpty) {
final minIFrame = iFrames.first;
frameIndex = state.h264FrameBuffer.indexWhere(
(f) =>
f['frameType'] == TalkDataH264Frame_FrameTypeE.I &&
f['frameSeq'] == minIFrame['frameSeq'],
);
}
}
//
if (frameIndex >= 0) {
final Map<String, dynamic> frameMap = state.h264FrameBuffer.removeAt(frameIndex);
final List<int>? frameData = frameMap['frameData'];
final TalkDataH264Frame_FrameTypeE? frameType = frameMap['frameType'];
final int? frameSeq = frameMap['frameSeq'];
final int? frameSeqI = frameMap['frameSeqI'];
final int? pts = frameMap['pts'];
if (frameData != null &&
frameType != null &&
frameSeq != null &&
frameSeqI != null &&
pts != null &&
state.textureId.value != null) {
final int pluginFrameType = frameType == TalkDataH264Frame_FrameTypeE.I ? 0 : 1;
//
try {
await VideoDecodePlugin.sendFrame(
frameData: frameData,
frameType: pluginFrameType,
frameSeq: frameSeq,
timestamp: pts,
splitNalFromIFrame: true,
refIFrameSeq: frameSeqI,
).timeout(const Duration(milliseconds: 20)); //
// I帧序号
if (frameType == TalkDataH264Frame_FrameTypeE.I) {
lastDecodedIFrameSeq = frameSeq;
}
} catch (e) {
AppLog.log('发送帧数据超时或失败: $e');
}
}
if (frameToProcess != null) {
frameIndex = earliestIframeIndex;
}
// I帧到来
} //
if (frameToProcess != null && frameIndex >= 0) {
final Map<String, dynamic> frameMap = state.h264FrameBuffer.removeAt(
frameIndex);
// ...
}
} finally {
state.isProcessingFrame = false;
final endTime = DateTime.now().microsecondsSinceEpoch;
final durationMs = (endTime - startTime) / 1000.0;
// > 5ms
if (durationMs > 5) {
debugPrint('[_processNextFrameFromBuffer] 耗时: ${durationMs.toStringAsFixed(2)} ms');
// 使
// AppLog.log('Frame processing took ${durationMs.toStringAsFixed(2)} ms');
// -
if (durationMs > 30) {
AppLog.log('帧处理耗时过长: ${durationMs.toStringAsFixed(2)} ms, 缓冲区长度: ${state.h264FrameBuffer.length}');
}
}
final endTime = DateTime.now().microsecondsSinceEpoch;
final durationMs = (endTime - startTime) / 1000.0;
}
//
if (durationMs > 16.67) { // (60fps)
AppLog.log('帧处理耗时过长: ${durationMs.toStringAsFixed(2)} ms, 缓冲区长度: ${state.h264FrameBuffer.length}');
}
Timer? _tempAdjustTimer;
///
void _temporarilyIncreaseProcessFrequency() {
//
if (_tempAdjustTimer != null) return;
final originalFps = state.targetFps;
final tempFps = (min(originalFps * 1.5, 60.0)).toInt();
_adjustFrameProcessFrequency(tempFps.toDouble());
// 3
_tempAdjustTimer = Timer(const Duration(seconds: 2), () {
_adjustFrameProcessFrequency(originalFps.toDouble());
_tempAdjustTimer = null;
});
}
void _adjustFrameProcessFrequency(double newFps) {
@ -493,14 +494,6 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
_startFrameProcessTimer(); //
}
///
void _stopFrameProcessTimer() {
state.frameProcessTimer?.cancel();
state.frameProcessTimer = null;
state.h264FrameBuffer.clear();
state.isProcessingFrame = false;
}
//
void initiateAnswerCommand() {
StartChartManage().startTalkAcceptTimer();
@ -690,7 +683,6 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
@override
void onClose() {
// _closeH264File();
//
_stopFrameProcessTimer();
@ -708,11 +700,8 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
state.oneMinuteTimeTimer = null; //
state.oneMinuteTime.value = 0;
//
if (state.textureId.value != null) {
VideoDecodePlugin.releaseDecoder();
Future.microtask(() => state.textureId.value = null);
}
//
_releaseVideoDecoderAsync();
//
_streamSubscription?.cancel();
@ -721,7 +710,6 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
//
StartChartManage().reSetDefaultTalkExpect();
StartChartManage().stopTalkExpectMessageTimer();
VideoDecodePlugin.releaseDecoder();
//
_batchProcessTimer?.cancel();
@ -734,6 +722,20 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
_bufferedAudioFrames.clear();
super.onClose();
}
///
Future<void> _releaseVideoDecoderAsync() async {
try {
if (state.textureId.value != null) {
//
await VideoDecodePlugin.releaseDecoder().timeout(const Duration(milliseconds: 500));
Future.microtask(() => state.textureId.value = null);
}
} catch (e) {
AppLog.log('异步释放视频解码器失败: $e');
// 使
Future.microtask(() => state.textureId.value = null);
}
}
///
void _handleInvalidTalkStatus() {
@ -945,10 +947,13 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
state.currentQuality.value = quality;
TalkExpectReq talkExpectReq = StartChartManage().getDefaultTalkExpect();
final audioType = talkExpectReq.audioType;
// loading状态
state.isLoading.value = true;
int width = 864;
int height = 480;
switch (quality) {
case '高清':
talkExpectReq = TalkExpectReq(
@ -971,16 +976,15 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
//
_pendingResetWidth = width;
_pendingResetHeight = height;
//
await _resetDecoderForNewStream(width, height);
///
StartChartManage().changeTalkExpectDataTypeAndReStartTalkExpectMessageTimer(talkExpect: talkExpectReq);
// loadingframeSeq回绕检测
// frameSeq回绕检测标志
// _pendingStreamReset = false;
// _pendingResetWidth = width;
// _pendingResetHeight = height;
//
await Future.wait([
//
_resetDecoderForNewStream(width, height),
//
Future.microtask(() =>
StartChartManage().changeTalkExpectDataTypeAndReStartTalkExpectMessageTimer(talkExpect: talkExpectReq))
]).timeout(const Duration(seconds: 2)); //
}
void _initHdOptions() {
@ -1001,14 +1005,24 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
//
_stopFrameProcessTimer();
//
//
_clearFrameBufferQuickly();
// - 使
if (state.textureId.value != null) {
await VideoDecodePlugin.releaseDecoder();
state.textureId.value = null;
try {
//
await VideoDecodePlugin.releaseDecoder().timeout(const Duration(milliseconds: 300));
state.textureId.value = null;
} catch (e) {
AppLog.log('释放解码器超时或失败: $e');
// 使
state.textureId.value = null;
}
}
//
await Future.delayed(Duration(milliseconds: 50));
//
await Future.delayed(Duration(milliseconds: 2));
//
final config = VideoDecoderConfig(
@ -1018,36 +1032,45 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
);
//
final textureId = await VideoDecodePlugin.initDecoder(config);
if (textureId != null) {
state.textureId.value = textureId;
AppLog.log('frameSeq回绕后解码器初始化成功textureId=$textureId');
try {
// 使
final textureId = await VideoDecodePlugin.initDecoder(config)
.timeout(const Duration(milliseconds: 1500));
//
VideoDecodePlugin.setOnFrameRenderedListener((textureId) {
AppLog.log('已经开始渲染=======');
// loading
if (textureId != null) {
state.textureId.value = textureId;
AppLog.log('解码器初始化成功textureId=$textureId');
//
VideoDecodePlugin.setOnFrameRenderedListener((textureId) {
AppLog.log('开始渲染=======');
// loading
state.isLoading.value = false;
});
//
_startFrameProcessTimer();
//
_decodedIFrames.clear();
state.h264FrameBuffer.clear();
state.isProcessingFrame = false;
_lastFrameSeq = null;
lastDecodedIFrameSeq = null;
hasSps = false;
hasPps = false;
spsCache = null;
ppsCache = null;
} else {
AppLog.log('解码器初始化失败');
state.isLoading.value = false;
});
//
_startFrameProcessTimer();
//
_decodedIFrames.clear();
state.h264FrameBuffer.clear();
state.isProcessingFrame = false;
_lastFrameSeq = null;
hasSps = false;
hasPps = false;
spsCache = null;
ppsCache = null;
} else {
AppLog.log('frameSeq回绕后解码器初始化失败');
}
} catch (e) {
AppLog.log('解码器初始化超时或错误: $e');
state.isLoading.value = false;
}
} catch (e) {
AppLog.log('frameSeq回绕时解码器初始化错误: $e');
AppLog.log('解码器操作错误: $e');
state.isLoading.value = false;
}
}

View File

@ -47,6 +47,11 @@ class _TalkViewNativeDecodePageState extends State<TalkViewNativeDecodePage> wit
);
state.animationController.repeat();
//
SchedulerBinding.instance.addPostFrameCallback((_) {
//
state.animationController.duration = const Duration(milliseconds: 500);
});
//StatusListener
state.animationController.addStatusListener((AnimationStatus status) {
if (status == AnimationStatus.completed) {
@ -101,12 +106,12 @@ class _TalkViewNativeDecodePageState extends State<TalkViewNativeDecodePage> wit
aspectRatio: StartChartManage().videoWidth / StartChartManage().videoHeight,
child: Texture(
textureId: state.textureId.value!,
filterQuality: FilterQuality.medium,
filterQuality: FilterQuality.low,
),
)
: Texture(
textureId: state.textureId.value!,
filterQuality: FilterQuality.medium,
filterQuality: FilterQuality.low,
),
),
),