周期性卡顿 优化

This commit is contained in:
sky_min 2025-11-03 15:49:18 +08:00
parent e16c7fa935
commit 2f1135193b
2 changed files with 357 additions and 96 deletions

View File

@ -40,7 +40,7 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
final LockDetailState lockDetailState = Get.put(LockDetailLogic()).state;
int bufferSize = 25; //
int bufferSize = 50; //
int audioBufferSize = 20; // 2
@ -231,10 +231,30 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
// P/B帧
while (state.h264FrameBuffer.length >= state.maxFrameBufferSize) {
int pbIndex = state.h264FrameBuffer.indexWhere((f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.P);
if (pbIndex != -1) {
state.h264FrameBuffer.removeAt(pbIndex);
// int pbIndex = state.h264FrameBuffer.indexWhere((f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.P);
// if (pbIndex != -1) {
// state.h264FrameBuffer.removeAt(pbIndex);
// } else {
// state.h264FrameBuffer.removeAt(0);
// }
// P帧
int oldestPFrameIndex = -1;
int minPts = pts; //
for (int i = 0; i < state.h264FrameBuffer.length; i++) {
final frame = state.h264FrameBuffer[i];
if (frame['frameType'] == TalkDataH264Frame_FrameTypeE.P) {
if (oldestPFrameIndex == -1 || frame['pts'] < minPts) {
oldestPFrameIndex = i;
minPts = frame['pts'];
}
}
}
if (oldestPFrameIndex != -1) {
state.h264FrameBuffer.removeAt(oldestPFrameIndex);
} else {
// P帧
state.h264FrameBuffer.removeAt(0);
}
}
@ -244,21 +264,146 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
}
///
// void _startFrameProcessTimer() {
// //
// state.frameProcessTimer?.cancel();
//
// //
// final int intervalMs = (1000 / state.targetFps).round();
//
// //
// state.frameProcessTimer = Timer.periodic(Duration(milliseconds: intervalMs), (timer) {
// _processNextFrameFromBuffer();
// });
// AppLog.log('启动帧处理定时器,目标帧率: ${state.targetFps}fps间隔: ${intervalMs}ms');
// }
void _startFrameProcessTimer() {
//
state.frameProcessTimer?.cancel();
_stopFrameProcessTimer();
//
// 使
final int intervalMs = (1000 / state.targetFps).round();
//
state.frameProcessTimer = Timer.periodic(Duration(milliseconds: intervalMs), (timer) {
_processNextFrameFromBuffer();
});
// 使
state.frameProcessTimer = Timer.periodic(
Duration(milliseconds: intervalMs),
(timer) {
// 使Future.microtask确保不会阻塞定时器
Future.microtask(_processNextFrameFromBuffer);
}
);
AppLog.log('启动帧处理定时器,目标帧率: ${state.targetFps}fps间隔: ${intervalMs}ms');
}
///
// void _processNextFrameFromBuffer() async {
// //
// if (state.isProcessingFrame) {
// return;
// }
//
// //
// if (state.h264FrameBuffer.isEmpty) {
// return;
// }
//
// state.isProcessingFrame = true;
//
// // I帧frameSeq最小的I帧消费
// final iFrames = state.h264FrameBuffer.where((f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.I).toList();
// iFrames.sort((a, b) => (a['frameSeq'] as int).compareTo(b['frameSeq'] as int));
//
// if (iFrames.isNotEmpty) {
// // I帧I帧frameSeq
// final minIFrame = iFrames.first;
// final minIFrameSeq = minIFrame['frameSeq'];
// final targetIndex = state.h264FrameBuffer.indexWhere(
// (f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.I && f['frameSeq'] == minIFrameSeq,
// );
// state.isProcessingFrame = true;
// final Map<String, dynamic>? frameMap = state.h264FrameBuffer.removeAt(targetIndex);
// if (frameMap == null) {
// state.isProcessingFrame = false;
// return;
// }
// final List<int>? frameData = frameMap['frameData'];
// final TalkDataH264Frame_FrameTypeE? frameType = frameMap['frameType'];
// final int? frameSeq = frameMap['frameSeq'];
// final int? frameSeqI = frameMap['frameSeqI'];
// final int? pts = frameMap['pts'];
// final ScpMessage? scpMessage = frameMap['scpMessage'];
// if (frameData == null || frameType == null || frameSeq == null || frameSeqI == null || pts == null) {
// state.isProcessingFrame = false;
// return;
// }
// if (state.textureId.value == null) {
// state.isProcessingFrame = false;
// return;
// }
// lastDecodedIFrameSeq = minIFrameSeq;
// AppLog.log('送入解码器的P帧数据frameSeq:${frameSeq},frameSeqI:${frameSeqI},'
// 'frameType:${frameType},messageId:${scpMessage!.MessageId}');
// await VideoDecodePlugin.sendFrame(
// frameData: frameData,
// frameType: 0,
// frameSeq: frameSeq,
// timestamp: pts,
// splitNalFromIFrame: true,
// refIFrameSeq: frameSeqI,
// );
// state.isProcessingFrame = false;
// return;
// }
//
// // I帧时refIFrameSeq等于lastDecodedIFrameSeq的P帧
// if (lastDecodedIFrameSeq != null) {
// final validPFrames =
// state.h264FrameBuffer.where((f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.P && f['frameSeqI'] == lastDecodedIFrameSeq).toList();
// if (validPFrames.isNotEmpty) {
// validPFrames.sort((a, b) => (a['frameSeq'] as int).compareTo(b['frameSeq'] as int));
// final minPFrame = validPFrames.first;
// final targetIndex = state.h264FrameBuffer.indexWhere(
// (f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.P && f['frameSeq'] == minPFrame['frameSeq'] && f['frameSeqI'] == lastDecodedIFrameSeq,
// );
// state.isProcessingFrame = true;
// final Map<String, dynamic>? frameMap = state.h264FrameBuffer.removeAt(targetIndex);
// if (frameMap == null) {
// state.isProcessingFrame = false;
// return;
// }
// final List<int>? frameData = frameMap['frameData'];
// final TalkDataH264Frame_FrameTypeE? frameType = frameMap['frameType'];
// final int? frameSeq = frameMap['frameSeq'];
// final int? frameSeqI = frameMap['frameSeqI'];
// final int? pts = frameMap['pts'];
// final ScpMessage? scpMessage = frameMap['scpMessage'];
// if (frameData == null || frameType == null || frameSeq == null || frameSeqI == null || pts == null) {
// state.isProcessingFrame = false;
// return;
// }
// if (state.textureId.value == null) {
// state.isProcessingFrame = false;
// return;
// }
// // AppLog.log('送入解码器的I帧数据frameSeq:${frameSeq},frameSeqI:${frameSeqI},'
// // 'frameType:${frameType},messageId:${scpMessage!.MessageId}');
//
// await VideoDecodePlugin.sendFrame(
// frameData: frameData,
// frameType: 1,
// frameSeq: frameSeq,
// timestamp: pts,
// splitNalFromIFrame: true,
// refIFrameSeq: frameSeqI,
// );
// state.isProcessingFrame = false;
// return;
// }
// }
// // I帧到来
// }
/// -
void _processNextFrameFromBuffer() async {
//
if (state.isProcessingFrame) {
@ -270,98 +415,108 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
return;
}
// I帧frameSeq最小的I帧消费
final iFrames = state.h264FrameBuffer.where((f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.I).toList();
iFrames.sort((a, b) => (a['frameSeq'] as int).compareTo(b['frameSeq'] as int));
state.isProcessingFrame = true;
if (iFrames.isNotEmpty) {
// I帧I帧frameSeq
final minIFrame = iFrames.first;
final minIFrameSeq = minIFrame['frameSeq'];
final targetIndex = state.h264FrameBuffer.indexWhere(
(f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.I && f['frameSeq'] == minIFrameSeq,
);
state.isProcessingFrame = true;
final Map<String, dynamic>? frameMap = state.h264FrameBuffer.removeAt(targetIndex);
if (frameMap == null) {
state.isProcessingFrame = false;
return;
try {
// I帧
final iFrames = state.h264FrameBuffer
.where((f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.I)
.toList()
..sort((a, b) => (a['frameSeq'] as int).compareTo(b['frameSeq'] as int));
if (iFrames.isNotEmpty) {
final minIFrame = iFrames.first;
final targetIndex = state.h264FrameBuffer.indexWhere(
(f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.I &&
f['frameSeq'] == minIFrame['frameSeq'],
);
if (targetIndex != -1) {
final frameMap = state.h264FrameBuffer.removeAt(targetIndex);
await _decodeFrame(frameMap);
lastDecodedIFrameSeq = minIFrame['frameSeq'] as int;
return;
}
}
final List<int>? frameData = frameMap['frameData'];
final TalkDataH264Frame_FrameTypeE? frameType = frameMap['frameType'];
final int? frameSeq = frameMap['frameSeq'];
final int? frameSeqI = frameMap['frameSeqI'];
final int? pts = frameMap['pts'];
final ScpMessage? scpMessage = frameMap['scpMessage'];
if (frameData == null || frameType == null || frameSeq == null || frameSeqI == null || pts == null) {
state.isProcessingFrame = false;
return;
// P帧 - P帧以提高流畅度
if (lastDecodedIFrameSeq != null) {
final validPFrames = state.h264FrameBuffer
.where((f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.P &&
f['frameSeqI'] == lastDecodedIFrameSeq)
.toList()
..sort((a, b) => (a['frameSeq'] as int).compareTo(b['frameSeq'] as int));
// 3P帧
int framesToProcess = min(3, validPFrames.length);
for (int i = 0; i < framesToProcess; i++) {
if (state.h264FrameBuffer.isNotEmpty) {
final pFrame = validPFrames[i];
final targetIndex = state.h264FrameBuffer.indexWhere(
(f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.P &&
f['frameSeq'] == pFrame['frameSeq'] &&
f['frameSeqI'] == lastDecodedIFrameSeq,
);
if (targetIndex != -1) {
final frameMap = state.h264FrameBuffer.removeAt(targetIndex);
await _decodeFrame(frameMap);
}
}
}
}
if (state.textureId.value == null) {
state.isProcessingFrame = false;
return;
}
lastDecodedIFrameSeq = minIFrameSeq;
AppLog.log('送入解码器的P帧数据frameSeq:${frameSeq},frameSeqI:${frameSeqI},'
'frameType:${frameType},messageId:${scpMessage!.MessageId}');
await VideoDecodePlugin.sendFrame(
frameData: frameData,
frameType: 0,
frameSeq: frameSeq,
timestamp: pts,
splitNalFromIFrame: true,
refIFrameSeq: frameSeqI,
);
} catch (e) {
AppLog.log('帧处理错误: $e');
//
state.isProcessingFrame = false;
//
_handleDecodeError();
} finally {
state.isProcessingFrame = false;
}
}
int _decodeErrorCount = 0;
static const int MAX_DECODE_ERRORS = 5;
void _handleDecodeError() {
_decodeErrorCount++;
if (_decodeErrorCount >= MAX_DECODE_ERRORS) {
AppLog.log('解码错误过多,重置解码器');
_resetDecoderForNewStream(
StartChartManage().videoWidth,
StartChartManage().videoHeight
);
_decodeErrorCount = 0;
}
}
//
Future<void> _decodeFrame(Map<String, dynamic> frameMap) async {
final List<int>? frameData = frameMap['frameData'];
final TalkDataH264Frame_FrameTypeE? frameType = frameMap['frameType'];
final int? frameSeq = frameMap['frameSeq'];
final int? frameSeqI = frameMap['frameSeqI'];
final int? pts = frameMap['pts'];
final ScpMessage? scpMessage = frameMap['scpMessage'];
if (frameData == null || frameType == null || frameSeq == null ||
frameSeqI == null || pts == null || state.textureId.value == null) {
return;
}
// I帧时refIFrameSeq等于lastDecodedIFrameSeq的P帧
if (lastDecodedIFrameSeq != null) {
final validPFrames =
state.h264FrameBuffer.where((f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.P && f['frameSeqI'] == lastDecodedIFrameSeq).toList();
if (validPFrames.isNotEmpty) {
validPFrames.sort((a, b) => (a['frameSeq'] as int).compareTo(b['frameSeq'] as int));
final minPFrame = validPFrames.first;
final targetIndex = state.h264FrameBuffer.indexWhere(
(f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.P && f['frameSeq'] == minPFrame['frameSeq'] && f['frameSeqI'] == lastDecodedIFrameSeq,
);
state.isProcessingFrame = true;
final Map<String, dynamic>? frameMap = state.h264FrameBuffer.removeAt(targetIndex);
if (frameMap == null) {
state.isProcessingFrame = false;
return;
}
final List<int>? frameData = frameMap['frameData'];
final TalkDataH264Frame_FrameTypeE? frameType = frameMap['frameType'];
final int? frameSeq = frameMap['frameSeq'];
final int? frameSeqI = frameMap['frameSeqI'];
final int? pts = frameMap['pts'];
final ScpMessage? scpMessage = frameMap['scpMessage'];
if (frameData == null || frameType == null || frameSeq == null || frameSeqI == null || pts == null) {
state.isProcessingFrame = false;
return;
}
if (state.textureId.value == null) {
state.isProcessingFrame = false;
return;
}
// AppLog.log('送入解码器的I帧数据frameSeq:${frameSeq},frameSeqI:${frameSeqI},'
// 'frameType:${frameType},messageId:${scpMessage!.MessageId}');
AppLog.log('送入解码器的帧数据frameSeq:$frameSeq,frameSeqI:$frameSeqI,'
'frameType:$frameType,messageId:${scpMessage?.MessageId}');
await VideoDecodePlugin.sendFrame(
frameData: frameData,
frameType: 1,
frameSeq: frameSeq,
timestamp: pts,
splitNalFromIFrame: true,
refIFrameSeq: frameSeqI,
);
state.isProcessingFrame = false;
return;
}
}
// I帧到来
await VideoDecodePlugin.sendFrame(
frameData: frameData,
frameType: frameType == TalkDataH264Frame_FrameTypeE.I ? 0 : 1,
frameSeq: frameSeq,
timestamp: pts,
splitNalFromIFrame: true,
refIFrameSeq: frameSeqI,
);
}
///
@ -557,6 +712,20 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
//
_startListenTalkData();
//
state.targetFps = _getOptimalFps();
_startFrameProcessTimer();
}
//
int _getOptimalFps() {
//
if (defaultTargetPlatform == TargetPlatform.android) {
return 25; // Android设备通常性能较好
} else {
return 20; // iOS设备保守一些
}
}
@override
@ -953,6 +1122,98 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
AppLog.log('无法处理H264帧textureId为空');
}
break;
case TalkData_ContentTypeE.H264:
if (state.textureId.value != null && talkDataH264Frame != null) {
// SPS/PPS
if (talkDataH264Frame.frameType == TalkDataH264Frame_FrameTypeE.I) {
_cacheSPSPPSIfNeeded(talkData.content);
}
_addFrameToBuffer(
talkData.content,
talkDataH264Frame.frameType,
talkData.durationMs,
talkDataH264Frame.frameSeq,
talkDataH264Frame.frameSeqI,
scpMessage!,
);
} else {
AppLog.log('无法处理H264帧textureId为空或帧数据无效');
}
break;
}
}
void _cacheSPSPPSIfNeeded(List<int> frameData) {
try {
// H.264NAL单元开始NAL头部
int offset = 0;
// NAL单元分隔符 (0x00000001 0x000001)
while (offset < frameData.length - 4) {
// 0x00000001
if (frameData[offset] == 0 && frameData[offset + 1] == 0 &&
frameData[offset + 2] == 0 && frameData[offset + 3] == 1) {
// (4)
int nalStart = offset + 4;
// NAL头部
if (nalStart >= frameData.length) break;
// NAL头部第一个字节包含NAL类型信息
// bit 0-7: forbidden_zero_bit(1) + nal_ref_idc(2) + nal_unit_type(5)
int nalHeader = frameData[nalStart];
int nalType = nalHeader & 0x1F; // 5
// H.264 NAL单元类型:
// 7 = SPS (Sequence Parameter Set)
// 8 = PPS (Picture Parameter Set)
if (nalType == 7) {
// SPS - NAL单元(NAL头部)
int nalEnd = _findNextStartCode(frameData, nalStart);
if (nalEnd == -1) nalEnd = frameData.length;
spsCache = frameData.sublist(offset, nalEnd);
hasSps = true;
AppLog.log('检测到并缓存SPS数据, 长度: ${spsCache!.length}');
} else if (nalType == 8) {
// PPS - NAL单元
int nalEnd = _findNextStartCode(frameData, nalStart);
if (nalEnd == -1) nalEnd = frameData.length;
ppsCache = frameData.sublist(offset, nalEnd);
hasPps = true;
AppLog.log('检测到并缓存PPS数据, 长度: ${ppsCache!.length}');
}
// NAL单元
offset = nalStart + 1;
} else {
offset++;
}
}
} catch (e) {
AppLog.log('SPS/PPS检测错误: $e');
}
}
//
int _findNextStartCode(List<int> data, int fromIndex) {
for (int i = fromIndex; i < data.length - 4; i++) {
if (data[i] == 0 && data[i + 1] == 0 &&
data[i + 2] == 0 && data[i + 3] == 1) {
return i; //
}
}
return -1; //
}
// 使SPS/PPS数据
void _ensureSPSPPSAvailable() {
if (hasSps && hasPps && spsCache != null && ppsCache != null) {
// SPS/PPS数据发送给解码器
//
AppLog.log('SPS和PPS数据已就绪可用于解码器初始化');
}
}
}

View File

@ -110,7 +110,7 @@ class TalkViewNativeDecodeState {
// H264帧缓冲区相关
final List<Map<String, dynamic>> h264FrameBuffer = <Map<String, dynamic>>[]; // H264帧缓冲区
final int maxFrameBufferSize = 50; //
final int targetFps = 60; // ,native的缓冲区
int targetFps = 25; // ,native的缓冲区
Timer? frameProcessTimer; //
bool isProcessingFrame = false; //
int lastProcessedTimestamp = 0; //