周期性卡顿 优化
This commit is contained in:
parent
e16c7fa935
commit
2f1135193b
@ -40,7 +40,7 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
|
||||
|
||||
final LockDetailState lockDetailState = Get.put(LockDetailLogic()).state;
|
||||
|
||||
int bufferSize = 25; // 初始化为默认大小
|
||||
int bufferSize = 50; // 初始化为默认大小
|
||||
|
||||
int audioBufferSize = 20; // 音频默认缓冲2帧
|
||||
|
||||
@ -231,10 +231,30 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
|
||||
|
||||
// 如果缓冲区超出最大大小,优先丢弃P/B帧
|
||||
while (state.h264FrameBuffer.length >= state.maxFrameBufferSize) {
|
||||
int pbIndex = state.h264FrameBuffer.indexWhere((f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.P);
|
||||
if (pbIndex != -1) {
|
||||
state.h264FrameBuffer.removeAt(pbIndex);
|
||||
// int pbIndex = state.h264FrameBuffer.indexWhere((f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.P);
|
||||
// if (pbIndex != -1) {
|
||||
// state.h264FrameBuffer.removeAt(pbIndex);
|
||||
// } else {
|
||||
// state.h264FrameBuffer.removeAt(0);
|
||||
// }
|
||||
// 优先丢弃较旧的P帧,而不是直接丢弃第一个元素
|
||||
int oldestPFrameIndex = -1;
|
||||
int minPts = pts; // 当前帧的时间戳作为比较基准
|
||||
|
||||
for (int i = 0; i < state.h264FrameBuffer.length; i++) {
|
||||
final frame = state.h264FrameBuffer[i];
|
||||
if (frame['frameType'] == TalkDataH264Frame_FrameTypeE.P) {
|
||||
if (oldestPFrameIndex == -1 || frame['pts'] < minPts) {
|
||||
oldestPFrameIndex = i;
|
||||
minPts = frame['pts'];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (oldestPFrameIndex != -1) {
|
||||
state.h264FrameBuffer.removeAt(oldestPFrameIndex);
|
||||
} else {
|
||||
// 如果没有P帧,则丢弃最旧的帧
|
||||
state.h264FrameBuffer.removeAt(0);
|
||||
}
|
||||
}
|
||||
@ -244,21 +264,146 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
|
||||
}
|
||||
|
||||
/// 启动帧处理定时器
|
||||
// void _startFrameProcessTimer() {
|
||||
// // 取消已有定时器
|
||||
// state.frameProcessTimer?.cancel();
|
||||
//
|
||||
// // 计算定时器间隔,确保以目标帧率处理帧
|
||||
// final int intervalMs = (1000 / state.targetFps).round();
|
||||
//
|
||||
// // 创建新定时器
|
||||
// state.frameProcessTimer = Timer.periodic(Duration(milliseconds: intervalMs), (timer) {
|
||||
// _processNextFrameFromBuffer();
|
||||
// });
|
||||
// AppLog.log('启动帧处理定时器,目标帧率: ${state.targetFps}fps,间隔: ${intervalMs}ms');
|
||||
// }
|
||||
void _startFrameProcessTimer() {
|
||||
// 取消已有定时器
|
||||
state.frameProcessTimer?.cancel();
|
||||
_stopFrameProcessTimer();
|
||||
|
||||
// 计算定时器间隔,确保以目标帧率处理帧
|
||||
// 使用更精确的帧率控制
|
||||
final int intervalMs = (1000 / state.targetFps).round();
|
||||
|
||||
// 创建新定时器
|
||||
state.frameProcessTimer = Timer.periodic(Duration(milliseconds: intervalMs), (timer) {
|
||||
_processNextFrameFromBuffer();
|
||||
});
|
||||
// 使用微任务队列确保及时处理
|
||||
state.frameProcessTimer = Timer.periodic(
|
||||
Duration(milliseconds: intervalMs),
|
||||
(timer) {
|
||||
// 使用Future.microtask确保不会阻塞定时器
|
||||
Future.microtask(_processNextFrameFromBuffer);
|
||||
}
|
||||
);
|
||||
|
||||
AppLog.log('启动帧处理定时器,目标帧率: ${state.targetFps}fps,间隔: ${intervalMs}ms');
|
||||
}
|
||||
|
||||
/// 从缓冲区处理下一帧
|
||||
// void _processNextFrameFromBuffer() async {
|
||||
// // 避免重复处理
|
||||
// if (state.isProcessingFrame) {
|
||||
// return;
|
||||
// }
|
||||
//
|
||||
// // 如果缓冲区为空,跳过
|
||||
// if (state.h264FrameBuffer.isEmpty) {
|
||||
// return;
|
||||
// }
|
||||
//
|
||||
// state.isProcessingFrame = true;
|
||||
//
|
||||
// // 优先查找I帧,按frameSeq最小的I帧消费
|
||||
// final iFrames = state.h264FrameBuffer.where((f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.I).toList();
|
||||
// iFrames.sort((a, b) => (a['frameSeq'] as int).compareTo(b['frameSeq'] as int));
|
||||
//
|
||||
// if (iFrames.isNotEmpty) {
|
||||
// // 有I帧,消费最小的I帧,并记录其frameSeq
|
||||
// final minIFrame = iFrames.first;
|
||||
// final minIFrameSeq = minIFrame['frameSeq'];
|
||||
// final targetIndex = state.h264FrameBuffer.indexWhere(
|
||||
// (f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.I && f['frameSeq'] == minIFrameSeq,
|
||||
// );
|
||||
// state.isProcessingFrame = true;
|
||||
// final Map<String, dynamic>? frameMap = state.h264FrameBuffer.removeAt(targetIndex);
|
||||
// if (frameMap == null) {
|
||||
// state.isProcessingFrame = false;
|
||||
// return;
|
||||
// }
|
||||
// final List<int>? frameData = frameMap['frameData'];
|
||||
// final TalkDataH264Frame_FrameTypeE? frameType = frameMap['frameType'];
|
||||
// final int? frameSeq = frameMap['frameSeq'];
|
||||
// final int? frameSeqI = frameMap['frameSeqI'];
|
||||
// final int? pts = frameMap['pts'];
|
||||
// final ScpMessage? scpMessage = frameMap['scpMessage'];
|
||||
// if (frameData == null || frameType == null || frameSeq == null || frameSeqI == null || pts == null) {
|
||||
// state.isProcessingFrame = false;
|
||||
// return;
|
||||
// }
|
||||
// if (state.textureId.value == null) {
|
||||
// state.isProcessingFrame = false;
|
||||
// return;
|
||||
// }
|
||||
// lastDecodedIFrameSeq = minIFrameSeq;
|
||||
// AppLog.log('送入解码器的P帧数据frameSeq:${frameSeq},frameSeqI:${frameSeqI},'
|
||||
// 'frameType:${frameType},messageId:${scpMessage!.MessageId}');
|
||||
// await VideoDecodePlugin.sendFrame(
|
||||
// frameData: frameData,
|
||||
// frameType: 0,
|
||||
// frameSeq: frameSeq,
|
||||
// timestamp: pts,
|
||||
// splitNalFromIFrame: true,
|
||||
// refIFrameSeq: frameSeqI,
|
||||
// );
|
||||
// state.isProcessingFrame = false;
|
||||
// return;
|
||||
// }
|
||||
//
|
||||
// // 没有I帧时,只消费refIFrameSeq等于lastDecodedIFrameSeq的P帧
|
||||
// if (lastDecodedIFrameSeq != null) {
|
||||
// final validPFrames =
|
||||
// state.h264FrameBuffer.where((f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.P && f['frameSeqI'] == lastDecodedIFrameSeq).toList();
|
||||
// if (validPFrames.isNotEmpty) {
|
||||
// validPFrames.sort((a, b) => (a['frameSeq'] as int).compareTo(b['frameSeq'] as int));
|
||||
// final minPFrame = validPFrames.first;
|
||||
// final targetIndex = state.h264FrameBuffer.indexWhere(
|
||||
// (f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.P && f['frameSeq'] == minPFrame['frameSeq'] && f['frameSeqI'] == lastDecodedIFrameSeq,
|
||||
// );
|
||||
// state.isProcessingFrame = true;
|
||||
// final Map<String, dynamic>? frameMap = state.h264FrameBuffer.removeAt(targetIndex);
|
||||
// if (frameMap == null) {
|
||||
// state.isProcessingFrame = false;
|
||||
// return;
|
||||
// }
|
||||
// final List<int>? frameData = frameMap['frameData'];
|
||||
// final TalkDataH264Frame_FrameTypeE? frameType = frameMap['frameType'];
|
||||
// final int? frameSeq = frameMap['frameSeq'];
|
||||
// final int? frameSeqI = frameMap['frameSeqI'];
|
||||
// final int? pts = frameMap['pts'];
|
||||
// final ScpMessage? scpMessage = frameMap['scpMessage'];
|
||||
// if (frameData == null || frameType == null || frameSeq == null || frameSeqI == null || pts == null) {
|
||||
// state.isProcessingFrame = false;
|
||||
// return;
|
||||
// }
|
||||
// if (state.textureId.value == null) {
|
||||
// state.isProcessingFrame = false;
|
||||
// return;
|
||||
// }
|
||||
// // AppLog.log('送入解码器的I帧数据frameSeq:${frameSeq},frameSeqI:${frameSeqI},'
|
||||
// // 'frameType:${frameType},messageId:${scpMessage!.MessageId}');
|
||||
//
|
||||
// await VideoDecodePlugin.sendFrame(
|
||||
// frameData: frameData,
|
||||
// frameType: 1,
|
||||
// frameSeq: frameSeq,
|
||||
// timestamp: pts,
|
||||
// splitNalFromIFrame: true,
|
||||
// refIFrameSeq: frameSeqI,
|
||||
// );
|
||||
// state.isProcessingFrame = false;
|
||||
// return;
|
||||
// }
|
||||
// }
|
||||
// // 其他情况不消费,等待I帧到来
|
||||
// }
|
||||
|
||||
/// 从缓冲区处理下一帧 - 优化版本
|
||||
void _processNextFrameFromBuffer() async {
|
||||
// 避免重复处理
|
||||
if (state.isProcessingFrame) {
|
||||
@ -270,98 +415,108 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
|
||||
return;
|
||||
}
|
||||
|
||||
// 优先查找I帧,按frameSeq最小的I帧消费
|
||||
final iFrames = state.h264FrameBuffer.where((f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.I).toList();
|
||||
iFrames.sort((a, b) => (a['frameSeq'] as int).compareTo(b['frameSeq'] as int));
|
||||
state.isProcessingFrame = true;
|
||||
|
||||
if (iFrames.isNotEmpty) {
|
||||
// 有I帧,消费最小的I帧,并记录其frameSeq
|
||||
final minIFrame = iFrames.first;
|
||||
final minIFrameSeq = minIFrame['frameSeq'];
|
||||
final targetIndex = state.h264FrameBuffer.indexWhere(
|
||||
(f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.I && f['frameSeq'] == minIFrameSeq,
|
||||
);
|
||||
state.isProcessingFrame = true;
|
||||
final Map<String, dynamic>? frameMap = state.h264FrameBuffer.removeAt(targetIndex);
|
||||
if (frameMap == null) {
|
||||
state.isProcessingFrame = false;
|
||||
return;
|
||||
try {
|
||||
// 优先处理I帧
|
||||
final iFrames = state.h264FrameBuffer
|
||||
.where((f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.I)
|
||||
.toList()
|
||||
..sort((a, b) => (a['frameSeq'] as int).compareTo(b['frameSeq'] as int));
|
||||
|
||||
if (iFrames.isNotEmpty) {
|
||||
final minIFrame = iFrames.first;
|
||||
final targetIndex = state.h264FrameBuffer.indexWhere(
|
||||
(f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.I &&
|
||||
f['frameSeq'] == minIFrame['frameSeq'],
|
||||
);
|
||||
|
||||
if (targetIndex != -1) {
|
||||
final frameMap = state.h264FrameBuffer.removeAt(targetIndex);
|
||||
await _decodeFrame(frameMap);
|
||||
lastDecodedIFrameSeq = minIFrame['frameSeq'] as int;
|
||||
return;
|
||||
}
|
||||
}
|
||||
final List<int>? frameData = frameMap['frameData'];
|
||||
final TalkDataH264Frame_FrameTypeE? frameType = frameMap['frameType'];
|
||||
final int? frameSeq = frameMap['frameSeq'];
|
||||
final int? frameSeqI = frameMap['frameSeqI'];
|
||||
final int? pts = frameMap['pts'];
|
||||
final ScpMessage? scpMessage = frameMap['scpMessage'];
|
||||
if (frameData == null || frameType == null || frameSeq == null || frameSeqI == null || pts == null) {
|
||||
state.isProcessingFrame = false;
|
||||
return;
|
||||
|
||||
// 处理P帧 - 允许处理多个P帧以提高流畅度
|
||||
if (lastDecodedIFrameSeq != null) {
|
||||
final validPFrames = state.h264FrameBuffer
|
||||
.where((f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.P &&
|
||||
f['frameSeqI'] == lastDecodedIFrameSeq)
|
||||
.toList()
|
||||
..sort((a, b) => (a['frameSeq'] as int).compareTo(b['frameSeq'] as int));
|
||||
|
||||
// 一次处理最多3个P帧,提高解码效率
|
||||
int framesToProcess = min(3, validPFrames.length);
|
||||
for (int i = 0; i < framesToProcess; i++) {
|
||||
if (state.h264FrameBuffer.isNotEmpty) {
|
||||
final pFrame = validPFrames[i];
|
||||
final targetIndex = state.h264FrameBuffer.indexWhere(
|
||||
(f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.P &&
|
||||
f['frameSeq'] == pFrame['frameSeq'] &&
|
||||
f['frameSeqI'] == lastDecodedIFrameSeq,
|
||||
);
|
||||
|
||||
if (targetIndex != -1) {
|
||||
final frameMap = state.h264FrameBuffer.removeAt(targetIndex);
|
||||
await _decodeFrame(frameMap);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (state.textureId.value == null) {
|
||||
state.isProcessingFrame = false;
|
||||
return;
|
||||
}
|
||||
lastDecodedIFrameSeq = minIFrameSeq;
|
||||
AppLog.log('送入解码器的P帧数据frameSeq:${frameSeq},frameSeqI:${frameSeqI},'
|
||||
'frameType:${frameType},messageId:${scpMessage!.MessageId}');
|
||||
await VideoDecodePlugin.sendFrame(
|
||||
frameData: frameData,
|
||||
frameType: 0,
|
||||
frameSeq: frameSeq,
|
||||
timestamp: pts,
|
||||
splitNalFromIFrame: true,
|
||||
refIFrameSeq: frameSeqI,
|
||||
);
|
||||
} catch (e) {
|
||||
AppLog.log('帧处理错误: $e');
|
||||
// 出错时清理状态,避免卡死
|
||||
state.isProcessingFrame = false;
|
||||
|
||||
// 如果连续出错,考虑重置解码器
|
||||
_handleDecodeError();
|
||||
} finally {
|
||||
state.isProcessingFrame = false;
|
||||
}
|
||||
}
|
||||
|
||||
int _decodeErrorCount = 0;
|
||||
static const int MAX_DECODE_ERRORS = 5;
|
||||
|
||||
void _handleDecodeError() {
|
||||
_decodeErrorCount++;
|
||||
if (_decodeErrorCount >= MAX_DECODE_ERRORS) {
|
||||
AppLog.log('解码错误过多,重置解码器');
|
||||
_resetDecoderForNewStream(
|
||||
StartChartManage().videoWidth,
|
||||
StartChartManage().videoHeight
|
||||
);
|
||||
_decodeErrorCount = 0;
|
||||
}
|
||||
}
|
||||
|
||||
// 提取解码逻辑为独立方法
|
||||
Future<void> _decodeFrame(Map<String, dynamic> frameMap) async {
|
||||
final List<int>? frameData = frameMap['frameData'];
|
||||
final TalkDataH264Frame_FrameTypeE? frameType = frameMap['frameType'];
|
||||
final int? frameSeq = frameMap['frameSeq'];
|
||||
final int? frameSeqI = frameMap['frameSeqI'];
|
||||
final int? pts = frameMap['pts'];
|
||||
final ScpMessage? scpMessage = frameMap['scpMessage'];
|
||||
|
||||
if (frameData == null || frameType == null || frameSeq == null ||
|
||||
frameSeqI == null || pts == null || state.textureId.value == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
// 没有I帧时,只消费refIFrameSeq等于lastDecodedIFrameSeq的P帧
|
||||
if (lastDecodedIFrameSeq != null) {
|
||||
final validPFrames =
|
||||
state.h264FrameBuffer.where((f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.P && f['frameSeqI'] == lastDecodedIFrameSeq).toList();
|
||||
if (validPFrames.isNotEmpty) {
|
||||
validPFrames.sort((a, b) => (a['frameSeq'] as int).compareTo(b['frameSeq'] as int));
|
||||
final minPFrame = validPFrames.first;
|
||||
final targetIndex = state.h264FrameBuffer.indexWhere(
|
||||
(f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.P && f['frameSeq'] == minPFrame['frameSeq'] && f['frameSeqI'] == lastDecodedIFrameSeq,
|
||||
);
|
||||
state.isProcessingFrame = true;
|
||||
final Map<String, dynamic>? frameMap = state.h264FrameBuffer.removeAt(targetIndex);
|
||||
if (frameMap == null) {
|
||||
state.isProcessingFrame = false;
|
||||
return;
|
||||
}
|
||||
final List<int>? frameData = frameMap['frameData'];
|
||||
final TalkDataH264Frame_FrameTypeE? frameType = frameMap['frameType'];
|
||||
final int? frameSeq = frameMap['frameSeq'];
|
||||
final int? frameSeqI = frameMap['frameSeqI'];
|
||||
final int? pts = frameMap['pts'];
|
||||
final ScpMessage? scpMessage = frameMap['scpMessage'];
|
||||
if (frameData == null || frameType == null || frameSeq == null || frameSeqI == null || pts == null) {
|
||||
state.isProcessingFrame = false;
|
||||
return;
|
||||
}
|
||||
if (state.textureId.value == null) {
|
||||
state.isProcessingFrame = false;
|
||||
return;
|
||||
}
|
||||
// AppLog.log('送入解码器的I帧数据frameSeq:${frameSeq},frameSeqI:${frameSeqI},'
|
||||
// 'frameType:${frameType},messageId:${scpMessage!.MessageId}');
|
||||
AppLog.log('送入解码器的帧数据frameSeq:$frameSeq,frameSeqI:$frameSeqI,'
|
||||
'frameType:$frameType,messageId:${scpMessage?.MessageId}');
|
||||
|
||||
await VideoDecodePlugin.sendFrame(
|
||||
frameData: frameData,
|
||||
frameType: 1,
|
||||
frameSeq: frameSeq,
|
||||
timestamp: pts,
|
||||
splitNalFromIFrame: true,
|
||||
refIFrameSeq: frameSeqI,
|
||||
);
|
||||
state.isProcessingFrame = false;
|
||||
return;
|
||||
}
|
||||
}
|
||||
// 其他情况不消费,等待I帧到来
|
||||
await VideoDecodePlugin.sendFrame(
|
||||
frameData: frameData,
|
||||
frameType: frameType == TalkDataH264Frame_FrameTypeE.I ? 0 : 1,
|
||||
frameSeq: frameSeq,
|
||||
timestamp: pts,
|
||||
splitNalFromIFrame: true,
|
||||
refIFrameSeq: frameSeqI,
|
||||
);
|
||||
}
|
||||
|
||||
/// 停止帧处理定时器
|
||||
@ -557,6 +712,20 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
|
||||
|
||||
// 启动监听音视频数据流
|
||||
_startListenTalkData();
|
||||
|
||||
// 根据设备性能调整目标帧率
|
||||
state.targetFps = _getOptimalFps();
|
||||
_startFrameProcessTimer();
|
||||
}
|
||||
|
||||
// 根据设备性能确定最优帧率
|
||||
int _getOptimalFps() {
|
||||
// 简单实现,可根据设备信息进一步优化
|
||||
if (defaultTargetPlatform == TargetPlatform.android) {
|
||||
return 25; // Android设备通常性能较好
|
||||
} else {
|
||||
return 20; // iOS设备保守一些
|
||||
}
|
||||
}
|
||||
|
||||
@override
|
||||
@ -953,6 +1122,98 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
|
||||
AppLog.log('无法处理H264帧:textureId为空');
|
||||
}
|
||||
break;
|
||||
case TalkData_ContentTypeE.H264:
|
||||
if (state.textureId.value != null && talkDataH264Frame != null) {
|
||||
// 检查是否包含SPS/PPS
|
||||
if (talkDataH264Frame.frameType == TalkDataH264Frame_FrameTypeE.I) {
|
||||
_cacheSPSPPSIfNeeded(talkData.content);
|
||||
}
|
||||
|
||||
_addFrameToBuffer(
|
||||
talkData.content,
|
||||
talkDataH264Frame.frameType,
|
||||
talkData.durationMs,
|
||||
talkDataH264Frame.frameSeq,
|
||||
talkDataH264Frame.frameSeqI,
|
||||
scpMessage!,
|
||||
);
|
||||
} else {
|
||||
AppLog.log('无法处理H264帧:textureId为空或帧数据无效');
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
void _cacheSPSPPSIfNeeded(List<int> frameData) {
|
||||
try {
|
||||
// H.264帧通常以NAL单元开始,需要解析NAL头部
|
||||
int offset = 0;
|
||||
|
||||
// 查找NAL单元分隔符 (0x00000001 或 0x000001)
|
||||
while (offset < frameData.length - 4) {
|
||||
// 查找起始码 0x00000001
|
||||
if (frameData[offset] == 0 && frameData[offset + 1] == 0 &&
|
||||
frameData[offset + 2] == 0 && frameData[offset + 3] == 1) {
|
||||
|
||||
// 跳过起始码 (4字节)
|
||||
int nalStart = offset + 4;
|
||||
|
||||
// 确保有足够的数据读取NAL头部
|
||||
if (nalStart >= frameData.length) break;
|
||||
|
||||
// NAL头部第一个字节包含NAL类型信息
|
||||
// bit 0-7: forbidden_zero_bit(1) + nal_ref_idc(2) + nal_unit_type(5)
|
||||
int nalHeader = frameData[nalStart];
|
||||
int nalType = nalHeader & 0x1F; // 取低5位
|
||||
|
||||
// H.264 NAL单元类型:
|
||||
// 7 = SPS (Sequence Parameter Set)
|
||||
// 8 = PPS (Picture Parameter Set)
|
||||
if (nalType == 7) {
|
||||
// 找到SPS - 缓存整个NAL单元(包括起始码和NAL头部)
|
||||
int nalEnd = _findNextStartCode(frameData, nalStart);
|
||||
if (nalEnd == -1) nalEnd = frameData.length;
|
||||
|
||||
spsCache = frameData.sublist(offset, nalEnd);
|
||||
hasSps = true;
|
||||
AppLog.log('检测到并缓存SPS数据, 长度: ${spsCache!.length}');
|
||||
} else if (nalType == 8) {
|
||||
// 找到PPS - 缓存整个NAL单元
|
||||
int nalEnd = _findNextStartCode(frameData, nalStart);
|
||||
if (nalEnd == -1) nalEnd = frameData.length;
|
||||
|
||||
ppsCache = frameData.sublist(offset, nalEnd);
|
||||
hasPps = true;
|
||||
AppLog.log('检测到并缓存PPS数据, 长度: ${ppsCache!.length}');
|
||||
}
|
||||
|
||||
// 移动到下一个可能的NAL单元
|
||||
offset = nalStart + 1;
|
||||
} else {
|
||||
offset++;
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
AppLog.log('SPS/PPS检测错误: $e');
|
||||
}
|
||||
}
|
||||
|
||||
// 辅助方法:查找下一个起始码位置
|
||||
int _findNextStartCode(List<int> data, int fromIndex) {
|
||||
for (int i = fromIndex; i < data.length - 4; i++) {
|
||||
if (data[i] == 0 && data[i + 1] == 0 &&
|
||||
data[i + 2] == 0 && data[i + 3] == 1) {
|
||||
return i; // 返回下一个起始码的位置
|
||||
}
|
||||
}
|
||||
return -1; // 未找到
|
||||
}
|
||||
|
||||
// 在需要的地方可以使用缓存的SPS/PPS数据
|
||||
void _ensureSPSPPSAvailable() {
|
||||
if (hasSps && hasPps && spsCache != null && ppsCache != null) {
|
||||
// 可以在这里将SPS/PPS数据发送给解码器
|
||||
// 通常在解码器初始化或流重置时需要
|
||||
AppLog.log('SPS和PPS数据已就绪,可用于解码器初始化');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -110,7 +110,7 @@ class TalkViewNativeDecodeState {
|
||||
// H264帧缓冲区相关
|
||||
final List<Map<String, dynamic>> h264FrameBuffer = <Map<String, dynamic>>[]; // H264帧缓冲区,存储帧数据和类型
|
||||
final int maxFrameBufferSize = 50; // 最大缓冲区大小
|
||||
final int targetFps = 60; // 目标解码帧率,只是为了快速填充native的缓冲区
|
||||
int targetFps = 25; // 目标解码帧率,只是为了快速填充native的缓冲区
|
||||
Timer? frameProcessTimer; // 帧处理定时器
|
||||
bool isProcessingFrame = false; // 是否正在处理帧
|
||||
int lastProcessedTimestamp = 0; // 上次处理帧的时间戳
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user