优化视频bug
This commit is contained in:
parent
f9ee17de21
commit
6b6a754cf9
@ -56,16 +56,25 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
|
||||
_frameProcessCount = 0;
|
||||
_lastFrameProcessTime = now;
|
||||
|
||||
// 根据实际处理能力调整目标帧率
|
||||
if (_actualFps < state.targetFps * 0.7) {
|
||||
// 处理能力不足,降低目标帧率
|
||||
state.targetFps = (state.targetFps * 0.9).clamp(15.0, 60.0) as int;
|
||||
// 平滑帧率调整,避免频繁大幅调整
|
||||
final targetFps = state.targetFps.toDouble();
|
||||
final actualRatio = _actualFps / targetFps;
|
||||
|
||||
// 更加保守和稳定的调整策略
|
||||
if (actualRatio < 0.4) {
|
||||
// 处理能力严重不足,大幅降低目标帧率
|
||||
state.targetFps = (targetFps * 0.6).round().clamp(15, 60);
|
||||
_startFrameProcessTimer();
|
||||
} else if (_actualFps > state.targetFps * 1.2 && state.targetFps < 30.0) {
|
||||
} else if (actualRatio < 0.6) {
|
||||
// 处理能力不足,中等幅度降低帧率
|
||||
state.targetFps = (targetFps * 0.8).round().clamp(15, 60);
|
||||
_startFrameProcessTimer();
|
||||
} else if (actualRatio > 1.8 && targetFps < 25) {
|
||||
// 处理能力充足,可以提高帧率
|
||||
state.targetFps = (state.targetFps * 1.1).clamp(15.0, 30.0) as int;
|
||||
state.targetFps = (targetFps * 1.15).round().clamp(15, 30);
|
||||
_startFrameProcessTimer();
|
||||
}
|
||||
// 注意:避免过于积极地提高帧率,这可能导致卡顿
|
||||
}
|
||||
}
|
||||
|
||||
@ -128,25 +137,33 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
|
||||
height: StartChartManage().videoHeight,
|
||||
codecType: 'h264',
|
||||
);
|
||||
|
||||
// 使用超时控制避免长时间等待
|
||||
final timeoutFuture = Future.delayed(const Duration(seconds: 3), () => null);
|
||||
final decoderFuture = VideoDecodePlugin.initDecoder(config);
|
||||
|
||||
// 初始化解码器并获取textureId
|
||||
final textureId = await VideoDecodePlugin.initDecoder(config);
|
||||
final textureId = await Future.any([decoderFuture, timeoutFuture]);
|
||||
if (textureId != null) {
|
||||
Future.microtask(() => state.textureId.value = textureId);
|
||||
AppLog.log('视频解码器初始化成功:textureId=$textureId');
|
||||
// 异步设置帧渲染监听器
|
||||
VideoDecodePlugin.setOnFrameRenderedListener((textureId) {
|
||||
AppLog.log('已经开始渲染=======');
|
||||
// 只有真正渲染出首帧时才关闭loading
|
||||
Future.microtask(() => state.isLoading.value = false);
|
||||
});
|
||||
} else {
|
||||
AppLog.log('视频解码器初始化失败');
|
||||
AppLog.log('视频解码器初始化失败或超时');
|
||||
state.isLoading.value = false;
|
||||
}
|
||||
// 启动定时器发送帧数据
|
||||
_startFrameProcessTimer();
|
||||
} catch (e) {
|
||||
AppLog.log('初始化视频解码器错误: $e');
|
||||
state.isLoading.value = false;
|
||||
// 如果初始化失败,延迟后重试
|
||||
await Future.delayed(const Duration(seconds: 2));
|
||||
await Future.delayed(const Duration(seconds: 1));
|
||||
if (!Get.isRegistered<TalkViewNativeDecodeLogic>()) {
|
||||
return; // 如果控制器已经被销毁,不再重试
|
||||
}
|
||||
@ -185,13 +202,13 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
|
||||
|
||||
/// 添加H264帧到缓冲区
|
||||
void _addFrameToBuffer(
|
||||
List<int> frameData,
|
||||
TalkDataH264Frame_FrameTypeE frameType,
|
||||
int pts,
|
||||
int frameSeq,
|
||||
int frameSeqI,
|
||||
ScpMessage scpMessage,
|
||||
) {
|
||||
List<int> frameData,
|
||||
TalkDataH264Frame_FrameTypeE frameType,
|
||||
int pts,
|
||||
int frameSeq,
|
||||
int frameSeqI,
|
||||
ScpMessage scpMessage,
|
||||
) {
|
||||
// 动态回绕阈值判断,frameSeq较小时阈值也小
|
||||
if (!_pendingStreamReset && _lastFrameSeq != null && frameType == TalkDataH264Frame_FrameTypeE.I && frameSeq < _lastFrameSeq!) {
|
||||
int dynamicThreshold = _getFrameSeqRolloverThreshold(_lastFrameSeq!);
|
||||
@ -237,8 +254,11 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
|
||||
} else {
|
||||
// 正常流程
|
||||
if (_lastFrameSeq != null && frameSeq <= _lastFrameSeq!) {
|
||||
AppLog.log('丢弃乱序或重复帧: frameSeq=$frameSeq, lastFrameSeq=$_lastFrameSeq');
|
||||
return;
|
||||
// 允许小范围乱序,提高容错性
|
||||
if ((_lastFrameSeq! - frameSeq).abs() > 50) { // 缩小容错范围
|
||||
AppLog.log('丢弃乱序或重复帧: frameSeq=$frameSeq, lastFrameSeq=$_lastFrameSeq');
|
||||
return;
|
||||
}
|
||||
}
|
||||
_lastFrameSeq = frameSeq;
|
||||
}
|
||||
@ -252,33 +272,16 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
|
||||
'scpMessage': scpMessage,
|
||||
};
|
||||
|
||||
// 如果缓冲区超出最大大小,优先丢弃P/B帧
|
||||
// 更智能的缓冲区管理
|
||||
if (state.h264FrameBuffer.length >= state.maxFrameBufferSize) {
|
||||
_manageBufferOverflow(frameType);
|
||||
}
|
||||
|
||||
// 如果缓冲区仍然超出最大大小,移除最旧的帧
|
||||
while (state.h264FrameBuffer.length >= state.maxFrameBufferSize) {
|
||||
int pbIndex = state.h264FrameBuffer.indexWhere((f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.P);
|
||||
if (pbIndex != -1) {
|
||||
state.h264FrameBuffer.removeAt(pbIndex);
|
||||
} else {
|
||||
state.h264FrameBuffer.removeAt(0);
|
||||
}
|
||||
state.h264FrameBuffer.removeAt(0);
|
||||
}
|
||||
|
||||
// 根据缓冲区长度动态调整最大缓冲区大小
|
||||
if (state.h264FrameBuffer.length > state.maxFrameBufferSize * 0.8) {
|
||||
// 缓冲区接近满载,更积极地丢弃帧
|
||||
while (state.h264FrameBuffer.length >= state.maxFrameBufferSize * 0.9) {
|
||||
// 优先丢弃旧的P帧
|
||||
int pbIndex = state.h264FrameBuffer.indexWhere((f) =>
|
||||
f['frameType'] == TalkDataH264Frame_FrameTypeE.P &&
|
||||
f['frameSeq'] < frameSeq - 100);
|
||||
|
||||
if (pbIndex != -1) {
|
||||
state.h264FrameBuffer.removeAt(pbIndex);
|
||||
} else {
|
||||
// 如果没有找到合适的P帧,移除最旧的帧
|
||||
state.h264FrameBuffer.removeAt(0);
|
||||
}
|
||||
}
|
||||
}
|
||||
// 将帧添加到缓冲区
|
||||
state.h264FrameBuffer.add(frameMap);
|
||||
}
|
||||
@ -286,10 +289,10 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
|
||||
/// 启动帧处理定时器
|
||||
void _startFrameProcessTimer() {
|
||||
// 取消已有定时器
|
||||
state.frameProcessTimer?.cancel();
|
||||
_stopFrameProcessTimer();
|
||||
|
||||
// 计算定时器间隔,确保以目标帧率处理帧
|
||||
final int intervalMs = (1000 / state.targetFps).round();
|
||||
final int intervalMs = max(16, min(40, (1000 / state.targetFps).round()));
|
||||
|
||||
// 创建新定时器
|
||||
state.frameProcessTimer = Timer.periodic(Duration(milliseconds: intervalMs), (timer) {
|
||||
@ -298,23 +301,56 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
|
||||
AppLog.log('启动帧处理定时器,目标帧率: ${state.targetFps}fps,间隔: ${intervalMs}ms');
|
||||
}
|
||||
|
||||
/// 从缓冲区处理下一帧
|
||||
/// 添加:快速清理缓冲区方法
|
||||
void _clearFrameBufferQuickly() {
|
||||
// 只保留最新的少量帧,加快切换响应
|
||||
if (state.h264FrameBuffer.length > 3) {
|
||||
// 保留最新的3帧
|
||||
state.h264FrameBuffer.removeRange(0, state.h264FrameBuffer.length - 3);
|
||||
}
|
||||
|
||||
// 重置帧序列状态
|
||||
_lastFrameSeq = null;
|
||||
lastDecodedIFrameSeq = null;
|
||||
_decodedIFrames.clear();
|
||||
state.isProcessingFrame = false;
|
||||
}
|
||||
|
||||
/// 停止帧处理定时器
|
||||
void _stopFrameProcessTimer() {
|
||||
state.frameProcessTimer?.cancel();
|
||||
state.frameProcessTimer = null;
|
||||
// 注意:不清空缓冲区,避免数据丢失
|
||||
// state.h264FrameBuffer.clear();
|
||||
state.isProcessingFrame = false;
|
||||
}
|
||||
|
||||
/// 管理缓冲区溢出
|
||||
void _manageBufferOverflow(TalkDataH264Frame_FrameTypeE newFrameType) {
|
||||
// 如果新帧是I帧,优先保留
|
||||
if (newFrameType == TalkDataH264Frame_FrameTypeE.I) {
|
||||
// 查找最旧的P/B帧进行移除
|
||||
int pbIndex = state.h264FrameBuffer.indexWhere((f) =>
|
||||
f['frameType'] != TalkDataH264Frame_FrameTypeE.I &&
|
||||
f['frameSeq'] < (_lastFrameSeq ?? 0) - 100); // 只移除较旧的帧
|
||||
|
||||
if (pbIndex != -1) {
|
||||
state.h264FrameBuffer.removeAt(pbIndex);
|
||||
} else {
|
||||
// 如果没有找到合适的旧帧,移除最旧的帧
|
||||
state.h264FrameBuffer.removeAt(0);
|
||||
}
|
||||
} else {
|
||||
// 新帧是P帧或B帧,移除最旧的帧
|
||||
state.h264FrameBuffer.removeAt(0);
|
||||
}
|
||||
}
|
||||
|
||||
/// 从缓冲区处理下一帧
|
||||
void _processNextFrameFromBuffer() async {
|
||||
_monitorFrameProcessingPerformance();
|
||||
final startTime = DateTime.now().microsecondsSinceEpoch;
|
||||
|
||||
// 动态调整处理策略基于缓冲区长度
|
||||
final bufferLength = state.h264FrameBuffer.length;
|
||||
// 如果缓冲区过长,提高处理频率
|
||||
if (bufferLength > 30 && state.targetFps < 60) {
|
||||
_adjustFrameProcessFrequency(state.targetFps * 1.5);
|
||||
}
|
||||
// 如果缓冲区较短,可以适当降低处理频率节省资源
|
||||
else if (bufferLength < 10 && state.targetFps > 25) {
|
||||
_adjustFrameProcessFrequency(state.targetFps * 0.8);
|
||||
}
|
||||
|
||||
// 避免重复处理
|
||||
if (state.isProcessingFrame) {
|
||||
return;
|
||||
@ -326,166 +362,131 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
|
||||
}
|
||||
|
||||
try {
|
||||
// 优先查找I帧,按frameSeq最小的I帧消费
|
||||
final iFrames = state.h264FrameBuffer.where((f) =>
|
||||
f['frameType'] == TalkDataH264Frame_FrameTypeE.I).toList();
|
||||
iFrames.sort((a, b) =>
|
||||
(a['frameSeq'] as int).compareTo(b['frameSeq'] as int));
|
||||
|
||||
if (iFrames.isNotEmpty) {
|
||||
final minIFrame = iFrames.first;
|
||||
final minIFrameSeq = minIFrame['frameSeq'];
|
||||
final targetIndex = state.h264FrameBuffer.indexWhere(
|
||||
(f) =>
|
||||
f['frameType'] == TalkDataH264Frame_FrameTypeE.I &&
|
||||
f['frameSeq'] == minIFrameSeq,
|
||||
);
|
||||
state.isProcessingFrame = true;
|
||||
final Map<String, dynamic>? frameMap = state.h264FrameBuffer.removeAt(
|
||||
targetIndex);
|
||||
if (frameMap == null) {
|
||||
state.isProcessingFrame = false;
|
||||
return;
|
||||
}
|
||||
final List<int>? frameData = frameMap['frameData'];
|
||||
final TalkDataH264Frame_FrameTypeE? frameType = frameMap['frameType'];
|
||||
final int? frameSeq = frameMap['frameSeq'];
|
||||
final int? frameSeqI = frameMap['frameSeqI'];
|
||||
final int? pts = frameMap['pts'];
|
||||
if (frameData == null || frameType == null || frameSeq == null ||
|
||||
frameSeqI == null || pts == null) {
|
||||
state.isProcessingFrame = false;
|
||||
return;
|
||||
}
|
||||
if (state.textureId.value == null) {
|
||||
state.isProcessingFrame = false;
|
||||
return;
|
||||
}
|
||||
lastDecodedIFrameSeq = minIFrameSeq;
|
||||
|
||||
await VideoDecodePlugin.sendFrame(
|
||||
frameData: frameData,
|
||||
frameType: 0,
|
||||
frameSeq: frameSeq,
|
||||
timestamp: pts,
|
||||
splitNalFromIFrame: true,
|
||||
refIFrameSeq: frameSeqI,
|
||||
);
|
||||
state.isProcessingFrame = false;
|
||||
return;
|
||||
}
|
||||
|
||||
state.isProcessingFrame = true;
|
||||
|
||||
// 使用更高效的查找方式,避免每次都排序整个列表
|
||||
Map<String, dynamic>? frameToProcess;
|
||||
// 动态调整处理策略基于缓冲区长度
|
||||
final bufferLength = state.h264FrameBuffer.length;
|
||||
|
||||
// 更智能的帧率调整策略
|
||||
if (bufferLength > 50) {
|
||||
// 缓冲区过长,临时提高处理频率
|
||||
_temporarilyIncreaseProcessFrequency();
|
||||
} else if (bufferLength < 5) {
|
||||
// 缓冲区过短,降低处理频率节省资源
|
||||
_adjustFrameProcessFrequency((state.targetFps * 0.9).toDouble());
|
||||
}
|
||||
|
||||
// 查找最适合处理的帧
|
||||
int frameIndex = -1;
|
||||
|
||||
// 没有I帧时,只消费refIFrameSeq等于lastDecodedIFrameSeq的P帧
|
||||
// 优先处理与最近解码的I帧相关的P帧
|
||||
if (lastDecodedIFrameSeq != null) {
|
||||
// 先查找与上一个I帧关联的P帧
|
||||
for (int i = 0; i < state.h264FrameBuffer.length; i++) {
|
||||
final frame = state.h264FrameBuffer[i];
|
||||
if (frame['frameType'] == TalkDataH264Frame_FrameTypeE.P &&
|
||||
frame['frameSeqI'] == lastDecodedIFrameSeq) {
|
||||
frameToProcess = frame;
|
||||
frameIndex = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
final validPFrames =
|
||||
state.h264FrameBuffer.where((f) =>
|
||||
// 查找与上一个I帧关联的P帧(按顺序)
|
||||
final validPFrames = state.h264FrameBuffer
|
||||
.where((f) =>
|
||||
f['frameType'] == TalkDataH264Frame_FrameTypeE.P &&
|
||||
f['frameSeqI'] == lastDecodedIFrameSeq).toList();
|
||||
f['frameSeqI'] == lastDecodedIFrameSeq)
|
||||
.toList()
|
||||
..sort((a, b) => (a['frameSeq'] as int).compareTo(b['frameSeq'] as int));
|
||||
|
||||
if (validPFrames.isNotEmpty) {
|
||||
validPFrames.sort((a, b) =>
|
||||
(a['frameSeq'] as int).compareTo(b['frameSeq'] as int));
|
||||
final minPFrame = validPFrames.first;
|
||||
final targetIndex = state.h264FrameBuffer.indexWhere(
|
||||
frameIndex = state.h264FrameBuffer.indexWhere(
|
||||
(f) =>
|
||||
f['frameType'] == TalkDataH264Frame_FrameTypeE.P &&
|
||||
f['frameSeq'] == minPFrame['frameSeq'] &&
|
||||
f['frameSeqI'] == lastDecodedIFrameSeq,
|
||||
);
|
||||
state.isProcessingFrame = true;
|
||||
final Map<String, dynamic>? frameMap = state.h264FrameBuffer.removeAt(
|
||||
targetIndex);
|
||||
if (frameMap == null) {
|
||||
state.isProcessingFrame = false;
|
||||
return;
|
||||
}
|
||||
final List<int>? frameData = frameMap['frameData'];
|
||||
final TalkDataH264Frame_FrameTypeE? frameType = frameMap['frameType'];
|
||||
final int? frameSeq = frameMap['frameSeq'];
|
||||
final int? frameSeqI = frameMap['frameSeqI'];
|
||||
final int? pts = frameMap['pts'];
|
||||
if (frameData == null || frameType == null || frameSeq == null ||
|
||||
frameSeqI == null || pts == null) {
|
||||
state.isProcessingFrame = false;
|
||||
return;
|
||||
}
|
||||
if (state.textureId.value == null) {
|
||||
state.isProcessingFrame = false;
|
||||
return;
|
||||
}
|
||||
|
||||
await VideoDecodePlugin.sendFrame(
|
||||
frameData: frameData,
|
||||
frameType: 1,
|
||||
frameSeq: frameSeq,
|
||||
timestamp: pts,
|
||||
splitNalFromIFrame: true,
|
||||
refIFrameSeq: frameSeqI,
|
||||
);
|
||||
state.isProcessingFrame = false;
|
||||
return;
|
||||
}
|
||||
}
|
||||
// 如果没有找到合适的P帧,查找最早的I帧
|
||||
if (frameToProcess == null) {
|
||||
int earliestIframeIndex = -1;
|
||||
int earliestIframeSeq = 999999;
|
||||
|
||||
for (int i = 0; i < state.h264FrameBuffer.length; i++) {
|
||||
final frame = state.h264FrameBuffer[i];
|
||||
if (frame['frameType'] == TalkDataH264Frame_FrameTypeE.I) {
|
||||
final frameSeq = frame['frameSeq'] as int;
|
||||
if (frameSeq < earliestIframeSeq) {
|
||||
earliestIframeSeq = frameSeq;
|
||||
frameToProcess = frame;
|
||||
earliestIframeIndex = i;
|
||||
// 如果没有找到相关的P帧,查找最早的I帧
|
||||
if (frameIndex == -1) {
|
||||
final iFrames = state.h264FrameBuffer
|
||||
.where((f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.I)
|
||||
.toList()
|
||||
..sort((a, b) => (a['frameSeq'] as int).compareTo(b['frameSeq'] as int));
|
||||
|
||||
if (iFrames.isNotEmpty) {
|
||||
final minIFrame = iFrames.first;
|
||||
frameIndex = state.h264FrameBuffer.indexWhere(
|
||||
(f) =>
|
||||
f['frameType'] == TalkDataH264Frame_FrameTypeE.I &&
|
||||
f['frameSeq'] == minIFrame['frameSeq'],
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// 处理选中的帧
|
||||
if (frameIndex >= 0) {
|
||||
final Map<String, dynamic> frameMap = state.h264FrameBuffer.removeAt(frameIndex);
|
||||
|
||||
final List<int>? frameData = frameMap['frameData'];
|
||||
final TalkDataH264Frame_FrameTypeE? frameType = frameMap['frameType'];
|
||||
final int? frameSeq = frameMap['frameSeq'];
|
||||
final int? frameSeqI = frameMap['frameSeqI'];
|
||||
final int? pts = frameMap['pts'];
|
||||
|
||||
if (frameData != null &&
|
||||
frameType != null &&
|
||||
frameSeq != null &&
|
||||
frameSeqI != null &&
|
||||
pts != null &&
|
||||
state.textureId.value != null) {
|
||||
|
||||
final int pluginFrameType = frameType == TalkDataH264Frame_FrameTypeE.I ? 0 : 1;
|
||||
|
||||
// 异步发送帧,添加超时处理
|
||||
try {
|
||||
await VideoDecodePlugin.sendFrame(
|
||||
frameData: frameData,
|
||||
frameType: pluginFrameType,
|
||||
frameSeq: frameSeq,
|
||||
timestamp: pts,
|
||||
splitNalFromIFrame: true,
|
||||
refIFrameSeq: frameSeqI,
|
||||
).timeout(const Duration(milliseconds: 20)); // 进一步缩短超时时间
|
||||
|
||||
// 更新最后解码的I帧序号
|
||||
if (frameType == TalkDataH264Frame_FrameTypeE.I) {
|
||||
lastDecodedIFrameSeq = frameSeq;
|
||||
}
|
||||
} catch (e) {
|
||||
AppLog.log('发送帧数据超时或失败: $e');
|
||||
}
|
||||
}
|
||||
|
||||
if (frameToProcess != null) {
|
||||
frameIndex = earliestIframeIndex;
|
||||
}
|
||||
// 其他情况不消费,等待I帧到来
|
||||
} // 处理选中的帧
|
||||
if (frameToProcess != null && frameIndex >= 0) {
|
||||
final Map<String, dynamic> frameMap = state.h264FrameBuffer.removeAt(
|
||||
frameIndex);
|
||||
// ... 帧处理逻辑
|
||||
}
|
||||
} finally {
|
||||
state.isProcessingFrame = false;
|
||||
final endTime = DateTime.now().microsecondsSinceEpoch;
|
||||
final durationMs = (endTime - startTime) / 1000.0;
|
||||
// 可选:只在耗时较长时打印(例如 > 5ms)
|
||||
if (durationMs > 5) {
|
||||
debugPrint('[_processNextFrameFromBuffer] 耗时: ${durationMs.toStringAsFixed(2)} ms');
|
||||
// 或使用你的日志系统,如:
|
||||
// AppLog.log('Frame processing took ${durationMs.toStringAsFixed(2)} ms');
|
||||
|
||||
// 性能监控 - 更宽松的阈值
|
||||
if (durationMs > 30) {
|
||||
AppLog.log('帧处理耗时过长: ${durationMs.toStringAsFixed(2)} ms, 缓冲区长度: ${state.h264FrameBuffer.length}');
|
||||
}
|
||||
}
|
||||
final endTime = DateTime.now().microsecondsSinceEpoch;
|
||||
final durationMs = (endTime - startTime) / 1000.0;
|
||||
}
|
||||
|
||||
// 记录处理时间,用于性能分析
|
||||
if (durationMs > 16.67) { // 超过一帧的时间(60fps)
|
||||
AppLog.log('帧处理耗时过长: ${durationMs.toStringAsFixed(2)} ms, 缓冲区长度: ${state.h264FrameBuffer.length}');
|
||||
}
|
||||
|
||||
|
||||
Timer? _tempAdjustTimer;
|
||||
|
||||
/// 临时增加处理频率
|
||||
void _temporarilyIncreaseProcessFrequency() {
|
||||
// 如果已经有临时调整在运行,则跳过
|
||||
if (_tempAdjustTimer != null) return;
|
||||
|
||||
final originalFps = state.targetFps;
|
||||
final tempFps = (min(originalFps * 1.5, 60.0)).toInt();
|
||||
|
||||
_adjustFrameProcessFrequency(tempFps.toDouble());
|
||||
|
||||
// 3秒后恢复原频率
|
||||
_tempAdjustTimer = Timer(const Duration(seconds: 2), () {
|
||||
_adjustFrameProcessFrequency(originalFps.toDouble());
|
||||
_tempAdjustTimer = null;
|
||||
});
|
||||
}
|
||||
|
||||
void _adjustFrameProcessFrequency(double newFps) {
|
||||
@ -493,14 +494,6 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
|
||||
_startFrameProcessTimer(); // 重新启动定时器
|
||||
}
|
||||
|
||||
/// 停止帧处理定时器
|
||||
void _stopFrameProcessTimer() {
|
||||
state.frameProcessTimer?.cancel();
|
||||
state.frameProcessTimer = null;
|
||||
state.h264FrameBuffer.clear();
|
||||
state.isProcessingFrame = false;
|
||||
}
|
||||
|
||||
// 发起接听命令
|
||||
void initiateAnswerCommand() {
|
||||
StartChartManage().startTalkAcceptTimer();
|
||||
@ -690,7 +683,6 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
|
||||
|
||||
@override
|
||||
void onClose() {
|
||||
// _closeH264File();
|
||||
// 停止帧处理定时器
|
||||
_stopFrameProcessTimer();
|
||||
|
||||
@ -708,11 +700,8 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
|
||||
state.oneMinuteTimeTimer = null; // 取消旧定时器
|
||||
state.oneMinuteTime.value = 0;
|
||||
|
||||
// 释放视频解码器资源
|
||||
if (state.textureId.value != null) {
|
||||
VideoDecodePlugin.releaseDecoder();
|
||||
Future.microtask(() => state.textureId.value = null);
|
||||
}
|
||||
// 异步释放视频解码器资源
|
||||
_releaseVideoDecoderAsync();
|
||||
|
||||
// 取消数据流监听
|
||||
_streamSubscription?.cancel();
|
||||
@ -721,7 +710,6 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
|
||||
// 重置期望数据
|
||||
StartChartManage().reSetDefaultTalkExpect();
|
||||
StartChartManage().stopTalkExpectMessageTimer();
|
||||
VideoDecodePlugin.releaseDecoder();
|
||||
|
||||
// 取消批处理定时器
|
||||
_batchProcessTimer?.cancel();
|
||||
@ -734,6 +722,20 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
|
||||
_bufferedAudioFrames.clear();
|
||||
super.onClose();
|
||||
}
|
||||
/// 异步释放视频解码器
|
||||
Future<void> _releaseVideoDecoderAsync() async {
|
||||
try {
|
||||
if (state.textureId.value != null) {
|
||||
// 添加超时处理
|
||||
await VideoDecodePlugin.releaseDecoder().timeout(const Duration(milliseconds: 500));
|
||||
Future.microtask(() => state.textureId.value = null);
|
||||
}
|
||||
} catch (e) {
|
||||
AppLog.log('异步释放视频解码器失败: $e');
|
||||
// 即使失败也清除状态
|
||||
Future.microtask(() => state.textureId.value = null);
|
||||
}
|
||||
}
|
||||
|
||||
/// 处理无效通话状态
|
||||
void _handleInvalidTalkStatus() {
|
||||
@ -945,10 +947,13 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
|
||||
state.currentQuality.value = quality;
|
||||
TalkExpectReq talkExpectReq = StartChartManage().getDefaultTalkExpect();
|
||||
final audioType = talkExpectReq.audioType;
|
||||
|
||||
// 立即显示loading状态
|
||||
state.isLoading.value = true;
|
||||
|
||||
int width = 864;
|
||||
int height = 480;
|
||||
|
||||
switch (quality) {
|
||||
case '高清':
|
||||
talkExpectReq = TalkExpectReq(
|
||||
@ -971,16 +976,15 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
|
||||
// 立即预设解码器尺寸
|
||||
_pendingResetWidth = width;
|
||||
_pendingResetHeight = height;
|
||||
// 立即重置解码器而不是等待回绕检测
|
||||
await _resetDecoderForNewStream(width, height);
|
||||
/// 修改发送预期数据
|
||||
StartChartManage().changeTalkExpectDataTypeAndReStartTalkExpectMessageTimer(talkExpect: talkExpectReq);
|
||||
|
||||
// 不立即loading,继续解码旧流帧,等待frameSeq回绕检测
|
||||
// 仅重置frameSeq回绕检测标志
|
||||
// _pendingStreamReset = false;
|
||||
// _pendingResetWidth = width;
|
||||
// _pendingResetHeight = height;
|
||||
// 并行执行两个操作以提高效率
|
||||
await Future.wait([
|
||||
// 立即重置解码器
|
||||
_resetDecoderForNewStream(width, height),
|
||||
// 修改发送预期数据
|
||||
Future.microtask(() =>
|
||||
StartChartManage().changeTalkExpectDataTypeAndReStartTalkExpectMessageTimer(talkExpect: talkExpectReq))
|
||||
]).timeout(const Duration(seconds: 2)); // 设置总超时时间
|
||||
}
|
||||
|
||||
void _initHdOptions() {
|
||||
@ -1001,14 +1005,24 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
|
||||
// 先停止帧处理定时器
|
||||
_stopFrameProcessTimer();
|
||||
|
||||
// 释放旧解码器
|
||||
// 快速清理缓冲区以加快响应
|
||||
_clearFrameBufferQuickly();
|
||||
|
||||
// 释放旧解码器 - 使用最短超时时间
|
||||
if (state.textureId.value != null) {
|
||||
await VideoDecodePlugin.releaseDecoder();
|
||||
state.textureId.value = null;
|
||||
try {
|
||||
// 极短超时时间,避免阻塞
|
||||
await VideoDecodePlugin.releaseDecoder().timeout(const Duration(milliseconds: 300));
|
||||
state.textureId.value = null;
|
||||
} catch (e) {
|
||||
AppLog.log('释放解码器超时或失败: $e');
|
||||
// 即使释放失败也继续执行
|
||||
state.textureId.value = null;
|
||||
}
|
||||
}
|
||||
|
||||
// 等待一小段时间确保资源释放完成
|
||||
await Future.delayed(Duration(milliseconds: 50));
|
||||
// 最小化等待时间
|
||||
await Future.delayed(Duration(milliseconds: 2));
|
||||
|
||||
// 创建新的解码器配置
|
||||
final config = VideoDecoderConfig(
|
||||
@ -1018,36 +1032,45 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
|
||||
);
|
||||
|
||||
// 初始化新解码器
|
||||
final textureId = await VideoDecodePlugin.initDecoder(config);
|
||||
if (textureId != null) {
|
||||
state.textureId.value = textureId;
|
||||
AppLog.log('frameSeq回绕后解码器初始化成功:textureId=$textureId');
|
||||
try {
|
||||
// 使用较短超时时间
|
||||
final textureId = await VideoDecodePlugin.initDecoder(config)
|
||||
.timeout(const Duration(milliseconds: 1500));
|
||||
|
||||
// 设置帧渲染监听
|
||||
VideoDecodePlugin.setOnFrameRenderedListener((textureId) {
|
||||
AppLog.log('已经开始渲染=======');
|
||||
// 只有真正渲染出首帧时才关闭loading
|
||||
if (textureId != null) {
|
||||
state.textureId.value = textureId;
|
||||
AppLog.log('解码器初始化成功:textureId=$textureId');
|
||||
|
||||
// 设置帧渲染监听
|
||||
VideoDecodePlugin.setOnFrameRenderedListener((textureId) {
|
||||
AppLog.log('开始渲染=======');
|
||||
// 只有真正渲染出首帧时才关闭loading
|
||||
state.isLoading.value = false;
|
||||
});
|
||||
|
||||
// 重新启动帧处理定时器
|
||||
_startFrameProcessTimer();
|
||||
|
||||
// 重置相关状态
|
||||
_decodedIFrames.clear();
|
||||
state.h264FrameBuffer.clear();
|
||||
state.isProcessingFrame = false;
|
||||
_lastFrameSeq = null;
|
||||
lastDecodedIFrameSeq = null;
|
||||
hasSps = false;
|
||||
hasPps = false;
|
||||
spsCache = null;
|
||||
ppsCache = null;
|
||||
} else {
|
||||
AppLog.log('解码器初始化失败');
|
||||
state.isLoading.value = false;
|
||||
});
|
||||
|
||||
// 重新启动帧处理定时器
|
||||
_startFrameProcessTimer();
|
||||
|
||||
// 重置相关状态
|
||||
_decodedIFrames.clear();
|
||||
state.h264FrameBuffer.clear();
|
||||
state.isProcessingFrame = false;
|
||||
_lastFrameSeq = null;
|
||||
hasSps = false;
|
||||
hasPps = false;
|
||||
spsCache = null;
|
||||
ppsCache = null;
|
||||
} else {
|
||||
AppLog.log('frameSeq回绕后解码器初始化失败');
|
||||
}
|
||||
} catch (e) {
|
||||
AppLog.log('解码器初始化超时或错误: $e');
|
||||
state.isLoading.value = false;
|
||||
}
|
||||
} catch (e) {
|
||||
AppLog.log('frameSeq回绕时解码器初始化错误: $e');
|
||||
AppLog.log('解码器操作错误: $e');
|
||||
state.isLoading.value = false;
|
||||
}
|
||||
}
|
||||
|
||||
@ -47,6 +47,11 @@ class _TalkViewNativeDecodePageState extends State<TalkViewNativeDecodePage> wit
|
||||
);
|
||||
|
||||
state.animationController.repeat();
|
||||
// 添加帧回调以优化动画性能
|
||||
SchedulerBinding.instance.addPostFrameCallback((_) {
|
||||
// 确保动画在合适的帧率下运行
|
||||
state.animationController.duration = const Duration(milliseconds: 500);
|
||||
});
|
||||
//动画开始、结束、向前移动或向后移动时会调用StatusListener
|
||||
state.animationController.addStatusListener((AnimationStatus status) {
|
||||
if (status == AnimationStatus.completed) {
|
||||
@ -101,12 +106,12 @@ class _TalkViewNativeDecodePageState extends State<TalkViewNativeDecodePage> wit
|
||||
aspectRatio: StartChartManage().videoWidth / StartChartManage().videoHeight,
|
||||
child: Texture(
|
||||
textureId: state.textureId.value!,
|
||||
filterQuality: FilterQuality.medium,
|
||||
filterQuality: FilterQuality.low,
|
||||
),
|
||||
)
|
||||
: Texture(
|
||||
textureId: state.textureId.value!,
|
||||
filterQuality: FilterQuality.medium,
|
||||
filterQuality: FilterQuality.low,
|
||||
),
|
||||
),
|
||||
),
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user