视频对讲---优化缓冲区+解码器性能

This commit is contained in:
sky.min 2026-01-21 08:56:46 +08:00
parent c3750ff5a3
commit 113a6a345e
2 changed files with 259 additions and 14 deletions

View File

@ -40,9 +40,38 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
final LockDetailState lockDetailState = Get.put(LockDetailLogic()).state;
int bufferSize = 25; //
int bufferSize = 40; // 2540
int audioBufferSize = 20; // 2
int audioBufferSize = 30; // 2030
//
double networkQualityScore = 1.0; // 1.0
//
int _lastBufferSizeAdjustmentTime = 0;
//
int _frameCount = 0;
int _lastFpsCalculationTime = 0;
Timer? _fpsUpdateTimer;
//
int _consecutiveFullBufferCount = 0;
static const int _maxConsecutiveFullBuffer = 5; //
//
int _lastSlowDecodeTime = 0;
int _slowDecodeCount = 0;
static const int _slowDecodeThreshold = 50; // ()
static const int _slowDecodeAdjustmentThreshold = 3; //
//
int? _firstFrameReceivedTime;
int? _firstFrameRenderedTime;
//
bool _isAdjustingBuffer = false;
static const int _minAdjustmentInterval = 3000; // 3
// frameSeq较小时阈值也小
int _getFrameSeqRolloverThreshold(int lastSeq) {
@ -96,6 +125,9 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
Future<void> _initVideoDecoder() async {
try {
state.isLoading.value = true;
//
_firstFrameReceivedTime = DateTime.now().millisecondsSinceEpoch;
//
final config = VideoDecoderConfig(
width: StartChartManage().videoWidth,
@ -110,14 +142,27 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
AppLog.log('视频解码器初始化成功textureId=$textureId');
VideoDecodePlugin.setOnFrameRenderedListener((textureId) {
AppLog.log('已经开始渲染=======');
//
_firstFrameRenderedTime = DateTime.now().millisecondsSinceEpoch;
if (_firstFrameReceivedTime != null) {
final renderTime = _firstFrameRenderedTime! - _firstFrameReceivedTime!;
AppLog.log('首帧渲染耗时: ${renderTime}ms (${renderTime/1000.0}s)');
}
// loading
Future.microtask(() => state.isLoading.value = false);
//
_startFpsCalculation();
});
} else {
AppLog.log('视频解码器初始化失败');
}
//
_startFrameProcessTimer();
//
_startNetworkQualityMonitor();
} catch (e) {
AppLog.log('初始化视频解码器错误: $e');
//
@ -129,6 +174,96 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
}
}
///
void _startFpsCalculation() {
_fpsUpdateTimer?.cancel();
_fpsUpdateTimer = Timer.periodic(const Duration(seconds: 1), (timer) {
_updateFps();
});
}
///
void _updateFps() {
final currentTime = DateTime.now().millisecondsSinceEpoch;
final timeDiff = currentTime - _lastFpsCalculationTime;
if (timeDiff > 0) {
final fps = (_frameCount / (timeDiff / 1000)).toDouble();
state.decoderFps.value = fps;
state.totalFrames.value += _frameCount;
_frameCount = 0;
_lastFpsCalculationTime = currentTime;
} else if (_lastFpsCalculationTime == 0) {
//
_lastFpsCalculationTime = currentTime;
}
}
///
void _startNetworkQualityMonitor() {
Timer.periodic(Duration(milliseconds: state.networkQualityCheckIntervalMs), (timer) {
_calculateNetworkQuality();
_adjustBufferSizeBasedOnNetworkQuality();
});
}
///
void _calculateNetworkQuality() {
//
//
double lossRateImpact = 1.0 - state.packetLossRate.value; //
// 使FPS作为基准FPS为0时的影响
double actualFps = state.decoderFps.value;
if (actualFps <= 0) actualFps = 0.1; //
double fpsImpact = (actualFps / state.targetFps).clamp(0.0, 1.0); // FPS与目标FPS的比例
//
networkQualityScore = (lossRateImpact * 0.6 + fpsImpact * 0.4).clamp(0.0, 1.0);
AppLog.log('网络质量评分: ${networkQualityScore.toStringAsFixed(2)}, 丢包率: ${state.packetLossRate.value.toStringAsFixed(2)}, FPS: ${state.decoderFps.value.toStringAsFixed(2)}');
}
///
void _adjustBufferSizeBasedOnNetworkQuality() async {
final currentTime = DateTime.now().millisecondsSinceEpoch;
//
if (currentTime - _lastBufferSizeAdjustmentTime < _minAdjustmentInterval || _isAdjustingBuffer) {
return;
}
_isAdjustingBuffer = true;
int newBufferSize;
if (networkQualityScore < 0.5) {
//
newBufferSize = state.adaptiveBufferSizeMax;
} else if (networkQualityScore > 0.8) {
//
newBufferSize = state.adaptiveBufferSizeMin;
} else {
// 使
newBufferSize = ((state.adaptiveBufferSizeMin + state.adaptiveBufferSizeMax) / 2).round();
}
//
if (newBufferSize != state.maxFrameBufferSize) {
//
int maxAllowedIncrease = (state.maxFrameBufferSize * 0.5).round();
if (newBufferSize > state.maxFrameBufferSize && (newBufferSize - state.maxFrameBufferSize) > maxAllowedIncrease) {
newBufferSize = state.maxFrameBufferSize + maxAllowedIncrease;
}
state.maxFrameBufferSize = newBufferSize;
AppLog.log('缓冲区大小调整为: $newBufferSize (网络质量评分: ${networkQualityScore.toStringAsFixed(2)})');
}
_lastBufferSizeAdjustmentTime = currentTime;
await Future.delayed(const Duration(milliseconds: 100)); //
_isAdjustingBuffer = false;
}
///
void _initFlutterPcmSound() {
const int sampleRate = 8000;
@ -227,12 +362,24 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
'scpMessage': scpMessage,
};
// P/B帧
//
if (state.h264FrameBuffer.length >= state.maxFrameBufferSize) {
_consecutiveFullBufferCount++;
if (_consecutiveFullBufferCount >= _maxConsecutiveFullBuffer) {
_handleConsecutiveFullBuffer();
_consecutiveFullBufferCount = 0;
}
} else {
_consecutiveFullBufferCount = 0;
}
// P帧
while (state.h264FrameBuffer.length >= state.maxFrameBufferSize) {
int pbIndex = state.h264FrameBuffer.indexWhere((f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.P);
if (pbIndex != -1) {
state.h264FrameBuffer.removeAt(pbIndex);
int pIndex = state.h264FrameBuffer.indexWhere((f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.P);
if (pIndex != -1) {
state.h264FrameBuffer.removeAt(pIndex);
} else {
// P帧
state.h264FrameBuffer.removeAt(0);
}
}
@ -241,13 +388,48 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
state.h264FrameBuffer.add(frameMap);
}
///
void _handleConsecutiveFullBuffer() {
AppLog.log('缓冲区连续满载 $_consecutiveFullBufferCount 次,执行紧急清理');
// I帧及关联的P帧P帧
final iFrames = state.h264FrameBuffer.where((f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.I).toList();
if (iFrames.length > 1) {
// I帧
iFrames.sort((a, b) => (a['frameSeq'] as int).compareTo(b['frameSeq'] as int));
// I帧
for (int i = 0; i < iFrames.length - 1; i++) {
state.h264FrameBuffer.remove(iFrames[i]);
}
}
//
int targetSize = state.maxFrameBufferSize ~/ 2;
while (state.h264FrameBuffer.length > targetSize) {
// P帧I帧
int pIndex = state.h264FrameBuffer.indexWhere((f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.P);
if (pIndex != -1) {
state.h264FrameBuffer.removeAt(pIndex);
continue;
}
// P帧
if (state.h264FrameBuffer.length > targetSize) {
state.h264FrameBuffer.removeAt(0);
}
}
AppLog.log('紧急清理完成,当前缓冲区大小: ${state.h264FrameBuffer.length}');
}
///
void _startFrameProcessTimer() {
//
state.frameProcessTimer?.cancel();
//
final int intervalMs = (1000 / state.targetFps).round();
final int intervalMs = state.frameProcessIntervalMs; // 使
//
state.frameProcessTimer = Timer.periodic(Duration(milliseconds: intervalMs), (timer) {
@ -274,9 +456,8 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
try {
// I帧frameSeq最小的I帧消费
final iFrames = state.h264FrameBuffer.where((f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.I).toList();
iFrames.sort((a, b) => (a['frameSeq'] as int).compareTo(b['frameSeq'] as int));
if (iFrames.isNotEmpty) {
iFrames.sort((a, b) => (a['frameSeq'] as int).compareTo(b['frameSeq'] as int));
final minIFrame = iFrames.first;
final minIFrameSeq = minIFrame['frameSeq'];
final targetIndex = state.h264FrameBuffer.indexWhere(
@ -303,6 +484,7 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
}
lastDecodedIFrameSeq = minIFrameSeq;
final decodeStartTime = DateTime.now().millisecondsSinceEpoch;
await VideoDecodePlugin.sendFrame(
frameData: frameData,
frameType: 0,
@ -311,7 +493,31 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
splitNalFromIFrame: true,
refIFrameSeq: frameSeqI,
);
final decodeEndTime = DateTime.now().millisecondsSinceEpoch;
//
final decodeDuration = decodeEndTime - decodeStartTime;
if (decodeDuration > _slowDecodeThreshold) {
_slowDecodeCount++;
if (decodeEndTime - _lastSlowDecodeTime < 5000) { // 5
if (_slowDecodeCount >= _slowDecodeAdjustmentThreshold) {
//
_handleSlowDecodePerformance();
_slowDecodeCount = 0; //
}
} else {
_slowDecodeCount = 1; //
}
_lastSlowDecodeTime = decodeEndTime;
} else {
//
if (decodeEndTime - _lastSlowDecodeTime > 10000) { // 10
_slowDecodeCount = 0;
}
}
state.isProcessingFrame = false;
_frameCount++; //
return;
}
@ -346,6 +552,7 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
return;
}
final decodeStartTime = DateTime.now().millisecondsSinceEpoch;
await VideoDecodePlugin.sendFrame(
frameData: frameData,
frameType: 1,
@ -354,7 +561,31 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
splitNalFromIFrame: true,
refIFrameSeq: frameSeqI,
);
final decodeEndTime = DateTime.now().millisecondsSinceEpoch;
//
final decodeDuration = decodeEndTime - decodeStartTime;
if (decodeDuration > _slowDecodeThreshold) {
_slowDecodeCount++;
if (decodeEndTime - _lastSlowDecodeTime < 5000) { // 5
if (_slowDecodeCount >= _slowDecodeAdjustmentThreshold) {
//
_handleSlowDecodePerformance();
_slowDecodeCount = 0; //
}
} else {
_slowDecodeCount = 1; //
}
_lastSlowDecodeTime = decodeEndTime;
} else {
//
if (decodeEndTime - _lastSlowDecodeTime > 10000) { // 10
_slowDecodeCount = 0;
}
}
state.isProcessingFrame = false;
_frameCount++; //
return;
}
}
@ -362,8 +593,8 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
} finally {
final endTime = DateTime.now().microsecondsSinceEpoch;
final durationMs = (endTime - startTime) / 1000.0;
// > 5ms
if (durationMs > 5) {
// > 10ms
if (durationMs > 10) {
debugPrint('[_processNextFrameFromBuffer] 耗时: ${durationMs.toStringAsFixed(2)} ms');
// 使
// AppLog.log('Frame processing took ${durationMs.toStringAsFixed(2)} ms');
@ -371,6 +602,17 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
}
}
///
void _handleSlowDecodePerformance() {
AppLog.log('检测到连续慢解码,触发性能优化');
//
if (state.maxFrameBufferSize < state.adaptiveBufferSizeMax) {
state.maxFrameBufferSize += 2; //
AppLog.log('因解码性能问题,缓冲区大小调整为: ${state.maxFrameBufferSize}');
}
}
///
void _stopFrameProcessTimer() {
state.frameProcessTimer?.cancel();
@ -568,6 +810,9 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
@override
void onClose() {
// FPS更新定时器
_fpsUpdateTimer?.cancel();
// _closeH264File();
//
_stopFrameProcessTimer();

View File

@ -109,12 +109,12 @@ class TalkViewNativeDecodeState {
// H264帧缓冲区相关
final List<Map<String, dynamic>> h264FrameBuffer = <Map<String, dynamic>>[]; // H264帧缓冲区
int maxFrameBufferSize = 2; //
int maxFrameBufferSize = 8; // 28
final int targetFps = 25; // ,native的缓冲区
final int adaptiveBufferSizeMin = 2; //
final int adaptiveBufferSizeMax = 6; //
final int adaptiveBufferSizeMax = 8; // 68
final int networkQualityCheckIntervalMs = 2000; // ()
int frameProcessIntervalMs = 10; // ()
int frameProcessIntervalMs = 5; // ()105
Timer? frameProcessTimer; //
bool isProcessingFrame = false; //
int lastProcessedTimestamp = 0; //