优化苹果手机视频对讲后锁版麦克风声音变小

This commit is contained in:
sky.min 2026-01-12 16:53:59 +08:00
parent 83b3908826
commit faa00c6bce
3 changed files with 31 additions and 525 deletions

View File

@ -1,6 +1,5 @@
import 'dart:async';
import 'dart:io';
import 'dart:math';
import 'dart:ui' as ui;
import 'package:flutter/foundation.dart';
@ -89,44 +88,6 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
bool _waitingForIFrame = false;
int? lastDecodedIFrameSeq;
//
int _consecutiveFullBufferCount = 0; //
int _maxConsecutiveFullBufferCount = 3; // 3
bool _isAdjustingForBufferFull = false; //
//
DateTime? _lastVideoRenderTime;
//
int _currentBufferSize = 3; //
int _lastBufferSizeAdjustmentTime = 0; //
int _bufferSizeAdjustmentCooldown = 2000; // ()
//
int _lastNetworkQualityCheckTime = 0; //
int _framesProcessedSinceLastCheck = 0; //
int _framesDroppedSinceLastCheck = 0; //
double _currentNetworkQualityScore = 1.0; // (0.0-1.0, 1.0)
//
List<int> _frameReceiveTimes = []; //
List<int> _frameSeqList = []; //
int _totalFramesReceived = 0; //
int _lostFrames = 0; //
int _lastFrameSeqNum = -1; //
DateTime? _testStartTime; //
Timer? _networkQualityTestTimer; //
//
int _totalPacketsReceived = 0; //
int _totalFramesReceivedCount = 0; //
int _iFramesReceived = 0; // I帧
int _pFramesReceived = 0; // P帧
int _processedFrames = 0; //
int _droppedFrames = 0; //
int _framesInBuffer = 0; //
int _bufferSize = 0; //
//
Future<void> _initVideoDecoder() async {
@ -167,20 +128,16 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
height: height,
codecType: 'h264',
);
AppLog.log('解码器配置的宽高为:${config.width}x${config.height}');
// textureId
final textureId = await VideoDecodePlugin.initDecoder(config);
if (textureId != null) {
Future.microtask(() => state.textureId.value = textureId);
AppLog.log('视频解码器初始化成功textureId=$textureId');
VideoDecodePlugin.setOnFrameRenderedListener((textureId) {
AppLog.log('已经开始渲染=======');
//
_lastVideoRenderTime = DateTime.now();
// loading
if (state.isLoading.value) {
Future.microtask(() => state.isLoading.value = false);
} else {
AppLog.log('视频已在渲染状态,保持当前状态');
}
Future.microtask(() => state.isLoading.value = false);
});
} else {
AppLog.log('视频解码器初始化失败');
@ -286,110 +243,6 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
}
_lastFrameSeq = frameSeq;
}
//
recordFrameReceived(frameSeq, frameType);
//
bool isVideoRendering = state.textureId.value != null &&
(state.isLoading.isFalse || _lastVideoRenderTime != null);
if (isVideoRendering) {
// I帧和其关联的P帧
if (frameType == TalkDataH264Frame_FrameTypeE.I) {
// I帧I帧和其关联的P帧
_removeOldFramesForIFrame(frameSeq);
// I帧时
_adjustBufferSizeForNetworkCondition();
} else {
// P帧I帧
_cleanOldPFrameForCurrentIFrame(frameSeq, frameSeqI);
}
} else {
//
if (state.h264FrameBuffer.length >= state.maxFrameBufferSize) {
//
bool isVideoRendering = state.textureId.value != null &&
(state.isLoading.isFalse || _lastVideoRenderTime != null);
//
if (isVideoRendering) {
_consecutiveFullBufferCount++;
//
if (_consecutiveFullBufferCount >= _maxConsecutiveFullBufferCount) {
//
_evaluateCurrentNetworkQuality();
// I帧
final framesToRemove = <int>[];
for (int i = 0; i < state.h264FrameBuffer.length; i++) {
if (state.h264FrameBuffer[i]['frameType'] != TalkDataH264Frame_FrameTypeE.I) {
framesToRemove.add(i);
}
}
//
framesToRemove.reversed.forEach((index) {
state.h264FrameBuffer.removeAt(index);
recordDroppedFrame();
});
// I帧及相关的P帧
if (state.h264FrameBuffer.length > _currentBufferSize ~/ 2) {
// I帧的位置
int lastIFrameIndex = -1;
for (int i = state.h264FrameBuffer.length - 1; i >= 0; i--) {
if (state.h264FrameBuffer[i]['frameType'] == TalkDataH264Frame_FrameTypeE.I) {
lastIFrameIndex = i;
break;
}
}
// I帧I帧及其后续的P帧
if (lastIFrameIndex > 0) {
for (int i = 0; i < lastIFrameIndex; i++) {
state.h264FrameBuffer.removeAt(0);
recordDroppedFrame();
}
} else {
// I帧
while (state.h264FrameBuffer.length > _currentBufferSize ~/ 2) {
state.h264FrameBuffer.removeAt(0);
recordDroppedFrame();
}
}
}
_consecutiveFullBufferCount = 0; //
_isAdjustingForBufferFull = false;
} else {
//
int pbIndex = state.h264FrameBuffer.indexWhere((f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.P);
if (pbIndex != -1) {
state.h264FrameBuffer.removeAt(pbIndex);
recordDroppedFrame(); //
} else {
state.h264FrameBuffer.removeAt(0);
recordDroppedFrame(); //
}
}
} else {
// 使
int pbIndex = state.h264FrameBuffer.indexWhere((f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.P);
if (pbIndex != -1) {
state.h264FrameBuffer.removeAt(pbIndex);
recordDroppedFrame(); //
} else {
state.h264FrameBuffer.removeAt(0);
recordDroppedFrame(); //
}
}
}
}
// Map
final Map<String, dynamic> frameMap = {
'frameData': frameData,
@ -400,169 +253,18 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
'scpMessage': scpMessage,
};
// P/B帧
while (state.h264FrameBuffer.length >= state.maxFrameBufferSize) {
int pbIndex = state.h264FrameBuffer.indexWhere((f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.P);
if (pbIndex != -1) {
state.h264FrameBuffer.removeAt(pbIndex);
} else {
state.h264FrameBuffer.removeAt(0);
}
}
//
state.h264FrameBuffer.add(frameMap);
recordFrameInBuffer(); //
//
if (state.h264FrameBuffer.length < state.maxFrameBufferSize) {
_consecutiveFullBufferCount = 0;
}
}
// I帧和其关联的P帧
void _removeOldFramesForIFrame(int newIFrameSeq) {
// I帧和相关的P帧I帧和其关联的P帧
final List<Map<String, dynamic>> framesToKeep = [];
// I帧和P帧
final List<Map<String, dynamic>> iFrames = [];
final List<Map<String, dynamic>> pFrames = [];
for (var frame in state.h264FrameBuffer) {
if (frame['frameType'] == TalkDataH264Frame_FrameTypeE.I) {
iFrames.add(frame);
} else {
pFrames.add(frame);
}
}
// frameSeq排序I帧
iFrames.sort((a, b) => (a['frameSeq'] as int).compareTo(b['frameSeq'] as int));
// I帧I帧
final int maxIFramesToKeep = 2; // 2I帧以确保平滑过渡
final int startIdx = max(0, iFrames.length - maxIFramesToKeep);
for (int i = startIdx; i < iFrames.length; i++) {
framesToKeep.add(iFrames[i]);
}
// P帧I帧关联的P帧
for (var pFrame in pFrames) {
int refIFrameSeq = pFrame['frameSeqI'];
bool shouldKeep = false;
// P帧引用的I帧是否被保留
for (var keptIFrame in framesToKeep) {
if (keptIFrame['frameType'] == TalkDataH264Frame_FrameTypeE.I &&
keptIFrame['frameSeq'] == refIFrameSeq) {
shouldKeep = true;
break;
}
}
if (shouldKeep) {
framesToKeep.add(pFrame);
} else {
recordDroppedFrame(); //
}
}
//
state.h264FrameBuffer.clear();
state.h264FrameBuffer.addAll(framesToKeep);
}
// I帧的旧P帧
void _cleanOldPFrameForCurrentIFrame(int frameSeq, int frameSeqI) {
// I帧保留最新的P帧P帧
final List<Map<String, dynamic>> framesToKeep = [];
final List<Map<String, dynamic>> framesToRemove = [];
// I帧引用的帧
for (var frame in state.h264FrameBuffer) {
if (frame['frameSeqI'] != frameSeqI || frame['frameType'] == TalkDataH264Frame_FrameTypeE.I) {
framesToKeep.add(frame);
}
}
// I帧引用的P帧
final List<Map<String, dynamic>> currentIFramePFrames = [];
for (var frame in state.h264FrameBuffer) {
if (frame['frameSeqI'] == frameSeqI && frame['frameType'] == TalkDataH264Frame_FrameTypeE.P) {
currentIFramePFrames.add(frame);
}
}
// frameSeq排序P帧
currentIFramePFrames.sort((a, b) => (a['frameSeq'] as int).compareTo(b['frameSeq'] as int));
// P帧5
final int maxPFramesToKeep = 5;
final int startIdx = max(0, currentIFramePFrames.length - maxPFramesToKeep);
for (int i = startIdx; i < currentIFramePFrames.length; i++) {
framesToKeep.add(currentIFramePFrames[i]);
}
// P帧
for (int i = 0; i < startIdx; i++) {
recordDroppedFrame();
}
//
state.h264FrameBuffer.clear();
state.h264FrameBuffer.addAll(framesToKeep);
}
///
void _adjustBufferSizeForNetworkCondition() {
//
int currentTime = DateTime.now().millisecondsSinceEpoch;
//
if (currentTime - _lastBufferSizeAdjustmentTime < _bufferSizeAdjustmentCooldown) {
return;
}
//
_lastBufferSizeAdjustmentTime = currentTime;
//
if (_currentNetworkQualityScore > 0.7) {
// 使
_currentBufferSize = state.adaptiveBufferSizeMin;
} else if (_currentNetworkQualityScore > 0.4) {
// 使
_currentBufferSize = (state.adaptiveBufferSizeMin + state.adaptiveBufferSizeMax) ~/ 2;
} else {
// 使
_currentBufferSize = state.adaptiveBufferSizeMax;
}
AppLog.log('根据网络状况调整缓冲区大小: ${_currentBufferSize} (当前网络质量评分: ${_currentNetworkQualityScore.toStringAsFixed(2)})');
}
///
void _evaluateCurrentNetworkQuality() {
int currentTime = DateTime.now().millisecondsSinceEpoch;
//
if (currentTime - _lastNetworkQualityCheckTime < state.networkQualityCheckIntervalMs) {
return;
}
_lastNetworkQualityCheckTime = currentTime;
//
double dropRate = 0.0;
int totalProcessed = _framesProcessedSinceLastCheck + _framesDroppedSinceLastCheck;
if (totalProcessed > 0) {
dropRate = _framesDroppedSinceLastCheck / totalProcessed;
}
// ()
_currentNetworkQualityScore = 1.0 - dropRate;
if (_currentNetworkQualityScore < 0) {
_currentNetworkQualityScore = 0.0;
}
AppLog.log('网络质量评估: 丢帧率=${dropRate.toStringAsFixed(2)}, 网络质量评分=${_currentNetworkQualityScore.toStringAsFixed(2)}');
//
_framesProcessedSinceLastCheck = 0;
_framesDroppedSinceLastCheck = 0;
}
///
@ -1282,215 +984,4 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
break;
}
}
///
void recordFrameReceived(int frameSeq, TalkDataH264Frame_FrameTypeE frameType) {
_totalFramesReceivedCount++;
//
if (frameType == TalkDataH264Frame_FrameTypeE.I) {
_iFramesReceived++;
} else if (frameType == TalkDataH264Frame_FrameTypeE.P) {
_pFramesReceived++;
}
// -
if (_lastFrameSeqNum != -1) {
if (frameSeq > _lastFrameSeqNum + 1) {
//
int gap = frameSeq - _lastFrameSeqNum - 1;
//
int maxAllowedGap = 50; //
if (gap > maxAllowedGap) {
//
AppLog.log('检测到帧序列号异常跳跃: gap=$gap, 当前frameSeq=$frameSeq, 上一个frameSeq=$_lastFrameSeqNum');
} else {
_lostFrames += gap;
}
} else if (frameSeq <= _lastFrameSeqNum && frameSeq < 100 && _lastFrameSeqNum > 1000) {
//
//
AppLog.log('检测到帧序列号回绕: 从 $_lastFrameSeqNum 回到 $frameSeq');
_lastFrameSeqNum = frameSeq;
}
}
_lastFrameSeqNum = frameSeq;
//
_frameReceiveTimes.add(DateTime.now().millisecondsSinceEpoch);
_frameSeqList.add(frameSeq);
// 100
if (_frameReceiveTimes.length > 100) {
_frameReceiveTimes.removeAt(0);
_frameSeqList.removeAt(0);
}
}
///
void recordPacketReceived() {
_totalPacketsReceived++;
}
///
void recordProcessedFrame() {
_processedFrames++;
_framesProcessedSinceLastCheck++; //
}
///
void recordDroppedFrame() {
_droppedFrames++;
_framesDroppedSinceLastCheck++; //
}
///
void recordFrameInBuffer() {
_framesInBuffer++;
}
///
void startNetworkQualityAssessment() {
resetNetworkQualityAssessmentVariables();
_testStartTime = DateTime.now();
//
_bufferSize = state.maxFrameBufferSize; //
//
_networkQualityTestTimer = Timer.periodic(const Duration(seconds: 1), (timer) {
_evaluateNetworkQuality();
});
}
///
void stopNetworkQualityAssessment() {
_networkQualityTestTimer?.cancel();
_networkQualityTestTimer = null;
}
///
void resetNetworkQualityAssessmentVariables() {
_totalFramesReceived = 0;
_lostFrames = 0;
_lastFrameSeqNum = -1;
_frameReceiveTimes.clear();
_frameSeqList.clear();
_testStartTime = null;
//
_totalPacketsReceived = 0;
_totalFramesReceivedCount = 0;
_iFramesReceived = 0;
_pFramesReceived = 0;
_processedFrames = 0;
_droppedFrames = 0;
_framesInBuffer = 0;
}
///
void _evaluateNetworkQuality() {
if (_testStartTime == null) return;
final elapsed = DateTime.now().difference(_testStartTime!).inSeconds;
if (elapsed == 0) return;
// - 100%
double lossRate = 0.0;
if (_totalFramesReceivedCount > 0) {
lossRate = (_lostFrames / _totalFramesReceivedCount) * 100;
// 100%
if (lossRate > 100.0) {
lossRate = 100.0;
}
}
// ()
if (_frameReceiveTimes.length >= 2) {
List<int> intervals = [];
for (int i = 1; i < _frameReceiveTimes.length; i++) {
intervals.add(_frameReceiveTimes[i] - _frameReceiveTimes[i-1]);
}
}
//
_adjustBufferSizeBasedOnNetworkQuality();
}
///
String _getNetworkQualityLevel(double lossRate, double frameRate, double jitter) {
if (lossRate < 1.0 && frameRate > 15.0 && jitter < 50.0) {
return "优秀";
} else if (lossRate < 3.0 && frameRate > 10.0 && jitter < 100.0) {
return "良好";
} else if (lossRate < 5.0 && frameRate > 5.0 && jitter < 200.0) {
return "一般";
} else {
return "较差";
}
}
///
String _predictStutterProbability(double lossRate, double frameRate, double jitter) {
if (lossRate < 2.0 && frameRate > 10.0 && jitter < 100.0) {
return "低风险 - 视频流畅";
} else if (lossRate <= 5.0 && frameRate >= 5.0 && jitter <= 200.0) {
return "中风险 - 可能轻微卡顿";
} else {
return "高风险 - 可能严重卡顿";
}
}
///
void _adjustBufferSizeBasedOnNetworkQuality() {
if (_testStartTime == null) return;
final elapsed = DateTime.now().difference(_testStartTime!).inSeconds;
if (elapsed == 0) return;
//
double lossRate = 0.0;
if (_totalFramesReceivedCount > 0) {
lossRate = (_lostFrames / _totalFramesReceivedCount) * 100;
if (lossRate > 100.0) lossRate = 100.0;
}
//
final avgFrameRate = _totalFramesReceivedCount / elapsed;
//
double jitter = 0.0;
if (_frameReceiveTimes.length >= 2) {
List<int> intervals = [];
for (int i = 1; i < _frameReceiveTimes.length; i++) {
intervals.add(_frameReceiveTimes[i] - _frameReceiveTimes[i-1]);
}
if (intervals.length > 1) {
double mean = intervals.reduce((a, b) => a + b) / intervals.length;
double variance = 0.0;
for (int interval in intervals) {
variance += pow(interval - mean, 2).toDouble();
}
variance /= intervals.length;
jitter = sqrt(variance);
}
}
//
if (lossRate < 2.0 && avgFrameRate > 15.0 && jitter < 50.0) {
// - 使
_currentBufferSize = state.adaptiveBufferSizeMin;
} else if (lossRate < 5.0 && avgFrameRate > 10.0 && jitter < 100.0) {
// - 使
_currentBufferSize = (state.adaptiveBufferSizeMin + state.adaptiveBufferSizeMax) ~/ 3;
} else if (lossRate < 10.0 && avgFrameRate > 5.0 && jitter < 200.0) {
// - 使
_currentBufferSize = ((state.adaptiveBufferSizeMin + state.adaptiveBufferSizeMax) ~/ 2 + state.adaptiveBufferSizeMax) ~/ 2;
} else {
// - 使
_currentBufferSize = state.adaptiveBufferSizeMax;
}
}
}

View File

@ -646,7 +646,15 @@ class TalkViewLogic extends BaseGetXController {
//
Future<void> _onFrame(List<int> frame) async {
final applyGain = _applyGain(frame, 1.6);
// iOS端发送音频过强导致锁端接收音量相对变小
double gainFactor = 1.0; //
if (Platform.isAndroid) {
gainFactor = 1.2; // Android端适当增强
} else if (Platform.isIOS) {
gainFactor = 0.8; // iOS端降低增益使
}
final applyGain = _applyGain(frame, gainFactor);
// G711数据
List<int> encodedData = G711Tool.encode(applyGain, 0); // 0A-law

View File

@ -445,8 +445,15 @@ class H264WebViewLogic extends BaseGetXController {
return;
}
//
List<int> amplifiedFrame = _applyGain(frame, 1.8);
// iOS端发送音频过强导致锁端接收音量相对变小
double gainFactor = 1.0; //
if (Platform.isAndroid) {
gainFactor = 1.2; // Android端适当增强
} else if (Platform.isIOS) {
gainFactor = 0.8; // iOS端降低增益使
}
List<int> amplifiedFrame = _applyGain(frame, gainFactor);
// G711数据
List<int> encodedData = G711Tool.encode(amplifiedFrame, 0); // 0A-law
_bufferedAudioFrames.addAll(encodedData);