feat:调整对讲时的数据缓冲逻辑

This commit is contained in:
liyi 2025-03-13 13:38:59 +08:00
parent 0cdaa26fe5
commit fc23d8f851
2 changed files with 146 additions and 269 deletions

View File

@ -36,25 +36,22 @@ class TalkViewLogic extends BaseGetXController {
final TalkViewState state = TalkViewState();
final LockDetailState lockDetailState = Get.put(LockDetailLogic()).state;
Timer? _syncTimer; //
Timer? _audioTimer; //
Timer? _networkQualityTimer; //
int _startTime = 0; //
int bufferSize = 40; //
int audioBufferSize = 500; //
//
final List<double> _lastFewFps = <double>[]; //
final int minBufferSize = 2; // 2166ms
final int maxBufferSize = 8; // 8666ms
int bufferSize = 3; //
//
final int minAudioBufferSize = 1; // 1
final int maxAudioBufferSize = 3; // 3
int audioBufferSize = 2; // 2
//
int _startTime = 0; //
bool _isFirstFrame = true; //
int frameIntervalMs = 83; // 8312FPS
int audioFrameIntervalMs = 20; // 4522FPS
int minFrameIntervalMs = 83; // 12 FPS
int maxFrameIntervalMs = 166; // 6 FPS
//
final List<int> _bufferedAudioFrames = <int>[];
//
final int maxImageCacheCount = 40; //
final Map<String, ui.Image> _imageCache = {};
///
@ -91,24 +88,153 @@ class TalkViewLogic extends BaseGetXController {
void _startListenTalkData() {
state.talkDataRepository.talkDataStream.listen((TalkData talkData) async {
final contentType = talkData.contentType;
final currentTime = DateTime.now().millisecondsSinceEpoch;
//
if (_isFirstFrame) {
_startTime = currentTime;
_isFirstFrame = false;
}
//
switch (contentType) {
case TalkData_ContentTypeE.G711:
if (state.audioBuffer.length >= audioBufferSize) {
if (state.audioBuffer.length >= bufferSize) {
state.audioBuffer.removeAt(0); //
}
state.audioBuffer.add(talkData); //
//
_playAudioFrames();
break;
case TalkData_ContentTypeE.Image:
// ( - ) -
final expectedTime = _startTime + talkData.durationMs;
final videoDelay = currentTime - expectedTime;
//
_adjustBufferSize(videoDelay);
//
if (state.videoBuffer.length >= bufferSize) {
state.videoBuffer.removeAt(0); //
state.videoBuffer.removeAt(0);
}
state.videoBuffer.add(talkData); //
state.videoBuffer.add(talkData);
//
await _decodeAndCacheFrame(talkData);
//
_playVideoFrames();
break;
}
});
}
//
void _playVideoFrames() {
//
if (state.videoBuffer.isEmpty || state.videoBuffer.length < bufferSize) {
// AppLog.log('📊 缓冲中 - 当前缓冲区大小: ${state.videoBuffer.length}/${bufferSize}');
return;
}
//
TalkData? oldestFrame;
int oldestIndex = -1;
for (int i = 0; i < state.videoBuffer.length; i++) {
if (oldestFrame == null ||
state.videoBuffer[i].durationMs < oldestFrame.durationMs) {
oldestFrame = state.videoBuffer[i];
oldestIndex = i;
}
}
//
if (oldestFrame != null && oldestIndex != -1) {
final cacheKey = oldestFrame.content.hashCode.toString();
// 使
if (_imageCache.containsKey(cacheKey)) {
state.currentImage.value = _imageCache[cacheKey];
state.listData.value = Uint8List.fromList(oldestFrame.content);
state.videoBuffer.removeAt(oldestIndex); //
// AppLog.log('🎬 播放帧 - 缓冲区剩余: ${state.videoBuffer.length}/${bufferSize}, '
// '播放延迟: ${currentTime - oldestFrame.durationMs}ms, '
// '帧时间戳: ${oldestFrame.durationMs}');
} else {
// AppLog.log('⚠️ 帧未找到缓存 - Key: $cacheKey');
state.videoBuffer.removeAt(oldestIndex); //
}
}
}
//
void _playAudioFrames() {
//
//
if (state.audioBuffer.isEmpty ||
state.audioBuffer.length < audioBufferSize) {
return;
}
//
TalkData? oldestFrame;
int oldestIndex = -1;
for (int i = 0; i < state.audioBuffer.length; i++) {
if (oldestFrame == null ||
state.audioBuffer[i].durationMs < oldestFrame.durationMs) {
oldestFrame = state.audioBuffer[i];
oldestIndex = i;
}
}
//
if (oldestFrame != null && oldestIndex != -1) {
if (state.isOpenVoice.value) {
//
_playAudioData(oldestFrame);
}
state.audioBuffer.removeAt(oldestIndex);
}
}
//
Future<void> _decodeAndCacheFrame(TalkData talkData) async {
try {
String cacheKey = talkData.content.hashCode.toString();
//
if (!_imageCache.containsKey(cacheKey)) {
final Uint8List uint8Data = Uint8List.fromList(talkData.content);
final ui.Image image = await decodeImageFromList(uint8Data);
//
if (_imageCache.length >= bufferSize) {
_imageCache.remove(_imageCache.keys.first);
}
//
_imageCache[cacheKey] = image;
// AppLog.log('📥 缓存新帧 - 缓存数: ${_imageCache.length}, Key: $cacheKey');
}
} catch (e) {
AppLog.log('❌ 帧解码错误: $e');
}
}
//
void _adjustBufferSize(int delay) {
const int delayThresholdHigh = 250; // 3
const int delayThresholdLow = 166; // 2
const int adjustInterval = 1; // 1
if (delay > delayThresholdHigh && bufferSize < maxBufferSize) {
//
bufferSize = min(bufferSize + adjustInterval, maxBufferSize);
AppLog.log('📈 增加缓冲区 - 当前大小: $bufferSize, 延迟: ${delay}ms');
} else if (delay < delayThresholdLow && bufferSize > minBufferSize) {
//
bufferSize = max(bufferSize - adjustInterval, minBufferSize);
AppLog.log('📉 减少缓冲区 - 当前大小: $bufferSize, 延迟: ${delay}ms');
}
}
///
void _startListenTalkStatus() {
state.startChartTalkStatus.statusStream.listen((talkStatus) {
@ -156,231 +282,6 @@ class TalkViewLogic extends BaseGetXController {
}
}
///
void _playVideoData(TalkData talkData) async {
try {
// key
String cacheKey = talkData.content.hashCode.toString();
//
if (_imageCache.containsKey(cacheKey)) {
// 使
state.currentImage.value = _imageCache[cacheKey];
} else {
// List<int> Uint8List
final Uint8List uint8Data = Uint8List.fromList(talkData.content);
// 线
ui.Image? image = await decodeImageFromList(uint8Data);
//
if (_imageCache.length >= maxImageCacheCount) {
_imageCache.remove(_imageCache.keys.first);
}
//
_imageCache[cacheKey] = image;
state.currentImage.value = image;
}
//
state.listData.value = Uint8List.fromList(talkData.content);
} catch (e) {
print('视频帧解码错误: $e');
}
// state.listData.value = Uint8List.fromList(talkData.content);
}
///
void _startPlayback() {
Future.delayed(Duration(milliseconds: 800), () {
//
_networkQualityTimer ??=
Timer.periodic(const Duration(seconds: 5), _checkNetworkQuality);
_startTime = DateTime.now().millisecondsSinceEpoch;
_syncTimer ??=
Timer.periodic(Duration(milliseconds: frameIntervalMs), (timer) {
//
_adjustFrameInterval();
//
_monitorFrameStability();
});
});
}
///
void _adjustFrameInterval() {
//
int targetInterval = _calculateTargetInterval();
//
if (frameIntervalMs != targetInterval) {
// 2ms使
frameIntervalMs += (targetInterval > frameIntervalMs) ? 2 : -2;
//
frameIntervalMs =
frameIntervalMs.clamp(minFrameIntervalMs, maxFrameIntervalMs);
//
if ((frameIntervalMs - targetInterval).abs() >= 5) {
_rebuildTimers();
}
}
// int newFrameIntervalMs = frameIntervalMs;
// if (state.videoBuffer.length < 10 && frameIntervalMs < maxFrameIntervalMs) {
// //
// frameIntervalMs += 5;
// } else if (state.videoBuffer.length > 20 &&
// frameIntervalMs > minFrameIntervalMs) {
// //
// frameIntervalMs -= 5;
// }
// //
// if (newFrameIntervalMs != frameIntervalMs) {
// frameIntervalMs = newFrameIntervalMs;
// //
// _syncTimer?.cancel();
// _syncTimer =
// Timer.periodic(Duration(milliseconds: frameIntervalMs), (timer) {
// //
// _playVideoFrames();
// });
//
// _audioTimer?.cancel();
// _audioTimer =
// Timer.periodic(Duration(milliseconds: audioFrameIntervalMs), (timer) {
// final currentTime = DateTime.now().millisecondsSinceEpoch;
// final elapsedTime = currentTime - _startTime;
//
// //
// if (state.audioBuffer.isNotEmpty &&
// state.audioBuffer.first.durationMs <= elapsedTime) {
// //
// if (state.isOpenVoice.value) {
// _playAudioData(state.audioBuffer.removeAt(0));
// } else {
// //
// //
// //
// state.audioBuffer.removeAt(0);
// }
// }
// });
// }
}
///
void _monitorFrameStability() {
const stabilityThreshold = 5; //
final currentFps = 1000 / frameIntervalMs;
if (_lastFewFps.length >= 10) {
_lastFewFps.removeAt(0);
}
_lastFewFps.add(currentFps);
//
if (_lastFewFps.length >= 5) {
double mean = _lastFewFps.reduce((a, b) => a + b) / _lastFewFps.length;
double variance =
_lastFewFps.map((fps) => pow(fps - mean, 2)).reduce((a, b) => a + b) /
_lastFewFps.length;
double stdDev = sqrt(variance);
//
if (stdDev > stabilityThreshold) {
_smoothFrameRate(mean);
}
}
}
///
void _checkNetworkQuality(Timer timer) {
final bufferHealth = state.videoBuffer.length / bufferSize;
if (bufferHealth < 0.3) {
// 30%
//
frameIntervalMs = min(frameIntervalMs + 10, maxFrameIntervalMs);
_rebuildTimers();
} else if (bufferHealth > 0.7) {
// 70%
//
frameIntervalMs = max(frameIntervalMs - 5, minFrameIntervalMs);
_rebuildTimers();
}
}
///
int _calculateTargetInterval() {
const int optimalBufferSize = 15; //
const int bufferTolerance = 5; //
if (state.videoBuffer.length < optimalBufferSize - bufferTolerance) {
//
return (frameIntervalMs * 1.2).round();
} else if (state.videoBuffer.length > optimalBufferSize + bufferTolerance) {
//
return (frameIntervalMs * 0.8).round();
}
return frameIntervalMs;
}
///
void _rebuildTimers() {
//
_syncTimer?.cancel();
_audioTimer?.cancel();
//
_syncTimer =
Timer.periodic(Duration(milliseconds: frameIntervalMs), (timer) {
_playVideoFrames();
});
// 使
_audioTimer =
Timer.periodic(Duration(milliseconds: audioFrameIntervalMs), (timer) {
_processAudioFrame();
});
}
///
void _processAudioFrame() {
final currentTime = DateTime.now().millisecondsSinceEpoch;
final elapsedTime = currentTime - _startTime;
while (state.audioBuffer.isNotEmpty &&
state.audioBuffer.first.durationMs <= elapsedTime) {
if (state.isOpenVoice.value) {
_playAudioData(state.audioBuffer.removeAt(0));
} else {
state.audioBuffer.removeAt(0);
}
}
}
void _playVideoFrames() {
final currentTime = DateTime.now().millisecondsSinceEpoch;
final elapsedTime = currentTime - _startTime;
//
//
int maxFramesToProcess = 5; // 5
int processedFrames = 0;
while (state.videoBuffer.isNotEmpty &&
state.videoBuffer.first.durationMs <= elapsedTime &&
processedFrames < maxFramesToProcess) {
if (state.videoBuffer.length > 1) {
state.videoBuffer.removeAt(0);
} else {
_playVideoData(state.videoBuffer.removeAt(0));
}
processedFrames++;
}
}
///
void _stopPlayG711Data() async {
await FlutterPcmSound.pause();
@ -546,7 +447,7 @@ class TalkViewLogic extends BaseGetXController {
_initFlutterPcmSound();
//
_startPlayback();
// _startPlayback();
//
_initAudioRecorder();
@ -554,39 +455,16 @@ class TalkViewLogic extends BaseGetXController {
requestPermissions();
}
///
void _smoothFrameRate(double targetFps) {
//
int targetInterval = (1000 / targetFps).round();
// 使
double weight = 0.3; //
frameIntervalMs =
(frameIntervalMs * (1 - weight) + targetInterval * weight).round();
//
frameIntervalMs =
frameIntervalMs.clamp(minFrameIntervalMs, maxFrameIntervalMs);
//
_rebuildTimers();
}
@override
void onClose() {
_stopPlayG711Data(); //
state.listData.value = Uint8List(0); //
state.audioBuffer.clear(); //
state.videoBuffer.clear(); //
_syncTimer?.cancel(); //
_syncTimer = null; //
_audioTimer?.cancel();
_audioTimer = null; //
state.oneMinuteTimeTimer?.cancel();
state.oneMinuteTimeTimer = null;
//
_networkQualityTimer?.cancel();
_lastFewFps.clear();
stopProcessingAudio();
//
_imageCache.clear();

View File

@ -55,7 +55,6 @@ class TalkViewState {
//
List<TalkData> audioBuffer = <TalkData>[].obs;
List<TalkData> audioBuffer2 = <TalkData>[].obs;
List<TalkData> activeAudioBuffer = <TalkData>[].obs;
List<TalkData> activeVideoBuffer = <TalkData>[].obs;