feat:调整对讲时的数据缓冲逻辑
This commit is contained in:
parent
0cdaa26fe5
commit
fc23d8f851
@ -36,25 +36,22 @@ class TalkViewLogic extends BaseGetXController {
|
||||
final TalkViewState state = TalkViewState();
|
||||
|
||||
final LockDetailState lockDetailState = Get.put(LockDetailLogic()).state;
|
||||
Timer? _syncTimer; // 音视频播放刷新率定时器
|
||||
Timer? _audioTimer; // 音视频播放刷新率定时器
|
||||
Timer? _networkQualityTimer; // 网络质量监测定时器
|
||||
|
||||
int _startTime = 0; // 开始播放时间戳,用于判断帧数据中的时间戳位置
|
||||
int bufferSize = 40; // 缓冲区大小(以帧为单位)
|
||||
int audioBufferSize = 500; // 缓冲区大小(以帧为单位)
|
||||
// 帧率监控相关
|
||||
final List<double> _lastFewFps = <double>[]; // 存储最近的帧率数据
|
||||
final int minBufferSize = 2; // 最小缓冲2帧,约166ms
|
||||
final int maxBufferSize = 8; // 最大缓冲8帧,约666ms
|
||||
int bufferSize = 3; // 初始化为默认大小
|
||||
// 修改音频相关的成员变量
|
||||
final int minAudioBufferSize = 1; // 音频最小缓冲1帧
|
||||
final int maxAudioBufferSize = 3; // 音频最大缓冲3帧
|
||||
int audioBufferSize = 2; // 音频默认缓冲2帧
|
||||
|
||||
// 添加开始时间记录
|
||||
int _startTime = 0; // 开始播放时间戳
|
||||
bool _isFirstFrame = true; // 是否是第一帧
|
||||
|
||||
int frameIntervalMs = 83; // 初始帧间隔设置为83毫秒(12FPS)
|
||||
int audioFrameIntervalMs = 20; // 初始帧间隔设置为45毫秒(约22FPS)
|
||||
int minFrameIntervalMs = 83; // 最小帧间隔(12 FPS)
|
||||
int maxFrameIntervalMs = 166; // 最大帧间隔(约6 FPS)
|
||||
// 定义音频帧缓冲和发送函数
|
||||
final List<int> _bufferedAudioFrames = <int>[];
|
||||
|
||||
// 在类的开始处添加缓存相关变量
|
||||
final int maxImageCacheCount = 40; // 最大图片缓存数量
|
||||
final Map<String, ui.Image> _imageCache = {};
|
||||
|
||||
/// 初始化音频播放器
|
||||
@ -91,24 +88,153 @@ class TalkViewLogic extends BaseGetXController {
|
||||
void _startListenTalkData() {
|
||||
state.talkDataRepository.talkDataStream.listen((TalkData talkData) async {
|
||||
final contentType = talkData.contentType;
|
||||
final currentTime = DateTime.now().millisecondsSinceEpoch;
|
||||
|
||||
// 第一帧到达时记录开始时间
|
||||
if (_isFirstFrame) {
|
||||
_startTime = currentTime;
|
||||
_isFirstFrame = false;
|
||||
}
|
||||
|
||||
// 判断数据类型,进行分发处理
|
||||
switch (contentType) {
|
||||
case TalkData_ContentTypeE.G711:
|
||||
if (state.audioBuffer.length >= audioBufferSize) {
|
||||
if (state.audioBuffer.length >= bufferSize) {
|
||||
state.audioBuffer.removeAt(0); // 丢弃最旧的数据
|
||||
}
|
||||
state.audioBuffer.add(talkData); // 添加新数据
|
||||
// 添加音频播放逻辑,与视频类似
|
||||
_playAudioFrames();
|
||||
break;
|
||||
case TalkData_ContentTypeE.Image:
|
||||
// 计算实际延迟:(当前系统时间 - 开始时间) - 帧的预期播放时间
|
||||
final expectedTime = _startTime + talkData.durationMs;
|
||||
final videoDelay = currentTime - expectedTime;
|
||||
// 动态调整缓冲区
|
||||
_adjustBufferSize(videoDelay);
|
||||
// 然后添加到播放缓冲区
|
||||
if (state.videoBuffer.length >= bufferSize) {
|
||||
state.videoBuffer.removeAt(0); // 丢弃最旧的数据
|
||||
state.videoBuffer.removeAt(0);
|
||||
}
|
||||
state.videoBuffer.add(talkData); // 添加新数据
|
||||
state.videoBuffer.add(talkData);
|
||||
// 先进行解码和缓存
|
||||
await _decodeAndCacheFrame(talkData);
|
||||
// 最后尝试播放
|
||||
_playVideoFrames();
|
||||
break;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// 修改:视频帧播放逻辑
|
||||
void _playVideoFrames() {
|
||||
// 如果缓冲区为空或未达到目标大小,不进行播放
|
||||
if (state.videoBuffer.isEmpty || state.videoBuffer.length < bufferSize) {
|
||||
// AppLog.log('📊 缓冲中 - 当前缓冲区大小: ${state.videoBuffer.length}/${bufferSize}');
|
||||
return;
|
||||
}
|
||||
// 找出时间戳最小的帧(最旧的帧)
|
||||
TalkData? oldestFrame;
|
||||
int oldestIndex = -1;
|
||||
for (int i = 0; i < state.videoBuffer.length; i++) {
|
||||
if (oldestFrame == null ||
|
||||
state.videoBuffer[i].durationMs < oldestFrame.durationMs) {
|
||||
oldestFrame = state.videoBuffer[i];
|
||||
oldestIndex = i;
|
||||
}
|
||||
}
|
||||
// 确保找到了有效帧
|
||||
if (oldestFrame != null && oldestIndex != -1) {
|
||||
final cacheKey = oldestFrame.content.hashCode.toString();
|
||||
|
||||
// 使用缓存的解码图片更新显示
|
||||
if (_imageCache.containsKey(cacheKey)) {
|
||||
state.currentImage.value = _imageCache[cacheKey];
|
||||
state.listData.value = Uint8List.fromList(oldestFrame.content);
|
||||
state.videoBuffer.removeAt(oldestIndex); // 移除已播放的帧
|
||||
|
||||
// AppLog.log('🎬 播放帧 - 缓冲区剩余: ${state.videoBuffer.length}/${bufferSize}, '
|
||||
// '播放延迟: ${currentTime - oldestFrame.durationMs}ms, '
|
||||
// '帧时间戳: ${oldestFrame.durationMs}');
|
||||
} else {
|
||||
// AppLog.log('⚠️ 帧未找到缓存 - Key: $cacheKey');
|
||||
state.videoBuffer.removeAt(oldestIndex); // 移除无法播放的帧
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 新增:音频帧播放逻辑
|
||||
void _playAudioFrames() {
|
||||
// 如果缓冲区为空或未达到目标大小,不进行播放
|
||||
// 音频缓冲区要求更小,以减少延迟
|
||||
if (state.audioBuffer.isEmpty ||
|
||||
state.audioBuffer.length < audioBufferSize) {
|
||||
return;
|
||||
}
|
||||
|
||||
// 找出时间戳最小的音频帧
|
||||
TalkData? oldestFrame;
|
||||
int oldestIndex = -1;
|
||||
for (int i = 0; i < state.audioBuffer.length; i++) {
|
||||
if (oldestFrame == null ||
|
||||
state.audioBuffer[i].durationMs < oldestFrame.durationMs) {
|
||||
oldestFrame = state.audioBuffer[i];
|
||||
oldestIndex = i;
|
||||
}
|
||||
}
|
||||
|
||||
// 确保找到了有效帧
|
||||
if (oldestFrame != null && oldestIndex != -1) {
|
||||
if (state.isOpenVoice.value) {
|
||||
// 播放音频
|
||||
_playAudioData(oldestFrame);
|
||||
}
|
||||
state.audioBuffer.removeAt(oldestIndex);
|
||||
}
|
||||
}
|
||||
|
||||
// 新增:解码和缓存帧的方法
|
||||
Future<void> _decodeAndCacheFrame(TalkData talkData) async {
|
||||
try {
|
||||
String cacheKey = talkData.content.hashCode.toString();
|
||||
|
||||
// 如果该帧还没有被缓存,则进行解码和缓存
|
||||
if (!_imageCache.containsKey(cacheKey)) {
|
||||
final Uint8List uint8Data = Uint8List.fromList(talkData.content);
|
||||
final ui.Image image = await decodeImageFromList(uint8Data);
|
||||
|
||||
// 管理缓存大小
|
||||
if (_imageCache.length >= bufferSize) {
|
||||
_imageCache.remove(_imageCache.keys.first);
|
||||
}
|
||||
|
||||
// 添加到缓存
|
||||
_imageCache[cacheKey] = image;
|
||||
|
||||
// AppLog.log('📥 缓存新帧 - 缓存数: ${_imageCache.length}, Key: $cacheKey');
|
||||
}
|
||||
} catch (e) {
|
||||
AppLog.log('❌ 帧解码错误: $e');
|
||||
}
|
||||
}
|
||||
|
||||
// 新增:动态调整缓冲区大小的方法
|
||||
void _adjustBufferSize(int delay) {
|
||||
const int delayThresholdHigh = 250; // 高延迟阈值(约3帧的时间)
|
||||
const int delayThresholdLow = 166; // 低延迟阈值(约2帧的时间)
|
||||
const int adjustInterval = 1; // 每次调整1帧
|
||||
|
||||
if (delay > delayThresholdHigh && bufferSize < maxBufferSize) {
|
||||
// 延迟较大,增加缓冲区
|
||||
bufferSize = min(bufferSize + adjustInterval, maxBufferSize);
|
||||
AppLog.log('📈 增加缓冲区 - 当前大小: $bufferSize, 延迟: ${delay}ms');
|
||||
} else if (delay < delayThresholdLow && bufferSize > minBufferSize) {
|
||||
// 延迟较小,减少缓冲区
|
||||
bufferSize = max(bufferSize - adjustInterval, minBufferSize);
|
||||
AppLog.log('📉 减少缓冲区 - 当前大小: $bufferSize, 延迟: ${delay}ms');
|
||||
}
|
||||
}
|
||||
|
||||
/// 监听对讲状态
|
||||
void _startListenTalkStatus() {
|
||||
state.startChartTalkStatus.statusStream.listen((talkStatus) {
|
||||
@ -156,231 +282,6 @@ class TalkViewLogic extends BaseGetXController {
|
||||
}
|
||||
}
|
||||
|
||||
/// 播放视频数据
|
||||
void _playVideoData(TalkData talkData) async {
|
||||
try {
|
||||
// 计算当前帧的哈希值作为缓存key
|
||||
String cacheKey = talkData.content.hashCode.toString();
|
||||
|
||||
// 检查缓存
|
||||
if (_imageCache.containsKey(cacheKey)) {
|
||||
// 使用缓存的解码图片
|
||||
state.currentImage.value = _imageCache[cacheKey];
|
||||
} else {
|
||||
// 将 List<int> 转换为 Uint8List
|
||||
final Uint8List uint8Data = Uint8List.fromList(talkData.content);
|
||||
// 在后台线程解码图片
|
||||
ui.Image? image = await decodeImageFromList(uint8Data);
|
||||
|
||||
// 缓存管理:如果缓存太大则移除最早的项
|
||||
if (_imageCache.length >= maxImageCacheCount) {
|
||||
_imageCache.remove(_imageCache.keys.first);
|
||||
}
|
||||
|
||||
// 添加到缓存
|
||||
_imageCache[cacheKey] = image;
|
||||
state.currentImage.value = image;
|
||||
}
|
||||
|
||||
// 更新显示数据
|
||||
state.listData.value = Uint8List.fromList(talkData.content);
|
||||
} catch (e) {
|
||||
print('视频帧解码错误: $e');
|
||||
}
|
||||
// state.listData.value = Uint8List.fromList(talkData.content);
|
||||
}
|
||||
|
||||
/// 启动播放
|
||||
void _startPlayback() {
|
||||
Future.delayed(Duration(milliseconds: 800), () {
|
||||
// 添加网络质量监测
|
||||
_networkQualityTimer ??=
|
||||
Timer.periodic(const Duration(seconds: 5), _checkNetworkQuality);
|
||||
_startTime = DateTime.now().millisecondsSinceEpoch;
|
||||
_syncTimer ??=
|
||||
Timer.periodic(Duration(milliseconds: frameIntervalMs), (timer) {
|
||||
// 动态调整帧间隔
|
||||
_adjustFrameInterval();
|
||||
// 监控帧率稳定性
|
||||
_monitorFrameStability();
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/// 动态调整帧间隔
|
||||
void _adjustFrameInterval() {
|
||||
// 计算目标帧间隔
|
||||
int targetInterval = _calculateTargetInterval();
|
||||
|
||||
// 平滑过渡到目标帧率,避免突变
|
||||
if (frameIntervalMs != targetInterval) {
|
||||
// 每次最多调整2ms,使变化更平滑
|
||||
frameIntervalMs += (targetInterval > frameIntervalMs) ? 2 : -2;
|
||||
|
||||
// 确保在合理范围内
|
||||
frameIntervalMs =
|
||||
frameIntervalMs.clamp(minFrameIntervalMs, maxFrameIntervalMs);
|
||||
|
||||
// 只在帧间隔变化超过阈值时才重建定时器
|
||||
if ((frameIntervalMs - targetInterval).abs() >= 5) {
|
||||
_rebuildTimers();
|
||||
}
|
||||
}
|
||||
// int newFrameIntervalMs = frameIntervalMs;
|
||||
// if (state.videoBuffer.length < 10 && frameIntervalMs < maxFrameIntervalMs) {
|
||||
// // 如果缓冲区较小且帧间隔小于最大值,则增加帧间隔
|
||||
// frameIntervalMs += 5;
|
||||
// } else if (state.videoBuffer.length > 20 &&
|
||||
// frameIntervalMs > minFrameIntervalMs) {
|
||||
// // 如果缓冲区较大且帧间隔大于最小值,则减少帧间隔
|
||||
// frameIntervalMs -= 5;
|
||||
// }
|
||||
// // 只有在帧间隔发生变化时才重建定时器
|
||||
// if (newFrameIntervalMs != frameIntervalMs) {
|
||||
// frameIntervalMs = newFrameIntervalMs;
|
||||
// // 取消旧的定时器
|
||||
// _syncTimer?.cancel();
|
||||
// _syncTimer =
|
||||
// Timer.periodic(Duration(milliseconds: frameIntervalMs), (timer) {
|
||||
// // 播放视频帧
|
||||
// _playVideoFrames();
|
||||
// });
|
||||
//
|
||||
// _audioTimer?.cancel();
|
||||
// _audioTimer =
|
||||
// Timer.periodic(Duration(milliseconds: audioFrameIntervalMs), (timer) {
|
||||
// final currentTime = DateTime.now().millisecondsSinceEpoch;
|
||||
// final elapsedTime = currentTime - _startTime;
|
||||
//
|
||||
// // 播放合适的音频帧
|
||||
// if (state.audioBuffer.isNotEmpty &&
|
||||
// state.audioBuffer.first.durationMs <= elapsedTime) {
|
||||
// // 判断音频开关是否打开
|
||||
// if (state.isOpenVoice.value) {
|
||||
// _playAudioData(state.audioBuffer.removeAt(0));
|
||||
// } else {
|
||||
// // 如果不播放音频,只从缓冲区中读取数据,但不移除
|
||||
// // 你可以根据需要调整此处逻辑,例如保留缓冲区的最大长度,防止无限增长
|
||||
// // 仅移除缓冲区数据但不播放音频,确保音频也是实时更新的
|
||||
// state.audioBuffer.removeAt(0);
|
||||
// }
|
||||
// }
|
||||
// });
|
||||
// }
|
||||
}
|
||||
|
||||
/// 监控帧率稳定性
|
||||
void _monitorFrameStability() {
|
||||
const stabilityThreshold = 5; // 帧率波动阈值
|
||||
final currentFps = 1000 / frameIntervalMs;
|
||||
|
||||
if (_lastFewFps.length >= 10) {
|
||||
_lastFewFps.removeAt(0);
|
||||
}
|
||||
_lastFewFps.add(currentFps);
|
||||
|
||||
// 计算帧率标准差
|
||||
if (_lastFewFps.length >= 5) {
|
||||
double mean = _lastFewFps.reduce((a, b) => a + b) / _lastFewFps.length;
|
||||
double variance =
|
||||
_lastFewFps.map((fps) => pow(fps - mean, 2)).reduce((a, b) => a + b) /
|
||||
_lastFewFps.length;
|
||||
double stdDev = sqrt(variance);
|
||||
|
||||
// 如果帧率波动过大,采取平滑措施
|
||||
if (stdDev > stabilityThreshold) {
|
||||
_smoothFrameRate(mean);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// 检查网络质量
|
||||
void _checkNetworkQuality(Timer timer) {
|
||||
final bufferHealth = state.videoBuffer.length / bufferSize;
|
||||
|
||||
if (bufferHealth < 0.3) {
|
||||
// 缓冲区不足30%
|
||||
// 降低帧率以适应网络状况
|
||||
frameIntervalMs = min(frameIntervalMs + 10, maxFrameIntervalMs);
|
||||
_rebuildTimers();
|
||||
} else if (bufferHealth > 0.7) {
|
||||
// 缓冲区超过70%
|
||||
// 提高帧率以提供更好体验
|
||||
frameIntervalMs = max(frameIntervalMs - 5, minFrameIntervalMs);
|
||||
_rebuildTimers();
|
||||
}
|
||||
}
|
||||
|
||||
/// 计算目标帧间隔
|
||||
int _calculateTargetInterval() {
|
||||
const int optimalBufferSize = 15; // 理想的缓冲区大小
|
||||
const int bufferTolerance = 5; // 缓冲区容差
|
||||
|
||||
if (state.videoBuffer.length < optimalBufferSize - bufferTolerance) {
|
||||
// 缓冲区过小,降低帧率
|
||||
return (frameIntervalMs * 1.2).round();
|
||||
} else if (state.videoBuffer.length > optimalBufferSize + bufferTolerance) {
|
||||
// 缓冲区过大,提高帧率
|
||||
return (frameIntervalMs * 0.8).round();
|
||||
}
|
||||
return frameIntervalMs;
|
||||
}
|
||||
|
||||
/// 重建定时器
|
||||
void _rebuildTimers() {
|
||||
// 取消现有定时器
|
||||
_syncTimer?.cancel();
|
||||
_audioTimer?.cancel();
|
||||
|
||||
// 创建新的视频定时器
|
||||
_syncTimer =
|
||||
Timer.periodic(Duration(milliseconds: frameIntervalMs), (timer) {
|
||||
_playVideoFrames();
|
||||
});
|
||||
|
||||
// 创建新的音频定时器,使用固定间隔
|
||||
_audioTimer =
|
||||
Timer.periodic(Duration(milliseconds: audioFrameIntervalMs), (timer) {
|
||||
_processAudioFrame();
|
||||
});
|
||||
}
|
||||
|
||||
/// 处理音频帧
|
||||
void _processAudioFrame() {
|
||||
final currentTime = DateTime.now().millisecondsSinceEpoch;
|
||||
final elapsedTime = currentTime - _startTime;
|
||||
|
||||
while (state.audioBuffer.isNotEmpty &&
|
||||
state.audioBuffer.first.durationMs <= elapsedTime) {
|
||||
if (state.isOpenVoice.value) {
|
||||
_playAudioData(state.audioBuffer.removeAt(0));
|
||||
} else {
|
||||
state.audioBuffer.removeAt(0);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void _playVideoFrames() {
|
||||
final currentTime = DateTime.now().millisecondsSinceEpoch;
|
||||
final elapsedTime = currentTime - _startTime;
|
||||
|
||||
// 播放合适的视频帧
|
||||
// 跳帧策略:如果缓冲区中有多个帧,且它们的时间戳都在当前时间之前,则播放最新的帧
|
||||
int maxFramesToProcess = 5; // 每次最多处理 5 帧
|
||||
int processedFrames = 0;
|
||||
|
||||
while (state.videoBuffer.isNotEmpty &&
|
||||
state.videoBuffer.first.durationMs <= elapsedTime &&
|
||||
processedFrames < maxFramesToProcess) {
|
||||
if (state.videoBuffer.length > 1) {
|
||||
state.videoBuffer.removeAt(0);
|
||||
} else {
|
||||
_playVideoData(state.videoBuffer.removeAt(0));
|
||||
}
|
||||
processedFrames++;
|
||||
}
|
||||
}
|
||||
|
||||
/// 停止播放音频
|
||||
void _stopPlayG711Data() async {
|
||||
await FlutterPcmSound.pause();
|
||||
@ -546,7 +447,7 @@ class TalkViewLogic extends BaseGetXController {
|
||||
_initFlutterPcmSound();
|
||||
|
||||
// 启动播放定时器
|
||||
_startPlayback();
|
||||
// _startPlayback();
|
||||
|
||||
// 初始化录音控制器
|
||||
_initAudioRecorder();
|
||||
@ -554,39 +455,16 @@ class TalkViewLogic extends BaseGetXController {
|
||||
requestPermissions();
|
||||
}
|
||||
|
||||
/// 平滑帧率
|
||||
void _smoothFrameRate(double targetFps) {
|
||||
// 计算目标帧间隔
|
||||
int targetInterval = (1000 / targetFps).round();
|
||||
|
||||
// 使用加权平均来平滑过渡
|
||||
double weight = 0.3; // 权重因子,可以根据需要调整
|
||||
frameIntervalMs =
|
||||
(frameIntervalMs * (1 - weight) + targetInterval * weight).round();
|
||||
|
||||
// 确保帧间隔在合理范围内
|
||||
frameIntervalMs =
|
||||
frameIntervalMs.clamp(minFrameIntervalMs, maxFrameIntervalMs);
|
||||
|
||||
// 重建定时器
|
||||
_rebuildTimers();
|
||||
}
|
||||
|
||||
@override
|
||||
void onClose() {
|
||||
_stopPlayG711Data(); // 停止播放音频
|
||||
state.listData.value = Uint8List(0); // 清空视频数据
|
||||
state.audioBuffer.clear(); // 清空音频缓冲区
|
||||
state.videoBuffer.clear(); // 清空视频缓冲区
|
||||
_syncTimer?.cancel(); // 取消定时器
|
||||
_syncTimer = null; // 释放定时器引用
|
||||
_audioTimer?.cancel();
|
||||
_audioTimer = null; // 释放定时器引用
|
||||
|
||||
state.oneMinuteTimeTimer?.cancel();
|
||||
state.oneMinuteTimeTimer = null;
|
||||
// 添加新的清理代码
|
||||
_networkQualityTimer?.cancel();
|
||||
_lastFewFps.clear();
|
||||
|
||||
stopProcessingAudio();
|
||||
// 清理图片缓存
|
||||
_imageCache.clear();
|
||||
|
||||
@ -55,7 +55,6 @@ class TalkViewState {
|
||||
|
||||
// 星图对讲相关状态
|
||||
List<TalkData> audioBuffer = <TalkData>[].obs;
|
||||
List<TalkData> audioBuffer2 = <TalkData>[].obs;
|
||||
List<TalkData> activeAudioBuffer = <TalkData>[].obs;
|
||||
List<TalkData> activeVideoBuffer = <TalkData>[].obs;
|
||||
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user