fix:调整mjpeg的渲染逻辑

This commit is contained in:
liyi 2025-05-08 11:33:38 +08:00
parent a0895927c4
commit 1a67783d7a
3 changed files with 82 additions and 173 deletions

View File

@ -2,6 +2,7 @@ import 'dart:async';
import 'dart:io';
import 'dart:ui' as ui;
import 'dart:math'; // Import the math package to use sqrt
import 'dart:ui' show decodeImageFromList;
import 'package:flutter/foundation.dart';
import 'package:flutter/rendering.dart';
@ -38,36 +39,25 @@ class TalkViewLogic extends BaseGetXController {
final LockDetailState lockDetailState = Get.put(LockDetailLogic()).state;
final int minBufferSize = 2; // 2166ms
final int maxBufferSize = 20; // 8666ms
int bufferSize = 8; //
//
final int minAudioBufferSize = 1; // 1
final int maxAudioBufferSize = 3; // 3
int bufferSize = 8; //
int audioBufferSize = 2; // 2
bool _isFirstAudioFrame = true; //
//
int _startTime = 0; //
int _startAudioTime = 0; //
bool _isFirstFrame = true; //
//
final List<int> _bufferedAudioFrames = <int>[];
final Map<String, ui.Image> _imageCache = {};
//
int _lastFrameTimestamp = 0; // 0
//
int _frameCount = 0;
int _lastFpsUpdateTime = 0;
Timer? _fpsTimer;
//
bool _isListening = false;
StreamSubscription? _streamSubscription;
Timer? videoRenderTimer; //
int _renderedFrameCount = 0;
int _lastFpsPrintTime = DateTime.now().millisecondsSinceEpoch;
///
void _initFlutterPcmSound() {
const int sampleRate = 8000;
@ -143,79 +133,16 @@ class TalkViewLogic extends BaseGetXController {
_playAudioFrames();
break;
case TalkData_ContentTypeE.Image:
//
if (_isFirstFrame) {
_startTime = currentTime;
_isFirstFrame = false;
// AppLog.log('第一帧帧的时间戳:${talkData.durationMs}');
}
// AppLog.log('其他帧的时间戳:${talkData.durationMs}');
//
if (_lastFrameTimestamp != 0) {
final int frameInterval = talkData.durationMs - _lastFrameTimestamp;
_adjustBufferSize(frameInterval); //
}
_lastFrameTimestamp = talkData.durationMs; //
//
if (state.videoBuffer.length >= bufferSize) {
state.videoBuffer.removeAt(0);
}
// bufferSize帧
state.videoBuffer.add(talkData);
//
await _decodeAndCacheFrame(talkData);
//
_playVideoFrames();
if (state.videoBuffer.length > bufferSize) {
state.videoBuffer.removeAt(0); //
}
break;
}
});
}
//
void _playVideoFrames() {
//
if (state.videoBuffer.isEmpty || state.videoBuffer.length < bufferSize) {
// AppLog.log('📊 缓冲中 - 当前缓冲区大小: ${state.videoBuffer.length}/${bufferSize}');
return;
}
//
TalkData? oldestFrame;
int oldestIndex = -1;
for (int i = 0; i < state.videoBuffer.length; i++) {
if (oldestFrame == null ||
state.videoBuffer[i].durationMs < oldestFrame.durationMs) {
oldestFrame = state.videoBuffer[i];
oldestIndex = i;
}
}
//
if (oldestFrame != null && oldestIndex != -1) {
final cacheKey = oldestFrame.content.hashCode.toString();
// 使
if (_imageCache.containsKey(cacheKey)) {
state.currentImage.value = _imageCache[cacheKey];
state.listData.value = Uint8List.fromList(oldestFrame.content);
state.videoBuffer.removeAt(oldestIndex); //
// //
// _frameCount++;
// final currentTime = DateTime.now().millisecondsSinceEpoch;
// final elapsed = currentTime - _lastFpsUpdateTime;
//
// if (elapsed >= 1000) {
// //
// state.fps.value = (_frameCount * 1000 / elapsed).round();
// _frameCount = 0;
// _lastFpsUpdateTime = currentTime;
// }
} else {
// AppLog.log('⚠️ 帧未找到缓存 - Key: $cacheKey');
state.videoBuffer.removeAt(oldestIndex); //
}
}
}
//
void _playAudioFrames() {
//
@ -246,50 +173,6 @@ class TalkViewLogic extends BaseGetXController {
}
}
//
Future<void> _decodeAndCacheFrame(TalkData talkData) async {
try {
String cacheKey = talkData.content.hashCode.toString();
//
if (!_imageCache.containsKey(cacheKey)) {
final Uint8List uint8Data = Uint8List.fromList(talkData.content);
final ui.Image image = await decodeImageFromList(uint8Data);
//
if (_imageCache.length >= bufferSize) {
_imageCache.remove(_imageCache.keys.first);
}
//
_imageCache[cacheKey] = image;
// AppLog.log('📥 缓存新帧 - 缓存数: ${_imageCache.length}, Key: $cacheKey');
}
} catch (e) {
AppLog.log('❌ 帧解码错误: $e');
}
}
//
void _adjustBufferSize(int frameInterval) {
const int frameDuration = 83; // 83ms12fps
const int delayThresholdHigh = frameDuration * 2; // 2
const int delayThresholdLow = frameDuration; // 1
const int adjustInterval = 1; // 1
if (frameInterval > delayThresholdHigh && bufferSize < maxBufferSize) {
//
bufferSize = min(bufferSize + adjustInterval, maxBufferSize);
AppLog.log('📈 增加缓冲区 - 当前大小: $bufferSize, 帧间间隔: ${frameInterval}ms');
} else if (frameInterval < delayThresholdLow &&
bufferSize > minBufferSize) {
//
bufferSize = max(bufferSize - adjustInterval, minBufferSize);
AppLog.log('📉 减少缓冲区 - 当前大小: $bufferSize, 帧间间隔: ${frameInterval}ms');
}
}
///
void _startListenTalkStatus() {
state.startChartTalkStatus.statusStream.listen((talkStatus) {
@ -496,6 +379,32 @@ class TalkViewLogic extends BaseGetXController {
_initAudioRecorder();
requestPermissions();
// 10fps
videoRenderTimer = Timer.periodic(const Duration(milliseconds: 100), (_) {
final int now = DateTime.now().millisecondsSinceEpoch;
if (state.videoBuffer.isNotEmpty) {
final TalkData oldestFrame = state.videoBuffer.removeAt(0);
if (oldestFrame.content.isNotEmpty) {
state.listData.value = Uint8List.fromList(oldestFrame.content); //
final int decodeStart = DateTime.now().millisecondsSinceEpoch;
decodeImageFromList(Uint8List.fromList(oldestFrame.content)).then((ui.Image img) {
final int decodeEnd = DateTime.now().millisecondsSinceEpoch;
state.currentImage.value = img;
_renderedFrameCount++;
// fps
if (now - _lastFpsPrintTime >= 1000) {
// print('实际渲染fps: $_renderedFrameCount');
_renderedFrameCount = 0;
_lastFpsPrintTime = now;
}
}).catchError((e) {
print('图片解码失败: $e');
});
}
}
//
});
}
@override
@ -510,7 +419,7 @@ class TalkViewLogic extends BaseGetXController {
stopProcessingAudio();
//
_imageCache.clear();
// _imageCache.clear();
state.oneMinuteTimeTimer?.cancel(); //
state.oneMinuteTimeTimer = null; //
state.oneMinuteTime.value = 0;
@ -518,6 +427,10 @@ class TalkViewLogic extends BaseGetXController {
_streamSubscription?.cancel();
_isListening = false;
//
videoRenderTimer?.cancel();
videoRenderTimer = null;
super.onClose();
}
@ -526,6 +439,9 @@ class TalkViewLogic extends BaseGetXController {
stopProcessingAudio();
//
StartChartManage().reSetDefaultTalkExpect();
//
videoRenderTimer?.cancel();
videoRenderTimer = null;
super.dispose();
}

View File

@ -135,16 +135,12 @@ class _TalkViewPageState extends State<TalkViewPage>
child: SizedBox.expand(
child: RotatedBox(
quarterTurns: startChartManage.rotateAngle ~/ 90,
child: Obx(
() => state.currentImage.value != null
? RawImage(
image: state.currentImage.value,
width: ScreenUtil().scaleWidth,
height: ScreenUtil().scaleHeight,
fit: BoxFit.cover,
filterQuality: FilterQuality.high,
)
: Container(color: Colors.transparent),
child: RawImage(
image: state.currentImage.value,
width: ScreenUtil().scaleWidth,
height: ScreenUtil().scaleHeight,
fit: BoxFit.cover,
filterQuality: FilterQuality.high,
),
),
),
@ -160,35 +156,33 @@ class _TalkViewPageState extends State<TalkViewPage>
style: TextStyle(color: Colors.black, fontSize: 26.sp),
))
: Container()),
Obx(
() => state.listData.value.isNotEmpty &&
state.oneMinuteTime.value > 0
? Positioned(
top: ScreenUtil().statusBarHeight + 75.h,
width: 1.sw,
child: Obx(
() {
final String sec = (state.oneMinuteTime.value % 60)
.toString()
.padLeft(2, '0');
final String min = (state.oneMinuteTime.value ~/ 60)
.toString()
.padLeft(2, '0');
return Row(
mainAxisAlignment: MainAxisAlignment.center,
children: <Widget>[
Text(
'$min:$sec',
style: TextStyle(
fontSize: 26.sp, color: Colors.white),
),
],
);
},
),
)
: Container(),
),
Obx(() =>
state.listData.value.isNotEmpty && state.oneMinuteTime.value > 0
? Positioned(
top: ScreenUtil().statusBarHeight + 75.h,
width: 1.sw,
child: Obx(
() {
final String sec = (state.oneMinuteTime.value % 60)
.toString()
.padLeft(2, '0');
final String min = (state.oneMinuteTime.value ~/ 60)
.toString()
.padLeft(2, '0');
return Row(
mainAxisAlignment: MainAxisAlignment.center,
children: <Widget>[
Text(
'$min:$sec',
style: TextStyle(
fontSize: 26.sp, color: Colors.white),
),
],
);
},
),
)
: Container()),
Positioned(
bottom: 10.w,
child: Container(
@ -458,7 +452,6 @@ class _TalkViewPageState extends State<TalkViewPage>
// if (state.talkStatus.value == TalkStatus.answeredSuccessfully &&
// state.listData.value.length > 0) {
// logic.udpOpenDoorAction();
logic.remoteOpenLock();
// }
// if (UDPManage().remoteUnlock == 1) {
// logic.udpOpenDoorAction();
@ -466,6 +459,7 @@ class _TalkViewPageState extends State<TalkViewPage>
// } else {
// logic.showToast('请在锁设置中开启远程开锁'.tr);
// }
logic.remoteOpenLock();
},
)
]);

View File

@ -90,6 +90,5 @@ class TalkViewState {
RxBool isLongPressing = false.obs; //
RxBool hasAudioData = false.obs; //
RxInt lastAudioTimestamp = 0.obs; //
//
final Rx<ui.Image?> currentImage = Rx<ui.Image?>(null);
Rx<ui.Image?> currentImage = Rx<ui.Image?>(null);
}