fix:调整mjpeg的渲染逻辑

This commit is contained in:
liyi 2025-05-08 11:33:38 +08:00
parent a0895927c4
commit 1a67783d7a
3 changed files with 82 additions and 173 deletions

View File

@ -2,6 +2,7 @@ import 'dart:async';
import 'dart:io'; import 'dart:io';
import 'dart:ui' as ui; import 'dart:ui' as ui;
import 'dart:math'; // Import the math package to use sqrt import 'dart:math'; // Import the math package to use sqrt
import 'dart:ui' show decodeImageFromList;
import 'package:flutter/foundation.dart'; import 'package:flutter/foundation.dart';
import 'package:flutter/rendering.dart'; import 'package:flutter/rendering.dart';
@ -38,36 +39,25 @@ class TalkViewLogic extends BaseGetXController {
final LockDetailState lockDetailState = Get.put(LockDetailLogic()).state; final LockDetailState lockDetailState = Get.put(LockDetailLogic()).state;
final int minBufferSize = 2; // 2166ms int bufferSize = 8; //
final int maxBufferSize = 20; // 8666ms
int bufferSize = 8; //
//
final int minAudioBufferSize = 1; // 1
final int maxAudioBufferSize = 3; // 3
int audioBufferSize = 2; // 2 int audioBufferSize = 2; // 2
bool _isFirstAudioFrame = true; // bool _isFirstAudioFrame = true; //
//
int _startTime = 0; //
int _startAudioTime = 0; // int _startAudioTime = 0; //
bool _isFirstFrame = true; //
// //
final List<int> _bufferedAudioFrames = <int>[]; final List<int> _bufferedAudioFrames = <int>[];
final Map<String, ui.Image> _imageCache = {};
//
int _lastFrameTimestamp = 0; // 0
//
int _frameCount = 0;
int _lastFpsUpdateTime = 0;
Timer? _fpsTimer;
// //
bool _isListening = false; bool _isListening = false;
StreamSubscription? _streamSubscription; StreamSubscription? _streamSubscription;
Timer? videoRenderTimer; //
int _renderedFrameCount = 0;
int _lastFpsPrintTime = DateTime.now().millisecondsSinceEpoch;
/// ///
void _initFlutterPcmSound() { void _initFlutterPcmSound() {
const int sampleRate = 8000; const int sampleRate = 8000;
@ -143,79 +133,16 @@ class TalkViewLogic extends BaseGetXController {
_playAudioFrames(); _playAudioFrames();
break; break;
case TalkData_ContentTypeE.Image: case TalkData_ContentTypeE.Image:
// // bufferSize帧
if (_isFirstFrame) {
_startTime = currentTime;
_isFirstFrame = false;
// AppLog.log('第一帧帧的时间戳:${talkData.durationMs}');
}
// AppLog.log('其他帧的时间戳:${talkData.durationMs}');
//
if (_lastFrameTimestamp != 0) {
final int frameInterval = talkData.durationMs - _lastFrameTimestamp;
_adjustBufferSize(frameInterval); //
}
_lastFrameTimestamp = talkData.durationMs; //
//
if (state.videoBuffer.length >= bufferSize) {
state.videoBuffer.removeAt(0);
}
state.videoBuffer.add(talkData); state.videoBuffer.add(talkData);
// if (state.videoBuffer.length > bufferSize) {
await _decodeAndCacheFrame(talkData); state.videoBuffer.removeAt(0); //
// }
_playVideoFrames();
break; break;
} }
}); });
} }
//
void _playVideoFrames() {
//
if (state.videoBuffer.isEmpty || state.videoBuffer.length < bufferSize) {
// AppLog.log('📊 缓冲中 - 当前缓冲区大小: ${state.videoBuffer.length}/${bufferSize}');
return;
}
//
TalkData? oldestFrame;
int oldestIndex = -1;
for (int i = 0; i < state.videoBuffer.length; i++) {
if (oldestFrame == null ||
state.videoBuffer[i].durationMs < oldestFrame.durationMs) {
oldestFrame = state.videoBuffer[i];
oldestIndex = i;
}
}
//
if (oldestFrame != null && oldestIndex != -1) {
final cacheKey = oldestFrame.content.hashCode.toString();
// 使
if (_imageCache.containsKey(cacheKey)) {
state.currentImage.value = _imageCache[cacheKey];
state.listData.value = Uint8List.fromList(oldestFrame.content);
state.videoBuffer.removeAt(oldestIndex); //
// //
// _frameCount++;
// final currentTime = DateTime.now().millisecondsSinceEpoch;
// final elapsed = currentTime - _lastFpsUpdateTime;
//
// if (elapsed >= 1000) {
// //
// state.fps.value = (_frameCount * 1000 / elapsed).round();
// _frameCount = 0;
// _lastFpsUpdateTime = currentTime;
// }
} else {
// AppLog.log('⚠️ 帧未找到缓存 - Key: $cacheKey');
state.videoBuffer.removeAt(oldestIndex); //
}
}
}
// //
void _playAudioFrames() { void _playAudioFrames() {
// //
@ -246,50 +173,6 @@ class TalkViewLogic extends BaseGetXController {
} }
} }
//
Future<void> _decodeAndCacheFrame(TalkData talkData) async {
try {
String cacheKey = talkData.content.hashCode.toString();
//
if (!_imageCache.containsKey(cacheKey)) {
final Uint8List uint8Data = Uint8List.fromList(talkData.content);
final ui.Image image = await decodeImageFromList(uint8Data);
//
if (_imageCache.length >= bufferSize) {
_imageCache.remove(_imageCache.keys.first);
}
//
_imageCache[cacheKey] = image;
// AppLog.log('📥 缓存新帧 - 缓存数: ${_imageCache.length}, Key: $cacheKey');
}
} catch (e) {
AppLog.log('❌ 帧解码错误: $e');
}
}
//
void _adjustBufferSize(int frameInterval) {
const int frameDuration = 83; // 83ms12fps
const int delayThresholdHigh = frameDuration * 2; // 2
const int delayThresholdLow = frameDuration; // 1
const int adjustInterval = 1; // 1
if (frameInterval > delayThresholdHigh && bufferSize < maxBufferSize) {
//
bufferSize = min(bufferSize + adjustInterval, maxBufferSize);
AppLog.log('📈 增加缓冲区 - 当前大小: $bufferSize, 帧间间隔: ${frameInterval}ms');
} else if (frameInterval < delayThresholdLow &&
bufferSize > minBufferSize) {
//
bufferSize = max(bufferSize - adjustInterval, minBufferSize);
AppLog.log('📉 减少缓冲区 - 当前大小: $bufferSize, 帧间间隔: ${frameInterval}ms');
}
}
/// ///
void _startListenTalkStatus() { void _startListenTalkStatus() {
state.startChartTalkStatus.statusStream.listen((talkStatus) { state.startChartTalkStatus.statusStream.listen((talkStatus) {
@ -496,6 +379,32 @@ class TalkViewLogic extends BaseGetXController {
_initAudioRecorder(); _initAudioRecorder();
requestPermissions(); requestPermissions();
// 10fps
videoRenderTimer = Timer.periodic(const Duration(milliseconds: 100), (_) {
final int now = DateTime.now().millisecondsSinceEpoch;
if (state.videoBuffer.isNotEmpty) {
final TalkData oldestFrame = state.videoBuffer.removeAt(0);
if (oldestFrame.content.isNotEmpty) {
state.listData.value = Uint8List.fromList(oldestFrame.content); //
final int decodeStart = DateTime.now().millisecondsSinceEpoch;
decodeImageFromList(Uint8List.fromList(oldestFrame.content)).then((ui.Image img) {
final int decodeEnd = DateTime.now().millisecondsSinceEpoch;
state.currentImage.value = img;
_renderedFrameCount++;
// fps
if (now - _lastFpsPrintTime >= 1000) {
// print('实际渲染fps: $_renderedFrameCount');
_renderedFrameCount = 0;
_lastFpsPrintTime = now;
}
}).catchError((e) {
print('图片解码失败: $e');
});
}
}
//
});
} }
@override @override
@ -510,7 +419,7 @@ class TalkViewLogic extends BaseGetXController {
stopProcessingAudio(); stopProcessingAudio();
// //
_imageCache.clear(); // _imageCache.clear();
state.oneMinuteTimeTimer?.cancel(); // state.oneMinuteTimeTimer?.cancel(); //
state.oneMinuteTimeTimer = null; // state.oneMinuteTimeTimer = null; //
state.oneMinuteTime.value = 0; state.oneMinuteTime.value = 0;
@ -518,6 +427,10 @@ class TalkViewLogic extends BaseGetXController {
_streamSubscription?.cancel(); _streamSubscription?.cancel();
_isListening = false; _isListening = false;
//
videoRenderTimer?.cancel();
videoRenderTimer = null;
super.onClose(); super.onClose();
} }
@ -526,6 +439,9 @@ class TalkViewLogic extends BaseGetXController {
stopProcessingAudio(); stopProcessingAudio();
// //
StartChartManage().reSetDefaultTalkExpect(); StartChartManage().reSetDefaultTalkExpect();
//
videoRenderTimer?.cancel();
videoRenderTimer = null;
super.dispose(); super.dispose();
} }

View File

@ -135,16 +135,12 @@ class _TalkViewPageState extends State<TalkViewPage>
child: SizedBox.expand( child: SizedBox.expand(
child: RotatedBox( child: RotatedBox(
quarterTurns: startChartManage.rotateAngle ~/ 90, quarterTurns: startChartManage.rotateAngle ~/ 90,
child: Obx( child: RawImage(
() => state.currentImage.value != null image: state.currentImage.value,
? RawImage( width: ScreenUtil().scaleWidth,
image: state.currentImage.value, height: ScreenUtil().scaleHeight,
width: ScreenUtil().scaleWidth, fit: BoxFit.cover,
height: ScreenUtil().scaleHeight, filterQuality: FilterQuality.high,
fit: BoxFit.cover,
filterQuality: FilterQuality.high,
)
: Container(color: Colors.transparent),
), ),
), ),
), ),
@ -160,35 +156,33 @@ class _TalkViewPageState extends State<TalkViewPage>
style: TextStyle(color: Colors.black, fontSize: 26.sp), style: TextStyle(color: Colors.black, fontSize: 26.sp),
)) ))
: Container()), : Container()),
Obx( Obx(() =>
() => state.listData.value.isNotEmpty && state.listData.value.isNotEmpty && state.oneMinuteTime.value > 0
state.oneMinuteTime.value > 0 ? Positioned(
? Positioned( top: ScreenUtil().statusBarHeight + 75.h,
top: ScreenUtil().statusBarHeight + 75.h, width: 1.sw,
width: 1.sw, child: Obx(
child: Obx( () {
() { final String sec = (state.oneMinuteTime.value % 60)
final String sec = (state.oneMinuteTime.value % 60) .toString()
.toString() .padLeft(2, '0');
.padLeft(2, '0'); final String min = (state.oneMinuteTime.value ~/ 60)
final String min = (state.oneMinuteTime.value ~/ 60) .toString()
.toString() .padLeft(2, '0');
.padLeft(2, '0'); return Row(
return Row( mainAxisAlignment: MainAxisAlignment.center,
mainAxisAlignment: MainAxisAlignment.center, children: <Widget>[
children: <Widget>[ Text(
Text( '$min:$sec',
'$min:$sec', style: TextStyle(
style: TextStyle( fontSize: 26.sp, color: Colors.white),
fontSize: 26.sp, color: Colors.white), ),
), ],
], );
); },
}, ),
), )
) : Container()),
: Container(),
),
Positioned( Positioned(
bottom: 10.w, bottom: 10.w,
child: Container( child: Container(
@ -458,7 +452,6 @@ class _TalkViewPageState extends State<TalkViewPage>
// if (state.talkStatus.value == TalkStatus.answeredSuccessfully && // if (state.talkStatus.value == TalkStatus.answeredSuccessfully &&
// state.listData.value.length > 0) { // state.listData.value.length > 0) {
// logic.udpOpenDoorAction(); // logic.udpOpenDoorAction();
logic.remoteOpenLock();
// } // }
// if (UDPManage().remoteUnlock == 1) { // if (UDPManage().remoteUnlock == 1) {
// logic.udpOpenDoorAction(); // logic.udpOpenDoorAction();
@ -466,6 +459,7 @@ class _TalkViewPageState extends State<TalkViewPage>
// } else { // } else {
// logic.showToast('请在锁设置中开启远程开锁'.tr); // logic.showToast('请在锁设置中开启远程开锁'.tr);
// } // }
logic.remoteOpenLock();
}, },
) )
]); ]);

View File

@ -90,6 +90,5 @@ class TalkViewState {
RxBool isLongPressing = false.obs; // RxBool isLongPressing = false.obs; //
RxBool hasAudioData = false.obs; // RxBool hasAudioData = false.obs; //
RxInt lastAudioTimestamp = 0.obs; // RxInt lastAudioTimestamp = 0.obs; //
// Rx<ui.Image?> currentImage = Rx<ui.Image?>(null);
final Rx<ui.Image?> currentImage = Rx<ui.Image?>(null);
} }