fix:完成切换清晰度逻辑

This commit is contained in:
liyi 2025-05-15 16:46:50 +08:00
parent 069ef1b592
commit 90f94e1a9a
3 changed files with 291 additions and 91 deletions

View File

@ -35,6 +35,8 @@ import 'package:star_lock/talk/starChart/views/native/talk_view_native_decode_st
import 'package:star_lock/talk/starChart/views/talkView/talk_view_state.dart'; import 'package:star_lock/talk/starChart/views/talkView/talk_view_state.dart';
import 'package:star_lock/tools/G711Tool.dart'; import 'package:star_lock/tools/G711Tool.dart';
import 'package:star_lock/tools/bugly/bugly_tool.dart'; import 'package:star_lock/tools/bugly/bugly_tool.dart';
import 'package:star_lock/tools/commonDataManage.dart';
import 'package:star_lock/tools/storage.dart';
import 'package:video_decode_plugin/video_decode_plugin.dart'; import 'package:video_decode_plugin/video_decode_plugin.dart';
import '../../../../tools/baseGetXController.dart'; import '../../../../tools/baseGetXController.dart';
@ -75,6 +77,16 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
// frameSeq // frameSeq
int? _lastFrameSeq; int? _lastFrameSeq;
// frameSeq回绕检测标志
bool _pendingStreamReset = false;
//
int _pendingResetWidth = 864;
int _pendingResetHeight = 480;
// I帧状态
bool _waitingForIFrame = false;
// //
Future<void> _initVideoDecoder() async { Future<void> _initVideoDecoder() async {
try { try {
@ -89,12 +101,12 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
// textureId // textureId
final textureId = await VideoDecodePlugin.initDecoder(config); final textureId = await VideoDecodePlugin.initDecoder(config);
if (textureId != null) { if (textureId != null) {
state.textureId.value = textureId; Future.microtask(() => state.textureId.value = textureId);
AppLog.log('视频解码器初始化成功textureId=$textureId'); AppLog.log('视频解码器初始化成功textureId=$textureId');
VideoDecodePlugin.setOnFrameRenderedListener((textureId) { VideoDecodePlugin.setOnFrameRenderedListener((textureId) {
state.isLoading.value = false;
AppLog.log('已经开始渲染======='); AppLog.log('已经开始渲染=======');
// loading
Future.microtask(() => state.isLoading.value = false);
}); });
} else { } else {
AppLog.log('视频解码器初始化失败'); AppLog.log('视频解码器初始化失败');
@ -146,12 +158,53 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
int frameSeq, int frameSeq,
int frameSeqI, int frameSeqI,
) { ) {
// frameSeq严格递增 // frameSeq回绕I帧
if (_lastFrameSeq != null && frameSeq <= _lastFrameSeq!) { if (!_pendingStreamReset &&
AppLog.log('丢弃乱序或重复帧: frameSeq=$frameSeq, lastFrameSeq=$_lastFrameSeq'); _lastFrameSeq != null &&
return; frameType == TalkDataH264Frame_FrameTypeE.I &&
frameSeq < _lastFrameSeq!) {
// I帧loading并重置所有本地状态
AppLog.log(
'检测到新流I帧frameSeq回绕进入loading并重置: frameSeq=$frameSeq, lastFrameSeq=$_lastFrameSeq');
Future.microtask(() => state.isLoading.value = true);
_pendingStreamReset = true;
//
_stopFrameProcessTimer();
//
_resetDecoderForNewStream(_pendingResetWidth, _pendingResetHeight);
//
_lastFrameSeq = null;
_decodedIFrames.clear();
state.h264FrameBuffer.clear();
//
_startFrameProcessTimer();
// returnI帧初始化解码器并解码
//
}
// pendingStreamResetI帧
if (_pendingStreamReset) {
if (frameType == TalkDataH264Frame_FrameTypeE.I) {
// I帧loading
AppLog.log('收到新流I帧关闭loading: frameSeq=$frameSeq');
//Future.microtask(() => state.isLoading.value = false);
_pendingStreamReset = false;
_lastFrameSeq = frameSeq;
_decodedIFrames.clear();
_decodedIFrames.add(frameSeq);
// I帧解码
} else {
// I帧期间I帧
AppLog.log('等待新流I帧丢弃非I帧: frameSeq=$frameSeq, frameType=$frameType');
return;
}
} else {
//
if (_lastFrameSeq != null && frameSeq <= _lastFrameSeq!) {
AppLog.log('丢弃乱序或重复帧: frameSeq=$frameSeq, lastFrameSeq=$_lastFrameSeq');
return;
}
_lastFrameSeq = frameSeq;
} }
_lastFrameSeq = frameSeq;
// Map // Map
final Map<String, dynamic> frameMap = { final Map<String, dynamic> frameMap = {
'frameData': frameData, 'frameData': frameData,
@ -163,8 +216,8 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
// P/B帧 // P/B帧
while (state.h264FrameBuffer.length >= state.maxFrameBufferSize) { while (state.h264FrameBuffer.length >= state.maxFrameBufferSize) {
int pbIndex = state.h264FrameBuffer.indexWhere((f) => int pbIndex = state.h264FrameBuffer
f['frameType'] == TalkDataH264Frame_FrameTypeE.P); .indexWhere((f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.P);
if (pbIndex != -1) { if (pbIndex != -1) {
state.h264FrameBuffer.removeAt(pbIndex); state.h264FrameBuffer.removeAt(pbIndex);
} else { } else {
@ -209,29 +262,31 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
try { try {
// //
final Map<String, dynamic> frameMap = state.h264FrameBuffer.removeAt(0); final Map<String, dynamic>? frameMap = state.h264FrameBuffer.isNotEmpty
final List<int> frameData = frameMap['frameData']; ? state.h264FrameBuffer.removeAt(0)
final TalkDataH264Frame_FrameTypeE frameType = frameMap['frameType']; : null;
final int frameSeq = frameMap['frameSeq']; if (frameMap == null) {
final int frameSeqI = frameMap['frameSeqI']; state.isProcessingFrame = false;
int pts = frameMap['pts']; return;
// int pts = DateTime.now().millisecondsSinceEpoch; }
final List<int>? frameData = frameMap['frameData'];
if (frameType == TalkDataH264Frame_FrameTypeE.P) { final TalkDataH264Frame_FrameTypeE? frameType = frameMap['frameType'];
// frameSeqI为I帧序号标识 final int? frameSeq = frameMap['frameSeq'];
if (!(_decodedIFrames.contains(frameSeqI))) { final int? frameSeqI = frameMap['frameSeqI'];
AppLog.log('丢弃P帧未收到对应I帧frameSeqI=${frameSeqI}'); final int? pts = frameMap['pts'];
return; if (frameData == null ||
} frameType == null ||
} else if (frameType == TalkDataH264Frame_FrameTypeE.I) { frameSeq == null ||
// I帧序号 frameSeqI == null ||
_decodedIFrames.add(frameSeq); pts == null) {
state.isProcessingFrame = false;
return;
}
// textureId为null时跳过
if (state.textureId.value == null) {
state.isProcessingFrame = false;
return;
} }
// h264文件
// _appendH264FrameToFile(frameData, frameType);
// final timestamp = DateTime.now().millisecondsSinceEpoch;
// final timestamp64 = timestamp is int ? timestamp : timestamp.toInt();
await VideoDecodePlugin.sendFrame( await VideoDecodePlugin.sendFrame(
frameData: frameData, frameData: frameData,
frameType: frameType == TalkDataH264Frame_FrameTypeE.I ? 0 : 1, frameType: frameType == TalkDataH264Frame_FrameTypeE.I ? 0 : 1,
@ -462,6 +517,7 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
// //
_initVideoDecoder(); _initVideoDecoder();
_initHdOptions();
// H264帧缓冲区 // H264帧缓冲区
state.h264FrameBuffer.clear(); state.h264FrameBuffer.clear();
state.isProcessingFrame = false; state.isProcessingFrame = false;
@ -490,7 +546,7 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
// //
if (state.textureId.value != null) { if (state.textureId.value != null) {
VideoDecodePlugin.releaseDecoder(); VideoDecodePlugin.releaseDecoder();
state.textureId.value = null; Future.microtask(() => state.textureId.value = null);
} }
// //
@ -577,36 +633,42 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
} }
} }
// //
Future<void> remoteOpenLock() async { Future<void> remoteOpenLock() async {
final LockListInfoItemEntity currentKeyInfo =
CommonDataManage().currentKeyInfo;
var lockId = currentKeyInfo.lockId ?? 0;
var remoteUnlock = currentKeyInfo.lockSetting?.remoteUnlock ?? 0;
final lockPeerId = StartChartManage().lockPeerId; final lockPeerId = StartChartManage().lockPeerId;
final lockListPeerId = StartChartManage().lockListPeerId; final LockListInfoGroupEntity? lockListInfoGroupEntity =
int lockId = lockDetailState.keyInfos.value.lockId ?? 0; await Storage.getLockMainListData();
if (lockListInfoGroupEntity != null) {
// peerId使peerId lockListInfoGroupEntity!.groupList?.forEach((element) {
// peerId final lockList = element.lockList;
lockListPeerId.forEach((element) { if (lockList != null && lockList.length != 0) {
if (element.network?.peerId == lockPeerId) { for (var lockInfo in lockList) {
lockId = element.lockId ?? 0; final peerId = lockInfo.network?.peerId;
} if (peerId != null && peerId != '') {
}); if (peerId == lockPeerId) {
lockId = lockInfo.lockId ?? 0;
final LockSetInfoEntity lockSetInfoEntity = remoteUnlock = lockInfo.lockSetting?.remoteUnlock ?? 0;
await ApiRepository.to.getLockSettingInfoData( }
lockId: lockId.toString(), }
); }
if (lockSetInfoEntity.errorCode!.codeIsSuccessful) {
if (lockSetInfoEntity.data?.lockFeature?.remoteUnlock == 1 &&
lockSetInfoEntity.data?.lockSettingInfo?.remoteUnlock == 1) {
final LoginEntity entity = await ApiRepository.to
.remoteOpenLock(lockId: lockId.toString(), timeOut: 60);
if (entity.errorCode!.codeIsSuccessful) {
showToast('已开锁'.tr);
StartChartManage().lockListPeerId = [];
} }
} else { });
showToast('该锁的远程开锁功能未启用'.tr); }
if (remoteUnlock == 1) {
final LoginEntity entity = await ApiRepository.to
.remoteOpenLock(lockId: lockId.toString(), timeOut: 60);
if (entity.errorCode!.codeIsSuccessful) {
showToast('已开锁'.tr);
StartChartManage().lockListPeerId = [];
} }
} else {
showToast('该锁的远程开锁功能未启用'.tr);
} }
} }
@ -1172,4 +1234,81 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
} }
} }
} }
//
void onQualityChanged(String quality) async {
state.currentQuality.value = quality;
TalkExpectReq talkExpectReq = StartChartManage().getDefaultTalkExpect();
final audioType = talkExpectReq.audioType;
int width = 864;
int height = 480;
switch (quality) {
case '高清':
talkExpectReq = TalkExpectReq(
videoType: [VideoTypeE.H264_720P],
audioType: audioType,
);
width = 1280;
height = 720;
break;
case '标清':
talkExpectReq = TalkExpectReq(
videoType: [VideoTypeE.H264],
audioType: audioType,
);
width = 864;
height = 480;
break;
}
///
StartChartManage().changeTalkExpectDataTypeAndReStartTalkExpectMessageTimer(
talkExpect: talkExpectReq);
// loadingframeSeq回绕检测
// frameSeq回绕检测标志
_pendingStreamReset = false;
_pendingResetWidth = width;
_pendingResetHeight = height;
}
void _initHdOptions() {
TalkExpectReq talkExpectReq = StartChartManage().getDefaultTalkExpect();
final videoType = talkExpectReq.videoType;
if (videoType.contains(VideoTypeE.H264)) {
state.currentQuality.value = '标清';
} else if (videoType.contains(VideoTypeE.H264_720P)) {
state.currentQuality.value = '高清';
}
}
//
Future<void> _resetDecoderForNewStream(int width, int height) async {
try {
if (state.textureId.value != null) {
await VideoDecodePlugin.releaseDecoder();
Future.microtask(() => state.textureId.value = null);
}
final config = VideoDecoderConfig(
width: width,
height: height,
codecType: 'h264',
);
final textureId = await VideoDecodePlugin.initDecoder(config);
if (textureId != null) {
Future.microtask(() => state.textureId.value = textureId);
AppLog.log('frameSeq回绕后解码器初始化成功textureId=$textureId');
VideoDecodePlugin.setOnFrameRenderedListener((textureId) {
AppLog.log('已经开始渲染=======');
// loading
Future.microtask(() => state.isLoading.value = false);
});
} else {
AppLog.log('frameSeq回绕后解码器初始化失败');
}
_startFrameProcessTimer();
} catch (e) {
AppLog.log('frameSeq回绕时解码器初始化错误: $e');
}
}
} }

View File

@ -97,40 +97,42 @@ class _TalkViewNativeDecodePageState extends State<TalkViewNativeDecodePage>
final double scaleWidth = physicalWidth / rotatedImageWidth; final double scaleWidth = physicalWidth / rotatedImageWidth;
final double scaleHeight = physicalHeight / rotatedImageHeight; final double scaleHeight = physicalHeight / rotatedImageHeight;
max(scaleWidth, scaleHeight); // max(scaleWidth, scaleHeight); //
return state.isLoading.isTrue // loading中或textureId为nullloading/
? Image.asset( if (state.isLoading.isTrue || state.textureId.value == null) {
'images/main/monitorBg.png', return Image.asset(
width: screenWidth, 'images/main/monitorBg.png',
height: screenHeight, width: screenWidth,
fit: BoxFit.cover, height: screenHeight,
) fit: BoxFit.cover,
: Positioned.fill( );
child: PopScope( } else {
canPop: false, return Positioned.fill(
child: RepaintBoundary( child: PopScope(
key: state.globalKey, canPop: false,
child: SizedBox.expand( child: RepaintBoundary(
child: RotatedBox( key: state.globalKey,
// 使RotatedBox child: SizedBox.expand(
quarterTurns: child: RotatedBox(
startChartManage.rotateAngle ~/ 90, // 使RotatedBox
child: Platform.isIOS quarterTurns: startChartManage.rotateAngle ~/ 90,
? Transform.scale( child: Platform.isIOS
scale: 1.008, // iOS白边 ? Transform.scale(
child: Texture( scale: 1.008, // iOS白边
textureId: state.textureId.value!, child: Texture(
filterQuality: FilterQuality.medium, textureId: state.textureId.value!,
), filterQuality: FilterQuality.medium,
) ),
: Texture( )
textureId: state.textureId.value!, : Texture(
filterQuality: FilterQuality.medium, textureId: state.textureId.value!,
), filterQuality: FilterQuality.medium,
), ),
),
), ),
), ),
); ),
),
);
}
}, },
), ),
@ -295,6 +297,62 @@ class _TalkViewNativeDecodePageState extends State<TalkViewNativeDecodePage>
), ),
), ),
), ),
SizedBox(width: 50.w),
//
GestureDetector(
onTap: () async {
//
showModalBottomSheet(
context: context,
backgroundColor: Colors.white,
shape: RoundedRectangleBorder(
borderRadius: BorderRadius.vertical(top: Radius.circular(20.w)),
),
builder: (BuildContext context) {
final List<String> qualities = ['高清', '标清'];
return SafeArea(
child: SingleChildScrollView(
child: Column(
mainAxisSize: MainAxisSize.min,
children: qualities.map((q) {
return Obx(() => InkWell(
onTap: () {
Navigator.of(context).pop();
logic.onQualityChanged(q);
},
child: Container(
padding: EdgeInsets.symmetric(vertical: 18.w),
child: Row(
mainAxisAlignment: MainAxisAlignment.center,
mainAxisSize: MainAxisSize.max,
children: [
Text(
q,
style: TextStyle(
color: state.currentQuality.value == q
? AppColors.mainColor
: Colors.black,
fontWeight: state.currentQuality.value == q
? FontWeight.bold
: FontWeight.normal,
fontSize: 28.sp,
),
),
],
),
),
));
}).toList(),
),
),
);
},
);
},
child: Container(
child: Icon(Icons.high_quality_outlined, color: Colors.white, size: 38.w),
),
),
]); ]);
} }

View File

@ -117,4 +117,7 @@ class TalkViewNativeDecodeState {
// H264文件保存相关 // H264文件保存相关
String? h264FilePath; String? h264FilePath;
File? h264File; File? h264File;
// '高清'
RxString currentQuality = '高清'.obs; //
} }