合并代码后,优化视频/设备时区/鸿蒙手机bug

This commit is contained in:
sky_min 2025-11-10 13:57:28 +08:00
parent c167b254fd
commit f9ee17de21
4 changed files with 281 additions and 24 deletions

View File

@ -153,7 +153,7 @@ class _BasicInformationPageState extends State<BasicInformationPage> {
visible: state.lockSetInfoData.value.lockFeature?.wifi == 1,
child: CommonItem(
leftTitel: '设备时区'.tr,
rightTitle: state.lockSetInfoData.value.lockBasicInfo?.timezoneName,
rightTitle: _convertTimezoneToUTCFormat(state.lockSetInfoData.value.lockBasicInfo?.timezoneName),
allHeight: 70.h,
isHaveLine: true,
),
@ -244,4 +244,112 @@ class _BasicInformationPageState extends State<BasicInformationPage> {
// 线: percentage = (rssi + 100) * 100 / 70
return ((clampedRssi + 100) * 100 ~/ 70).clamp(0, 100);
}
//
String _convertTimezoneToUTCFormat(String? timezoneName) {
if (timezoneName == null || timezoneName.isEmpty) {
return '-';
}
// UTC格式
if (timezoneName.startsWith('UTC')) {
return timezoneName;
}
//
final Map<String, String> timezoneMap = {
// (UTC+)
'Asia/Shanghai': 'UTC+8',
'Asia/Taipei': 'UTC+8',
'Asia/Singapore': 'UTC+8',
'Asia/Hong_Kong': 'UTC+8',
'Asia/Macau': 'UTC+8',
'Asia/Seoul': 'UTC+9',
'Asia/Tokyo': 'UTC+9',
'Asia/Dubai': 'UTC+4',
'Asia/Kolkata': 'UTC+5:30',
'Asia/Bangkok': 'UTC+7',
'Asia/Jakarta': 'UTC+7',
'Asia/Kuala_Lumpur': 'UTC+8',
'Asia/Manila': 'UTC+8',
'Asia/Karachi': 'UTC+5',
'Asia/Tehran': 'UTC+3:30',
'Asia/Baghdad': 'UTC+3',
'Asia/Beirut': 'UTC+2',
'Asia/Jerusalem': 'UTC+2',
'Asia/Damascus': 'UTC+3',
'Asia/Amman': 'UTC+3',
'Asia/Baku': 'UTC+4',
'Asia/Yerevan': 'UTC+4',
'Asia/Tbilisi': 'UTC+4',
//
'Europe/London': 'UTC+0',
'Europe/Paris': 'UTC+1',
'Europe/Berlin': 'UTC+1',
'Europe/Rome': 'UTC+1',
'Europe/Madrid': 'UTC+1',
'Europe/Amsterdam': 'UTC+1',
'Europe/Brussels': 'UTC+1',
'Europe/Vienna': 'UTC+1',
'Europe/Stockholm': 'UTC+1',
'Europe/Oslo': 'UTC+1',
'Europe/Copenhagen': 'UTC+1',
'Europe/Warsaw': 'UTC+1',
'Europe/Prague': 'UTC+1',
'Europe/Budapest': 'UTC+1',
'Europe/Athens': 'UTC+2',
'Europe/Helsinki': 'UTC+2',
'Europe/Riga': 'UTC+2',
'Europe/Tallinn': 'UTC+2',
'Europe/Vilnius': 'UTC+2',
'Europe/Sofia': 'UTC+2',
'Europe/Bucharest': 'UTC+2',
'Europe/Istanbul': 'UTC+3',
'Europe/Minsk': 'UTC+3',
'Europe/Moscow': 'UTC+3',
// (UTC-)
'America/New_York': 'UTC-5',
'America/Los_Angeles': 'UTC-8',
'America/Chicago': 'UTC-6',
'America/Denver': 'UTC-7',
'America/Phoenix': 'UTC-7',
'America/Toronto': 'UTC-5',
'America/Montreal': 'UTC-5',
'America/Vancouver': 'UTC-8',
'America/Edmonton': 'UTC-7',
'America/Halifax': 'UTC-4',
'America/St_Johns': 'UTC-3:30',
//
'America/Sao_Paulo': 'UTC-3',
'America/Buenos_Aires': 'UTC-3',
'America/Santiago': 'UTC-4',
'America/Lima': 'UTC-5',
'America/Bogota': 'UTC-5',
'America/Caracas': 'UTC-4',
'America/Mexico_City': 'UTC-6',
//
'Australia/Sydney': 'UTC+10',
'Australia/Melbourne': 'UTC+10',
'Australia/Brisbane': 'UTC+10',
'Australia/Perth': 'UTC+8',
'Australia/Adelaide': 'UTC+9:30',
'Pacific/Auckland': 'UTC+12',
'Pacific/Fiji': 'UTC+12',
//
'Africa/Cairo': 'UTC+2',
'Africa/Johannesburg': 'UTC+2',
'Africa/Lagos': 'UTC+1',
'Africa/Nairobi': 'UTC+3',
'Africa/Casablanca': 'UTC+1',
'Africa/Tunis': 'UTC+1',
'Africa/Algiers': 'UTC+1',
};
return timezoneMap[timezoneName] ?? timezoneName;
}
}

View File

@ -26,8 +26,11 @@ class SelectLockTypeLogic extends BaseGetXController {
final DeviceInfoPlugin deviceInfo = DeviceInfoPlugin();
if (Platform.isAndroid) {
final AndroidDeviceInfo androidInfo = await deviceInfo.androidInfo;
// HarmonyOS标识'HUAWEI'
return androidInfo.brand == 'HONOR' || androidInfo.version.sdkInt >= 30; // API可能需要更新以适配最新鸿蒙系统版本
// 鸿
return androidInfo.brand == 'HUAWEI' ||
androidInfo.brand == 'HONOR' ||
androidInfo.manufacturer == 'HUAWEI' ||
androidInfo.version.release.contains('HarmonyOS');
} else {
return false;
}
@ -38,19 +41,33 @@ class SelectLockTypeLogic extends BaseGetXController {
if (!Platform.isIOS) {
final bool locationRequest = await PermissionDialog.request(Permission.location);
final bool bluetoothRequest = await PermissionDialog.requestBluetooth();
//
final bool storageRequest = await PermissionDialog.request(Permission.storage);
bool isHarmonyOS = await checkIfHarmonyOS();
// 鸿
if(isHarmonyOS){
Get.snackbar('提示', '如您是鸿蒙系统,请下拉手动开启系统的“位置信息”,否则无法搜索到锁哦');
}
if (!bluetoothRequest || !locationRequest) {
return;
print('鸿蒙手机提示----');
if (!bluetoothRequest || !locationRequest || !storageRequest) {
return;
}
} else {
if (!bluetoothRequest || !locationRequest) {
return;
}
}
}
Get.toNamed(Routers.nearbyLockPage);
}
Future<void> requestHarmonyOSPermissions() async {
// 鸿
await Permission.storage.request(); // 访
await Permission.photos.request(); //
}
@override
void onInit() {
super.onInit();

View File

@ -44,6 +44,31 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
int audioBufferSize = 20; // 2
int _frameProcessCount = 0;
int _lastFrameProcessTime = 0;
double _actualFps = 0.0;
void _monitorFrameProcessingPerformance() {
_frameProcessCount++;
final now = DateTime.now().millisecondsSinceEpoch;
if (now - _lastFrameProcessTime >= 1000) { //
_actualFps = _frameProcessCount.toDouble();
_frameProcessCount = 0;
_lastFrameProcessTime = now;
//
if (_actualFps < state.targetFps * 0.7) {
//
state.targetFps = (state.targetFps * 0.9).clamp(15.0, 60.0) as int;
_startFrameProcessTimer();
} else if (_actualFps > state.targetFps * 1.2 && state.targetFps < 30.0) {
//
state.targetFps = (state.targetFps * 1.1).clamp(15.0, 30.0) as int;
_startFrameProcessTimer();
}
}
}
// frameSeq较小时阈值也小
int _getFrameSeqRolloverThreshold(int lastSeq) {
if (lastSeq > 2000) {
@ -237,6 +262,23 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
}
}
//
if (state.h264FrameBuffer.length > state.maxFrameBufferSize * 0.8) {
//
while (state.h264FrameBuffer.length >= state.maxFrameBufferSize * 0.9) {
// P帧
int pbIndex = state.h264FrameBuffer.indexWhere((f) =>
f['frameType'] == TalkDataH264Frame_FrameTypeE.P &&
f['frameSeq'] < frameSeq - 100);
if (pbIndex != -1) {
state.h264FrameBuffer.removeAt(pbIndex);
} else {
// P帧
state.h264FrameBuffer.removeAt(0);
}
}
}
//
state.h264FrameBuffer.add(frameMap);
}
@ -259,8 +301,20 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
///
///
void _processNextFrameFromBuffer() async {
_monitorFrameProcessingPerformance();
final startTime = DateTime.now().microsecondsSinceEpoch;
//
final bufferLength = state.h264FrameBuffer.length;
//
if (bufferLength > 30 && state.targetFps < 60) {
_adjustFrameProcessFrequency(state.targetFps * 1.5);
}
//
else if (bufferLength < 10 && state.targetFps > 25) {
_adjustFrameProcessFrequency(state.targetFps * 0.8);
}
//
if (state.isProcessingFrame) {
return;
@ -273,17 +327,22 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
try {
// I帧frameSeq最小的I帧消费
final iFrames = state.h264FrameBuffer.where((f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.I).toList();
iFrames.sort((a, b) => (a['frameSeq'] as int).compareTo(b['frameSeq'] as int));
final iFrames = state.h264FrameBuffer.where((f) =>
f['frameType'] == TalkDataH264Frame_FrameTypeE.I).toList();
iFrames.sort((a, b) =>
(a['frameSeq'] as int).compareTo(b['frameSeq'] as int));
if (iFrames.isNotEmpty) {
final minIFrame = iFrames.first;
final minIFrameSeq = minIFrame['frameSeq'];
final targetIndex = state.h264FrameBuffer.indexWhere(
(f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.I && f['frameSeq'] == minIFrameSeq,
(f) =>
f['frameType'] == TalkDataH264Frame_FrameTypeE.I &&
f['frameSeq'] == minIFrameSeq,
);
state.isProcessingFrame = true;
final Map<String, dynamic>? frameMap = state.h264FrameBuffer.removeAt(targetIndex);
final Map<String, dynamic>? frameMap = state.h264FrameBuffer.removeAt(
targetIndex);
if (frameMap == null) {
state.isProcessingFrame = false;
return;
@ -293,7 +352,8 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
final int? frameSeq = frameMap['frameSeq'];
final int? frameSeqI = frameMap['frameSeqI'];
final int? pts = frameMap['pts'];
if (frameData == null || frameType == null || frameSeq == null || frameSeqI == null || pts == null) {
if (frameData == null || frameType == null || frameSeq == null ||
frameSeqI == null || pts == null) {
state.isProcessingFrame = false;
return;
}
@ -315,19 +375,41 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
return;
}
state.isProcessingFrame = true;
// 使
Map<String, dynamic>? frameToProcess;
int frameIndex = -1;
// I帧时refIFrameSeq等于lastDecodedIFrameSeq的P帧
if (lastDecodedIFrameSeq != null) {
// I帧关联的P帧
for (int i = 0; i < state.h264FrameBuffer.length; i++) {
final frame = state.h264FrameBuffer[i];
if (frame['frameType'] == TalkDataH264Frame_FrameTypeE.P &&
frame['frameSeqI'] == lastDecodedIFrameSeq) {
frameToProcess = frame;
frameIndex = i;
break;
}
}
final validPFrames =
state.h264FrameBuffer.where((f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.P && f['frameSeqI'] == lastDecodedIFrameSeq).toList();
state.h264FrameBuffer.where((f) =>
f['frameType'] == TalkDataH264Frame_FrameTypeE.P &&
f['frameSeqI'] == lastDecodedIFrameSeq).toList();
if (validPFrames.isNotEmpty) {
validPFrames.sort((a, b) => (a['frameSeq'] as int).compareTo(b['frameSeq'] as int));
validPFrames.sort((a, b) =>
(a['frameSeq'] as int).compareTo(b['frameSeq'] as int));
final minPFrame = validPFrames.first;
final targetIndex = state.h264FrameBuffer.indexWhere(
(f) =>
f['frameType'] == TalkDataH264Frame_FrameTypeE.P && f['frameSeq'] == minPFrame['frameSeq'] && f['frameSeqI'] == lastDecodedIFrameSeq,
(f) =>
f['frameType'] == TalkDataH264Frame_FrameTypeE.P &&
f['frameSeq'] == minPFrame['frameSeq'] &&
f['frameSeqI'] == lastDecodedIFrameSeq,
);
state.isProcessingFrame = true;
final Map<String, dynamic>? frameMap = state.h264FrameBuffer.removeAt(targetIndex);
final Map<String, dynamic>? frameMap = state.h264FrameBuffer.removeAt(
targetIndex);
if (frameMap == null) {
state.isProcessingFrame = false;
return;
@ -337,7 +419,8 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
final int? frameSeq = frameMap['frameSeq'];
final int? frameSeqI = frameMap['frameSeqI'];
final int? pts = frameMap['pts'];
if (frameData == null || frameType == null || frameSeq == null || frameSeqI == null || pts == null) {
if (frameData == null || frameType == null || frameSeq == null ||
frameSeqI == null || pts == null) {
state.isProcessingFrame = false;
return;
}
@ -358,8 +441,35 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
return;
}
}
// I帧到来
// P帧I帧
if (frameToProcess == null) {
int earliestIframeIndex = -1;
int earliestIframeSeq = 999999;
for (int i = 0; i < state.h264FrameBuffer.length; i++) {
final frame = state.h264FrameBuffer[i];
if (frame['frameType'] == TalkDataH264Frame_FrameTypeE.I) {
final frameSeq = frame['frameSeq'] as int;
if (frameSeq < earliestIframeSeq) {
earliestIframeSeq = frameSeq;
frameToProcess = frame;
earliestIframeIndex = i;
}
}
}
if (frameToProcess != null) {
frameIndex = earliestIframeIndex;
}
// I帧到来
} //
if (frameToProcess != null && frameIndex >= 0) {
final Map<String, dynamic> frameMap = state.h264FrameBuffer.removeAt(
frameIndex);
// ...
}
} finally {
state.isProcessingFrame = false;
final endTime = DateTime.now().microsecondsSinceEpoch;
final durationMs = (endTime - startTime) / 1000.0;
// > 5ms
@ -369,6 +479,18 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
// AppLog.log('Frame processing took ${durationMs.toStringAsFixed(2)} ms');
}
}
final endTime = DateTime.now().microsecondsSinceEpoch;
final durationMs = (endTime - startTime) / 1000.0;
//
if (durationMs > 16.67) { // (60fps)
AppLog.log('帧处理耗时过长: ${durationMs.toStringAsFixed(2)} ms, 缓冲区长度: ${state.h264FrameBuffer.length}');
}
}
void _adjustFrameProcessFrequency(double newFps) {
state.targetFps = newFps.clamp(20.0, 60.0) as int;
_startFrameProcessTimer(); //
}
///
@ -823,6 +945,8 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
state.currentQuality.value = quality;
TalkExpectReq talkExpectReq = StartChartManage().getDefaultTalkExpect();
final audioType = talkExpectReq.audioType;
// loading状态
state.isLoading.value = true;
int width = 864;
int height = 480;
switch (quality) {
@ -844,14 +968,19 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
break;
}
//
_pendingResetWidth = width;
_pendingResetHeight = height;
//
await _resetDecoderForNewStream(width, height);
///
StartChartManage().changeTalkExpectDataTypeAndReStartTalkExpectMessageTimer(talkExpect: talkExpectReq);
// loadingframeSeq回绕检测
// frameSeq回绕检测标志
_pendingStreamReset = false;
_pendingResetWidth = width;
_pendingResetHeight = height;
// _pendingStreamReset = false;
// _pendingResetWidth = width;
// _pendingResetHeight = height;
}
void _initHdOptions() {
@ -867,6 +996,8 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
//
Future<void> _resetDecoderForNewStream(int width, int height) async {
try {
//
state.isLoading.value = true;
//
_stopFrameProcessTimer();
@ -877,7 +1008,7 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
}
//
await Future.delayed(Duration(milliseconds: 100));
await Future.delayed(Duration(milliseconds: 50));
//
final config = VideoDecoderConfig(
@ -906,6 +1037,7 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
_decodedIFrames.clear();
state.h264FrameBuffer.clear();
state.isProcessingFrame = false;
_lastFrameSeq = null;
hasSps = false;
hasPps = false;
spsCache = null;

View File

@ -110,7 +110,7 @@ class TalkViewNativeDecodeState {
// H264帧缓冲区相关
final List<Map<String, dynamic>> h264FrameBuffer = <Map<String, dynamic>>[]; // H264帧缓冲区
final int maxFrameBufferSize = 25; //
final int targetFps = 25; // ,native的缓冲区
int targetFps = 25; // ,native的缓冲区
Timer? frameProcessTimer; //
bool isProcessingFrame = false; //
int lastProcessedTimestamp = 0; //