Merge remote-tracking branch 'origin/develop_sky' into develop_sky_minbb

# Conflicts:
#	lib/talk/starChart/views/native/talk_view_native_decode_logic.dart
#	lib/talk/starChart/views/native/talk_view_native_decode_state.dart
This commit is contained in:
sky_min 2025-11-07 18:11:35 +08:00
commit c167b254fd
8 changed files with 267 additions and 596 deletions

View File

@ -31,8 +31,7 @@ class SaveLockLogic extends BaseGetXController {
late StreamSubscription<Reply> _replySubscription;
void _initReplySubscription() {
_replySubscription =
EventBusManager().eventBus!.on<Reply>().listen((Reply reply) {
_replySubscription = EventBusManager().eventBus!.on<Reply>().listen((Reply reply) {
if (reply is AddUserReply && state.ifCurrentScreen.value == true) {
_replyAddUserKey(reply);
}
@ -66,15 +65,11 @@ class SaveLockLogic extends BaseGetXController {
break;
case 0x06:
//
final List<String>? privateKey =
await Storage.getStringList(saveBluePrivateKey);
final List<int> getPrivateKeyList =
changeStringListToIntList(privateKey!);
final List<String>? privateKey = await Storage.getStringList(saveBluePrivateKey);
final List<int> getPrivateKeyList = changeStringListToIntList(privateKey!);
final List<String>? publicKey =
await Storage.getStringList(saveBluePublicKey);
final List<int> publicKeyDataList =
changeStringListToIntList(publicKey!);
final List<String>? publicKey = await Storage.getStringList(saveBluePublicKey);
final List<int> publicKeyDataList = changeStringListToIntList(publicKey!);
IoSenderManage.senderAddUser(
lockID: BlueManage().connectDeviceName,
@ -215,19 +210,14 @@ class SaveLockLogic extends BaseGetXController {
showBlueConnetctToast();
}
});
BlueManage().blueSendData(BlueManage().connectDeviceName,
(BluetoothConnectionState deviceConnectionState) async {
BlueManage().blueSendData(BlueManage().connectDeviceName, (BluetoothConnectionState deviceConnectionState) async {
if (deviceConnectionState == BluetoothConnectionState.connected) {
//
final List<String>? privateKey =
await Storage.getStringList(saveBluePrivateKey);
final List<int> getPrivateKeyList =
changeStringListToIntList(privateKey!);
final List<String>? privateKey = await Storage.getStringList(saveBluePrivateKey);
final List<int> getPrivateKeyList = changeStringListToIntList(privateKey!);
final List<String>? publicKey =
await Storage.getStringList(saveBluePublicKey);
final List<int> publicKeyDataList =
changeStringListToIntList(publicKey!);
final List<String>? publicKey = await Storage.getStringList(saveBluePublicKey);
final List<int> publicKeyDataList = changeStringListToIntList(publicKey!);
final List<String>? token = await Storage.getStringList(saveBlueToken);
List<int> getTokenList = <int>[0, 0, 0, 0];
@ -257,8 +247,7 @@ class SaveLockLogic extends BaseGetXController {
privateKey: getPrivateKeyList,
token: getTokenList,
isBeforeAddUser: true);
} else if (deviceConnectionState ==
BluetoothConnectionState.disconnected) {
} else if (deviceConnectionState == BluetoothConnectionState.disconnected) {
dismissEasyLoading();
cancelBlueConnetctToastTimer();
state.sureBtnState.value = 0;
@ -376,16 +365,14 @@ class SaveLockLogic extends BaseGetXController {
// positionMap['address'] = state.addressInfo['address'];
final Map<String, dynamic> bluetooth = <String, dynamic>{};
bluetooth['bluetoothDeviceId'] = BlueManage().connectDeviceMacAddress;
bluetooth['bluetoothDeviceId'] = state.lockInfo['mac'];
bluetooth['bluetoothDeviceName'] = BlueManage().connectDeviceName;
final List<String>? publicKey =
await Storage.getStringList(saveBluePublicKey);
final List<String>? publicKey = await Storage.getStringList(saveBluePublicKey);
final List<int> publicKeyDataList = changeStringListToIntList(publicKey!);
bluetooth['publicKey'] = publicKeyDataList;
final List<String>? privateKey =
await Storage.getStringList(saveBluePrivateKey);
final List<String>? privateKey = await Storage.getStringList(saveBluePrivateKey);
final List<int> getPrivateKeyList = changeStringListToIntList(privateKey!);
bluetooth['privateKey'] = getPrivateKeyList;
@ -410,8 +397,7 @@ class SaveLockLogic extends BaseGetXController {
final String getMobile = (await Storage.getMobile())!;
ApmHelper.instance.trackEvent('save_lock_result', {
'lock_name': BlueManage().connectDeviceName,
'account':
getMobile.isNotEmpty ? getMobile : (await Storage.getEmail())!,
'account': getMobile.isNotEmpty ? getMobile : (await Storage.getEmail())!,
'date': DateTool().getNowDateWithType(1),
'save_lock_result': '成功',
});
@ -427,8 +413,7 @@ class SaveLockLogic extends BaseGetXController {
final String getMobile = (await Storage.getMobile())!;
ApmHelper.instance.trackEvent('save_lock_result', {
'lock_name': BlueManage().connectDeviceName,
'account':
getMobile.isNotEmpty ? getMobile : (await Storage.getEmail())!,
'account': getMobile.isNotEmpty ? getMobile : (await Storage.getEmail())!,
'date': DateTool().getNowDateWithType(1),
'save_lock_result': '${entity.errorCode}--${entity.errorMsg}',
});
@ -489,26 +474,18 @@ class SaveLockLogic extends BaseGetXController {
// BlueManage().disconnect();
//
final LockSetInfoEntity entity =
await ApiRepository.to.getLockSettingInfoData(
final LockSetInfoEntity entity = await ApiRepository.to.getLockSettingInfoData(
lockId: state.lockId.toString(),
);
if (entity.errorCode!.codeIsSuccessful) {
state.lockSetInfoData.value = entity.data!;
if (state.lockSetInfoData.value.lockFeature?.wifi == 1) {
// wifi锁WIFI
Get.toNamed(Routers.wifiListPage, arguments: {
'lockSetInfoData': state.lockSetInfoData.value,
'pageName': 'saveLock'
});
Get.toNamed(Routers.wifiListPage, arguments: {'lockSetInfoData': state.lockSetInfoData.value, 'pageName': 'saveLock'});
} else if (state.lockSetInfoData.value.lockFeature?.languageSpeech == 1) {
Get.toNamed(Routers.lockVoiceSettingPage, arguments: {
'lockSetInfoData': state.lockSetInfoData.value,
'pageName': 'saveLock'
});
Get.toNamed(Routers.lockVoiceSettingPage, arguments: {'lockSetInfoData': state.lockSetInfoData.value, 'pageName': 'saveLock'});
} else {
eventBus.fire(RefreshLockListInfoDataEvent(
clearScanDevices: true, isUnShowLoading: true));
eventBus.fire(RefreshLockListInfoDataEvent(clearScanDevices: true, isUnShowLoading: true));
Future<void>.delayed(const Duration(seconds: 1), () {
// Get.close(state.isFromMap == 1
// ? (CommonDataManage().seletLockType == 0 ? 4 : 5)
@ -518,15 +495,12 @@ class SaveLockLogic extends BaseGetXController {
// 2
Future<void>.delayed(const Duration(milliseconds: 200), () {
if (Get.isRegistered<LockDetailLogic>()) {
Get.find<LockDetailLogic>()
.functionBlocker
.countdownProhibited(duration: const Duration(seconds: 2));
Get.find<LockDetailLogic>().functionBlocker.countdownProhibited(duration: const Duration(seconds: 2));
}
});
}
} else {
eventBus.fire(RefreshLockListInfoDataEvent(
clearScanDevices: true, isUnShowLoading: true));
eventBus.fire(RefreshLockListInfoDataEvent(clearScanDevices: true, isUnShowLoading: true));
Future<void>.delayed(const Duration(seconds: 1), () {
// Get.close(state.isFromMap == 1
// ? (CommonDataManage().seletLockType == 0 ? 4 : 5)
@ -536,9 +510,7 @@ class SaveLockLogic extends BaseGetXController {
// 2
Future<void>.delayed(const Duration(milliseconds: 200), () {
if (Get.isRegistered<LockDetailLogic>()) {
Get.find<LockDetailLogic>()
.functionBlocker
.countdownProhibited(duration: const Duration(seconds: 2));
Get.find<LockDetailLogic>().functionBlocker.countdownProhibited(duration: const Duration(seconds: 2));
}
});
}

View File

@ -62,8 +62,6 @@ class UdpTalkRequestHandler extends ScpMessageBaseHandle implements ScpMessageHa
_handleResponseSendExpect(
lockPeerID: scpMessage.FromPeerId!,
);
//
startChartManage.startTalkExpectTimer();
//
startChartManage.stopCallRequestMessageTimer();
//

View File

@ -282,7 +282,9 @@ class StartChartManage {
// RbcuInfo
void _sendRbcuInfoMessage({required String ToPeerId, bool isResp = false}) async {
final uuid = _uuid.v1();
final int timestamp = DateTime.now().millisecondsSinceEpoch;
final int timestamp = DateTime
.now()
.millisecondsSinceEpoch;
final Int64 int64Timestamp = Int64(timestamp); // 使
// ip地址和中继返回的外网地址
@ -301,7 +303,7 @@ class StartChartManage {
.where((addr) => addr != null) // null
.map(
(addr) => addr!.replaceAll(IpConstant.udpUrl, ''),
) // "udp://"
) // "udp://"
.cast<String>(); // Iterable<String>// Iterable<String?> Iterable<String>
_rbcuSessionId = uuid;
final RbcuInfo rbcuInfo = RbcuInfo(
@ -457,7 +459,7 @@ class StartChartManage {
Duration(
seconds: _defaultIntervalTime,
),
(Timer timer) async {
(Timer timer) async {
AppLog.log('发送对讲请求:${ToPeerId}');
await sendCallRequestMessage(ToPeerId: ToPeerId);
},
@ -533,7 +535,7 @@ class StartChartManage {
Duration(
seconds: heartbeatIntervalTime,
),
(Timer timer) async {
(Timer timer) async {
final List<int> message = MessageCommand.heartbeatMessage(
FromPeerId: FromPeerId,
ToPeerId: relayPeerId,
@ -583,7 +585,9 @@ class StartChartManage {
ToPeerId: ToPeerId,
FromPeerId: FromPeerId,
gatewayId: gatewayId,
time: DateTime.now().millisecondsSinceEpoch ~/ 1000,
time: DateTime
.now()
.millisecondsSinceEpoch ~/ 1000,
MessageId: MessageCommand.getNextMessageId(ToPeerId, increment: true),
);
await _sendMessage(message: message);
@ -617,7 +621,7 @@ class StartChartManage {
talkRejectTimer ??= Timer.periodic(
Duration(seconds: _defaultIntervalTime),
(Timer timer) async {
(Timer timer) async {
_sendTalkRejectMessage();
count++;
if (count >= maxCount) {
@ -723,7 +727,7 @@ class StartChartManage {
void startTalkHangupMessageTimer() {
talkHangupTimer ??= Timer.periodic(
Duration(seconds: _defaultIntervalTime),
(Timer timer) async {
(Timer timer) async {
_sendTalkHangupMessage();
},
);
@ -767,7 +771,7 @@ class StartChartManage {
Duration(
seconds: _defaultIntervalTime,
),
(Timer timer) async {
(Timer timer) async {
// 线
await _sendOnlineMessage();
},
@ -937,7 +941,9 @@ class StartChartManage {
String ipAddress = address.address;
// IPv6
if (ipAddress.contains('%')) {
ipAddress = ipAddress.split('%').first;
ipAddress = ipAddress
.split('%')
.first;
}
// IP
if (ipAddress.isNotEmpty && !IpConstant.reportExcludeIp.contains(ipAddress)) {
@ -1060,7 +1066,7 @@ class StartChartManage {
Duration(
seconds: _defaultIntervalTime,
),
(Timer timer) async {
(Timer timer) async {
await sendTalkPingMessage(
ToPeerId: ToPeerId,
FromPeerId: FromPeerId,
@ -1081,7 +1087,7 @@ class StartChartManage {
Duration(
seconds: _defaultIntervalTime,
),
(Timer timer) {
(Timer timer) {
//
sendTalkExpectMessage(
talkExpect: _defaultTalkExpect,
@ -1096,7 +1102,7 @@ class StartChartManage {
Duration(
seconds: _defaultIntervalTime,
),
(Timer timer) {
(Timer timer) {
sendTalkAcceptMessage();
},
);
@ -1128,7 +1134,16 @@ class StartChartManage {
///
void changeTalkExpectDataTypeAndReStartTalkExpectMessageTimer({required TalkExpectReq talkExpect}) {
_defaultTalkExpect = talkExpect;
reStartTalkExpectMessageTimer();
sendTalkExpectMessage(
talkExpect: _defaultTalkExpect,
);
sendTalkExpectMessage(
talkExpect: _defaultTalkExpect,
);
sendTalkExpectMessage(
talkExpect: _defaultTalkExpect,
);
// reStartTalkExpectMessageTimer();
}
void reSetDefaultTalkExpect() {

View File

@ -18,7 +18,7 @@ class _PermissionGuidancePageState extends State<PermissionGuidancePage> {
final List<Map<String, String>> _stepsData = [
{
'image': 'images/guide/matter.png',
'image': 'images/guide/1.png',
'text': '步骤1打开应用信息点击通知管理选项',
},
{
@ -26,7 +26,7 @@ class _PermissionGuidancePageState extends State<PermissionGuidancePage> {
'text': '步骤2下滑点击呼叫提醒的通知选项',
},
{
'image': 'images/guide/tuya.png',
'image': 'images/guide/3.png',
'text': '步骤3选择在锁定屏幕上的选项设置',
},
{

View File

@ -40,7 +40,7 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
final LockDetailState lockDetailState = Get.put(LockDetailLogic()).state;
int bufferSize = 50; //
int bufferSize = 25; //
int audioBufferSize = 20; // 2
@ -104,8 +104,6 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
codecType: 'h264',
);
// textureId
AppLog.log('StartChartManage().videoWidth:${StartChartManage().videoWidth}');
AppLog.log('StartChartManage().videoHeight:${StartChartManage().videoHeight}');
final textureId = await VideoDecodePlugin.initDecoder(config);
if (textureId != null) {
Future.microtask(() => state.textureId.value = textureId);
@ -231,30 +229,10 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
// P/B帧
while (state.h264FrameBuffer.length >= state.maxFrameBufferSize) {
// int pbIndex = state.h264FrameBuffer.indexWhere((f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.P);
// if (pbIndex != -1) {
// state.h264FrameBuffer.removeAt(pbIndex);
// } else {
// state.h264FrameBuffer.removeAt(0);
// }
// P帧
int oldestPFrameIndex = -1;
int minPts = pts; //
for (int i = 0; i < state.h264FrameBuffer.length; i++) {
final frame = state.h264FrameBuffer[i];
if (frame['frameType'] == TalkDataH264Frame_FrameTypeE.P) {
if (oldestPFrameIndex == -1 || frame['pts'] < minPts) {
oldestPFrameIndex = i;
minPts = frame['pts'];
}
}
}
if (oldestPFrameIndex != -1) {
state.h264FrameBuffer.removeAt(oldestPFrameIndex);
int pbIndex = state.h264FrameBuffer.indexWhere((f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.P);
if (pbIndex != -1) {
state.h264FrameBuffer.removeAt(pbIndex);
} else {
// P帧
state.h264FrameBuffer.removeAt(0);
}
}
@ -264,147 +242,25 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
}
///
// void _startFrameProcessTimer() {
// //
// state.frameProcessTimer?.cancel();
//
// //
// final int intervalMs = (1000 / state.targetFps).round();
//
// //
// state.frameProcessTimer = Timer.periodic(Duration(milliseconds: intervalMs), (timer) {
// _processNextFrameFromBuffer();
// });
// AppLog.log('启动帧处理定时器,目标帧率: ${state.targetFps}fps间隔: ${intervalMs}ms');
// }
void _startFrameProcessTimer() {
_stopFrameProcessTimer();
//
state.frameProcessTimer?.cancel();
// 使
//
final int intervalMs = (1000 / state.targetFps).round();
// 使
state.frameProcessTimer = Timer.periodic(
Duration(milliseconds: intervalMs),
(timer) {
// 使Future.microtask确保不会阻塞定时器
Future.microtask(_processNextFrameFromBuffer);
}
);
//
state.frameProcessTimer = Timer.periodic(Duration(milliseconds: intervalMs), (timer) {
_processNextFrameFromBuffer();
});
AppLog.log('启动帧处理定时器,目标帧率: ${state.targetFps}fps间隔: ${intervalMs}ms');
}
///
// void _processNextFrameFromBuffer() async {
// //
// if (state.isProcessingFrame) {
// return;
// }
//
// //
// if (state.h264FrameBuffer.isEmpty) {
// return;
// }
//
// state.isProcessingFrame = true;
//
// // I帧frameSeq最小的I帧消费
// final iFrames = state.h264FrameBuffer.where((f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.I).toList();
// iFrames.sort((a, b) => (a['frameSeq'] as int).compareTo(b['frameSeq'] as int));
//
// if (iFrames.isNotEmpty) {
// // I帧I帧frameSeq
// final minIFrame = iFrames.first;
// final minIFrameSeq = minIFrame['frameSeq'];
// final targetIndex = state.h264FrameBuffer.indexWhere(
// (f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.I && f['frameSeq'] == minIFrameSeq,
// );
// state.isProcessingFrame = true;
// final Map<String, dynamic>? frameMap = state.h264FrameBuffer.removeAt(targetIndex);
// if (frameMap == null) {
// state.isProcessingFrame = false;
// return;
// }
// final List<int>? frameData = frameMap['frameData'];
// final TalkDataH264Frame_FrameTypeE? frameType = frameMap['frameType'];
// final int? frameSeq = frameMap['frameSeq'];
// final int? frameSeqI = frameMap['frameSeqI'];
// final int? pts = frameMap['pts'];
// final ScpMessage? scpMessage = frameMap['scpMessage'];
// if (frameData == null || frameType == null || frameSeq == null || frameSeqI == null || pts == null) {
// state.isProcessingFrame = false;
// return;
// }
// if (state.textureId.value == null) {
// state.isProcessingFrame = false;
// return;
// }
// lastDecodedIFrameSeq = minIFrameSeq;
// AppLog.log('送入解码器的P帧数据frameSeq:${frameSeq},frameSeqI:${frameSeqI},'
// 'frameType:${frameType},messageId:${scpMessage!.MessageId}');
// await VideoDecodePlugin.sendFrame(
// frameData: frameData,
// frameType: 0,
// frameSeq: frameSeq,
// timestamp: pts,
// splitNalFromIFrame: true,
// refIFrameSeq: frameSeqI,
// );
// state.isProcessingFrame = false;
// return;
// }
//
// // I帧时refIFrameSeq等于lastDecodedIFrameSeq的P帧
// if (lastDecodedIFrameSeq != null) {
// final validPFrames =
// state.h264FrameBuffer.where((f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.P && f['frameSeqI'] == lastDecodedIFrameSeq).toList();
// if (validPFrames.isNotEmpty) {
// validPFrames.sort((a, b) => (a['frameSeq'] as int).compareTo(b['frameSeq'] as int));
// final minPFrame = validPFrames.first;
// final targetIndex = state.h264FrameBuffer.indexWhere(
// (f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.P && f['frameSeq'] == minPFrame['frameSeq'] && f['frameSeqI'] == lastDecodedIFrameSeq,
// );
// state.isProcessingFrame = true;
// final Map<String, dynamic>? frameMap = state.h264FrameBuffer.removeAt(targetIndex);
// if (frameMap == null) {
// state.isProcessingFrame = false;
// return;
// }
// final List<int>? frameData = frameMap['frameData'];
// final TalkDataH264Frame_FrameTypeE? frameType = frameMap['frameType'];
// final int? frameSeq = frameMap['frameSeq'];
// final int? frameSeqI = frameMap['frameSeqI'];
// final int? pts = frameMap['pts'];
// final ScpMessage? scpMessage = frameMap['scpMessage'];
// if (frameData == null || frameType == null || frameSeq == null || frameSeqI == null || pts == null) {
// state.isProcessingFrame = false;
// return;
// }
// if (state.textureId.value == null) {
// state.isProcessingFrame = false;
// return;
// }
// // AppLog.log('送入解码器的I帧数据frameSeq:${frameSeq},frameSeqI:${frameSeqI},'
// // 'frameType:${frameType},messageId:${scpMessage!.MessageId}');
//
// await VideoDecodePlugin.sendFrame(
// frameData: frameData,
// frameType: 1,
// frameSeq: frameSeq,
// timestamp: pts,
// splitNalFromIFrame: true,
// refIFrameSeq: frameSeqI,
// );
// state.isProcessingFrame = false;
// return;
// }
// }
// // I帧到来
// }
/// -
///
void _processNextFrameFromBuffer() async {
final startTime = DateTime.now().microsecondsSinceEpoch;
//
if (state.isProcessingFrame) {
return;
@ -415,110 +271,106 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
return;
}
state.isProcessingFrame = true;
try {
// I帧
final iFrames = state.h264FrameBuffer
.where((f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.I)
.toList()
..sort((a, b) => (a['frameSeq'] as int).compareTo(b['frameSeq'] as int));
// I帧frameSeq最小的I帧消费
final iFrames = state.h264FrameBuffer.where((f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.I).toList();
iFrames.sort((a, b) => (a['frameSeq'] as int).compareTo(b['frameSeq'] as int));
if (iFrames.isNotEmpty) {
final minIFrame = iFrames.first;
final minIFrameSeq = minIFrame['frameSeq'];
final targetIndex = state.h264FrameBuffer.indexWhere(
(f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.I &&
f['frameSeq'] == minIFrame['frameSeq'],
(f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.I && f['frameSeq'] == minIFrameSeq,
);
state.isProcessingFrame = true;
final Map<String, dynamic>? frameMap = state.h264FrameBuffer.removeAt(targetIndex);
if (frameMap == null) {
state.isProcessingFrame = false;
return;
}
final List<int>? frameData = frameMap['frameData'];
final TalkDataH264Frame_FrameTypeE? frameType = frameMap['frameType'];
final int? frameSeq = frameMap['frameSeq'];
final int? frameSeqI = frameMap['frameSeqI'];
final int? pts = frameMap['pts'];
if (frameData == null || frameType == null || frameSeq == null || frameSeqI == null || pts == null) {
state.isProcessingFrame = false;
return;
}
if (state.textureId.value == null) {
state.isProcessingFrame = false;
return;
}
lastDecodedIFrameSeq = minIFrameSeq;
if (targetIndex != -1) {
final frameMap = state.h264FrameBuffer.removeAt(targetIndex);
await _decodeFrame(frameMap);
lastDecodedIFrameSeq = minIFrame['frameSeq'] as int;
await VideoDecodePlugin.sendFrame(
frameData: frameData,
frameType: 0,
frameSeq: frameSeq,
timestamp: pts,
splitNalFromIFrame: true,
refIFrameSeq: frameSeqI,
);
state.isProcessingFrame = false;
return;
}
// I帧时refIFrameSeq等于lastDecodedIFrameSeq的P帧
if (lastDecodedIFrameSeq != null) {
final validPFrames =
state.h264FrameBuffer.where((f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.P && f['frameSeqI'] == lastDecodedIFrameSeq).toList();
if (validPFrames.isNotEmpty) {
validPFrames.sort((a, b) => (a['frameSeq'] as int).compareTo(b['frameSeq'] as int));
final minPFrame = validPFrames.first;
final targetIndex = state.h264FrameBuffer.indexWhere(
(f) =>
f['frameType'] == TalkDataH264Frame_FrameTypeE.P && f['frameSeq'] == minPFrame['frameSeq'] && f['frameSeqI'] == lastDecodedIFrameSeq,
);
state.isProcessingFrame = true;
final Map<String, dynamic>? frameMap = state.h264FrameBuffer.removeAt(targetIndex);
if (frameMap == null) {
state.isProcessingFrame = false;
return;
}
final List<int>? frameData = frameMap['frameData'];
final TalkDataH264Frame_FrameTypeE? frameType = frameMap['frameType'];
final int? frameSeq = frameMap['frameSeq'];
final int? frameSeqI = frameMap['frameSeqI'];
final int? pts = frameMap['pts'];
if (frameData == null || frameType == null || frameSeq == null || frameSeqI == null || pts == null) {
state.isProcessingFrame = false;
return;
}
if (state.textureId.value == null) {
state.isProcessingFrame = false;
return;
}
await VideoDecodePlugin.sendFrame(
frameData: frameData,
frameType: 1,
frameSeq: frameSeq,
timestamp: pts,
splitNalFromIFrame: true,
refIFrameSeq: frameSeqI,
);
state.isProcessingFrame = false;
return;
}
}
// P帧 - P帧以提高流畅度
if (lastDecodedIFrameSeq != null) {
final validPFrames = state.h264FrameBuffer
.where((f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.P &&
f['frameSeqI'] == lastDecodedIFrameSeq)
.toList()
..sort((a, b) => (a['frameSeq'] as int).compareTo(b['frameSeq'] as int));
// 3P帧
int framesToProcess = min(3, validPFrames.length);
for (int i = 0; i < framesToProcess; i++) {
if (state.h264FrameBuffer.isNotEmpty) {
final pFrame = validPFrames[i];
final targetIndex = state.h264FrameBuffer.indexWhere(
(f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.P &&
f['frameSeq'] == pFrame['frameSeq'] &&
f['frameSeqI'] == lastDecodedIFrameSeq,
);
if (targetIndex != -1) {
final frameMap = state.h264FrameBuffer.removeAt(targetIndex);
await _decodeFrame(frameMap);
}
}
}
}
} catch (e) {
AppLog.log('帧处理错误: $e');
//
state.isProcessingFrame = false;
//
_handleDecodeError();
// I帧到来
} finally {
state.isProcessingFrame = false;
final endTime = DateTime.now().microsecondsSinceEpoch;
final durationMs = (endTime - startTime) / 1000.0;
// > 5ms
if (durationMs > 5) {
debugPrint('[_processNextFrameFromBuffer] 耗时: ${durationMs.toStringAsFixed(2)} ms');
// 使
// AppLog.log('Frame processing took ${durationMs.toStringAsFixed(2)} ms');
}
}
}
int _decodeErrorCount = 0;
static const int MAX_DECODE_ERRORS = 5;
void _handleDecodeError() {
_decodeErrorCount++;
if (_decodeErrorCount >= MAX_DECODE_ERRORS) {
AppLog.log('解码错误过多,重置解码器');
_resetDecoderForNewStream(
StartChartManage().videoWidth,
StartChartManage().videoHeight
);
_decodeErrorCount = 0;
}
}
//
Future<void> _decodeFrame(Map<String, dynamic> frameMap) async {
final List<int>? frameData = frameMap['frameData'];
final TalkDataH264Frame_FrameTypeE? frameType = frameMap['frameType'];
final int? frameSeq = frameMap['frameSeq'];
final int? frameSeqI = frameMap['frameSeqI'];
final int? pts = frameMap['pts'];
final ScpMessage? scpMessage = frameMap['scpMessage'];
if (frameData == null || frameType == null || frameSeq == null ||
frameSeqI == null || pts == null || state.textureId.value == null) {
return;
}
AppLog.log('送入解码器的帧数据frameSeq:$frameSeq,frameSeqI:$frameSeqI,'
'frameType:$frameType,messageId:${scpMessage?.MessageId}');
await VideoDecodePlugin.sendFrame(
frameData: frameData,
frameType: frameType == TalkDataH264Frame_FrameTypeE.I ? 0 : 1,
frameSeq: frameSeq,
timestamp: pts,
splitNalFromIFrame: true,
refIFrameSeq: frameSeqI,
);
}
///
void _stopFrameProcessTimer() {
state.frameProcessTimer?.cancel();
@ -712,20 +564,6 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
//
_startListenTalkData();
//
state.targetFps = _getOptimalFps();
_startFrameProcessTimer();
}
//
int _getOptimalFps() {
//
if (defaultTargetPlatform == TargetPlatform.android) {
return 25; // Android设备通常性能较好
} else {
return 20; // iOS设备保守一些
}
}
@override
@ -983,7 +821,6 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
//
void onQualityChanged(String quality) async {
state.currentQuality.value = quality;
state.isLoading.value = true; //
TalkExpectReq talkExpectReq = StartChartManage().getDefaultTalkExpect();
final audioType = talkExpectReq.audioType;
int width = 864;
@ -1007,8 +844,6 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
break;
}
//
await _resetDecoderForNewStream(width, height);
///
StartChartManage().changeTalkExpectDataTypeAndReStartTalkExpectMessageTimer(talkExpect: talkExpectReq);
@ -1042,7 +877,7 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
}
//
await Future.delayed(Duration(milliseconds: 50));
await Future.delayed(Duration(milliseconds: 100));
//
final config = VideoDecoderConfig(
@ -1122,98 +957,6 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
AppLog.log('无法处理H264帧textureId为空');
}
break;
case TalkData_ContentTypeE.H264:
if (state.textureId.value != null && talkDataH264Frame != null) {
// SPS/PPS
if (talkDataH264Frame.frameType == TalkDataH264Frame_FrameTypeE.I) {
_cacheSPSPPSIfNeeded(talkData.content);
}
_addFrameToBuffer(
talkData.content,
talkDataH264Frame.frameType,
talkData.durationMs,
talkDataH264Frame.frameSeq,
talkDataH264Frame.frameSeqI,
scpMessage!,
);
} else {
AppLog.log('无法处理H264帧textureId为空或帧数据无效');
}
break;
}
}
void _cacheSPSPPSIfNeeded(List<int> frameData) {
try {
// H.264NAL单元开始NAL头部
int offset = 0;
// NAL单元分隔符 (0x00000001 0x000001)
while (offset < frameData.length - 4) {
// 0x00000001
if (frameData[offset] == 0 && frameData[offset + 1] == 0 &&
frameData[offset + 2] == 0 && frameData[offset + 3] == 1) {
// (4)
int nalStart = offset + 4;
// NAL头部
if (nalStart >= frameData.length) break;
// NAL头部第一个字节包含NAL类型信息
// bit 0-7: forbidden_zero_bit(1) + nal_ref_idc(2) + nal_unit_type(5)
int nalHeader = frameData[nalStart];
int nalType = nalHeader & 0x1F; // 5
// H.264 NAL单元类型:
// 7 = SPS (Sequence Parameter Set)
// 8 = PPS (Picture Parameter Set)
if (nalType == 7) {
// SPS - NAL单元(NAL头部)
int nalEnd = _findNextStartCode(frameData, nalStart);
if (nalEnd == -1) nalEnd = frameData.length;
spsCache = frameData.sublist(offset, nalEnd);
hasSps = true;
AppLog.log('检测到并缓存SPS数据, 长度: ${spsCache!.length}');
} else if (nalType == 8) {
// PPS - NAL单元
int nalEnd = _findNextStartCode(frameData, nalStart);
if (nalEnd == -1) nalEnd = frameData.length;
ppsCache = frameData.sublist(offset, nalEnd);
hasPps = true;
AppLog.log('检测到并缓存PPS数据, 长度: ${ppsCache!.length}');
}
// NAL单元
offset = nalStart + 1;
} else {
offset++;
}
}
} catch (e) {
AppLog.log('SPS/PPS检测错误: $e');
}
}
//
int _findNextStartCode(List<int> data, int fromIndex) {
for (int i = fromIndex; i < data.length - 4; i++) {
if (data[i] == 0 && data[i + 1] == 0 &&
data[i + 2] == 0 && data[i + 3] == 1) {
return i; //
}
}
return -1; //
}
// 使SPS/PPS数据
void _ensureSPSPPSAvailable() {
if (hasSps && hasPps && spsCache != null && ppsCache != null) {
// SPS/PPS数据发送给解码器
//
AppLog.log('SPS和PPS数据已就绪可用于解码器初始化');
}
}
}

View File

@ -29,15 +29,12 @@ class TalkViewNativeDecodePage extends StatefulWidget {
const TalkViewNativeDecodePage({Key? key}) : super(key: key);
@override
State<TalkViewNativeDecodePage> createState() =>
_TalkViewNativeDecodePageState();
State<TalkViewNativeDecodePage> createState() => _TalkViewNativeDecodePageState();
}
class _TalkViewNativeDecodePageState extends State<TalkViewNativeDecodePage>
with TickerProviderStateMixin {
class _TalkViewNativeDecodePageState extends State<TalkViewNativeDecodePage> with TickerProviderStateMixin {
final TalkViewNativeDecodeLogic logic = Get.put(TalkViewNativeDecodeLogic());
final TalkViewNativeDecodeState state =
Get.find<TalkViewNativeDecodeLogic>().state;
final TalkViewNativeDecodeState state = Get.find<TalkViewNativeDecodeLogic>().state;
final startChartManage = StartChartManage();
@override
@ -69,66 +66,48 @@ class _TalkViewNativeDecodePageState extends State<TalkViewNativeDecodePage>
// false 退
return false;
},
child: SizedBox(
child: Container(
width: 1.sw,
height: 1.sh,
color: Colors.black.withOpacity(0.7),
child: Stack(
alignment: Alignment.center,
children: <Widget>[
//
Obx(
() {
final double screenWidth = MediaQuery.of(context).size.width;
final double screenHeight = MediaQuery.of(context).size.height;
final double logicalWidth = MediaQuery.of(context).size.width;
final double logicalHeight = MediaQuery.of(context).size.height;
final double devicePixelRatio =
MediaQuery.of(context).devicePixelRatio;
//
final double physicalWidth = logicalWidth * devicePixelRatio;
final double physicalHeight = logicalHeight * devicePixelRatio;
//
const int rotatedImageWidth = 480; //
const int rotatedImageHeight = 864; //
//
final double scaleWidth = physicalWidth / rotatedImageWidth;
final double scaleHeight = physicalHeight / rotatedImageHeight;
max(scaleWidth, scaleHeight); //
// loading中或textureId为nullloading/
if (state.isLoading.isTrue || state.textureId.value == null) {
return Image.asset(
'images/main/monitorBg.png',
width: screenWidth,
height: screenHeight,
width: 1.sw,
height: 1.sh,
fit: BoxFit.cover,
);
} else {
return Positioned.fill(
return Positioned(
top: 0,
left: 0,
right: 0,
child: PopScope(
canPop: false,
child: RepaintBoundary(
key: state.globalKey,
child: SizedBox.expand(
child: RotatedBox(
// 使RotatedBox
quarterTurns: startChartManage.rotateAngle ~/ 90,
child: Platform.isIOS
? Transform.scale(
scale: 1.008, // iOS白边
child: Texture(
textureId: state.textureId.value!,
filterQuality: FilterQuality.medium,
),
)
: Texture(
child: RotatedBox(
// 使RotatedBox
quarterTurns: startChartManage.rotateAngle ~/ 90,
child: state.isFullScreen.isFalse
? AspectRatio(
aspectRatio: StartChartManage().videoWidth / StartChartManage().videoHeight,
child: Texture(
textureId: state.textureId.value!,
filterQuality: FilterQuality.medium,
),
),
)
: Texture(
textureId: state.textureId.value!,
filterQuality: FilterQuality.medium,
),
),
),
),
@ -151,19 +130,14 @@ class _TalkViewNativeDecodePageState extends State<TalkViewNativeDecodePage>
width: 1.sw,
child: Obx(
() {
final String sec = (state.oneMinuteTime.value % 60)
.toString()
.padLeft(2, '0');
final String min = (state.oneMinuteTime.value ~/ 60)
.toString()
.padLeft(2, '0');
final String sec = (state.oneMinuteTime.value % 60).toString().padLeft(2, '0');
final String min = (state.oneMinuteTime.value ~/ 60).toString().padLeft(2, '0');
return Row(
mainAxisAlignment: MainAxisAlignment.center,
children: <Widget>[
Text(
'$min:$sec',
style: TextStyle(
fontSize: 26.sp, color: Colors.white),
style: TextStyle(fontSize: 26.sp, color: Colors.white),
),
],
);
@ -177,9 +151,7 @@ class _TalkViewNativeDecodePageState extends State<TalkViewNativeDecodePage>
width: 1.sw - 30.w * 2,
// height: 300.h,
margin: EdgeInsets.all(30.w),
decoration: BoxDecoration(
color: Colors.black.withOpacity(0.2),
borderRadius: BorderRadius.circular(20.h)),
decoration: BoxDecoration(color: Colors.black.withOpacity(0.2), borderRadius: BorderRadius.circular(20.h)),
child: Column(
children: <Widget>[
SizedBox(height: 20.h),
@ -191,9 +163,7 @@ class _TalkViewNativeDecodePageState extends State<TalkViewNativeDecodePage>
),
),
),
Obx(() => state.isLoading.isTrue
? buildRotationTransition()
: Container()),
Obx(() => state.isLoading.isTrue ? buildRotationTransition() : Container()),
Obx(() => state.isLongPressing.value
? Positioned(
top: 80.h,
@ -213,8 +183,7 @@ class _TalkViewNativeDecodePageState extends State<TalkViewNativeDecodePage>
SizedBox(width: 10.w),
Text(
'正在说话...'.tr,
style: TextStyle(
fontSize: 20.sp, color: Colors.white),
style: TextStyle(fontSize: 20.sp, color: Colors.white),
),
],
),
@ -246,10 +215,8 @@ class _TalkViewNativeDecodePageState extends State<TalkViewNativeDecodePage>
width: 40.w,
height: 40.w,
image: state.isOpenVoice.value
? const AssetImage(
'images/main/icon_lockDetail_monitoringOpenVoice.png')
: const AssetImage(
'images/main/icon_lockDetail_monitoringCloseVoice.png'))),
? const AssetImage('images/main/icon_lockDetail_monitoringOpenVoice.png')
: const AssetImage('images/main/icon_lockDetail_monitoringCloseVoice.png'))),
),
),
SizedBox(width: 50.w),
@ -264,11 +231,7 @@ class _TalkViewNativeDecodePageState extends State<TalkViewNativeDecodePage>
width: 50.w,
height: 50.w,
padding: EdgeInsets.all(5.w),
child: Image(
width: 40.w,
height: 40.w,
image: const AssetImage(
'images/main/icon_lockDetail_monitoringScreenshot.png')),
child: Image(width: 40.w, height: 40.w, image: const AssetImage('images/main/icon_lockDetail_monitoringScreenshot.png')),
),
),
SizedBox(width: 50.w),
@ -293,8 +256,7 @@ class _TalkViewNativeDecodePageState extends State<TalkViewNativeDecodePage>
width: 40.w,
height: 40.w,
fit: BoxFit.fill,
image: const AssetImage(
'images/main/icon_lockDetail_monitoringScreenRecording.png'),
image: const AssetImage('images/main/icon_lockDetail_monitoringScreenRecording.png'),
),
),
),
@ -330,13 +292,8 @@ class _TalkViewNativeDecodePageState extends State<TalkViewNativeDecodePage>
Text(
q,
style: TextStyle(
color: state.currentQuality.value == q
? AppColors.mainColor
: Colors.black,
fontWeight:
state.currentQuality.value == q
? FontWeight.bold
: FontWeight.normal,
color: state.currentQuality.value == q ? AppColors.mainColor : Colors.black,
fontWeight: state.currentQuality.value == q ? FontWeight.bold : FontWeight.normal,
fontSize: 28.sp,
),
),
@ -352,8 +309,7 @@ class _TalkViewNativeDecodePageState extends State<TalkViewNativeDecodePage>
);
},
child: Container(
child: Icon(Icons.high_quality_outlined,
color: Colors.white, size: 38.w),
child: Icon(Icons.high_quality_outlined, color: Colors.white, size: 38.w),
),
),
Visibility(
@ -377,61 +333,56 @@ class _TalkViewNativeDecodePageState extends State<TalkViewNativeDecodePage>
}
Widget bottomBottomBtnWidget() {
return Row(
mainAxisAlignment: MainAxisAlignment.spaceEvenly,
children: <Widget>[
//
Obx(
() => bottomBtnItemWidget(
getAnswerBtnImg(),
getAnswerBtnName(),
Colors.white,
longPress: () async {
if (state.talkStatus.value == TalkStatus.answeredSuccessfully) {
//
logic.startProcessingAudio();
state.isLongPressing.value = true;
}
},
longPressUp: () async {
//
logic.stopProcessingAudio();
state.isLongPressing.value = false;
},
onClick: () async {
if (state.talkStatus.value ==
TalkStatus.passiveCallWaitingAnswer) {
//
logic.initiateAnswerCommand();
}
},
),
),
bottomBtnItemWidget(
'images/main/icon_lockDetail_hangUp.png', '挂断'.tr, Colors.red,
onClick: () {
//
logic.udpHangUpAction();
}),
bottomBtnItemWidget(
'images/main/icon_lockDetail_monitoringUnlock.png',
'开锁'.tr,
AppColors.mainColor,
onClick: () {
// if (state.talkStatus.value == TalkStatus.answeredSuccessfully &&
// state.listData.value.length > 0) {
// logic.udpOpenDoorAction();
// }
// if (UDPManage().remoteUnlock == 1) {
// logic.udpOpenDoorAction();
// showDeletPasswordAlertDialog(context);
// } else {
// logic.showToast('请在锁设置中开启远程开锁'.tr);
// }
logic.remoteOpenLock();
},
)
]);
return Row(mainAxisAlignment: MainAxisAlignment.spaceEvenly, children: <Widget>[
//
Obx(
() => bottomBtnItemWidget(
getAnswerBtnImg(),
getAnswerBtnName(),
Colors.white,
longPress: () async {
if (state.talkStatus.value == TalkStatus.answeredSuccessfully) {
//
logic.startProcessingAudio();
state.isLongPressing.value = true;
}
},
longPressUp: () async {
//
logic.stopProcessingAudio();
state.isLongPressing.value = false;
},
onClick: () async {
if (state.talkStatus.value == TalkStatus.passiveCallWaitingAnswer) {
//
logic.initiateAnswerCommand();
}
},
),
),
bottomBtnItemWidget('images/main/icon_lockDetail_hangUp.png', '挂断'.tr, Colors.red, onClick: () {
//
logic.udpHangUpAction();
}),
bottomBtnItemWidget(
'images/main/icon_lockDetail_monitoringUnlock.png',
'开锁'.tr,
AppColors.mainColor,
onClick: () {
// if (state.talkStatus.value == TalkStatus.answeredSuccessfully &&
// state.listData.value.length > 0) {
// logic.udpOpenDoorAction();
// }
// if (UDPManage().remoteUnlock == 1) {
// logic.udpOpenDoorAction();
// showDeletPasswordAlertDialog(context);
// } else {
// logic.showToast('请在锁设置中开启远程开锁'.tr);
// }
logic.remoteOpenLock();
},
)
]);
}
String getAnswerBtnImg() {

View File

@ -69,7 +69,7 @@ class _TalkViewNativeDecodePageDebugState extends State<TalkViewNativeDecodePage
child: Container(
width: 1.sw,
height: 1.sh,
color: Colors.blue,
color: Colors.black.withOpacity(0.7),
child: Stack(
alignment: Alignment.center,
children: <Widget>[
@ -92,33 +92,29 @@ class _TalkViewNativeDecodePageDebugState extends State<TalkViewNativeDecodePage
canPop: false,
child: RepaintBoundary(
key: state.globalKey,
child: SizedBox(
width: StartChartManage().videoHeight.w,
height: StartChartManage().videoWidth.h,
child: RotatedBox(
// 使RotatedBox
quarterTurns: startChartManage.rotateAngle ~/ 90,
child: Platform.isIOS
? Transform.scale(
scale: 1.008, // iOS白边
child: Texture(
textureId: state.textureId.value!,
filterQuality: FilterQuality.medium,
),
)
: state.isFullScreen.isFalse
? AspectRatio(
aspectRatio: StartChartManage().videoWidth / StartChartManage().videoHeight,
child: Texture(
textureId: state.textureId.value!,
filterQuality: FilterQuality.medium,
),
)
: Texture(
child: RotatedBox(
// 使RotatedBox
quarterTurns: startChartManage.rotateAngle ~/ 90,
child: Platform.isIOS
? Transform.scale(
scale: 1.008, // iOS白边
child: Texture(
textureId: state.textureId.value!,
filterQuality: FilterQuality.medium,
),
)
: state.isFullScreen.isFalse
? AspectRatio(
aspectRatio: StartChartManage().videoWidth / StartChartManage().videoHeight,
child: Texture(
textureId: state.textureId.value!,
filterQuality: FilterQuality.medium,
),
),
)
: Texture(
textureId: state.textureId.value!,
filterQuality: FilterQuality.medium,
),
),
),
);
@ -130,7 +126,7 @@ class _TalkViewNativeDecodePageDebugState extends State<TalkViewNativeDecodePage
state.isFullScreen.value = !state.isFullScreen.value;
},
child: Obx(
() => Text(state.isFullScreen.isTrue ? '退出全屏' : '全屏'),
() => Text(state.isFullScreen.isTrue ? '退出全屏' : '全屏'),
),
),
Obx(() => state.isLoading.isTrue
@ -168,8 +164,7 @@ class _TalkViewNativeDecodePageDebugState extends State<TalkViewNativeDecodePage
width: 1.sw - 30.w * 2,
// height: 300.h,
margin: EdgeInsets.all(30.w),
decoration:
BoxDecoration(color: Colors.black.withOpacity(0.2), borderRadius: BorderRadius.circular(20.h)),
decoration: BoxDecoration(color: Colors.black.withOpacity(0.2), borderRadius: BorderRadius.circular(20.h)),
child: Column(
children: <Widget>[
SizedBox(height: 20.h),
@ -249,10 +244,7 @@ class _TalkViewNativeDecodePageDebugState extends State<TalkViewNativeDecodePage
width: 50.w,
height: 50.w,
padding: EdgeInsets.all(5.w),
child: Image(
width: 40.w,
height: 40.w,
image: const AssetImage('images/main/icon_lockDetail_monitoringScreenshot.png')),
child: Image(width: 40.w, height: 40.w, image: const AssetImage('images/main/icon_lockDetail_monitoringScreenshot.png')),
),
),
SizedBox(width: 50.w),

View File

@ -67,7 +67,7 @@ class TalkViewNativeDecodeState {
//
final TalkDataRepository talkDataRepository = TalkDataRepository.instance;
RxBool isOpenVoice = true.obs; //
RxBool isOpenVoice = false.obs; //
RxBool isRecordingScreen = false.obs; //
RxBool isRecordingAudio = false.obs; //
Rx<DateTime> startRecordingAudioTime = DateTime.now().obs; //
@ -109,8 +109,8 @@ class TalkViewNativeDecodeState {
// H264帧缓冲区相关
final List<Map<String, dynamic>> h264FrameBuffer = <Map<String, dynamic>>[]; // H264帧缓冲区
final int maxFrameBufferSize = 50; //
int targetFps = 25; // ,native的缓冲区
final int maxFrameBufferSize = 25; //
final int targetFps = 25; // ,native的缓冲区
Timer? frameProcessTimer; //
bool isProcessingFrame = false; //
int lastProcessedTimestamp = 0; //