Merge remote-tracking branch 'origin/develop_sky' into develop_sky_minbb

# Conflicts:
#	lib/talk/starChart/views/native/talk_view_native_decode_logic.dart
#	lib/talk/starChart/views/native/talk_view_native_decode_state.dart
This commit is contained in:
sky_min 2025-11-07 18:11:35 +08:00
commit c167b254fd
8 changed files with 267 additions and 596 deletions

View File

@ -31,8 +31,7 @@ class SaveLockLogic extends BaseGetXController {
late StreamSubscription<Reply> _replySubscription; late StreamSubscription<Reply> _replySubscription;
void _initReplySubscription() { void _initReplySubscription() {
_replySubscription = _replySubscription = EventBusManager().eventBus!.on<Reply>().listen((Reply reply) {
EventBusManager().eventBus!.on<Reply>().listen((Reply reply) {
if (reply is AddUserReply && state.ifCurrentScreen.value == true) { if (reply is AddUserReply && state.ifCurrentScreen.value == true) {
_replyAddUserKey(reply); _replyAddUserKey(reply);
} }
@ -66,15 +65,11 @@ class SaveLockLogic extends BaseGetXController {
break; break;
case 0x06: case 0x06:
// //
final List<String>? privateKey = final List<String>? privateKey = await Storage.getStringList(saveBluePrivateKey);
await Storage.getStringList(saveBluePrivateKey); final List<int> getPrivateKeyList = changeStringListToIntList(privateKey!);
final List<int> getPrivateKeyList =
changeStringListToIntList(privateKey!);
final List<String>? publicKey = final List<String>? publicKey = await Storage.getStringList(saveBluePublicKey);
await Storage.getStringList(saveBluePublicKey); final List<int> publicKeyDataList = changeStringListToIntList(publicKey!);
final List<int> publicKeyDataList =
changeStringListToIntList(publicKey!);
IoSenderManage.senderAddUser( IoSenderManage.senderAddUser(
lockID: BlueManage().connectDeviceName, lockID: BlueManage().connectDeviceName,
@ -215,19 +210,14 @@ class SaveLockLogic extends BaseGetXController {
showBlueConnetctToast(); showBlueConnetctToast();
} }
}); });
BlueManage().blueSendData(BlueManage().connectDeviceName, BlueManage().blueSendData(BlueManage().connectDeviceName, (BluetoothConnectionState deviceConnectionState) async {
(BluetoothConnectionState deviceConnectionState) async {
if (deviceConnectionState == BluetoothConnectionState.connected) { if (deviceConnectionState == BluetoothConnectionState.connected) {
// //
final List<String>? privateKey = final List<String>? privateKey = await Storage.getStringList(saveBluePrivateKey);
await Storage.getStringList(saveBluePrivateKey); final List<int> getPrivateKeyList = changeStringListToIntList(privateKey!);
final List<int> getPrivateKeyList =
changeStringListToIntList(privateKey!);
final List<String>? publicKey = final List<String>? publicKey = await Storage.getStringList(saveBluePublicKey);
await Storage.getStringList(saveBluePublicKey); final List<int> publicKeyDataList = changeStringListToIntList(publicKey!);
final List<int> publicKeyDataList =
changeStringListToIntList(publicKey!);
final List<String>? token = await Storage.getStringList(saveBlueToken); final List<String>? token = await Storage.getStringList(saveBlueToken);
List<int> getTokenList = <int>[0, 0, 0, 0]; List<int> getTokenList = <int>[0, 0, 0, 0];
@ -257,8 +247,7 @@ class SaveLockLogic extends BaseGetXController {
privateKey: getPrivateKeyList, privateKey: getPrivateKeyList,
token: getTokenList, token: getTokenList,
isBeforeAddUser: true); isBeforeAddUser: true);
} else if (deviceConnectionState == } else if (deviceConnectionState == BluetoothConnectionState.disconnected) {
BluetoothConnectionState.disconnected) {
dismissEasyLoading(); dismissEasyLoading();
cancelBlueConnetctToastTimer(); cancelBlueConnetctToastTimer();
state.sureBtnState.value = 0; state.sureBtnState.value = 0;
@ -376,16 +365,14 @@ class SaveLockLogic extends BaseGetXController {
// positionMap['address'] = state.addressInfo['address']; // positionMap['address'] = state.addressInfo['address'];
final Map<String, dynamic> bluetooth = <String, dynamic>{}; final Map<String, dynamic> bluetooth = <String, dynamic>{};
bluetooth['bluetoothDeviceId'] = BlueManage().connectDeviceMacAddress; bluetooth['bluetoothDeviceId'] = state.lockInfo['mac'];
bluetooth['bluetoothDeviceName'] = BlueManage().connectDeviceName; bluetooth['bluetoothDeviceName'] = BlueManage().connectDeviceName;
final List<String>? publicKey = final List<String>? publicKey = await Storage.getStringList(saveBluePublicKey);
await Storage.getStringList(saveBluePublicKey);
final List<int> publicKeyDataList = changeStringListToIntList(publicKey!); final List<int> publicKeyDataList = changeStringListToIntList(publicKey!);
bluetooth['publicKey'] = publicKeyDataList; bluetooth['publicKey'] = publicKeyDataList;
final List<String>? privateKey = final List<String>? privateKey = await Storage.getStringList(saveBluePrivateKey);
await Storage.getStringList(saveBluePrivateKey);
final List<int> getPrivateKeyList = changeStringListToIntList(privateKey!); final List<int> getPrivateKeyList = changeStringListToIntList(privateKey!);
bluetooth['privateKey'] = getPrivateKeyList; bluetooth['privateKey'] = getPrivateKeyList;
@ -410,8 +397,7 @@ class SaveLockLogic extends BaseGetXController {
final String getMobile = (await Storage.getMobile())!; final String getMobile = (await Storage.getMobile())!;
ApmHelper.instance.trackEvent('save_lock_result', { ApmHelper.instance.trackEvent('save_lock_result', {
'lock_name': BlueManage().connectDeviceName, 'lock_name': BlueManage().connectDeviceName,
'account': 'account': getMobile.isNotEmpty ? getMobile : (await Storage.getEmail())!,
getMobile.isNotEmpty ? getMobile : (await Storage.getEmail())!,
'date': DateTool().getNowDateWithType(1), 'date': DateTool().getNowDateWithType(1),
'save_lock_result': '成功', 'save_lock_result': '成功',
}); });
@ -427,8 +413,7 @@ class SaveLockLogic extends BaseGetXController {
final String getMobile = (await Storage.getMobile())!; final String getMobile = (await Storage.getMobile())!;
ApmHelper.instance.trackEvent('save_lock_result', { ApmHelper.instance.trackEvent('save_lock_result', {
'lock_name': BlueManage().connectDeviceName, 'lock_name': BlueManage().connectDeviceName,
'account': 'account': getMobile.isNotEmpty ? getMobile : (await Storage.getEmail())!,
getMobile.isNotEmpty ? getMobile : (await Storage.getEmail())!,
'date': DateTool().getNowDateWithType(1), 'date': DateTool().getNowDateWithType(1),
'save_lock_result': '${entity.errorCode}--${entity.errorMsg}', 'save_lock_result': '${entity.errorCode}--${entity.errorMsg}',
}); });
@ -489,26 +474,18 @@ class SaveLockLogic extends BaseGetXController {
// BlueManage().disconnect(); // BlueManage().disconnect();
// //
final LockSetInfoEntity entity = final LockSetInfoEntity entity = await ApiRepository.to.getLockSettingInfoData(
await ApiRepository.to.getLockSettingInfoData(
lockId: state.lockId.toString(), lockId: state.lockId.toString(),
); );
if (entity.errorCode!.codeIsSuccessful) { if (entity.errorCode!.codeIsSuccessful) {
state.lockSetInfoData.value = entity.data!; state.lockSetInfoData.value = entity.data!;
if (state.lockSetInfoData.value.lockFeature?.wifi == 1) { if (state.lockSetInfoData.value.lockFeature?.wifi == 1) {
// wifi锁WIFI // wifi锁WIFI
Get.toNamed(Routers.wifiListPage, arguments: { Get.toNamed(Routers.wifiListPage, arguments: {'lockSetInfoData': state.lockSetInfoData.value, 'pageName': 'saveLock'});
'lockSetInfoData': state.lockSetInfoData.value,
'pageName': 'saveLock'
});
} else if (state.lockSetInfoData.value.lockFeature?.languageSpeech == 1) { } else if (state.lockSetInfoData.value.lockFeature?.languageSpeech == 1) {
Get.toNamed(Routers.lockVoiceSettingPage, arguments: { Get.toNamed(Routers.lockVoiceSettingPage, arguments: {'lockSetInfoData': state.lockSetInfoData.value, 'pageName': 'saveLock'});
'lockSetInfoData': state.lockSetInfoData.value,
'pageName': 'saveLock'
});
} else { } else {
eventBus.fire(RefreshLockListInfoDataEvent( eventBus.fire(RefreshLockListInfoDataEvent(clearScanDevices: true, isUnShowLoading: true));
clearScanDevices: true, isUnShowLoading: true));
Future<void>.delayed(const Duration(seconds: 1), () { Future<void>.delayed(const Duration(seconds: 1), () {
// Get.close(state.isFromMap == 1 // Get.close(state.isFromMap == 1
// ? (CommonDataManage().seletLockType == 0 ? 4 : 5) // ? (CommonDataManage().seletLockType == 0 ? 4 : 5)
@ -518,15 +495,12 @@ class SaveLockLogic extends BaseGetXController {
// 2 // 2
Future<void>.delayed(const Duration(milliseconds: 200), () { Future<void>.delayed(const Duration(milliseconds: 200), () {
if (Get.isRegistered<LockDetailLogic>()) { if (Get.isRegistered<LockDetailLogic>()) {
Get.find<LockDetailLogic>() Get.find<LockDetailLogic>().functionBlocker.countdownProhibited(duration: const Duration(seconds: 2));
.functionBlocker
.countdownProhibited(duration: const Duration(seconds: 2));
} }
}); });
} }
} else { } else {
eventBus.fire(RefreshLockListInfoDataEvent( eventBus.fire(RefreshLockListInfoDataEvent(clearScanDevices: true, isUnShowLoading: true));
clearScanDevices: true, isUnShowLoading: true));
Future<void>.delayed(const Duration(seconds: 1), () { Future<void>.delayed(const Duration(seconds: 1), () {
// Get.close(state.isFromMap == 1 // Get.close(state.isFromMap == 1
// ? (CommonDataManage().seletLockType == 0 ? 4 : 5) // ? (CommonDataManage().seletLockType == 0 ? 4 : 5)
@ -536,9 +510,7 @@ class SaveLockLogic extends BaseGetXController {
// 2 // 2
Future<void>.delayed(const Duration(milliseconds: 200), () { Future<void>.delayed(const Duration(milliseconds: 200), () {
if (Get.isRegistered<LockDetailLogic>()) { if (Get.isRegistered<LockDetailLogic>()) {
Get.find<LockDetailLogic>() Get.find<LockDetailLogic>().functionBlocker.countdownProhibited(duration: const Duration(seconds: 2));
.functionBlocker
.countdownProhibited(duration: const Duration(seconds: 2));
} }
}); });
} }

View File

@ -62,8 +62,6 @@ class UdpTalkRequestHandler extends ScpMessageBaseHandle implements ScpMessageHa
_handleResponseSendExpect( _handleResponseSendExpect(
lockPeerID: scpMessage.FromPeerId!, lockPeerID: scpMessage.FromPeerId!,
); );
//
startChartManage.startTalkExpectTimer();
// //
startChartManage.stopCallRequestMessageTimer(); startChartManage.stopCallRequestMessageTimer();
// //

View File

@ -282,7 +282,9 @@ class StartChartManage {
// RbcuInfo // RbcuInfo
void _sendRbcuInfoMessage({required String ToPeerId, bool isResp = false}) async { void _sendRbcuInfoMessage({required String ToPeerId, bool isResp = false}) async {
final uuid = _uuid.v1(); final uuid = _uuid.v1();
final int timestamp = DateTime.now().millisecondsSinceEpoch; final int timestamp = DateTime
.now()
.millisecondsSinceEpoch;
final Int64 int64Timestamp = Int64(timestamp); // 使 final Int64 int64Timestamp = Int64(timestamp); // 使
// ip地址和中继返回的外网地址 // ip地址和中继返回的外网地址
@ -301,7 +303,7 @@ class StartChartManage {
.where((addr) => addr != null) // null .where((addr) => addr != null) // null
.map( .map(
(addr) => addr!.replaceAll(IpConstant.udpUrl, ''), (addr) => addr!.replaceAll(IpConstant.udpUrl, ''),
) // "udp://" ) // "udp://"
.cast<String>(); // Iterable<String>// Iterable<String?> Iterable<String> .cast<String>(); // Iterable<String>// Iterable<String?> Iterable<String>
_rbcuSessionId = uuid; _rbcuSessionId = uuid;
final RbcuInfo rbcuInfo = RbcuInfo( final RbcuInfo rbcuInfo = RbcuInfo(
@ -457,7 +459,7 @@ class StartChartManage {
Duration( Duration(
seconds: _defaultIntervalTime, seconds: _defaultIntervalTime,
), ),
(Timer timer) async { (Timer timer) async {
AppLog.log('发送对讲请求:${ToPeerId}'); AppLog.log('发送对讲请求:${ToPeerId}');
await sendCallRequestMessage(ToPeerId: ToPeerId); await sendCallRequestMessage(ToPeerId: ToPeerId);
}, },
@ -533,7 +535,7 @@ class StartChartManage {
Duration( Duration(
seconds: heartbeatIntervalTime, seconds: heartbeatIntervalTime,
), ),
(Timer timer) async { (Timer timer) async {
final List<int> message = MessageCommand.heartbeatMessage( final List<int> message = MessageCommand.heartbeatMessage(
FromPeerId: FromPeerId, FromPeerId: FromPeerId,
ToPeerId: relayPeerId, ToPeerId: relayPeerId,
@ -583,7 +585,9 @@ class StartChartManage {
ToPeerId: ToPeerId, ToPeerId: ToPeerId,
FromPeerId: FromPeerId, FromPeerId: FromPeerId,
gatewayId: gatewayId, gatewayId: gatewayId,
time: DateTime.now().millisecondsSinceEpoch ~/ 1000, time: DateTime
.now()
.millisecondsSinceEpoch ~/ 1000,
MessageId: MessageCommand.getNextMessageId(ToPeerId, increment: true), MessageId: MessageCommand.getNextMessageId(ToPeerId, increment: true),
); );
await _sendMessage(message: message); await _sendMessage(message: message);
@ -617,7 +621,7 @@ class StartChartManage {
talkRejectTimer ??= Timer.periodic( talkRejectTimer ??= Timer.periodic(
Duration(seconds: _defaultIntervalTime), Duration(seconds: _defaultIntervalTime),
(Timer timer) async { (Timer timer) async {
_sendTalkRejectMessage(); _sendTalkRejectMessage();
count++; count++;
if (count >= maxCount) { if (count >= maxCount) {
@ -723,7 +727,7 @@ class StartChartManage {
void startTalkHangupMessageTimer() { void startTalkHangupMessageTimer() {
talkHangupTimer ??= Timer.periodic( talkHangupTimer ??= Timer.periodic(
Duration(seconds: _defaultIntervalTime), Duration(seconds: _defaultIntervalTime),
(Timer timer) async { (Timer timer) async {
_sendTalkHangupMessage(); _sendTalkHangupMessage();
}, },
); );
@ -767,7 +771,7 @@ class StartChartManage {
Duration( Duration(
seconds: _defaultIntervalTime, seconds: _defaultIntervalTime,
), ),
(Timer timer) async { (Timer timer) async {
// 线 // 线
await _sendOnlineMessage(); await _sendOnlineMessage();
}, },
@ -937,7 +941,9 @@ class StartChartManage {
String ipAddress = address.address; String ipAddress = address.address;
// IPv6 // IPv6
if (ipAddress.contains('%')) { if (ipAddress.contains('%')) {
ipAddress = ipAddress.split('%').first; ipAddress = ipAddress
.split('%')
.first;
} }
// IP // IP
if (ipAddress.isNotEmpty && !IpConstant.reportExcludeIp.contains(ipAddress)) { if (ipAddress.isNotEmpty && !IpConstant.reportExcludeIp.contains(ipAddress)) {
@ -1060,7 +1066,7 @@ class StartChartManage {
Duration( Duration(
seconds: _defaultIntervalTime, seconds: _defaultIntervalTime,
), ),
(Timer timer) async { (Timer timer) async {
await sendTalkPingMessage( await sendTalkPingMessage(
ToPeerId: ToPeerId, ToPeerId: ToPeerId,
FromPeerId: FromPeerId, FromPeerId: FromPeerId,
@ -1081,7 +1087,7 @@ class StartChartManage {
Duration( Duration(
seconds: _defaultIntervalTime, seconds: _defaultIntervalTime,
), ),
(Timer timer) { (Timer timer) {
// //
sendTalkExpectMessage( sendTalkExpectMessage(
talkExpect: _defaultTalkExpect, talkExpect: _defaultTalkExpect,
@ -1096,7 +1102,7 @@ class StartChartManage {
Duration( Duration(
seconds: _defaultIntervalTime, seconds: _defaultIntervalTime,
), ),
(Timer timer) { (Timer timer) {
sendTalkAcceptMessage(); sendTalkAcceptMessage();
}, },
); );
@ -1128,7 +1134,16 @@ class StartChartManage {
/// ///
void changeTalkExpectDataTypeAndReStartTalkExpectMessageTimer({required TalkExpectReq talkExpect}) { void changeTalkExpectDataTypeAndReStartTalkExpectMessageTimer({required TalkExpectReq talkExpect}) {
_defaultTalkExpect = talkExpect; _defaultTalkExpect = talkExpect;
reStartTalkExpectMessageTimer(); sendTalkExpectMessage(
talkExpect: _defaultTalkExpect,
);
sendTalkExpectMessage(
talkExpect: _defaultTalkExpect,
);
sendTalkExpectMessage(
talkExpect: _defaultTalkExpect,
);
// reStartTalkExpectMessageTimer();
} }
void reSetDefaultTalkExpect() { void reSetDefaultTalkExpect() {

View File

@ -18,7 +18,7 @@ class _PermissionGuidancePageState extends State<PermissionGuidancePage> {
final List<Map<String, String>> _stepsData = [ final List<Map<String, String>> _stepsData = [
{ {
'image': 'images/guide/matter.png', 'image': 'images/guide/1.png',
'text': '步骤1打开应用信息点击通知管理选项', 'text': '步骤1打开应用信息点击通知管理选项',
}, },
{ {
@ -26,7 +26,7 @@ class _PermissionGuidancePageState extends State<PermissionGuidancePage> {
'text': '步骤2下滑点击呼叫提醒的通知选项', 'text': '步骤2下滑点击呼叫提醒的通知选项',
}, },
{ {
'image': 'images/guide/tuya.png', 'image': 'images/guide/3.png',
'text': '步骤3选择在锁定屏幕上的选项设置', 'text': '步骤3选择在锁定屏幕上的选项设置',
}, },
{ {

View File

@ -40,7 +40,7 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
final LockDetailState lockDetailState = Get.put(LockDetailLogic()).state; final LockDetailState lockDetailState = Get.put(LockDetailLogic()).state;
int bufferSize = 50; // int bufferSize = 25; //
int audioBufferSize = 20; // 2 int audioBufferSize = 20; // 2
@ -104,8 +104,6 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
codecType: 'h264', codecType: 'h264',
); );
// textureId // textureId
AppLog.log('StartChartManage().videoWidth:${StartChartManage().videoWidth}');
AppLog.log('StartChartManage().videoHeight:${StartChartManage().videoHeight}');
final textureId = await VideoDecodePlugin.initDecoder(config); final textureId = await VideoDecodePlugin.initDecoder(config);
if (textureId != null) { if (textureId != null) {
Future.microtask(() => state.textureId.value = textureId); Future.microtask(() => state.textureId.value = textureId);
@ -231,30 +229,10 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
// P/B帧 // P/B帧
while (state.h264FrameBuffer.length >= state.maxFrameBufferSize) { while (state.h264FrameBuffer.length >= state.maxFrameBufferSize) {
// int pbIndex = state.h264FrameBuffer.indexWhere((f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.P); int pbIndex = state.h264FrameBuffer.indexWhere((f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.P);
// if (pbIndex != -1) { if (pbIndex != -1) {
// state.h264FrameBuffer.removeAt(pbIndex); state.h264FrameBuffer.removeAt(pbIndex);
// } else {
// state.h264FrameBuffer.removeAt(0);
// }
// P帧
int oldestPFrameIndex = -1;
int minPts = pts; //
for (int i = 0; i < state.h264FrameBuffer.length; i++) {
final frame = state.h264FrameBuffer[i];
if (frame['frameType'] == TalkDataH264Frame_FrameTypeE.P) {
if (oldestPFrameIndex == -1 || frame['pts'] < minPts) {
oldestPFrameIndex = i;
minPts = frame['pts'];
}
}
}
if (oldestPFrameIndex != -1) {
state.h264FrameBuffer.removeAt(oldestPFrameIndex);
} else { } else {
// P帧
state.h264FrameBuffer.removeAt(0); state.h264FrameBuffer.removeAt(0);
} }
} }
@ -264,147 +242,25 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
} }
/// ///
// void _startFrameProcessTimer() {
// //
// state.frameProcessTimer?.cancel();
//
// //
// final int intervalMs = (1000 / state.targetFps).round();
//
// //
// state.frameProcessTimer = Timer.periodic(Duration(milliseconds: intervalMs), (timer) {
// _processNextFrameFromBuffer();
// });
// AppLog.log('启动帧处理定时器,目标帧率: ${state.targetFps}fps间隔: ${intervalMs}ms');
// }
void _startFrameProcessTimer() { void _startFrameProcessTimer() {
_stopFrameProcessTimer(); //
state.frameProcessTimer?.cancel();
// 使 //
final int intervalMs = (1000 / state.targetFps).round(); final int intervalMs = (1000 / state.targetFps).round();
// 使 //
state.frameProcessTimer = Timer.periodic( state.frameProcessTimer = Timer.periodic(Duration(milliseconds: intervalMs), (timer) {
Duration(milliseconds: intervalMs), _processNextFrameFromBuffer();
(timer) { });
// 使Future.microtask确保不会阻塞定时器
Future.microtask(_processNextFrameFromBuffer);
}
);
AppLog.log('启动帧处理定时器,目标帧率: ${state.targetFps}fps间隔: ${intervalMs}ms'); AppLog.log('启动帧处理定时器,目标帧率: ${state.targetFps}fps间隔: ${intervalMs}ms');
} }
/// ///
// void _processNextFrameFromBuffer() async { ///
// //
// if (state.isProcessingFrame) {
// return;
// }
//
// //
// if (state.h264FrameBuffer.isEmpty) {
// return;
// }
//
// state.isProcessingFrame = true;
//
// // I帧frameSeq最小的I帧消费
// final iFrames = state.h264FrameBuffer.where((f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.I).toList();
// iFrames.sort((a, b) => (a['frameSeq'] as int).compareTo(b['frameSeq'] as int));
//
// if (iFrames.isNotEmpty) {
// // I帧I帧frameSeq
// final minIFrame = iFrames.first;
// final minIFrameSeq = minIFrame['frameSeq'];
// final targetIndex = state.h264FrameBuffer.indexWhere(
// (f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.I && f['frameSeq'] == minIFrameSeq,
// );
// state.isProcessingFrame = true;
// final Map<String, dynamic>? frameMap = state.h264FrameBuffer.removeAt(targetIndex);
// if (frameMap == null) {
// state.isProcessingFrame = false;
// return;
// }
// final List<int>? frameData = frameMap['frameData'];
// final TalkDataH264Frame_FrameTypeE? frameType = frameMap['frameType'];
// final int? frameSeq = frameMap['frameSeq'];
// final int? frameSeqI = frameMap['frameSeqI'];
// final int? pts = frameMap['pts'];
// final ScpMessage? scpMessage = frameMap['scpMessage'];
// if (frameData == null || frameType == null || frameSeq == null || frameSeqI == null || pts == null) {
// state.isProcessingFrame = false;
// return;
// }
// if (state.textureId.value == null) {
// state.isProcessingFrame = false;
// return;
// }
// lastDecodedIFrameSeq = minIFrameSeq;
// AppLog.log('送入解码器的P帧数据frameSeq:${frameSeq},frameSeqI:${frameSeqI},'
// 'frameType:${frameType},messageId:${scpMessage!.MessageId}');
// await VideoDecodePlugin.sendFrame(
// frameData: frameData,
// frameType: 0,
// frameSeq: frameSeq,
// timestamp: pts,
// splitNalFromIFrame: true,
// refIFrameSeq: frameSeqI,
// );
// state.isProcessingFrame = false;
// return;
// }
//
// // I帧时refIFrameSeq等于lastDecodedIFrameSeq的P帧
// if (lastDecodedIFrameSeq != null) {
// final validPFrames =
// state.h264FrameBuffer.where((f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.P && f['frameSeqI'] == lastDecodedIFrameSeq).toList();
// if (validPFrames.isNotEmpty) {
// validPFrames.sort((a, b) => (a['frameSeq'] as int).compareTo(b['frameSeq'] as int));
// final minPFrame = validPFrames.first;
// final targetIndex = state.h264FrameBuffer.indexWhere(
// (f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.P && f['frameSeq'] == minPFrame['frameSeq'] && f['frameSeqI'] == lastDecodedIFrameSeq,
// );
// state.isProcessingFrame = true;
// final Map<String, dynamic>? frameMap = state.h264FrameBuffer.removeAt(targetIndex);
// if (frameMap == null) {
// state.isProcessingFrame = false;
// return;
// }
// final List<int>? frameData = frameMap['frameData'];
// final TalkDataH264Frame_FrameTypeE? frameType = frameMap['frameType'];
// final int? frameSeq = frameMap['frameSeq'];
// final int? frameSeqI = frameMap['frameSeqI'];
// final int? pts = frameMap['pts'];
// final ScpMessage? scpMessage = frameMap['scpMessage'];
// if (frameData == null || frameType == null || frameSeq == null || frameSeqI == null || pts == null) {
// state.isProcessingFrame = false;
// return;
// }
// if (state.textureId.value == null) {
// state.isProcessingFrame = false;
// return;
// }
// // AppLog.log('送入解码器的I帧数据frameSeq:${frameSeq},frameSeqI:${frameSeqI},'
// // 'frameType:${frameType},messageId:${scpMessage!.MessageId}');
//
// await VideoDecodePlugin.sendFrame(
// frameData: frameData,
// frameType: 1,
// frameSeq: frameSeq,
// timestamp: pts,
// splitNalFromIFrame: true,
// refIFrameSeq: frameSeqI,
// );
// state.isProcessingFrame = false;
// return;
// }
// }
// // I帧到来
// }
/// -
void _processNextFrameFromBuffer() async { void _processNextFrameFromBuffer() async {
final startTime = DateTime.now().microsecondsSinceEpoch;
// //
if (state.isProcessingFrame) { if (state.isProcessingFrame) {
return; return;
@ -415,110 +271,106 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
return; return;
} }
state.isProcessingFrame = true;
try { try {
// I帧 // I帧frameSeq最小的I帧消费
final iFrames = state.h264FrameBuffer final iFrames = state.h264FrameBuffer.where((f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.I).toList();
.where((f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.I) iFrames.sort((a, b) => (a['frameSeq'] as int).compareTo(b['frameSeq'] as int));
.toList()
..sort((a, b) => (a['frameSeq'] as int).compareTo(b['frameSeq'] as int));
if (iFrames.isNotEmpty) { if (iFrames.isNotEmpty) {
final minIFrame = iFrames.first; final minIFrame = iFrames.first;
final minIFrameSeq = minIFrame['frameSeq'];
final targetIndex = state.h264FrameBuffer.indexWhere( final targetIndex = state.h264FrameBuffer.indexWhere(
(f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.I && (f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.I && f['frameSeq'] == minIFrameSeq,
f['frameSeq'] == minIFrame['frameSeq'],
); );
state.isProcessingFrame = true;
final Map<String, dynamic>? frameMap = state.h264FrameBuffer.removeAt(targetIndex);
if (frameMap == null) {
state.isProcessingFrame = false;
return;
}
final List<int>? frameData = frameMap['frameData'];
final TalkDataH264Frame_FrameTypeE? frameType = frameMap['frameType'];
final int? frameSeq = frameMap['frameSeq'];
final int? frameSeqI = frameMap['frameSeqI'];
final int? pts = frameMap['pts'];
if (frameData == null || frameType == null || frameSeq == null || frameSeqI == null || pts == null) {
state.isProcessingFrame = false;
return;
}
if (state.textureId.value == null) {
state.isProcessingFrame = false;
return;
}
lastDecodedIFrameSeq = minIFrameSeq;
if (targetIndex != -1) { await VideoDecodePlugin.sendFrame(
final frameMap = state.h264FrameBuffer.removeAt(targetIndex); frameData: frameData,
await _decodeFrame(frameMap); frameType: 0,
lastDecodedIFrameSeq = minIFrame['frameSeq'] as int; frameSeq: frameSeq,
timestamp: pts,
splitNalFromIFrame: true,
refIFrameSeq: frameSeqI,
);
state.isProcessingFrame = false;
return;
}
// I帧时refIFrameSeq等于lastDecodedIFrameSeq的P帧
if (lastDecodedIFrameSeq != null) {
final validPFrames =
state.h264FrameBuffer.where((f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.P && f['frameSeqI'] == lastDecodedIFrameSeq).toList();
if (validPFrames.isNotEmpty) {
validPFrames.sort((a, b) => (a['frameSeq'] as int).compareTo(b['frameSeq'] as int));
final minPFrame = validPFrames.first;
final targetIndex = state.h264FrameBuffer.indexWhere(
(f) =>
f['frameType'] == TalkDataH264Frame_FrameTypeE.P && f['frameSeq'] == minPFrame['frameSeq'] && f['frameSeqI'] == lastDecodedIFrameSeq,
);
state.isProcessingFrame = true;
final Map<String, dynamic>? frameMap = state.h264FrameBuffer.removeAt(targetIndex);
if (frameMap == null) {
state.isProcessingFrame = false;
return;
}
final List<int>? frameData = frameMap['frameData'];
final TalkDataH264Frame_FrameTypeE? frameType = frameMap['frameType'];
final int? frameSeq = frameMap['frameSeq'];
final int? frameSeqI = frameMap['frameSeqI'];
final int? pts = frameMap['pts'];
if (frameData == null || frameType == null || frameSeq == null || frameSeqI == null || pts == null) {
state.isProcessingFrame = false;
return;
}
if (state.textureId.value == null) {
state.isProcessingFrame = false;
return;
}
await VideoDecodePlugin.sendFrame(
frameData: frameData,
frameType: 1,
frameSeq: frameSeq,
timestamp: pts,
splitNalFromIFrame: true,
refIFrameSeq: frameSeqI,
);
state.isProcessingFrame = false;
return; return;
} }
} }
// I帧到来
// P帧 - P帧以提高流畅度
if (lastDecodedIFrameSeq != null) {
final validPFrames = state.h264FrameBuffer
.where((f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.P &&
f['frameSeqI'] == lastDecodedIFrameSeq)
.toList()
..sort((a, b) => (a['frameSeq'] as int).compareTo(b['frameSeq'] as int));
// 3P帧
int framesToProcess = min(3, validPFrames.length);
for (int i = 0; i < framesToProcess; i++) {
if (state.h264FrameBuffer.isNotEmpty) {
final pFrame = validPFrames[i];
final targetIndex = state.h264FrameBuffer.indexWhere(
(f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.P &&
f['frameSeq'] == pFrame['frameSeq'] &&
f['frameSeqI'] == lastDecodedIFrameSeq,
);
if (targetIndex != -1) {
final frameMap = state.h264FrameBuffer.removeAt(targetIndex);
await _decodeFrame(frameMap);
}
}
}
}
} catch (e) {
AppLog.log('帧处理错误: $e');
//
state.isProcessingFrame = false;
//
_handleDecodeError();
} finally { } finally {
state.isProcessingFrame = false; final endTime = DateTime.now().microsecondsSinceEpoch;
final durationMs = (endTime - startTime) / 1000.0;
// > 5ms
if (durationMs > 5) {
debugPrint('[_processNextFrameFromBuffer] 耗时: ${durationMs.toStringAsFixed(2)} ms');
// 使
// AppLog.log('Frame processing took ${durationMs.toStringAsFixed(2)} ms');
}
} }
} }
int _decodeErrorCount = 0;
static const int MAX_DECODE_ERRORS = 5;
void _handleDecodeError() {
_decodeErrorCount++;
if (_decodeErrorCount >= MAX_DECODE_ERRORS) {
AppLog.log('解码错误过多,重置解码器');
_resetDecoderForNewStream(
StartChartManage().videoWidth,
StartChartManage().videoHeight
);
_decodeErrorCount = 0;
}
}
//
Future<void> _decodeFrame(Map<String, dynamic> frameMap) async {
final List<int>? frameData = frameMap['frameData'];
final TalkDataH264Frame_FrameTypeE? frameType = frameMap['frameType'];
final int? frameSeq = frameMap['frameSeq'];
final int? frameSeqI = frameMap['frameSeqI'];
final int? pts = frameMap['pts'];
final ScpMessage? scpMessage = frameMap['scpMessage'];
if (frameData == null || frameType == null || frameSeq == null ||
frameSeqI == null || pts == null || state.textureId.value == null) {
return;
}
AppLog.log('送入解码器的帧数据frameSeq:$frameSeq,frameSeqI:$frameSeqI,'
'frameType:$frameType,messageId:${scpMessage?.MessageId}');
await VideoDecodePlugin.sendFrame(
frameData: frameData,
frameType: frameType == TalkDataH264Frame_FrameTypeE.I ? 0 : 1,
frameSeq: frameSeq,
timestamp: pts,
splitNalFromIFrame: true,
refIFrameSeq: frameSeqI,
);
}
/// ///
void _stopFrameProcessTimer() { void _stopFrameProcessTimer() {
state.frameProcessTimer?.cancel(); state.frameProcessTimer?.cancel();
@ -712,20 +564,6 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
// //
_startListenTalkData(); _startListenTalkData();
//
state.targetFps = _getOptimalFps();
_startFrameProcessTimer();
}
//
int _getOptimalFps() {
//
if (defaultTargetPlatform == TargetPlatform.android) {
return 25; // Android设备通常性能较好
} else {
return 20; // iOS设备保守一些
}
} }
@override @override
@ -983,7 +821,6 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
// //
void onQualityChanged(String quality) async { void onQualityChanged(String quality) async {
state.currentQuality.value = quality; state.currentQuality.value = quality;
state.isLoading.value = true; //
TalkExpectReq talkExpectReq = StartChartManage().getDefaultTalkExpect(); TalkExpectReq talkExpectReq = StartChartManage().getDefaultTalkExpect();
final audioType = talkExpectReq.audioType; final audioType = talkExpectReq.audioType;
int width = 864; int width = 864;
@ -1007,8 +844,6 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
break; break;
} }
//
await _resetDecoderForNewStream(width, height);
/// ///
StartChartManage().changeTalkExpectDataTypeAndReStartTalkExpectMessageTimer(talkExpect: talkExpectReq); StartChartManage().changeTalkExpectDataTypeAndReStartTalkExpectMessageTimer(talkExpect: talkExpectReq);
@ -1042,7 +877,7 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
} }
// //
await Future.delayed(Duration(milliseconds: 50)); await Future.delayed(Duration(milliseconds: 100));
// //
final config = VideoDecoderConfig( final config = VideoDecoderConfig(
@ -1122,98 +957,6 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
AppLog.log('无法处理H264帧textureId为空'); AppLog.log('无法处理H264帧textureId为空');
} }
break; break;
case TalkData_ContentTypeE.H264:
if (state.textureId.value != null && talkDataH264Frame != null) {
// SPS/PPS
if (talkDataH264Frame.frameType == TalkDataH264Frame_FrameTypeE.I) {
_cacheSPSPPSIfNeeded(talkData.content);
}
_addFrameToBuffer(
talkData.content,
talkDataH264Frame.frameType,
talkData.durationMs,
talkDataH264Frame.frameSeq,
talkDataH264Frame.frameSeqI,
scpMessage!,
);
} else {
AppLog.log('无法处理H264帧textureId为空或帧数据无效');
}
break;
}
}
void _cacheSPSPPSIfNeeded(List<int> frameData) {
try {
// H.264NAL单元开始NAL头部
int offset = 0;
// NAL单元分隔符 (0x00000001 0x000001)
while (offset < frameData.length - 4) {
// 0x00000001
if (frameData[offset] == 0 && frameData[offset + 1] == 0 &&
frameData[offset + 2] == 0 && frameData[offset + 3] == 1) {
// (4)
int nalStart = offset + 4;
// NAL头部
if (nalStart >= frameData.length) break;
// NAL头部第一个字节包含NAL类型信息
// bit 0-7: forbidden_zero_bit(1) + nal_ref_idc(2) + nal_unit_type(5)
int nalHeader = frameData[nalStart];
int nalType = nalHeader & 0x1F; // 5
// H.264 NAL单元类型:
// 7 = SPS (Sequence Parameter Set)
// 8 = PPS (Picture Parameter Set)
if (nalType == 7) {
// SPS - NAL单元(NAL头部)
int nalEnd = _findNextStartCode(frameData, nalStart);
if (nalEnd == -1) nalEnd = frameData.length;
spsCache = frameData.sublist(offset, nalEnd);
hasSps = true;
AppLog.log('检测到并缓存SPS数据, 长度: ${spsCache!.length}');
} else if (nalType == 8) {
// PPS - NAL单元
int nalEnd = _findNextStartCode(frameData, nalStart);
if (nalEnd == -1) nalEnd = frameData.length;
ppsCache = frameData.sublist(offset, nalEnd);
hasPps = true;
AppLog.log('检测到并缓存PPS数据, 长度: ${ppsCache!.length}');
}
// NAL单元
offset = nalStart + 1;
} else {
offset++;
}
}
} catch (e) {
AppLog.log('SPS/PPS检测错误: $e');
}
}
//
int _findNextStartCode(List<int> data, int fromIndex) {
for (int i = fromIndex; i < data.length - 4; i++) {
if (data[i] == 0 && data[i + 1] == 0 &&
data[i + 2] == 0 && data[i + 3] == 1) {
return i; //
}
}
return -1; //
}
// 使SPS/PPS数据
void _ensureSPSPPSAvailable() {
if (hasSps && hasPps && spsCache != null && ppsCache != null) {
// SPS/PPS数据发送给解码器
//
AppLog.log('SPS和PPS数据已就绪可用于解码器初始化');
} }
} }
} }

View File

@ -29,15 +29,12 @@ class TalkViewNativeDecodePage extends StatefulWidget {
const TalkViewNativeDecodePage({Key? key}) : super(key: key); const TalkViewNativeDecodePage({Key? key}) : super(key: key);
@override @override
State<TalkViewNativeDecodePage> createState() => State<TalkViewNativeDecodePage> createState() => _TalkViewNativeDecodePageState();
_TalkViewNativeDecodePageState();
} }
class _TalkViewNativeDecodePageState extends State<TalkViewNativeDecodePage> class _TalkViewNativeDecodePageState extends State<TalkViewNativeDecodePage> with TickerProviderStateMixin {
with TickerProviderStateMixin {
final TalkViewNativeDecodeLogic logic = Get.put(TalkViewNativeDecodeLogic()); final TalkViewNativeDecodeLogic logic = Get.put(TalkViewNativeDecodeLogic());
final TalkViewNativeDecodeState state = final TalkViewNativeDecodeState state = Get.find<TalkViewNativeDecodeLogic>().state;
Get.find<TalkViewNativeDecodeLogic>().state;
final startChartManage = StartChartManage(); final startChartManage = StartChartManage();
@override @override
@ -69,66 +66,48 @@ class _TalkViewNativeDecodePageState extends State<TalkViewNativeDecodePage>
// false 退 // false 退
return false; return false;
}, },
child: SizedBox( child: Container(
width: 1.sw, width: 1.sw,
height: 1.sh, height: 1.sh,
color: Colors.black.withOpacity(0.7),
child: Stack( child: Stack(
alignment: Alignment.center, alignment: Alignment.center,
children: <Widget>[ children: <Widget>[
// //
Obx( Obx(
() { () {
final double screenWidth = MediaQuery.of(context).size.width;
final double screenHeight = MediaQuery.of(context).size.height;
final double logicalWidth = MediaQuery.of(context).size.width;
final double logicalHeight = MediaQuery.of(context).size.height;
final double devicePixelRatio =
MediaQuery.of(context).devicePixelRatio;
//
final double physicalWidth = logicalWidth * devicePixelRatio;
final double physicalHeight = logicalHeight * devicePixelRatio;
//
const int rotatedImageWidth = 480; //
const int rotatedImageHeight = 864; //
//
final double scaleWidth = physicalWidth / rotatedImageWidth;
final double scaleHeight = physicalHeight / rotatedImageHeight;
max(scaleWidth, scaleHeight); //
// loading中或textureId为nullloading/ // loading中或textureId为nullloading/
if (state.isLoading.isTrue || state.textureId.value == null) { if (state.isLoading.isTrue || state.textureId.value == null) {
return Image.asset( return Image.asset(
'images/main/monitorBg.png', 'images/main/monitorBg.png',
width: screenWidth, width: 1.sw,
height: screenHeight, height: 1.sh,
fit: BoxFit.cover, fit: BoxFit.cover,
); );
} else { } else {
return Positioned.fill( return Positioned(
top: 0,
left: 0,
right: 0,
child: PopScope( child: PopScope(
canPop: false, canPop: false,
child: RepaintBoundary( child: RepaintBoundary(
key: state.globalKey, key: state.globalKey,
child: SizedBox.expand( child: RotatedBox(
child: RotatedBox( // 使RotatedBox
// 使RotatedBox quarterTurns: startChartManage.rotateAngle ~/ 90,
quarterTurns: startChartManage.rotateAngle ~/ 90, child: state.isFullScreen.isFalse
child: Platform.isIOS ? AspectRatio(
? Transform.scale( aspectRatio: StartChartManage().videoWidth / StartChartManage().videoHeight,
scale: 1.008, // iOS白边 child: Texture(
child: Texture(
textureId: state.textureId.value!,
filterQuality: FilterQuality.medium,
),
)
: Texture(
textureId: state.textureId.value!, textureId: state.textureId.value!,
filterQuality: FilterQuality.medium, filterQuality: FilterQuality.medium,
), ),
), )
: Texture(
textureId: state.textureId.value!,
filterQuality: FilterQuality.medium,
),
), ),
), ),
), ),
@ -151,19 +130,14 @@ class _TalkViewNativeDecodePageState extends State<TalkViewNativeDecodePage>
width: 1.sw, width: 1.sw,
child: Obx( child: Obx(
() { () {
final String sec = (state.oneMinuteTime.value % 60) final String sec = (state.oneMinuteTime.value % 60).toString().padLeft(2, '0');
.toString() final String min = (state.oneMinuteTime.value ~/ 60).toString().padLeft(2, '0');
.padLeft(2, '0');
final String min = (state.oneMinuteTime.value ~/ 60)
.toString()
.padLeft(2, '0');
return Row( return Row(
mainAxisAlignment: MainAxisAlignment.center, mainAxisAlignment: MainAxisAlignment.center,
children: <Widget>[ children: <Widget>[
Text( Text(
'$min:$sec', '$min:$sec',
style: TextStyle( style: TextStyle(fontSize: 26.sp, color: Colors.white),
fontSize: 26.sp, color: Colors.white),
), ),
], ],
); );
@ -177,9 +151,7 @@ class _TalkViewNativeDecodePageState extends State<TalkViewNativeDecodePage>
width: 1.sw - 30.w * 2, width: 1.sw - 30.w * 2,
// height: 300.h, // height: 300.h,
margin: EdgeInsets.all(30.w), margin: EdgeInsets.all(30.w),
decoration: BoxDecoration( decoration: BoxDecoration(color: Colors.black.withOpacity(0.2), borderRadius: BorderRadius.circular(20.h)),
color: Colors.black.withOpacity(0.2),
borderRadius: BorderRadius.circular(20.h)),
child: Column( child: Column(
children: <Widget>[ children: <Widget>[
SizedBox(height: 20.h), SizedBox(height: 20.h),
@ -191,9 +163,7 @@ class _TalkViewNativeDecodePageState extends State<TalkViewNativeDecodePage>
), ),
), ),
), ),
Obx(() => state.isLoading.isTrue Obx(() => state.isLoading.isTrue ? buildRotationTransition() : Container()),
? buildRotationTransition()
: Container()),
Obx(() => state.isLongPressing.value Obx(() => state.isLongPressing.value
? Positioned( ? Positioned(
top: 80.h, top: 80.h,
@ -213,8 +183,7 @@ class _TalkViewNativeDecodePageState extends State<TalkViewNativeDecodePage>
SizedBox(width: 10.w), SizedBox(width: 10.w),
Text( Text(
'正在说话...'.tr, '正在说话...'.tr,
style: TextStyle( style: TextStyle(fontSize: 20.sp, color: Colors.white),
fontSize: 20.sp, color: Colors.white),
), ),
], ],
), ),
@ -246,10 +215,8 @@ class _TalkViewNativeDecodePageState extends State<TalkViewNativeDecodePage>
width: 40.w, width: 40.w,
height: 40.w, height: 40.w,
image: state.isOpenVoice.value image: state.isOpenVoice.value
? const AssetImage( ? const AssetImage('images/main/icon_lockDetail_monitoringOpenVoice.png')
'images/main/icon_lockDetail_monitoringOpenVoice.png') : const AssetImage('images/main/icon_lockDetail_monitoringCloseVoice.png'))),
: const AssetImage(
'images/main/icon_lockDetail_monitoringCloseVoice.png'))),
), ),
), ),
SizedBox(width: 50.w), SizedBox(width: 50.w),
@ -264,11 +231,7 @@ class _TalkViewNativeDecodePageState extends State<TalkViewNativeDecodePage>
width: 50.w, width: 50.w,
height: 50.w, height: 50.w,
padding: EdgeInsets.all(5.w), padding: EdgeInsets.all(5.w),
child: Image( child: Image(width: 40.w, height: 40.w, image: const AssetImage('images/main/icon_lockDetail_monitoringScreenshot.png')),
width: 40.w,
height: 40.w,
image: const AssetImage(
'images/main/icon_lockDetail_monitoringScreenshot.png')),
), ),
), ),
SizedBox(width: 50.w), SizedBox(width: 50.w),
@ -293,8 +256,7 @@ class _TalkViewNativeDecodePageState extends State<TalkViewNativeDecodePage>
width: 40.w, width: 40.w,
height: 40.w, height: 40.w,
fit: BoxFit.fill, fit: BoxFit.fill,
image: const AssetImage( image: const AssetImage('images/main/icon_lockDetail_monitoringScreenRecording.png'),
'images/main/icon_lockDetail_monitoringScreenRecording.png'),
), ),
), ),
), ),
@ -330,13 +292,8 @@ class _TalkViewNativeDecodePageState extends State<TalkViewNativeDecodePage>
Text( Text(
q, q,
style: TextStyle( style: TextStyle(
color: state.currentQuality.value == q color: state.currentQuality.value == q ? AppColors.mainColor : Colors.black,
? AppColors.mainColor fontWeight: state.currentQuality.value == q ? FontWeight.bold : FontWeight.normal,
: Colors.black,
fontWeight:
state.currentQuality.value == q
? FontWeight.bold
: FontWeight.normal,
fontSize: 28.sp, fontSize: 28.sp,
), ),
), ),
@ -352,8 +309,7 @@ class _TalkViewNativeDecodePageState extends State<TalkViewNativeDecodePage>
); );
}, },
child: Container( child: Container(
child: Icon(Icons.high_quality_outlined, child: Icon(Icons.high_quality_outlined, color: Colors.white, size: 38.w),
color: Colors.white, size: 38.w),
), ),
), ),
Visibility( Visibility(
@ -377,61 +333,56 @@ class _TalkViewNativeDecodePageState extends State<TalkViewNativeDecodePage>
} }
Widget bottomBottomBtnWidget() { Widget bottomBottomBtnWidget() {
return Row( return Row(mainAxisAlignment: MainAxisAlignment.spaceEvenly, children: <Widget>[
mainAxisAlignment: MainAxisAlignment.spaceEvenly, //
children: <Widget>[ Obx(
// () => bottomBtnItemWidget(
Obx( getAnswerBtnImg(),
() => bottomBtnItemWidget( getAnswerBtnName(),
getAnswerBtnImg(), Colors.white,
getAnswerBtnName(), longPress: () async {
Colors.white, if (state.talkStatus.value == TalkStatus.answeredSuccessfully) {
longPress: () async { //
if (state.talkStatus.value == TalkStatus.answeredSuccessfully) { logic.startProcessingAudio();
// state.isLongPressing.value = true;
logic.startProcessingAudio(); }
state.isLongPressing.value = true; },
} longPressUp: () async {
}, //
longPressUp: () async { logic.stopProcessingAudio();
// state.isLongPressing.value = false;
logic.stopProcessingAudio(); },
state.isLongPressing.value = false; onClick: () async {
}, if (state.talkStatus.value == TalkStatus.passiveCallWaitingAnswer) {
onClick: () async { //
if (state.talkStatus.value == logic.initiateAnswerCommand();
TalkStatus.passiveCallWaitingAnswer) { }
// },
logic.initiateAnswerCommand(); ),
} ),
}, bottomBtnItemWidget('images/main/icon_lockDetail_hangUp.png', '挂断'.tr, Colors.red, onClick: () {
), //
), logic.udpHangUpAction();
bottomBtnItemWidget( }),
'images/main/icon_lockDetail_hangUp.png', '挂断'.tr, Colors.red, bottomBtnItemWidget(
onClick: () { 'images/main/icon_lockDetail_monitoringUnlock.png',
// '开锁'.tr,
logic.udpHangUpAction(); AppColors.mainColor,
}), onClick: () {
bottomBtnItemWidget( // if (state.talkStatus.value == TalkStatus.answeredSuccessfully &&
'images/main/icon_lockDetail_monitoringUnlock.png', // state.listData.value.length > 0) {
'开锁'.tr, // logic.udpOpenDoorAction();
AppColors.mainColor, // }
onClick: () { // if (UDPManage().remoteUnlock == 1) {
// if (state.talkStatus.value == TalkStatus.answeredSuccessfully && // logic.udpOpenDoorAction();
// state.listData.value.length > 0) { // showDeletPasswordAlertDialog(context);
// logic.udpOpenDoorAction(); // } else {
// } // logic.showToast('请在锁设置中开启远程开锁'.tr);
// if (UDPManage().remoteUnlock == 1) { // }
// logic.udpOpenDoorAction(); logic.remoteOpenLock();
// showDeletPasswordAlertDialog(context); },
// } else { )
// logic.showToast('请在锁设置中开启远程开锁'.tr); ]);
// }
logic.remoteOpenLock();
},
)
]);
} }
String getAnswerBtnImg() { String getAnswerBtnImg() {

View File

@ -69,7 +69,7 @@ class _TalkViewNativeDecodePageDebugState extends State<TalkViewNativeDecodePage
child: Container( child: Container(
width: 1.sw, width: 1.sw,
height: 1.sh, height: 1.sh,
color: Colors.blue, color: Colors.black.withOpacity(0.7),
child: Stack( child: Stack(
alignment: Alignment.center, alignment: Alignment.center,
children: <Widget>[ children: <Widget>[
@ -92,33 +92,29 @@ class _TalkViewNativeDecodePageDebugState extends State<TalkViewNativeDecodePage
canPop: false, canPop: false,
child: RepaintBoundary( child: RepaintBoundary(
key: state.globalKey, key: state.globalKey,
child: SizedBox( child: RotatedBox(
width: StartChartManage().videoHeight.w, // 使RotatedBox
height: StartChartManage().videoWidth.h, quarterTurns: startChartManage.rotateAngle ~/ 90,
child: RotatedBox( child: Platform.isIOS
// 使RotatedBox ? Transform.scale(
quarterTurns: startChartManage.rotateAngle ~/ 90, scale: 1.008, // iOS白边
child: Platform.isIOS child: Texture(
? Transform.scale( textureId: state.textureId.value!,
scale: 1.008, // iOS白边 filterQuality: FilterQuality.medium,
child: Texture( ),
textureId: state.textureId.value!, )
filterQuality: FilterQuality.medium, : state.isFullScreen.isFalse
), ? AspectRatio(
) aspectRatio: StartChartManage().videoWidth / StartChartManage().videoHeight,
: state.isFullScreen.isFalse child: Texture(
? AspectRatio(
aspectRatio: StartChartManage().videoWidth / StartChartManage().videoHeight,
child: Texture(
textureId: state.textureId.value!,
filterQuality: FilterQuality.medium,
),
)
: Texture(
textureId: state.textureId.value!, textureId: state.textureId.value!,
filterQuality: FilterQuality.medium, filterQuality: FilterQuality.medium,
), ),
), )
: Texture(
textureId: state.textureId.value!,
filterQuality: FilterQuality.medium,
),
), ),
), ),
); );
@ -130,7 +126,7 @@ class _TalkViewNativeDecodePageDebugState extends State<TalkViewNativeDecodePage
state.isFullScreen.value = !state.isFullScreen.value; state.isFullScreen.value = !state.isFullScreen.value;
}, },
child: Obx( child: Obx(
() => Text(state.isFullScreen.isTrue ? '退出全屏' : '全屏'), () => Text(state.isFullScreen.isTrue ? '退出全屏' : '全屏'),
), ),
), ),
Obx(() => state.isLoading.isTrue Obx(() => state.isLoading.isTrue
@ -168,8 +164,7 @@ class _TalkViewNativeDecodePageDebugState extends State<TalkViewNativeDecodePage
width: 1.sw - 30.w * 2, width: 1.sw - 30.w * 2,
// height: 300.h, // height: 300.h,
margin: EdgeInsets.all(30.w), margin: EdgeInsets.all(30.w),
decoration: decoration: BoxDecoration(color: Colors.black.withOpacity(0.2), borderRadius: BorderRadius.circular(20.h)),
BoxDecoration(color: Colors.black.withOpacity(0.2), borderRadius: BorderRadius.circular(20.h)),
child: Column( child: Column(
children: <Widget>[ children: <Widget>[
SizedBox(height: 20.h), SizedBox(height: 20.h),
@ -249,10 +244,7 @@ class _TalkViewNativeDecodePageDebugState extends State<TalkViewNativeDecodePage
width: 50.w, width: 50.w,
height: 50.w, height: 50.w,
padding: EdgeInsets.all(5.w), padding: EdgeInsets.all(5.w),
child: Image( child: Image(width: 40.w, height: 40.w, image: const AssetImage('images/main/icon_lockDetail_monitoringScreenshot.png')),
width: 40.w,
height: 40.w,
image: const AssetImage('images/main/icon_lockDetail_monitoringScreenshot.png')),
), ),
), ),
SizedBox(width: 50.w), SizedBox(width: 50.w),

View File

@ -67,7 +67,7 @@ class TalkViewNativeDecodeState {
// //
final TalkDataRepository talkDataRepository = TalkDataRepository.instance; final TalkDataRepository talkDataRepository = TalkDataRepository.instance;
RxBool isOpenVoice = true.obs; // RxBool isOpenVoice = false.obs; //
RxBool isRecordingScreen = false.obs; // RxBool isRecordingScreen = false.obs; //
RxBool isRecordingAudio = false.obs; // RxBool isRecordingAudio = false.obs; //
Rx<DateTime> startRecordingAudioTime = DateTime.now().obs; // Rx<DateTime> startRecordingAudioTime = DateTime.now().obs; //
@ -109,8 +109,8 @@ class TalkViewNativeDecodeState {
// H264帧缓冲区相关 // H264帧缓冲区相关
final List<Map<String, dynamic>> h264FrameBuffer = <Map<String, dynamic>>[]; // H264帧缓冲区 final List<Map<String, dynamic>> h264FrameBuffer = <Map<String, dynamic>>[]; // H264帧缓冲区
final int maxFrameBufferSize = 50; // final int maxFrameBufferSize = 25; //
int targetFps = 25; // ,native的缓冲区 final int targetFps = 25; // ,native的缓冲区
Timer? frameProcessTimer; // Timer? frameProcessTimer; //
bool isProcessingFrame = false; // bool isProcessingFrame = false; //
int lastProcessedTimestamp = 0; // int lastProcessedTimestamp = 0; //