fix: 增加日志

This commit is contained in:
liyi 2025-11-07 18:03:11 +08:00
parent 6cc00ca348
commit 513607e1ef
8 changed files with 233 additions and 298 deletions

View File

@ -31,8 +31,7 @@ class SaveLockLogic extends BaseGetXController {
late StreamSubscription<Reply> _replySubscription;
void _initReplySubscription() {
_replySubscription =
EventBusManager().eventBus!.on<Reply>().listen((Reply reply) {
_replySubscription = EventBusManager().eventBus!.on<Reply>().listen((Reply reply) {
if (reply is AddUserReply && state.ifCurrentScreen.value == true) {
_replyAddUserKey(reply);
}
@ -66,15 +65,11 @@ class SaveLockLogic extends BaseGetXController {
break;
case 0x06:
//
final List<String>? privateKey =
await Storage.getStringList(saveBluePrivateKey);
final List<int> getPrivateKeyList =
changeStringListToIntList(privateKey!);
final List<String>? privateKey = await Storage.getStringList(saveBluePrivateKey);
final List<int> getPrivateKeyList = changeStringListToIntList(privateKey!);
final List<String>? publicKey =
await Storage.getStringList(saveBluePublicKey);
final List<int> publicKeyDataList =
changeStringListToIntList(publicKey!);
final List<String>? publicKey = await Storage.getStringList(saveBluePublicKey);
final List<int> publicKeyDataList = changeStringListToIntList(publicKey!);
IoSenderManage.senderAddUser(
lockID: BlueManage().connectDeviceName,
@ -215,19 +210,14 @@ class SaveLockLogic extends BaseGetXController {
showBlueConnetctToast();
}
});
BlueManage().blueSendData(BlueManage().connectDeviceName,
(BluetoothConnectionState deviceConnectionState) async {
BlueManage().blueSendData(BlueManage().connectDeviceName, (BluetoothConnectionState deviceConnectionState) async {
if (deviceConnectionState == BluetoothConnectionState.connected) {
//
final List<String>? privateKey =
await Storage.getStringList(saveBluePrivateKey);
final List<int> getPrivateKeyList =
changeStringListToIntList(privateKey!);
final List<String>? privateKey = await Storage.getStringList(saveBluePrivateKey);
final List<int> getPrivateKeyList = changeStringListToIntList(privateKey!);
final List<String>? publicKey =
await Storage.getStringList(saveBluePublicKey);
final List<int> publicKeyDataList =
changeStringListToIntList(publicKey!);
final List<String>? publicKey = await Storage.getStringList(saveBluePublicKey);
final List<int> publicKeyDataList = changeStringListToIntList(publicKey!);
final List<String>? token = await Storage.getStringList(saveBlueToken);
List<int> getTokenList = <int>[0, 0, 0, 0];
@ -257,8 +247,7 @@ class SaveLockLogic extends BaseGetXController {
privateKey: getPrivateKeyList,
token: getTokenList,
isBeforeAddUser: true);
} else if (deviceConnectionState ==
BluetoothConnectionState.disconnected) {
} else if (deviceConnectionState == BluetoothConnectionState.disconnected) {
dismissEasyLoading();
cancelBlueConnetctToastTimer();
state.sureBtnState.value = 0;
@ -376,16 +365,14 @@ class SaveLockLogic extends BaseGetXController {
// positionMap['address'] = state.addressInfo['address'];
final Map<String, dynamic> bluetooth = <String, dynamic>{};
bluetooth['bluetoothDeviceId'] = BlueManage().connectDeviceMacAddress;
bluetooth['bluetoothDeviceId'] = state.lockInfo['mac'];
bluetooth['bluetoothDeviceName'] = BlueManage().connectDeviceName;
final List<String>? publicKey =
await Storage.getStringList(saveBluePublicKey);
final List<String>? publicKey = await Storage.getStringList(saveBluePublicKey);
final List<int> publicKeyDataList = changeStringListToIntList(publicKey!);
bluetooth['publicKey'] = publicKeyDataList;
final List<String>? privateKey =
await Storage.getStringList(saveBluePrivateKey);
final List<String>? privateKey = await Storage.getStringList(saveBluePrivateKey);
final List<int> getPrivateKeyList = changeStringListToIntList(privateKey!);
bluetooth['privateKey'] = getPrivateKeyList;
@ -410,8 +397,7 @@ class SaveLockLogic extends BaseGetXController {
final String getMobile = (await Storage.getMobile())!;
ApmHelper.instance.trackEvent('save_lock_result', {
'lock_name': BlueManage().connectDeviceName,
'account':
getMobile.isNotEmpty ? getMobile : (await Storage.getEmail())!,
'account': getMobile.isNotEmpty ? getMobile : (await Storage.getEmail())!,
'date': DateTool().getNowDateWithType(1),
'save_lock_result': '成功',
});
@ -427,8 +413,7 @@ class SaveLockLogic extends BaseGetXController {
final String getMobile = (await Storage.getMobile())!;
ApmHelper.instance.trackEvent('save_lock_result', {
'lock_name': BlueManage().connectDeviceName,
'account':
getMobile.isNotEmpty ? getMobile : (await Storage.getEmail())!,
'account': getMobile.isNotEmpty ? getMobile : (await Storage.getEmail())!,
'date': DateTool().getNowDateWithType(1),
'save_lock_result': '${entity.errorCode}--${entity.errorMsg}',
});
@ -489,26 +474,18 @@ class SaveLockLogic extends BaseGetXController {
// BlueManage().disconnect();
//
final LockSetInfoEntity entity =
await ApiRepository.to.getLockSettingInfoData(
final LockSetInfoEntity entity = await ApiRepository.to.getLockSettingInfoData(
lockId: state.lockId.toString(),
);
if (entity.errorCode!.codeIsSuccessful) {
state.lockSetInfoData.value = entity.data!;
if (state.lockSetInfoData.value.lockFeature?.wifi == 1) {
// wifi锁WIFI
Get.toNamed(Routers.wifiListPage, arguments: {
'lockSetInfoData': state.lockSetInfoData.value,
'pageName': 'saveLock'
});
Get.toNamed(Routers.wifiListPage, arguments: {'lockSetInfoData': state.lockSetInfoData.value, 'pageName': 'saveLock'});
} else if (state.lockSetInfoData.value.lockFeature?.languageSpeech == 1) {
Get.toNamed(Routers.lockVoiceSettingPage, arguments: {
'lockSetInfoData': state.lockSetInfoData.value,
'pageName': 'saveLock'
});
Get.toNamed(Routers.lockVoiceSettingPage, arguments: {'lockSetInfoData': state.lockSetInfoData.value, 'pageName': 'saveLock'});
} else {
eventBus.fire(RefreshLockListInfoDataEvent(
clearScanDevices: true, isUnShowLoading: true));
eventBus.fire(RefreshLockListInfoDataEvent(clearScanDevices: true, isUnShowLoading: true));
Future<void>.delayed(const Duration(seconds: 1), () {
// Get.close(state.isFromMap == 1
// ? (CommonDataManage().seletLockType == 0 ? 4 : 5)
@ -518,15 +495,12 @@ class SaveLockLogic extends BaseGetXController {
// 2
Future<void>.delayed(const Duration(milliseconds: 200), () {
if (Get.isRegistered<LockDetailLogic>()) {
Get.find<LockDetailLogic>()
.functionBlocker
.countdownProhibited(duration: const Duration(seconds: 2));
Get.find<LockDetailLogic>().functionBlocker.countdownProhibited(duration: const Duration(seconds: 2));
}
});
}
} else {
eventBus.fire(RefreshLockListInfoDataEvent(
clearScanDevices: true, isUnShowLoading: true));
eventBus.fire(RefreshLockListInfoDataEvent(clearScanDevices: true, isUnShowLoading: true));
Future<void>.delayed(const Duration(seconds: 1), () {
// Get.close(state.isFromMap == 1
// ? (CommonDataManage().seletLockType == 0 ? 4 : 5)
@ -536,9 +510,7 @@ class SaveLockLogic extends BaseGetXController {
// 2
Future<void>.delayed(const Duration(milliseconds: 200), () {
if (Get.isRegistered<LockDetailLogic>()) {
Get.find<LockDetailLogic>()
.functionBlocker
.countdownProhibited(duration: const Duration(seconds: 2));
Get.find<LockDetailLogic>().functionBlocker.countdownProhibited(duration: const Duration(seconds: 2));
}
});
}

View File

@ -62,8 +62,6 @@ class UdpTalkRequestHandler extends ScpMessageBaseHandle implements ScpMessageHa
_handleResponseSendExpect(
lockPeerID: scpMessage.FromPeerId!,
);
//
startChartManage.startTalkExpectTimer();
//
startChartManage.stopCallRequestMessageTimer();
//

View File

@ -282,7 +282,9 @@ class StartChartManage {
// RbcuInfo
void _sendRbcuInfoMessage({required String ToPeerId, bool isResp = false}) async {
final uuid = _uuid.v1();
final int timestamp = DateTime.now().millisecondsSinceEpoch;
final int timestamp = DateTime
.now()
.millisecondsSinceEpoch;
final Int64 int64Timestamp = Int64(timestamp); // 使
// ip地址和中继返回的外网地址
@ -301,7 +303,7 @@ class StartChartManage {
.where((addr) => addr != null) // null
.map(
(addr) => addr!.replaceAll(IpConstant.udpUrl, ''),
) // "udp://"
) // "udp://"
.cast<String>(); // Iterable<String>// Iterable<String?> Iterable<String>
_rbcuSessionId = uuid;
final RbcuInfo rbcuInfo = RbcuInfo(
@ -457,7 +459,7 @@ class StartChartManage {
Duration(
seconds: _defaultIntervalTime,
),
(Timer timer) async {
(Timer timer) async {
AppLog.log('发送对讲请求:${ToPeerId}');
await sendCallRequestMessage(ToPeerId: ToPeerId);
},
@ -533,7 +535,7 @@ class StartChartManage {
Duration(
seconds: heartbeatIntervalTime,
),
(Timer timer) async {
(Timer timer) async {
final List<int> message = MessageCommand.heartbeatMessage(
FromPeerId: FromPeerId,
ToPeerId: relayPeerId,
@ -583,7 +585,9 @@ class StartChartManage {
ToPeerId: ToPeerId,
FromPeerId: FromPeerId,
gatewayId: gatewayId,
time: DateTime.now().millisecondsSinceEpoch ~/ 1000,
time: DateTime
.now()
.millisecondsSinceEpoch ~/ 1000,
MessageId: MessageCommand.getNextMessageId(ToPeerId, increment: true),
);
await _sendMessage(message: message);
@ -617,7 +621,7 @@ class StartChartManage {
talkRejectTimer ??= Timer.periodic(
Duration(seconds: _defaultIntervalTime),
(Timer timer) async {
(Timer timer) async {
_sendTalkRejectMessage();
count++;
if (count >= maxCount) {
@ -723,7 +727,7 @@ class StartChartManage {
void startTalkHangupMessageTimer() {
talkHangupTimer ??= Timer.periodic(
Duration(seconds: _defaultIntervalTime),
(Timer timer) async {
(Timer timer) async {
_sendTalkHangupMessage();
},
);
@ -767,7 +771,7 @@ class StartChartManage {
Duration(
seconds: _defaultIntervalTime,
),
(Timer timer) async {
(Timer timer) async {
// 线
await _sendOnlineMessage();
},
@ -937,7 +941,9 @@ class StartChartManage {
String ipAddress = address.address;
// IPv6
if (ipAddress.contains('%')) {
ipAddress = ipAddress.split('%').first;
ipAddress = ipAddress
.split('%')
.first;
}
// IP
if (ipAddress.isNotEmpty && !IpConstant.reportExcludeIp.contains(ipAddress)) {
@ -1060,7 +1066,7 @@ class StartChartManage {
Duration(
seconds: _defaultIntervalTime,
),
(Timer timer) async {
(Timer timer) async {
await sendTalkPingMessage(
ToPeerId: ToPeerId,
FromPeerId: FromPeerId,
@ -1081,7 +1087,7 @@ class StartChartManage {
Duration(
seconds: _defaultIntervalTime,
),
(Timer timer) {
(Timer timer) {
//
sendTalkExpectMessage(
talkExpect: _defaultTalkExpect,
@ -1096,7 +1102,7 @@ class StartChartManage {
Duration(
seconds: _defaultIntervalTime,
),
(Timer timer) {
(Timer timer) {
sendTalkAcceptMessage();
},
);
@ -1128,7 +1134,16 @@ class StartChartManage {
///
void changeTalkExpectDataTypeAndReStartTalkExpectMessageTimer({required TalkExpectReq talkExpect}) {
_defaultTalkExpect = talkExpect;
reStartTalkExpectMessageTimer();
sendTalkExpectMessage(
talkExpect: _defaultTalkExpect,
);
sendTalkExpectMessage(
talkExpect: _defaultTalkExpect,
);
sendTalkExpectMessage(
talkExpect: _defaultTalkExpect,
);
// reStartTalkExpectMessageTimer();
}
void reSetDefaultTalkExpect() {

View File

@ -18,7 +18,7 @@ class _PermissionGuidancePageState extends State<PermissionGuidancePage> {
final List<Map<String, String>> _stepsData = [
{
'image': 'images/guide/matter.png',
'image': 'images/guide/1.png',
'text': '步骤1打开应用信息点击通知管理选项',
},
{
@ -26,7 +26,7 @@ class _PermissionGuidancePageState extends State<PermissionGuidancePage> {
'text': '步骤2下滑点击呼叫提醒的通知选项',
},
{
'image': 'images/guide/tuya.png',
'image': 'images/guide/3.png',
'text': '步骤3选择在锁定屏幕上的选项设置',
},
{

View File

@ -104,8 +104,6 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
codecType: 'h264',
);
// textureId
AppLog.log('StartChartManage().videoWidth:${StartChartManage().videoWidth}');
AppLog.log('StartChartManage().videoHeight:${StartChartManage().videoHeight}');
final textureId = await VideoDecodePlugin.initDecoder(config);
if (textureId != null) {
Future.microtask(() => state.textureId.value = textureId);
@ -258,8 +256,11 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
AppLog.log('启动帧处理定时器,目标帧率: ${state.targetFps}fps间隔: ${intervalMs}ms');
}
///
///
void _processNextFrameFromBuffer() async {
final startTime = DateTime.now().microsecondsSinceEpoch;
//
if (state.isProcessingFrame) {
return;
@ -270,61 +271,16 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
return;
}
// I帧frameSeq最小的I帧消费
final iFrames = state.h264FrameBuffer.where((f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.I).toList();
iFrames.sort((a, b) => (a['frameSeq'] as int).compareTo(b['frameSeq'] as int));
try {
// I帧frameSeq最小的I帧消费
final iFrames = state.h264FrameBuffer.where((f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.I).toList();
iFrames.sort((a, b) => (a['frameSeq'] as int).compareTo(b['frameSeq'] as int));
if (iFrames.isNotEmpty) {
// I帧I帧frameSeq
final minIFrame = iFrames.first;
final minIFrameSeq = minIFrame['frameSeq'];
final targetIndex = state.h264FrameBuffer.indexWhere(
(f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.I && f['frameSeq'] == minIFrameSeq,
);
state.isProcessingFrame = true;
final Map<String, dynamic>? frameMap = state.h264FrameBuffer.removeAt(targetIndex);
if (frameMap == null) {
state.isProcessingFrame = false;
return;
}
final List<int>? frameData = frameMap['frameData'];
final TalkDataH264Frame_FrameTypeE? frameType = frameMap['frameType'];
final int? frameSeq = frameMap['frameSeq'];
final int? frameSeqI = frameMap['frameSeqI'];
final int? pts = frameMap['pts'];
final ScpMessage? scpMessage = frameMap['scpMessage'];
if (frameData == null || frameType == null || frameSeq == null || frameSeqI == null || pts == null) {
state.isProcessingFrame = false;
return;
}
if (state.textureId.value == null) {
state.isProcessingFrame = false;
return;
}
lastDecodedIFrameSeq = minIFrameSeq;
AppLog.log('送入解码器的P帧数据frameSeq:${frameSeq},frameSeqI:${frameSeqI},'
'frameType:${frameType},messageId:${scpMessage!.MessageId}');
await VideoDecodePlugin.sendFrame(
frameData: frameData,
frameType: 0,
frameSeq: frameSeq,
timestamp: pts,
splitNalFromIFrame: true,
refIFrameSeq: frameSeqI,
);
state.isProcessingFrame = false;
return;
}
// I帧时refIFrameSeq等于lastDecodedIFrameSeq的P帧
if (lastDecodedIFrameSeq != null) {
final validPFrames =
state.h264FrameBuffer.where((f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.P && f['frameSeqI'] == lastDecodedIFrameSeq).toList();
if (validPFrames.isNotEmpty) {
validPFrames.sort((a, b) => (a['frameSeq'] as int).compareTo(b['frameSeq'] as int));
final minPFrame = validPFrames.first;
if (iFrames.isNotEmpty) {
final minIFrame = iFrames.first;
final minIFrameSeq = minIFrame['frameSeq'];
final targetIndex = state.h264FrameBuffer.indexWhere(
(f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.P && f['frameSeq'] == minPFrame['frameSeq'] && f['frameSeqI'] == lastDecodedIFrameSeq,
(f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.I && f['frameSeq'] == minIFrameSeq,
);
state.isProcessingFrame = true;
final Map<String, dynamic>? frameMap = state.h264FrameBuffer.removeAt(targetIndex);
@ -337,7 +293,6 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
final int? frameSeq = frameMap['frameSeq'];
final int? frameSeqI = frameMap['frameSeqI'];
final int? pts = frameMap['pts'];
final ScpMessage? scpMessage = frameMap['scpMessage'];
if (frameData == null || frameType == null || frameSeq == null || frameSeqI == null || pts == null) {
state.isProcessingFrame = false;
return;
@ -346,12 +301,11 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
state.isProcessingFrame = false;
return;
}
// AppLog.log('送入解码器的I帧数据frameSeq:${frameSeq},frameSeqI:${frameSeqI},'
// 'frameType:${frameType},messageId:${scpMessage!.MessageId}');
lastDecodedIFrameSeq = minIFrameSeq;
await VideoDecodePlugin.sendFrame(
frameData: frameData,
frameType: 1,
frameType: 0,
frameSeq: frameSeq,
timestamp: pts,
splitNalFromIFrame: true,
@ -360,8 +314,61 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
state.isProcessingFrame = false;
return;
}
// I帧时refIFrameSeq等于lastDecodedIFrameSeq的P帧
if (lastDecodedIFrameSeq != null) {
final validPFrames =
state.h264FrameBuffer.where((f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.P && f['frameSeqI'] == lastDecodedIFrameSeq).toList();
if (validPFrames.isNotEmpty) {
validPFrames.sort((a, b) => (a['frameSeq'] as int).compareTo(b['frameSeq'] as int));
final minPFrame = validPFrames.first;
final targetIndex = state.h264FrameBuffer.indexWhere(
(f) =>
f['frameType'] == TalkDataH264Frame_FrameTypeE.P && f['frameSeq'] == minPFrame['frameSeq'] && f['frameSeqI'] == lastDecodedIFrameSeq,
);
state.isProcessingFrame = true;
final Map<String, dynamic>? frameMap = state.h264FrameBuffer.removeAt(targetIndex);
if (frameMap == null) {
state.isProcessingFrame = false;
return;
}
final List<int>? frameData = frameMap['frameData'];
final TalkDataH264Frame_FrameTypeE? frameType = frameMap['frameType'];
final int? frameSeq = frameMap['frameSeq'];
final int? frameSeqI = frameMap['frameSeqI'];
final int? pts = frameMap['pts'];
if (frameData == null || frameType == null || frameSeq == null || frameSeqI == null || pts == null) {
state.isProcessingFrame = false;
return;
}
if (state.textureId.value == null) {
state.isProcessingFrame = false;
return;
}
await VideoDecodePlugin.sendFrame(
frameData: frameData,
frameType: 1,
frameSeq: frameSeq,
timestamp: pts,
splitNalFromIFrame: true,
refIFrameSeq: frameSeqI,
);
state.isProcessingFrame = false;
return;
}
}
// I帧到来
} finally {
final endTime = DateTime.now().microsecondsSinceEpoch;
final durationMs = (endTime - startTime) / 1000.0;
// > 5ms
if (durationMs > 5) {
debugPrint('[_processNextFrameFromBuffer] 耗时: ${durationMs.toStringAsFixed(2)} ms');
// 使
// AppLog.log('Frame processing took ${durationMs.toStringAsFixed(2)} ms');
}
}
// I帧到来
}
///

View File

@ -29,15 +29,12 @@ class TalkViewNativeDecodePage extends StatefulWidget {
const TalkViewNativeDecodePage({Key? key}) : super(key: key);
@override
State<TalkViewNativeDecodePage> createState() =>
_TalkViewNativeDecodePageState();
State<TalkViewNativeDecodePage> createState() => _TalkViewNativeDecodePageState();
}
class _TalkViewNativeDecodePageState extends State<TalkViewNativeDecodePage>
with TickerProviderStateMixin {
class _TalkViewNativeDecodePageState extends State<TalkViewNativeDecodePage> with TickerProviderStateMixin {
final TalkViewNativeDecodeLogic logic = Get.put(TalkViewNativeDecodeLogic());
final TalkViewNativeDecodeState state =
Get.find<TalkViewNativeDecodeLogic>().state;
final TalkViewNativeDecodeState state = Get.find<TalkViewNativeDecodeLogic>().state;
final startChartManage = StartChartManage();
@override
@ -69,66 +66,48 @@ class _TalkViewNativeDecodePageState extends State<TalkViewNativeDecodePage>
// false 退
return false;
},
child: SizedBox(
child: Container(
width: 1.sw,
height: 1.sh,
color: Colors.black.withOpacity(0.7),
child: Stack(
alignment: Alignment.center,
children: <Widget>[
//
Obx(
() {
final double screenWidth = MediaQuery.of(context).size.width;
final double screenHeight = MediaQuery.of(context).size.height;
final double logicalWidth = MediaQuery.of(context).size.width;
final double logicalHeight = MediaQuery.of(context).size.height;
final double devicePixelRatio =
MediaQuery.of(context).devicePixelRatio;
//
final double physicalWidth = logicalWidth * devicePixelRatio;
final double physicalHeight = logicalHeight * devicePixelRatio;
//
const int rotatedImageWidth = 480; //
const int rotatedImageHeight = 864; //
//
final double scaleWidth = physicalWidth / rotatedImageWidth;
final double scaleHeight = physicalHeight / rotatedImageHeight;
max(scaleWidth, scaleHeight); //
// loading中或textureId为nullloading/
if (state.isLoading.isTrue || state.textureId.value == null) {
return Image.asset(
'images/main/monitorBg.png',
width: screenWidth,
height: screenHeight,
width: 1.sw,
height: 1.sh,
fit: BoxFit.cover,
);
} else {
return Positioned.fill(
return Positioned(
top: 0,
left: 0,
right: 0,
child: PopScope(
canPop: false,
child: RepaintBoundary(
key: state.globalKey,
child: SizedBox.expand(
child: RotatedBox(
// 使RotatedBox
quarterTurns: startChartManage.rotateAngle ~/ 90,
child: Platform.isIOS
? Transform.scale(
scale: 1.008, // iOS白边
child: Texture(
textureId: state.textureId.value!,
filterQuality: FilterQuality.medium,
),
)
: Texture(
child: RotatedBox(
// 使RotatedBox
quarterTurns: startChartManage.rotateAngle ~/ 90,
child: state.isFullScreen.isFalse
? AspectRatio(
aspectRatio: StartChartManage().videoWidth / StartChartManage().videoHeight,
child: Texture(
textureId: state.textureId.value!,
filterQuality: FilterQuality.medium,
),
),
)
: Texture(
textureId: state.textureId.value!,
filterQuality: FilterQuality.medium,
),
),
),
),
@ -151,19 +130,14 @@ class _TalkViewNativeDecodePageState extends State<TalkViewNativeDecodePage>
width: 1.sw,
child: Obx(
() {
final String sec = (state.oneMinuteTime.value % 60)
.toString()
.padLeft(2, '0');
final String min = (state.oneMinuteTime.value ~/ 60)
.toString()
.padLeft(2, '0');
final String sec = (state.oneMinuteTime.value % 60).toString().padLeft(2, '0');
final String min = (state.oneMinuteTime.value ~/ 60).toString().padLeft(2, '0');
return Row(
mainAxisAlignment: MainAxisAlignment.center,
children: <Widget>[
Text(
'$min:$sec',
style: TextStyle(
fontSize: 26.sp, color: Colors.white),
style: TextStyle(fontSize: 26.sp, color: Colors.white),
),
],
);
@ -177,9 +151,7 @@ class _TalkViewNativeDecodePageState extends State<TalkViewNativeDecodePage>
width: 1.sw - 30.w * 2,
// height: 300.h,
margin: EdgeInsets.all(30.w),
decoration: BoxDecoration(
color: Colors.black.withOpacity(0.2),
borderRadius: BorderRadius.circular(20.h)),
decoration: BoxDecoration(color: Colors.black.withOpacity(0.2), borderRadius: BorderRadius.circular(20.h)),
child: Column(
children: <Widget>[
SizedBox(height: 20.h),
@ -191,9 +163,7 @@ class _TalkViewNativeDecodePageState extends State<TalkViewNativeDecodePage>
),
),
),
Obx(() => state.isLoading.isTrue
? buildRotationTransition()
: Container()),
Obx(() => state.isLoading.isTrue ? buildRotationTransition() : Container()),
Obx(() => state.isLongPressing.value
? Positioned(
top: 80.h,
@ -213,8 +183,7 @@ class _TalkViewNativeDecodePageState extends State<TalkViewNativeDecodePage>
SizedBox(width: 10.w),
Text(
'正在说话...'.tr,
style: TextStyle(
fontSize: 20.sp, color: Colors.white),
style: TextStyle(fontSize: 20.sp, color: Colors.white),
),
],
),
@ -246,10 +215,8 @@ class _TalkViewNativeDecodePageState extends State<TalkViewNativeDecodePage>
width: 40.w,
height: 40.w,
image: state.isOpenVoice.value
? const AssetImage(
'images/main/icon_lockDetail_monitoringOpenVoice.png')
: const AssetImage(
'images/main/icon_lockDetail_monitoringCloseVoice.png'))),
? const AssetImage('images/main/icon_lockDetail_monitoringOpenVoice.png')
: const AssetImage('images/main/icon_lockDetail_monitoringCloseVoice.png'))),
),
),
SizedBox(width: 50.w),
@ -264,11 +231,7 @@ class _TalkViewNativeDecodePageState extends State<TalkViewNativeDecodePage>
width: 50.w,
height: 50.w,
padding: EdgeInsets.all(5.w),
child: Image(
width: 40.w,
height: 40.w,
image: const AssetImage(
'images/main/icon_lockDetail_monitoringScreenshot.png')),
child: Image(width: 40.w, height: 40.w, image: const AssetImage('images/main/icon_lockDetail_monitoringScreenshot.png')),
),
),
SizedBox(width: 50.w),
@ -293,8 +256,7 @@ class _TalkViewNativeDecodePageState extends State<TalkViewNativeDecodePage>
width: 40.w,
height: 40.w,
fit: BoxFit.fill,
image: const AssetImage(
'images/main/icon_lockDetail_monitoringScreenRecording.png'),
image: const AssetImage('images/main/icon_lockDetail_monitoringScreenRecording.png'),
),
),
),
@ -330,13 +292,8 @@ class _TalkViewNativeDecodePageState extends State<TalkViewNativeDecodePage>
Text(
q,
style: TextStyle(
color: state.currentQuality.value == q
? AppColors.mainColor
: Colors.black,
fontWeight:
state.currentQuality.value == q
? FontWeight.bold
: FontWeight.normal,
color: state.currentQuality.value == q ? AppColors.mainColor : Colors.black,
fontWeight: state.currentQuality.value == q ? FontWeight.bold : FontWeight.normal,
fontSize: 28.sp,
),
),
@ -352,8 +309,7 @@ class _TalkViewNativeDecodePageState extends State<TalkViewNativeDecodePage>
);
},
child: Container(
child: Icon(Icons.high_quality_outlined,
color: Colors.white, size: 38.w),
child: Icon(Icons.high_quality_outlined, color: Colors.white, size: 38.w),
),
),
Visibility(
@ -377,61 +333,56 @@ class _TalkViewNativeDecodePageState extends State<TalkViewNativeDecodePage>
}
Widget bottomBottomBtnWidget() {
return Row(
mainAxisAlignment: MainAxisAlignment.spaceEvenly,
children: <Widget>[
//
Obx(
() => bottomBtnItemWidget(
getAnswerBtnImg(),
getAnswerBtnName(),
Colors.white,
longPress: () async {
if (state.talkStatus.value == TalkStatus.answeredSuccessfully) {
//
logic.startProcessingAudio();
state.isLongPressing.value = true;
}
},
longPressUp: () async {
//
logic.stopProcessingAudio();
state.isLongPressing.value = false;
},
onClick: () async {
if (state.talkStatus.value ==
TalkStatus.passiveCallWaitingAnswer) {
//
logic.initiateAnswerCommand();
}
},
),
),
bottomBtnItemWidget(
'images/main/icon_lockDetail_hangUp.png', '挂断'.tr, Colors.red,
onClick: () {
//
logic.udpHangUpAction();
}),
bottomBtnItemWidget(
'images/main/icon_lockDetail_monitoringUnlock.png',
'开锁'.tr,
AppColors.mainColor,
onClick: () {
// if (state.talkStatus.value == TalkStatus.answeredSuccessfully &&
// state.listData.value.length > 0) {
// logic.udpOpenDoorAction();
// }
// if (UDPManage().remoteUnlock == 1) {
// logic.udpOpenDoorAction();
// showDeletPasswordAlertDialog(context);
// } else {
// logic.showToast('请在锁设置中开启远程开锁'.tr);
// }
logic.remoteOpenLock();
},
)
]);
return Row(mainAxisAlignment: MainAxisAlignment.spaceEvenly, children: <Widget>[
//
Obx(
() => bottomBtnItemWidget(
getAnswerBtnImg(),
getAnswerBtnName(),
Colors.white,
longPress: () async {
if (state.talkStatus.value == TalkStatus.answeredSuccessfully) {
//
logic.startProcessingAudio();
state.isLongPressing.value = true;
}
},
longPressUp: () async {
//
logic.stopProcessingAudio();
state.isLongPressing.value = false;
},
onClick: () async {
if (state.talkStatus.value == TalkStatus.passiveCallWaitingAnswer) {
//
logic.initiateAnswerCommand();
}
},
),
),
bottomBtnItemWidget('images/main/icon_lockDetail_hangUp.png', '挂断'.tr, Colors.red, onClick: () {
//
logic.udpHangUpAction();
}),
bottomBtnItemWidget(
'images/main/icon_lockDetail_monitoringUnlock.png',
'开锁'.tr,
AppColors.mainColor,
onClick: () {
// if (state.talkStatus.value == TalkStatus.answeredSuccessfully &&
// state.listData.value.length > 0) {
// logic.udpOpenDoorAction();
// }
// if (UDPManage().remoteUnlock == 1) {
// logic.udpOpenDoorAction();
// showDeletPasswordAlertDialog(context);
// } else {
// logic.showToast('请在锁设置中开启远程开锁'.tr);
// }
logic.remoteOpenLock();
},
)
]);
}
String getAnswerBtnImg() {

View File

@ -69,7 +69,7 @@ class _TalkViewNativeDecodePageDebugState extends State<TalkViewNativeDecodePage
child: Container(
width: 1.sw,
height: 1.sh,
color: Colors.blue,
color: Colors.black.withOpacity(0.7),
child: Stack(
alignment: Alignment.center,
children: <Widget>[
@ -92,33 +92,29 @@ class _TalkViewNativeDecodePageDebugState extends State<TalkViewNativeDecodePage
canPop: false,
child: RepaintBoundary(
key: state.globalKey,
child: SizedBox(
width: StartChartManage().videoHeight.w,
height: StartChartManage().videoWidth.h,
child: RotatedBox(
// 使RotatedBox
quarterTurns: startChartManage.rotateAngle ~/ 90,
child: Platform.isIOS
? Transform.scale(
scale: 1.008, // iOS白边
child: Texture(
textureId: state.textureId.value!,
filterQuality: FilterQuality.medium,
),
)
: state.isFullScreen.isFalse
? AspectRatio(
aspectRatio: StartChartManage().videoWidth / StartChartManage().videoHeight,
child: Texture(
textureId: state.textureId.value!,
filterQuality: FilterQuality.medium,
),
)
: Texture(
child: RotatedBox(
// 使RotatedBox
quarterTurns: startChartManage.rotateAngle ~/ 90,
child: Platform.isIOS
? Transform.scale(
scale: 1.008, // iOS白边
child: Texture(
textureId: state.textureId.value!,
filterQuality: FilterQuality.medium,
),
)
: state.isFullScreen.isFalse
? AspectRatio(
aspectRatio: StartChartManage().videoWidth / StartChartManage().videoHeight,
child: Texture(
textureId: state.textureId.value!,
filterQuality: FilterQuality.medium,
),
),
)
: Texture(
textureId: state.textureId.value!,
filterQuality: FilterQuality.medium,
),
),
),
);
@ -130,7 +126,7 @@ class _TalkViewNativeDecodePageDebugState extends State<TalkViewNativeDecodePage
state.isFullScreen.value = !state.isFullScreen.value;
},
child: Obx(
() => Text(state.isFullScreen.isTrue ? '退出全屏' : '全屏'),
() => Text(state.isFullScreen.isTrue ? '退出全屏' : '全屏'),
),
),
Obx(() => state.isLoading.isTrue
@ -168,8 +164,7 @@ class _TalkViewNativeDecodePageDebugState extends State<TalkViewNativeDecodePage
width: 1.sw - 30.w * 2,
// height: 300.h,
margin: EdgeInsets.all(30.w),
decoration:
BoxDecoration(color: Colors.black.withOpacity(0.2), borderRadius: BorderRadius.circular(20.h)),
decoration: BoxDecoration(color: Colors.black.withOpacity(0.2), borderRadius: BorderRadius.circular(20.h)),
child: Column(
children: <Widget>[
SizedBox(height: 20.h),
@ -249,10 +244,7 @@ class _TalkViewNativeDecodePageDebugState extends State<TalkViewNativeDecodePage
width: 50.w,
height: 50.w,
padding: EdgeInsets.all(5.w),
child: Image(
width: 40.w,
height: 40.w,
image: const AssetImage('images/main/icon_lockDetail_monitoringScreenshot.png')),
child: Image(width: 40.w, height: 40.w, image: const AssetImage('images/main/icon_lockDetail_monitoringScreenshot.png')),
),
),
SizedBox(width: 50.w),

View File

@ -67,7 +67,7 @@ class TalkViewNativeDecodeState {
//
final TalkDataRepository talkDataRepository = TalkDataRepository.instance;
RxBool isOpenVoice = true.obs; //
RxBool isOpenVoice = false.obs; //
RxBool isRecordingScreen = false.obs; //
RxBool isRecordingAudio = false.obs; //
Rx<DateTime> startRecordingAudioTime = DateTime.now().obs; //
@ -109,8 +109,8 @@ class TalkViewNativeDecodeState {
// H264帧缓冲区相关
final List<Map<String, dynamic>> h264FrameBuffer = <Map<String, dynamic>>[]; // H264帧缓冲区
final int maxFrameBufferSize = 50; //
final int targetFps = 60; // ,native的缓冲区
final int maxFrameBufferSize = 25; //
final int targetFps = 25; // ,native的缓冲区
Timer? frameProcessTimer; //
bool isProcessingFrame = false; //
int lastProcessedTimestamp = 0; //