Merge branch 'develop_sky' into 'release_sky'

Develop sky

See merge request StarlockTeam/app-starlock!271
This commit is contained in:
李仪 2025-09-03 09:20:35 +00:00
commit c9683b4cba
60 changed files with 43810 additions and 43335 deletions

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1162,5 +1162,11 @@
"锁语音包设置": "鎖語音包設定",
"(中国台湾)": "(中国台湾)",
"男声": "男聲",
"女声": "女聲"
"女声": "女聲",
"您的图像和视频数据仅保留": "您的圖像和視頻數據僅保留",
"后图像和视频数据将会失效,开通": "后圖像和視頻數據將會失效,開通",
"云存会员": "雲存會員",
"服务,图像视频信息随心存!": "服務,圖像視頻資訊隨心存!",
"图像": "圖像",
"视频": "視頻"
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1172,5 +1172,11 @@
"语音包设置": "语音包设置",
"(中国台湾)": "(中国台湾)",
"男声": "男声",
"女声": "女声"
"女声": "女声",
"您的图像和视频数据仅保留": "您的图像和视频数据仅保留",
"后图像和视频数据将会失效,开通": "后图像和视频数据将会失效,开通",
"云存会员": "云存会员",
"服务,图像视频信息随心存!": "服务,图像视频信息随心存!",
"图像": "图像",
"视频": "视频"
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1166,5 +1166,11 @@
"语音包设置": "Configurações do pacote de voz",
"(中国台湾)": "(中国台湾)",
"男声": "Macho",
"女声": "Garota"
"女声": "Garota",
"您的图像和视频数据仅保留": "Seus dados de imagem e vídeo são retidos apenas",
"后图像和视频数据将会失效,开通": "Depois disso, os dados de imagem e vídeo serão inválidos e ativados",
"云存会员": "Associação de armazenamento em nuvem",
"服务,图像视频信息随心存!": "Informações de serviço, imagem e vídeo estão no seu coração!",
"图像": "imagem",
"视频": "Vídeo"
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1161,5 +1161,11 @@
"锁语音包设置": "Закључајте подешавања говорног пакета",
"(中国台湾)": "(中国台湾)",
"男声": "мушки глас",
"女声": "женски глас"
"女声": "женски глас",
"您的图像和视频数据仅保留": "Ваши подаци о слици и видео записима се задржавају само",
"后图像和视频数据将会失效,开通": "Након тога, сликовни и видео подаци ће бити неважећи и активирани",
"云存会员": "Чланство у облаку за складиштење",
"服务,图像视频信息随心存!": "Сервис , слике и видео информације су у вашем срцу!",
"图像": "Слика",
"视频": "Пријава"
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1161,5 +1161,11 @@
"锁语音包设置": "鎖語音包設定",
"(中国台湾)": "(中国台湾)",
"男声": "男聲",
"女声": "女聲"
"女声": "女聲",
"您的图像和视频数据仅保留": "您的圖像和視頻數據僅保留",
"后图像和视频数据将会失效,开通": "后圖像和視頻數據將會失效,開通",
"云存会员": "雲存會員",
"服务,图像视频信息随心存!": "服務,圖像視頻資訊隨心存!",
"图像": "圖像",
"视频": "視頻"
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -63,7 +63,6 @@
"授权管理员拥有操作这把锁的重要权限,请确保只发给我你信任的人": "授权管理员拥有操作这把锁的重要权限,请确保只发给我你信任的人",
"功能开启后,你将可以通过网关远程开锁。此功能的开启和关闭只能在锁附近通过手机蓝牙进行。": "功能开启后,你将可以通过网关远程开锁。此功能的开启和关闭只能在锁附近通过手机蓝牙进行。",
"此功能的开启和关闭只能在锁附近通过手机蓝牙进行": "此功能的开启和关闭只能在锁附近通过手机蓝牙进行",
"功能开启后,你将可以通过网关远程开锁。": "功能开启后,你将可以通过网关远程开锁。",
"排列方式": "排列方式",
"早到榜": "早到榜",
@ -1102,7 +1101,7 @@
"支持的国家": "支持的国家",
"支持的国家值": "美国、加拿大、英国、澳大利亚、印度、德国、法国、意大利、西班牙、日本",
"操作流程": "操作流程",
"操作流程值":"1 用智能锁APP添加锁和网关\n\n2 在APP里开启锁的远程开锁功能这个功能默认是关闭的。如果没有这个选项则锁不支持Alexa \n\n3 在Alexa中添加Skill并用智能锁APP的账号和密码进行授权。授权成功后就可以发现账号下的设备\n\n4 在Alexa app里找到锁开启语音开锁的功能并设置语言密码\n\n5 可以通过Alexa操作锁了",
"操作流程值": "1 用智能锁APP添加锁和网关\n\n2 在APP里开启锁的远程开锁功能这个功能默认是关闭的。如果没有这个选项则锁不支持Alexa \n\n3 在Alexa中添加Skill并用智能锁APP的账号和密码进行授权。授权成功后就可以发现账号下的设备\n\n4 在Alexa app里找到锁开启语音开锁的功能并设置语言密码\n\n5 可以通过Alexa操作锁了",
"Google Home": "Google Home",
"Action name": "Action name",
"ScienerSmart": "ScienerSmart",
@ -1174,5 +1173,11 @@
"语音包设置": "语音包设置",
"(中国台湾)": "(中国台湾)",
"男声": "男声",
"女声": "女声"
}
"女声": "女声",
"您的图像和视频数据仅保留": "您的图像和视频数据仅保留",
"后图像和视频数据将会失效,开通": "后图像和视频数据将会失效,开通",
"云存会员": "云存会员",
"服务,图像视频信息随心存!": "服务,图像视频信息随心存!",
"图像": "图像",
"视频": "视频"
}

View File

@ -48,7 +48,7 @@ class ReadLockCurrentVoicePacketReply extends Reply {
CommandType commandType, List<int> dataDetail)
: super.parseData(commandType, dataDetail) {
data = dataDetail;
status = data[6];
status = data[2];
errorWithStstus(status);
}
}

View File

@ -55,7 +55,7 @@ class SetVoicePackageFinalResultReply extends Reply {
CommandType commandType, List<int> dataDetail)
: super.parseData(commandType, dataDetail) {
data = dataDetail;
status = data[6];
status = data[2];
errorWithStstus(status);
}
}

View File

@ -18,9 +18,11 @@ import 'package:star_lock/blue/io_protocol/io_processOtaUpgrade.dart';
import 'package:star_lock/blue/io_protocol/io_readAdminPassword.dart';
import 'package:star_lock/blue/io_protocol/io_readSupportFunctionsNoParameters.dart';
import 'package:star_lock/blue/io_protocol/io_readSupportFunctionsWithParameters.dart';
import 'package:star_lock/blue/io_protocol/io_readVoicePackageFinalResult.dart';
import 'package:star_lock/blue/io_protocol/io_referEventRecordTime.dart';
import 'package:star_lock/blue/io_protocol/io_setSupportFunctionsNoParameters.dart';
import 'package:star_lock/blue/io_protocol/io_setSupportFunctionsWithParameters.dart';
import 'package:star_lock/blue/io_protocol/io_setVoicePackageFinalResult.dart';
import 'package:star_lock/blue/io_protocol/io_timing.dart';
import 'package:star_lock/blue/io_protocol/io_voicePackageConfigure.dart';
import 'package:star_lock/blue/io_protocol/io_voicePackageConfigureProcess.dart';
@ -317,6 +319,18 @@ class CommandReciverManager {
commandType, data);
}
break;
case CommandType.readLockCurrentVoicePacket:
{
reply =
ReadLockCurrentVoicePacketReply.parseData(commandType, data);
}
break;
case CommandType.setLockCurrentVoicePacket:
{
reply =
SetVoicePackageFinalResultReply.parseData(commandType, data);
}
break;
case CommandType.generalExtendedCommond:
{
//

View File

@ -125,4 +125,9 @@ class DoorLockLogDataItem {
data['recordDetailStr'] = recordDetailStr;
return data;
}
@override
String toString() {
return 'DoorLockLogDataItem{recordId: $recordId, lockId: $lockId, lockAlias: $lockAlias, recordType: $recordType, recordTypeName: $recordTypeName, username: $username, operateDate: $operateDate, imagesUrl: $imagesUrl, videoUrl: $videoUrl, headUrl: $headUrl, userid: $userid, keyboardPwd: $keyboardPwd, recordStr: $recordStr, recordDetailStr: $recordDetailStr}';
}
}

View File

@ -3,12 +3,15 @@ import 'dart:async';
import 'package:flutter_blue_plus/flutter_blue_plus.dart';
import 'package:get/get.dart';
import 'package:star_lock/apm/apm_helper.dart';
import 'package:star_lock/appRouters.dart';
import 'package:star_lock/app_settings/app_settings.dart';
import 'package:star_lock/common/XSConstantMacro/XSConstantMacro.dart';
import 'package:star_lock/main/lockDetail/doorLockLog/date_time_extensions.dart';
import 'package:star_lock/main/lockDetail/doorLockLog/doorLockLog_entity.dart';
import 'package:star_lock/main/lockDetail/doorLockLog/doorLockLog_state.dart';
import 'package:star_lock/main/lockDetail/lockOperatingRecord/lockOperatingRecordGetLastRecordTime_entity.dart';
import 'package:star_lock/mine/valueAddedServices/advancedFeaturesWeb/advancedFeaturesWeb_entity.dart';
import 'package:star_lock/tools/commonDataManage.dart';
import 'package:star_lock/tools/dateTool.dart';
import 'package:star_lock/tools/eventBusEventManage.dart';
@ -402,7 +405,20 @@ class DoorLockLogLogic extends BaseGetXController {
void refreshWeek() {
_setWeekRange();
}
getWebPlayUrl() async {
final AdvancedFeaturesWebEntity entity =
await ApiRepository.to.getServicePackageBuyUrl();
if (entity.errorCode!.codeIsSuccessful) {
state.cloudStorageWebViewUrl.value = entity.data!.cloudStorage!;
final uploadReportBuyRequest = await ApiRepository.to
.uploadReportBuyRequest(lockId: state.keyInfos.value.lockId!);
if (uploadReportBuyRequest.errorCode!.codeIsSuccessful) {
Get.toNamed(Routers.advancedFeaturesWebPage, arguments: <String, int>{
'webBuyType': XSConstantMacro.webBuyTypeCloudStorage,
});
}
}
}
@override
Future<void> onClose() async {
super.onClose();

View File

@ -1,4 +1,5 @@
import 'package:flustars/flustars.dart';
import 'package:flutter/gestures.dart';
import 'package:flutter/material.dart';
import 'package:flutter_easyloading/flutter_easyloading.dart';
import 'package:flutter_screenutil/flutter_screenutil.dart';
@ -284,6 +285,17 @@ class _DoorLockLogPageState extends State<DoorLockLogPage> with RouteAware {
return formatter.format(dateTime);
}
bool _checkIsVideoOrImagesType(DoorLockLogDataItem item) {
final recordType = item.recordType;
switch (recordType) {
case 130:
case 220:
return true;
default:
return false;
}
}
String _buildIDByType(DoorLockLogDataItem item) {
final recordType = item.recordType;
switch (recordType) {
@ -407,6 +419,20 @@ class _DoorLockLogPageState extends State<DoorLockLogPage> with RouteAware {
itemCount: state.lockLogItemList.length,
contentsBuilder: (BuildContext context, int index) {
final DoorLockLogDataItem timelineData = state.lockLogItemList[index];
// 👇 videoUrl build
int? firstVideoIndex = state.lockLogItemList
.indexWhere((item) => _checkIsVideoOrImagesType(item));
bool isInvalid = _checkIsVideoOrImagesType(timelineData) &&
((timelineData.imagesUrl == null &&
timelineData.videoUrl == null) ||
(timelineData.videoUrl == '' && timelineData.imagesUrl == ''));
String typeText = '';
if (timelineData.recordType == 130) {
typeText = '图像'.tr;
} else if (timelineData.recordType == 220) {
typeText = '视频'.tr;
}
return GestureDetector(
onTap: () {
Get.toNamed(
@ -428,17 +454,37 @@ class _DoorLockLogPageState extends State<DoorLockLogPage> with RouteAware {
// 使 SingleChildScrollView
SingleChildScrollView(
scrollDirection: Axis.horizontal, //
child: Text(
_buildIDByType(timelineData),
child: RichText(
textAlign: TextAlign.left,
style: TextStyle(
color: _buildTextColorByType(timelineData),
fontSize: 24.sp,
fontWeight: FontWeight.w600,
text: TextSpan(
style: TextStyle(
color: _buildTextColorByType(timelineData),
fontSize: 24.sp,
fontWeight: FontWeight.w600,
),
children: [
TextSpan(
text: _buildIDByType(timelineData) +
(isInvalid
? '${typeText}' +
'已失效'.tr +
''
: ''),
),
WidgetSpan(
alignment: PlaceholderAlignment.middle,
child: Visibility(
visible: isInvalid,
child: Icon(
Icons.error,
size: 24.sp,
color: Colors.red,
),
),
),
],
),
//
maxLines: 1,
//
overflow: TextOverflow.ellipsis,
),
),
@ -455,8 +501,71 @@ class _DoorLockLogPageState extends State<DoorLockLogPage> with RouteAware {
),
),
SizedBox(
height: 20.h,
height: 12.h,
),
Visibility(
visible: _checkIsVideoOrImagesType(timelineData) &&
index == firstVideoIndex,
child: GestureDetector(
onTap: () async {
await logic.getWebPlayUrl();
},
child: Container(
padding: EdgeInsets.all(8.w),
decoration: BoxDecoration(
borderRadius: BorderRadius.all(Radius.circular(8.r)),
),
child: RichText(
textAlign: TextAlign.center,
text: TextSpan(
children: [
//
TextSpan(
text:
'${'您的图像和视频数据仅保留'.tr} ${state.rollingStorageDays.value} ${''.tr} ,${state.rollingStorageDays.value} ${''.tr} ${'后图像和视频数据将会失效,开通'.tr}',
style: TextStyle(
color: Colors.grey,
fontSize: 16.sp,
fontWeight: FontWeight.w600,
height: 1.8,
),
),
// 🔥
TextSpan(
text: '云存会员'.tr,
style: TextStyle(
color: AppColors.mainColor,
fontSize: 22.sp,
fontWeight: FontWeight.w800,
//
decoration: TextDecoration.underline,
decorationThickness: 1.5,
height: 1.8,
),
recognizer: TapGestureRecognizer()
..onTap = () async {
// 👉
print('点击了“云存会员”');
await logic.getWebPlayUrl();
// Navigator.push(context, MaterialPageRoute(builder: ...));
},
),
//
TextSpan(
text: '服务,图像视频信息随心存!'.tr,
style: TextStyle(
color: Colors.grey,
fontSize: 16.sp,
fontWeight: FontWeight.w600,
height: 1.8,
),
),
],
),
),
),
),
)
],
),
),

View File

@ -1,4 +1,5 @@
import 'package:get/get.dart';
import 'package:get/get_rx/get_rx.dart';
import 'package:star_lock/common/XSConstantMacro/XSConstantMacro.dart';
import 'package:star_lock/main/lockDetail/doorLockLog/doorLockLog_entity.dart';
import 'package:star_lock/tools/advancedCalendar/src/controller.dart';
@ -73,4 +74,6 @@ class DoorLockLogState {
int logCountPage = 10; //
Rx<DateTime> currentSelectDate = DateTime.now().obs;
bool isLockReceiveResponse = false; //
RxString cloudStorageWebViewUrl = ''.obs;
RxInt rollingStorageDays = 3.obs; //
}

View File

@ -89,27 +89,27 @@ class _CatEyeCustomModePageState extends State<CatEyeCustomModePage> {
SizedBox(
height: 30.h,
),
Container(
margin: EdgeInsets.only(left: 20.w),
child: CommonItem(
leftTitel: '实时画面'.tr,
rightTitle: state.realTimeMode.value,
isHaveLine: false,
isHaveDirection: true,
isHaveRightWidget: false,
action: () {
Navigator.pushNamed(context, Routers.liveVideoPage,
arguments: {
'lockSetInfoData': state.lockSetInfoData.value,
'catEyeConfigData': state.lockSetInfoData.value
.lockSettingInfo!.catEyeConfig!.isNotEmpty
? state.lockSetInfoData.value.lockSettingInfo!
.catEyeConfig![0]
: null
});
},
),
)
// Container(
// margin: EdgeInsets.only(left: 20.w),
// child: CommonItem(
// leftTitel: '实时画面'.tr,
// rightTitle: state.realTimeMode.value,
// isHaveLine: false,
// isHaveDirection: true,
// isHaveRightWidget: false,
// action: () {
// Navigator.pushNamed(context, Routers.liveVideoPage,
// arguments: {
// 'lockSetInfoData': state.lockSetInfoData.value,
// 'catEyeConfigData': state.lockSetInfoData.value
// .lockSettingInfo!.catEyeConfig!.isNotEmpty
// ? state.lockSetInfoData.value.lockSettingInfo!
// .catEyeConfig![0]
// : null
// });
// },
// ),
// )
],
),
),

View File

@ -2,6 +2,7 @@ import 'dart:async';
import 'package:flutter_blue_plus/flutter_blue_plus.dart';
import 'package:get/get.dart';
import 'package:star_lock/app_settings/app_settings.dart';
import 'package:star_lock/blue/blue_manage.dart';
import 'package:star_lock/blue/io_protocol/io_setSupportFunctionsWithParameters.dart';
import 'package:star_lock/blue/io_reply.dart';
@ -82,30 +83,31 @@ class CatEyeSetLogic extends BaseGetXController {
//
cancelBlueConnetctToastTimer();
dismissEasyLoading();
AppLog.log('state.settingOptions.value:${state.settingOptions.value}');
switch (state.settingOptions.value) {
case 1: //
{
updateAutoLightScreenConfig();
await updateAutoLightScreenConfig();
}
break;
case 2: //
{
updateStayWarnConfig();
await updateStayWarnConfig();
}
break;
case 3: //
{
updateAbnormalWarnConfig();
await updateAbnormalWarnConfig();
}
break;
case 4: //
{
updateLightScreenTimeConfig();
await updateLightScreenTimeConfig();
}
break;
case 5: //
{
updateCatEyeModeConfig();
await updateCatEyeModeConfig();
}
break;
default:
@ -288,6 +290,7 @@ class CatEyeSetLogic extends BaseGetXController {
.catEyeConfig![0]
.catEyeModeConfig
?.realTimeMode = state.catEyeConfig.value.realTimeMode;
eventBus
.fire(PassCurrentLockInformationEvent(state.lockSetInfoData.value));
}
@ -456,6 +459,10 @@ class CatEyeSetLogic extends BaseGetXController {
}
void sendBlueMessage() {
showEasyLoading();
showBlueConnetctToastTimer(action: () {
dismissEasyLoading();
});
final message = _buildCatEyeSetBlueMessage();
BlueManage().blueSendData(BlueManage().connectDeviceName,
(BluetoothConnectionState connectionState) async {

View File

@ -80,12 +80,12 @@ class _CatEyeSetPageState extends State<CatEyeSetPage> {
isHaveRightWidget: true,
rightWidget: _otherToDoSwitch(2),
)),
Obx(() => CommonItem(
leftTitel: '异常警告'.tr,
rightTitle: '',
isHaveLine: true,
isHaveRightWidget: true,
rightWidget: _otherToDoSwitch(3))),
// Obx(() => CommonItem(
// leftTitel: '异常警告'.tr,
// rightTitle: '',
// isHaveLine: true,
// isHaveRightWidget: true,
// rightWidget: _otherToDoSwitch(3))),
//ToDo
CommonItem(
leftTitel: '呼叫目标'.tr,

View File

@ -12,6 +12,7 @@ import 'package:star_lock/blue/blue_manage.dart';
import 'package:star_lock/blue/io_protocol/io_getDeviceModel.dart';
import 'package:star_lock/blue/io_protocol/io_otaUpgrade.dart';
import 'package:star_lock/blue/io_protocol/io_processOtaUpgrade.dart';
import 'package:star_lock/blue/io_protocol/io_readVoicePackageFinalResult.dart';
import 'package:star_lock/blue/io_protocol/io_setVoicePackageFinalResult.dart';
import 'package:star_lock/blue/io_protocol/io_voicePackageConfigure.dart';
import 'package:star_lock/blue/io_protocol/io_voicePackageConfigureProcess.dart';
@ -55,9 +56,12 @@ class SpeechLanguageSettingsLogic extends BaseGetXController {
handleVoiceConfigureThrottled(reply);
} else if (reply is SetVoicePackageFinalResultReply) {
handleSetResult(reply);
} else if (reply is ReadLockCurrentVoicePacketReply) {
handleLockCurrentVoicePacketResult(reply);
}
});
await initList();
readLockLanguage();
}
///
@ -435,9 +439,24 @@ class SpeechLanguageSettingsLogic extends BaseGetXController {
_handlerVoicePackageConfigureConfirmation(
VoicePackageConfigureConfirmationReply reply,
) async {
final int status = reply.data[2];
switch (status) {
case 0x00:
showEasyLoading();
showBlueConnetctToastTimer(action: () {
dismissEasyLoading();
});
final LoginEntity entity = await ApiRepository.to.settingCurrentVoiceTimbre(
data: {
'lang': state.tempLangStr.value,
'timbre': state.tempTimbreStr.value,
},
lockId: state.lockSetInfoData.value.lockId!,
);
if (entity.errorCode!.codeIsSuccessful) {
showSuccess('设置成功'.tr, something: () async {
state.lockSetInfoData.value.lockSettingInfo?.currentVoiceTimbre?.lang =
state.tempLangStr.value;
state.lockSetInfoData.value.lockSettingInfo?.currentVoiceTimbre
?.timbre = state.tempTimbreStr.value;
await BlueManage().blueSendData(BlueManage().connectDeviceName,
(BluetoothConnectionState deviceConnectionState) async {
if (deviceConnectionState == BluetoothConnectionState.connected) {
@ -454,10 +473,8 @@ class SpeechLanguageSettingsLogic extends BaseGetXController {
showBlueConnetctToast();
}
});
break;
default:
showToast('设置'.tr + '失败'.tr);
break;
await Future.delayed(Duration(seconds: 1));
});
}
}
@ -466,24 +483,6 @@ class SpeechLanguageSettingsLogic extends BaseGetXController {
switch (status) {
case 0x00:
cancelBlueConnetctToastTimer();
final LoginEntity entity =
await ApiRepository.to.settingCurrentVoiceTimbre(
data: {
'lang': state.tempLangStr.value,
'timbre': state.tempTimbreStr.value,
},
lockId: state.lockSetInfoData.value.lockId!,
);
if (entity.errorCode!.codeIsSuccessful) {
showSuccess('设置成功'.tr, something: () {
state.lockSetInfoData.value.lockSettingInfo?.currentVoiceTimbre
?.lang = state.tempLangStr.value;
state.lockSetInfoData.value.lockSettingInfo?.currentVoiceTimbre
?.timbre = state.tempTimbreStr.value;
eventBus.fire(
PassCurrentLockInformationEvent(state.lockSetInfoData.value));
});
}
dismissEasyLoading();
break;
default:
@ -491,4 +490,74 @@ class SpeechLanguageSettingsLogic extends BaseGetXController {
break;
}
}
void handleLockCurrentVoicePacketResult(
ReadLockCurrentVoicePacketReply reply) {
final int status = reply.data[2];
switch (status) {
case 0x00:
//
cancelBlueConnetctToastTimer();
const int languageCodeStartIndex = 3;
const int languageCodeLength = 20;
const int languageCodeEndIndex =
languageCodeStartIndex + languageCodeLength; // 23
if (reply.data.length < languageCodeEndIndex) {
throw Exception(
'Reply data is too short to contain LanguageCode. Expected at least $languageCodeEndIndex bytes, got ${reply.data.length}');
}
List<int> languageCodeBytes =
reply.data.sublist(languageCodeStartIndex, languageCodeEndIndex);
String languageCode = String.fromCharCodes(languageCodeBytes);
languageCode = languageCode.trim(); //
languageCode =
languageCode.replaceAll('\u0000', ''); // (null bytes)
if (languageCode != null && languageCode != '') {
final indexWhere = state.languages
.indexWhere((element) => element.lang == languageCode);
if (indexWhere != -1) {
print('锁板上的语言是:$languageCode,下标是:$indexWhere');
state.selectPassthroughListIndex.value = indexWhere;
}
}
dismissEasyLoading();
break;
case 0x06:
//
final List<int> token = reply.data.sublist(2, 6);
if (state.data != null) {
sendFileToDevice(state.data!, token);
}
break;
default:
break;
}
}
void readLockLanguage() async {
showEasyLoading();
showBlueConnetctToastTimer(action: () {
dismissEasyLoading();
});
await BlueManage().blueSendData(BlueManage().connectDeviceName,
(BluetoothConnectionState deviceConnectionState) async {
if (deviceConnectionState == BluetoothConnectionState.connected) {
await BlueManage().writeCharacteristicWithResponse(
ReadLockCurrentVoicePacket(
lockID: BlueManage().connectDeviceName,
).packageData(),
);
} else if (deviceConnectionState ==
BluetoothConnectionState.disconnected) {
dismissEasyLoading();
cancelBlueConnetctToastTimer();
showBlueConnetctToast();
}
});
}
}

View File

@ -63,6 +63,12 @@ class _SpeechLanguageSettingsPageState
final soundType = state.soundTypeList.value[index];
return CommonItem(
leftTitel: soundType,
leftTitleStyle: TextStyle(
fontSize: 20.sp,
fontWeight: state.selectSoundTypeIndex.value == index
? FontWeight.bold
: null,
),
rightTitle: '',
isHaveLine: !isLastItem,
isHaveDirection: false,
@ -94,35 +100,44 @@ class _SpeechLanguageSettingsPageState
height: 8.h,
),
//
Container(
color: Colors.transparent,
child: Column(
children: List.generate(
state.languages.length,
(index) {
final item = state.languages[index];
return CommonItem(
leftTitel: item.langText,
rightTitle: '',
isHaveLine: true,
isHaveDirection: false,
isHaveRightWidget: true,
leftTitleMaxWidth: 0.9.sw, //
rightWidget:
state.selectPassthroughListIndex.value == index
? Image(
image: const AssetImage(
'images/icon_item_checked.png'),
width: 30.w,
height: 30.w,
fit: BoxFit.contain,
)
: Container(),
action: () {
state.selectPassthroughListIndex.value = index;
},
);
},
Obx(
() => Container(
color: Colors.transparent,
child: Column(
children: List.generate(
state.languages.length,
(index) {
final item = state.languages[index];
return CommonItem(
leftTitel: item.langText,
leftTitleStyle: TextStyle(
fontSize: 20.sp,
fontWeight: state.selectPassthroughListIndex.value == index
? FontWeight.bold
: null,
),
rightTitle: '',
isHaveLine: true,
isHaveDirection: false,
isHaveRightWidget: true,
leftTitleMaxWidth: 0.9.sw,
//
rightWidget:
state.selectPassthroughListIndex.value == index
? Image(
image: const AssetImage(
'images/icon_item_checked.png'),
width: 30.w,
height: 30.w,
fit: BoxFit.contain,
)
: Container(),
action: () {
state.selectPassthroughListIndex.value = index;
},
);
},
),
),
),
),
@ -133,16 +148,6 @@ class _SpeechLanguageSettingsPageState
}
List<Widget> _buildList() {
final appLocalLanguages = state.languages;
return List.generate(
appLocalLanguages.length,
(index) => _buildItem(
appLocalLanguages[index],
index,
),
);
}
@override
void dispose() {
@ -152,24 +157,4 @@ class _SpeechLanguageSettingsPageState
}
}
_buildItem(PassthroughItem item, index) {
return CommonItem(
leftTitel: item.langText,
rightTitle: '',
isHaveLine: true,
isHaveDirection: false,
isHaveRightWidget: true,
rightWidget: state.selectPassthroughListIndex.value == index
? Image(
image: const AssetImage('images/icon_item_checked.png'),
width: 30.w,
height: 30.w,
fit: BoxFit.contain,
)
: Container(),
action: () {
state.selectPassthroughListIndex.value = index;
},
);
}
}

View File

@ -12,6 +12,7 @@ import 'package:star_lock/app_settings/app_colors.dart';
import 'package:star_lock/blue/blue_manage.dart';
import 'package:star_lock/blue/io_protocol/io_getDeviceModel.dart';
import 'package:star_lock/blue/io_protocol/io_readVoicePackageFinalResult.dart';
import 'package:star_lock/blue/io_protocol/io_setVoicePackageFinalResult.dart';
import 'package:star_lock/blue/io_protocol/io_voicePackageConfigure.dart';
import 'package:star_lock/blue/io_protocol/io_voicePackageConfigureProcess.dart';
import 'package:star_lock/blue/io_reply.dart';
@ -53,6 +54,8 @@ class LockVoiceSettingLogic extends BaseGetXController {
handleVoiceConfigureThrottled(reply);
} else if (reply is ReadLockCurrentVoicePacketReply) {
handleLockCurrentVoicePacketResult(reply);
} else if (reply is SetVoicePackageFinalResultReply) {
handleSetResult(reply);
}
});
initList();
@ -77,6 +80,10 @@ class LockVoiceSettingLogic extends BaseGetXController {
Future<void> _executeLogic(
VoicePackageConfigureConfirmationReply reply) async {
showEasyLoading();
showBlueConnetctToastTimer(action: () {
dismissEasyLoading();
});
final LoginEntity entity = await ApiRepository.to.settingCurrentVoiceTimbre(
data: {
'lang': state.tempLangStr.value,
@ -85,18 +92,47 @@ class LockVoiceSettingLogic extends BaseGetXController {
lockId: state.lockSetInfoData.value.lockId!,
);
if (entity.errorCode!.codeIsSuccessful) {
showSuccess('设置成功'.tr, something: () {
showSuccess('设置成功'.tr, something: () async {
state.lockSetInfoData.value.lockSettingInfo?.currentVoiceTimbre?.lang =
state.tempLangStr.value;
state.lockSetInfoData.value.lockSettingInfo?.currentVoiceTimbre
?.timbre = state.tempTimbreStr.value;
await BlueManage().blueSendData(BlueManage().connectDeviceName,
(BluetoothConnectionState deviceConnectionState) async {
if (deviceConnectionState == BluetoothConnectionState.connected) {
await BlueManage().writeCharacteristicWithResponse(
SetVoicePackageFinalResult(
lockID: BlueManage().connectDeviceName,
languageCode: state.tempLangStr.value,
).packageData(),
);
} else if (deviceConnectionState ==
BluetoothConnectionState.disconnected) {
dismissEasyLoading();
cancelBlueConnetctToastTimer();
showBlueConnetctToast();
}
});
await Future.delayed(Duration(seconds: 1));
eventBus
.fire(PassCurrentLockInformationEvent(state.lockSetInfoData.value));
Get.offAllNamed(Routers.starLockMain);
});
}
dismissEasyLoading();
}
void handleSetResult(SetVoicePackageFinalResultReply reply) async {
final int status = reply.data[2];
switch (status) {
case 0x00:
cancelBlueConnetctToastTimer();
dismissEasyLoading();
break;
default:
showToast('设置'.tr + '失败'.tr);
break;
}
}
void saveSpeechLanguageSettings() async {
@ -201,14 +237,12 @@ class LockVoiceSettingLogic extends BaseGetXController {
//
void _handlerStartVoicePackageConfigure(
VoicePackageConfigureReply reply) async {
final int status = reply.data[3];
final int status = reply.data[6];
switch (status) {
case 0x00:
//
cancelBlueConnetctToastTimer();
_startSendLanguageFile();
break;
case 0x06:
//
@ -218,7 +252,8 @@ class LockVoiceSettingLogic extends BaseGetXController {
}
break;
default:
showToast('获取设备型号失败'.tr);
dismissEasyLoading();
cancelBlueConnetctToastTimer();
break;
}
}
@ -409,6 +444,10 @@ class LockVoiceSettingLogic extends BaseGetXController {
}
void readLockLanguage() async {
showEasyLoading();
showBlueConnetctToastTimer(action: () {
dismissEasyLoading();
});
await BlueManage().blueSendData(BlueManage().connectDeviceName,
(BluetoothConnectionState deviceConnectionState) async {
if (deviceConnectionState == BluetoothConnectionState.connected) {
@ -428,40 +467,42 @@ class LockVoiceSettingLogic extends BaseGetXController {
void handleLockCurrentVoicePacketResult(
ReadLockCurrentVoicePacketReply reply) {
final int status = reply.data[6];
final int status = reply.data[2];
switch (status) {
case 0x00:
//
cancelBlueConnetctToastTimer();
// 1. LanguageCode
// CmdID (2 bytes) + Status (1 byte) = 3 bytes -> LanguageCode 3
const int languageCodeStartIndex = 3;
const int languageCodeLength = 20;
const int languageCodeEndIndex =
languageCodeStartIndex + languageCodeLength; // 23
// 2.
if (reply.data.length < languageCodeEndIndex) {
throw Exception(
'Reply data is too short to contain LanguageCode. Expected at least $languageCodeEndIndex bytes, got ${reply.data.length}');
}
// 3. LanguageCode
List<int> languageCodeBytes =
reply.data.sublist(languageCodeStartIndex, languageCodeEndIndex);
// 4.
// UTF-8 ASCII
String languageCode = String.fromCharCodes(languageCodeBytes);
// 5. () '\0'
// 20 '\0'
languageCode = languageCode.trim(); //
languageCode =
languageCode.replaceAll('\u0000', ''); // (null bytes)
// 6. 使 languageCode
print('LanguageCode: $languageCode'); // : zh_CN, en_US
if (languageCode != null && languageCode != '') {
final indexWhere = state.languages
.indexWhere((element) => element.lang == languageCode);
if (indexWhere != -1) {
print('锁板上的语言是:$languageCode,下标是:$indexWhere');
state.selectPassthroughListIndex.value = indexWhere;
}
}
dismissEasyLoading();
break;
case 0x06:
//

View File

@ -96,7 +96,7 @@ class _LockVoiceSettingState extends State<LockVoiceSetting> {
return CommonItem(
leftTitel: soundType,
leftTitleStyle: TextStyle(
fontSize: 22.sp,
fontSize: 20.sp,
fontWeight: state.selectSoundTypeIndex.value == index
? FontWeight.bold
: null,
@ -142,7 +142,7 @@ class _LockVoiceSettingState extends State<LockVoiceSetting> {
return CommonItem(
leftTitel: item.langText,
leftTitleStyle: TextStyle(
fontSize: 22.sp,
fontSize: 20.sp,
fontWeight:
state.selectPassthroughListIndex.value == index
? FontWeight.bold
@ -152,7 +152,7 @@ class _LockVoiceSettingState extends State<LockVoiceSetting> {
isHaveLine: true,
isHaveDirection: false,
isHaveRightWidget: true,
leftTitleMaxWidth: 0.9.sw,
leftTitleMaxWidth: 0.85.sw,
rightWidget:
state.selectPassthroughListIndex.value == index
? Image(

View File

@ -110,22 +110,8 @@ class ImageTransmissionLogic extends BaseGetXController {
//
switch (contentType) {
case TalkData_ContentTypeE.G711:
// //
if (_isFirstAudioFrame) {
_startAudioTime = currentTime;
_isFirstAudioFrame = false;
}
//
final expectedTime = _startAudioTime + talkData.durationMs;
final audioDelay = currentTime - expectedTime;
//
if (audioDelay > 500) {
state.audioBuffer.clear();
if (state.isOpenVoice.value) {
_playAudioFrames();
}
//
if (!state.isOpenVoice.value && state.isRecordingAudio.value) {
return;
}
if (state.audioBuffer.length >= audioBufferSize) {
@ -212,7 +198,8 @@ class ImageTransmissionLogic extends BaseGetXController {
///
void _playAudioData(TalkData talkData) async {
if (state.isOpenVoice.value) {
if (state.isOpenVoice.value &&
state.isRecordingAudio.value == false) {
final list =
G711().decodeAndDenoise(talkData.content, true, 8000, 300, 150);
// // PCM PcmArrayInt16
@ -565,7 +552,6 @@ class ImageTransmissionLogic extends BaseGetXController {
//
Future<void> startProcessingAudio() async {
try {
if (await state.voiceProcessor?.hasRecordAudioPermission() ?? false) {
await state.voiceProcessor?.start(state.frameLength, state.sampleRate);
@ -602,12 +588,23 @@ class ImageTransmissionLogic extends BaseGetXController {
} on PlatformException catch (ex) {
// state.errorMessage.value = 'Failed to stop recorder: $ex';
} finally {
//
if (_startProcessingAudioTimer != null) {
// 53200
for (int i = 0; i < 5; i++) {
_bufferedAudioFrames.addAll(List.filled(chunkSize, 0));
}
Future.delayed(const Duration(milliseconds: 300), () {
_startProcessingAudioTimer?.cancel();
_startProcessingAudioTimer = null;
_bufferedAudioFrames.clear();
});
} else {
_bufferedAudioFrames.clear();
}
final bool? isRecording = await state.voiceProcessor?.isRecording();
state.isRecordingAudio.value = isRecording!;
}
_startProcessingAudioTimer?.cancel();
_startProcessingAudioTimer = null;
_bufferedAudioFrames.clear();
}
static const int chunkSize = 320; // 32010ms G.711
@ -645,38 +642,24 @@ class ImageTransmissionLogic extends BaseGetXController {
List<int> encodedData = G711Tool.encode(applyGain, 0); // 0A-law
_bufferedAudioFrames.addAll(encodedData);
//
if (_startProcessingAudioTimer == null && _bufferedAudioFrames.length > chunkSize) {
_startProcessingAudioTimer = Timer.periodic(Duration(milliseconds: intervalMs), _sendAudioChunk);
if (_startProcessingAudioTimer == null &&
_bufferedAudioFrames.length > chunkSize) {
_startProcessingAudioTimer =
Timer.periodic(Duration(milliseconds: intervalMs), _sendAudioChunk);
}
}
//
void _onError(VoiceProcessorException error) {
AppLog.log(error.message!);
}
//
List<int> _applyGain(List<int> pcmData, double gainFactor) {
List<int> result = List<int>.filled(pcmData.length, 0);
for (int i = 0; i < pcmData.length; i++) {
// PCM数据通常是有符号的16位整数
int sample = pcmData[i];
//
double amplified = sample * gainFactor;
//
if (amplified > 32767) {
amplified = 32767;
} else if (amplified < -32768) {
amplified = -32768;
}
result[i] = amplified.toInt();
}
return result;
return pcmData.map((sample) {
//
int amplified = (sample * gainFactor).round();
return amplified.clamp(-32768, 32767);
}).toList();
}
}

View File

@ -104,10 +104,10 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
codecType: 'h264',
);
// textureId
AppLog.log('StartChartManage().videoWidth:${StartChartManage()
.videoWidth}');
AppLog.log('StartChartManage().videoHeight:${StartChartManage()
.videoHeight}');
AppLog.log(
'StartChartManage().videoWidth:${StartChartManage().videoWidth}');
AppLog.log(
'StartChartManage().videoHeight:${StartChartManage().videoHeight}');
final textureId = await VideoDecodePlugin.initDecoder(config);
if (textureId != null) {
Future.microtask(() => state.textureId.value = textureId);
@ -493,7 +493,9 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
///
void _playAudioData(TalkData talkData) async {
if (state.isOpenVoice.value && state.isLoading.isFalse) {
if (state.isOpenVoice.value &&
state.isLoading.isFalse &&
state.isRecordingAudio.value == false) {
List<int> encodedData = G711Tool.decode(talkData.content, 0); // 0A-law
// PCM PcmArrayInt16
final PcmArrayInt16 fromList = PcmArrayInt16.fromList(encodedData);
@ -746,7 +748,6 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
//
Future<void> startProcessingAudio() async {
try {
if (await state.voiceProcessor?.hasRecordAudioPermission() ?? false) {
await state.voiceProcessor?.start(state.frameLength, state.sampleRate);
@ -783,39 +784,36 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
} on PlatformException catch (ex) {
// state.errorMessage.value = 'Failed to stop recorder: $ex';
} finally {
//
if (_startProcessingAudioTimer != null) {
// 53200
for (int i = 0; i < 5; i++) {
_bufferedAudioFrames.addAll(List.filled(chunkSize, 0));
}
Future.delayed(const Duration(milliseconds: 300), () {
_startProcessingAudioTimer?.cancel();
_startProcessingAudioTimer = null;
_bufferedAudioFrames.clear();
});
} else {
_bufferedAudioFrames.clear();
}
final bool? isRecording = await state.voiceProcessor?.isRecording();
state.isRecordingAudio.value = isRecording!;
}
_startProcessingAudioTimer?.cancel();
_startProcessingAudioTimer = null;
_bufferedAudioFrames.clear();
}
//
List<int> _applyGain(List<int> pcmData, double gainFactor) {
List<int> result = List<int>.filled(pcmData.length, 0);
for (int i = 0; i < pcmData.length; i++) {
// PCM数据通常是有符号的16位整数
int sample = pcmData[i];
//
double amplified = sample * gainFactor;
//
if (amplified > 32767) {
amplified = 32767;
} else if (amplified < -32768) {
amplified = -32768;
}
result[i] = amplified.toInt();
}
return result;
return pcmData.map((sample) {
//
int amplified = (sample * gainFactor).round();
return amplified.clamp(-32768, 32767);
}).toList();
}
static const int chunkSize = 320; // 32010ms G.711
static const int intervalMs = 40; // 40ms发送一次4chunk
static const int intervalMs = 35; // 40ms发送一次4chunk
void _sendAudioChunk(Timer timer) async {
if (_bufferedAudioFrames.length < chunkSize) {
//
@ -849,10 +847,11 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
List<int> encodedData = G711Tool.encode(applyGain, 0); // 0A-law
_bufferedAudioFrames.addAll(encodedData);
//
if (_startProcessingAudioTimer == null && _bufferedAudioFrames.length > chunkSize) {
_startProcessingAudioTimer = Timer.periodic(Duration(milliseconds: intervalMs), _sendAudioChunk);
if (_startProcessingAudioTimer == null &&
_bufferedAudioFrames.length > chunkSize) {
_startProcessingAudioTimer =
Timer.periodic(Duration(milliseconds: intervalMs), _sendAudioChunk);
}
}
@ -973,7 +972,8 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
//
switch (contentType) {
case TalkData_ContentTypeE.G711:
if (!state.isOpenVoice.value) {
//
if (!state.isOpenVoice.value || state.isRecordingAudio.value) {
return;
}
if (state.audioBuffer.length >= audioBufferSize) {

View File

@ -109,22 +109,10 @@ class TalkViewLogic extends BaseGetXController {
//
switch (contentType) {
case TalkData_ContentTypeE.G711:
// //
if (_isFirstAudioFrame) {
_startAudioTime = currentTime;
_isFirstAudioFrame = false;
}
//
final expectedTime = _startAudioTime + talkData.durationMs;
final audioDelay = currentTime - expectedTime;
//
if (audioDelay > 500) {
state.audioBuffer.clear();
if (state.isOpenVoice.value) {
_playAudioFrames();
}
//
if (!state.isOpenVoice.value || state.isRecordingAudio.value) {
print(
'录音时丢弃数据:${state.isOpenVoice.value}-${state.isRecordingAudio.value}');
return;
}
if (state.audioBuffer.length >= audioBufferSize) {
@ -388,9 +376,11 @@ class TalkViewLogic extends BaseGetXController {
if (state.videoBuffer.isNotEmpty) {
final TalkData oldestFrame = state.videoBuffer.removeAt(0);
if (oldestFrame.content.isNotEmpty) {
state.listData.value = Uint8List.fromList(oldestFrame.content); //
state.listData.value =
Uint8List.fromList(oldestFrame.content); //
final int decodeStart = DateTime.now().millisecondsSinceEpoch;
decodeImageFromList(Uint8List.fromList(oldestFrame.content)).then((ui.Image img) {
decodeImageFromList(Uint8List.fromList(oldestFrame.content))
.then((ui.Image img) {
final int decodeEnd = DateTime.now().millisecondsSinceEpoch;
state.currentImage.value = img;
_renderedFrameCount++;
@ -524,7 +514,7 @@ class TalkViewLogic extends BaseGetXController {
final lockPeerId = StartChartManage().lockPeerId;
final LockListInfoGroupEntity? lockListInfoGroupEntity =
await Storage.getLockMainListData();
await Storage.getLockMainListData();
if (lockListInfoGroupEntity != null) {
lockListInfoGroupEntity!.groupList?.forEach((element) {
final lockList = element.lockList;
@ -562,7 +552,6 @@ class TalkViewLogic extends BaseGetXController {
//
Future<void> startProcessingAudio() async {
try {
if (await state.voiceProcessor?.hasRecordAudioPermission() ?? false) {
await state.voiceProcessor?.start(state.frameLength, state.sampleRate);
@ -599,12 +588,23 @@ class TalkViewLogic extends BaseGetXController {
} on PlatformException catch (ex) {
// state.errorMessage.value = 'Failed to stop recorder: $ex';
} finally {
//
if (_startProcessingAudioTimer != null) {
// 53200
for (int i = 0; i < 5; i++) {
_bufferedAudioFrames.addAll(List.filled(chunkSize, 0));
}
Future.delayed(const Duration(milliseconds: 300), () {
_startProcessingAudioTimer?.cancel();
_startProcessingAudioTimer = null;
_bufferedAudioFrames.clear();
});
} else {
_bufferedAudioFrames.clear();
}
final bool? isRecording = await state.voiceProcessor?.isRecording();
state.isRecordingAudio.value = isRecording!;
}
_startProcessingAudioTimer?.cancel();
_startProcessingAudioTimer = null;
_bufferedAudioFrames.clear();
}
static const int chunkSize = 320; // 32010ms G.711
@ -642,10 +642,11 @@ class TalkViewLogic extends BaseGetXController {
List<int> encodedData = G711Tool.encode(applyGain, 0); // 0A-law
_bufferedAudioFrames.addAll(encodedData);
//
if (_startProcessingAudioTimer == null && _bufferedAudioFrames.length > chunkSize) {
_startProcessingAudioTimer = Timer.periodic(Duration(milliseconds: intervalMs), _sendAudioChunk);
if (_startProcessingAudioTimer == null &&
_bufferedAudioFrames.length > chunkSize) {
_startProcessingAudioTimer =
Timer.periodic(Duration(milliseconds: intervalMs), _sendAudioChunk);
}
}
@ -656,25 +657,10 @@ class TalkViewLogic extends BaseGetXController {
//
List<int> _applyGain(List<int> pcmData, double gainFactor) {
List<int> result = List<int>.filled(pcmData.length, 0);
for (int i = 0; i < pcmData.length; i++) {
// PCM数据通常是有符号的16位整数
int sample = pcmData[i];
//
double amplified = sample * gainFactor;
//
if (amplified > 32767) {
amplified = 32767;
} else if (amplified < -32768) {
amplified = -32768;
}
result[i] = amplified.toInt();
}
return result;
return pcmData.map((sample) {
//
int amplified = (sample * gainFactor).round();
return amplified.clamp(-32768, 32767);
}).toList();
}
}