Merge branch 'develop_sky_liyi' into 'develop_sky'

Develop sky liyi

See merge request StarlockTeam/app-starlock!67
This commit is contained in:
李仪 2025-05-16 09:07:58 +00:00
commit a83d26e368
28 changed files with 1821 additions and 270 deletions

View File

@ -64,10 +64,31 @@ variables:
.setup_fastlane_android:
extends: .build_rule
before_script:
- rm -rf ~/.gem ~/.bundle vendor/bundle_android # 强烈建议每次清理,防止并发/缓存污染
- export PATH="$HOME/.rbenv/bin:$PATH"
- eval "$(rbenv init -)"
- rbenv global 2.7.8
- export PATH="$HOME/.rbenv/shims:$PATH"
- which ruby # 输出当前使用的ruby路径便于调试
- ruby -v # 输出当前ruby版本便于调试
- gem sources --add https://gems.ruby-china.com/ --remove https://rubygems.org/ # 如在国外可移除此行
- bundle config mirror.https://rubygems.org https://mirrors.aliyun.com/rubygems/
- bundle -v || gem install bundler --source https://gems.ruby-china.com/
- ls -li
- export NEXT_VERSION="$(cat app_new.version)"
# - flutter pub get
- bundle install --gemfile android/Gemfile --quiet
- bundle config set --local path 'vendor/bundle_android' # Android独立依赖目录
- bundle install --gemfile android/Gemfile # 去掉--quiet便于观察进度
- gem pristine --all || true # 修复所有未编译的gem扩展
script:
# 输出调试信息,便于后续排查环境问题
- echo "=== DEBUG INFO (android) ==="
- which ruby
- ruby -v
- which gem
- gem -v
- echo $PATH
- env
- bash android/build.sh
cache:
paths:
- app_new.version
@ -75,10 +96,31 @@ variables:
.setup_fastlane_ios:
extends: .build_rule
before_script:
- rm -rf ~/.gem ~/.bundle vendor/bundle_ios # 强烈建议每次清理,防止并发/缓存污染
- export PATH="$HOME/.rbenv/bin:$PATH"
- eval "$(rbenv init -)"
- rbenv global 2.7.8
- export PATH="$HOME/.rbenv/shims:$PATH"
- which ruby # 输出当前使用的ruby路径便于调试
- ruby -v # 输出当前ruby版本便于调试
- gem sources --add https://gems.ruby-china.com/ --remove https://rubygems.org/ # 如在国外可移除此行
- bundle config mirror.https://rubygems.org https://gems.ruby-china.com
- bundle -v || gem install bundler --source https://gems.ruby-china.com/
- ls -li
- export NEXT_VERSION="$(cat app_new.version)"
# - flutter pub get
- bundle install --gemfile ios/Gemfile --quiet
- bundle config set --local path 'vendor/bundle_ios' # iOS独立依赖目录
- bundle install --gemfile ios/Gemfile # 去掉--quiet便于观察进度
- gem pristine --all || true # 修复所有未编译的gem扩展
script:
# 输出调试信息,便于后续排查环境问题
- echo "=== DEBUG INFO (ios) ==="
- which ruby
- ruby -v
- which gem
- gem -v
- echo $PATH
- env
- bash ios/build.sh
cache:
paths:
- app_new.version
@ -117,7 +159,15 @@ generate_next_version:
build_android:
stage: build-artifacts
extends: .setup_fastlane_android
script: bash android/build.sh
script:
- echo "=== DEBUG INFO (android) ==="
- which ruby
- ruby -v
- which gem
- gem -v
- echo $PATH
- env
- bash android/build.sh
artifacts:
paths:
- build/app/outputs/flutter-apk/
@ -126,6 +176,13 @@ build_ios:
stage: build-artifacts
extends: .setup_fastlane_ios
script:
- echo "=== DEBUG INFO (ios) ==="
- which ruby
- ruby -v
- which gem
- gem -v
- echo $PATH
- env
- bash ios/build.sh
artifacts:
paths:

View File

@ -1,6 +1,7 @@
source "https://rubygems.org"
source "https://mirrors.aliyun.com/rubygems/"
gem "fastlane"
gem 'nkf', '0.2.0'
plugins_path = File.join(File.dirname(__FILE__), 'fastlane', 'Pluginfile')
eval_gemfile(plugins_path) if File.exist?(plugins_path)

View File

@ -1,7 +1,8 @@
source "https://rubygems.org"
source "https://gems.ruby-china.com"
gem "fastlane"
gem 'cocoapods', '1.14.3'
gem 'public_suffix', '~> 4.0'
plugins_path = File.join(File.dirname(__FILE__), 'fastlane', 'Pluginfile')
eval_gemfile(plugins_path) if File.exist?(plugins_path)
gem 'nkf', '0.2.0'

View File

@ -10,6 +10,18 @@ cd ${CI_PROJECT_DIR}/ios
#bundle exec pod install
echo "ENV_BUILD_TAG:${ENV_BUILD_TAG},ENV_BUILD_BRANCH:${ENV_BUILD_BRANCH}"
regex='^v[0-9]+\.[0-9]+\.[0-9]+(-[a-zA-Z]+\.[0-9]+)?$'
# ==== 调试输出,确认环境和依赖 ====
echo "=== FASTLANE/GEM/ENV DEBUG ==="
which fastlane
fastlane -v
which bundle
bundle -v
echo $PATH
gem list | grep fastlane
gem list | grep digest-crc
# ==== END DEBUG ====
if [[ "${ENV_BUILD_BRANCH}" == "canary_release" ]]; then
echo "===build canary_release: ${NEXT_VERSION}"
export ENV_BUILD_TAG=${NEXT_VERSION}

View File

@ -203,6 +203,7 @@ import 'mine/valueAddedServices/valueAddedServicesRealName/value_added_services_
import 'mine/valueAddedServices/valueAddedServicesSMSTemplate/valueAddedServicesAddSMSTemplate/newSMSTemplate_page.dart';
import 'mine/valueAddedServices/valueAddedServicesSMSTemplate/valueAddedServicesListSMSTemplate/customSMSTemplateList_page.dart';
import 'starLockApplication/starLockApplication.dart';
import 'talk/starChart/views/imageTransmission/image_transmission_page.dart';
import 'tools/seletKeyCyclicDate/seletKeyCyclicDate_page.dart';
abstract class Routers {
@ -515,6 +516,8 @@ abstract class Routers {
static const String starChartPage = '/starChartPage'; //
static const String starChartTalkView = '/starChartTalkView'; //
static const String h264WebView = '/h264WebView'; //
static const String imageTransmissionView =
'/imageTransmissionView'; //()
}
abstract class AppRouters {
@ -1185,7 +1188,13 @@ abstract class AppRouters {
page: () => const DoubleLockLinkPage()),
GetPage<dynamic>(
name: Routers.starChartTalkView, page: () => const TalkViewPage()),
GetPage<dynamic>(name: Routers.h264WebView, page: () => TalkViewNativeDecodePage()), //
GetPage<dynamic>(
name: Routers.h264WebView, page: () => TalkViewNativeDecodePage()),
//
GetPage<dynamic>(
name: Routers.imageTransmissionView,
page: () => ImageTransmissionPage()),
//
// GetPage<dynamic>(name: Routers.h264WebView, page: () => H264WebView()), // webview播放页面
];
}

View File

@ -187,10 +187,12 @@ class BlueManage {
continue;
}
final isMatch = _isMatch(scanResult
.advertisementData.serviceUuids
.map((e) => e.uuid)
.toList());
final isMatch = _isMatch(
scanResult.advertisementData.serviceUuids
.map((e) => e.uuid)
.toList(),
isSingle: true,
);
if (isMatch && (scanResult.rssi >= -100)) {
// id相同的元素
@ -273,6 +275,7 @@ class BlueManage {
.map((e) => e.uuid)
.toList(),
deviceType: deviceType,
isSingle: false,
);
//
if (isMatch && (scanResult.rssi >= -100)) {
@ -316,25 +319,48 @@ class BlueManage {
}
/// uuid
bool _isMatch(List<String> serviceUuids, {DeviceType deviceType = DeviceType.blue}) {
final List<String> prefixes = getDeviceType(deviceType).map((e) => e.toLowerCase()).toList();
bool _isMatch(List<String> serviceUuids,
{DeviceType deviceType = DeviceType.blue, required bool isSingle}) {
final List<String> prefixes =
getDeviceType(deviceType).map((e) => e.toLowerCase()).toList();
for (String uuid in serviceUuids) {
final String cleanUuid = uuid.replaceAll('-', '').toLowerCase();
final String cleanUuid = uuid.toLowerCase();
if (cleanUuid.length == 8) {
// 8
// 845
String pairStatus = cleanUuid.substring(4, 6); // 4534
for (final prefix in prefixes) {
if (cleanUuid.startsWith(prefix)) {
return true;
if (isSingle) {
return true; // isSingle为truetrue
} else {
// 00=01=
if (pairStatus == '00') {
return true; // true
}
// 01trueuuid
}
}
}
} else if (cleanUuid.length == 32) {
} else {
// 128834
final String first8 = cleanUuid.substring(0, 8);
if (first8.length >= 4) {
final String thirdAndFourth = first8.substring(2, 4); // 23
if (cleanUuid.length >= 32) {
final String thirdAndFourth = cleanUuid.substring(2, 4); // 23
for (final prefix in prefixes) {
if (thirdAndFourth == prefix) {
return true;
if (isSingle) {
return true; // isSingle为truetrue
} else {
// UUID的第31321
if (cleanUuid.length >= 32) {
String pairStatus =
cleanUuid.substring(30, 32); // 31321
// 00=01=
if (pairStatus == '00') {
return true; // true
}
// 01trueuuid
}
}
}
}
}

View File

@ -773,7 +773,7 @@ class LockDetailLogic extends BaseGetXController {
return;
}
//
PacketLossStatistics().reset();
// PacketLossStatistics().reset();
// id
StartChartManage()
.startCallRequestMessageTimer(ToPeerId: network!.peerId ?? '');

View File

@ -1103,13 +1103,15 @@ class _LockDetailPageState extends State<LockDetailPage>
}));
//
showWidgetArr.add(bottomItem('images/main/icon_main_password.png', '密码'.tr,
state.bottomBtnisEable.value, () {
Get.toNamed(Routers.passwordKeyListPage,
arguments: <String, LockListInfoItemEntity>{
'keyInfo': state.keyInfos.value
});
}));
if (state.keyInfos.value.lockFeature!.password == 1) {
showWidgetArr.add(bottomItem('images/main/icon_main_password.png',
'密码'.tr, state.bottomBtnisEable.value, () {
Get.toNamed(Routers.passwordKeyListPage,
arguments: <String, LockListInfoItemEntity>{
'keyInfo': state.keyInfos.value
});
}));
}
// ic卡
if (state.keyInfos.value.lockFeature!.icCard == 1) {

View File

@ -189,6 +189,7 @@ class ConfiguringWifiLogic extends BaseGetXController {
Get.offAllNamed(Routers.starLockMain);
}
eventBus.fire(SuccessfulDistributionNetwork());
eventBus.fire(RefreshLockListInfoDataEvent(clearScanDevices: true,isUnShowLoading: true));
});
//

View File

@ -118,6 +118,15 @@ class _NormallyOpenModePageState extends State<NormallyOpenModePage> with RouteA
: SubmitBtn(
btnName: '保存'.tr,
onClick: () {
if (state.weekDays.value.isEmpty) {
logic.showToast('请选择常开日期'.tr);
return;
}
if (state.endTimeMinute.value < state.beginTimeMinute.value) {
logic.showToast('结束时间不能小于开始时间哦'.tr);
return;
}
logic.sendAutoLock();
}),
)),

View File

@ -361,6 +361,7 @@ class Bluetooth {
class LockFeature {
LockFeature({
this.password,
this.passwordIssue,
this.icCard,
this.fingerprint,
this.fingerVein,
@ -381,6 +382,7 @@ class LockFeature {
LockFeature.fromJson(Map<String, dynamic> json) {
password = json['password'];
passwordIssue = json['passwordIssue'];
icCard = json['icCard'];
fingerprint = json['fingerprint'];
fingerVein = json['fingerVein'];
@ -400,6 +402,7 @@ class LockFeature {
}
int? password;
int? passwordIssue;
int? icCard;
int? fingerprint;
int? fingerVein;
@ -420,6 +423,7 @@ class LockFeature {
Map<String, dynamic> toJson() {
final Map<String, dynamic> data = <String, dynamic>{};
data['password'] = password;
data['passwordIssue'] = passwordIssue;
data['icCard'] = icCard;
data['fingerprint'] = fingerprint;
data['fingerVein'] = fingerVein;

View File

@ -27,13 +27,13 @@ class LockListLogic extends BaseGetXController {
LockListLogic(this.entity) {}
LockListState state = LockListState();
List<GroupList> _groupDataList = <GroupList>[];
final RxList<GroupList> groupDataList = <GroupList>[].obs;
LockListInfoGroupEntity? entity;
final ShowTipView showTipView = ShowTipView();
List<GroupList> get groupDataList {
List<GroupList> get groupDataListFiltered {
final List<GroupList> list =
_groupDataList.map((GroupList e) => e.copy()).toList();
groupDataList.map((GroupList e) => e.copy()).toList();
if (state.searchStr.value != '' && state.showSearch.value) {
list.forEach((GroupList element) {
element.lockList?.removeWhere((LockListInfoItemEntity element) =>
@ -60,15 +60,12 @@ class LockListLogic extends BaseGetXController {
//
void setLockListInfoGroupEntity(LockListInfoGroupEntity entity) {
this.entity = entity;
// if (entity.pageNo == 1) {
_groupDataList = <GroupList>[];
// }
_groupDataList.addAll(entity.groupList!);
update();
groupDataList.value = entity.groupList!;
}
//
late StreamSubscription<Reply> _replySubscription;
late StreamSubscription _setLockListInfoGroupEntity;
void _initReplySubscription() {
_replySubscription =
@ -336,17 +333,30 @@ class LockListLogic extends BaseGetXController {
void onReady() {
super.onReady();
_initReplySubscription();
_initEventHandler();
}
@override
void onInit() {
super.onInit();
// AppLog.log('onInit调用了 setLockListInfoGroupEntity');
setLockListInfoGroupEntity(entity!);
AppLog.log('[onInit] entity: \\${entity?.toString()}');
if (entity != null) {
setLockListInfoGroupEntity(entity!);
}
_initEventHandler();
}
@override
void onClose() {
_replySubscription.cancel();
_setLockListInfoGroupEntity.cancel();
}
void _initEventHandler() {
_setLockListInfoGroupEntity = eventBus
.on<SetLockListInfoGroupEntity>()
.listen((SetLockListInfoGroupEntity event) async {
setLockListInfoGroupEntity(event.lockListInfoGroupEntity);
});
}
}

View File

@ -37,32 +37,31 @@ class _LockListPageState extends State<LockListPage> with RouteAware {
@override
Widget build(BuildContext context) {
return GetBuilder<LockListLogic>(builder: (LockListLogic logic) {
return Scaffold(
body: ListView.separated(
itemCount: logic.groupDataList.length,
itemBuilder: (BuildContext context, int index) {
final GroupList itemData = logic.groupDataList[index];
return _buildLockExpandedList(context, index, itemData);
},
shrinkWrap: true,
physics: const AlwaysScrollableScrollPhysics(),
separatorBuilder: (BuildContext context, int index) {
return const Divider(
height: 1,
color: AppColors.greyLineColor,
);
}),
);
});
return Obx(() => Scaffold(
body: ListView.separated(
itemCount: logic.groupDataListFiltered.length,
itemBuilder: (BuildContext context, int index) {
final GroupList itemData = logic.groupDataListFiltered[index];
return _buildLockExpandedList(context, index, itemData, key: ValueKey(itemData.groupId));
},
shrinkWrap: true,
physics: const AlwaysScrollableScrollPhysics(),
separatorBuilder: (BuildContext context, int index) {
return const Divider(
height: 1,
color: AppColors.greyLineColor,
);
}),
));
}
//
Widget _buildLockExpandedList(BuildContext context, int index,
GroupList itemData) {
GroupList itemData, {Key? key}) {
final List<LockListInfoItemEntity> lockItemList =
itemData.lockList ?? <LockListInfoItemEntity>[];
return LockListGroupView(
key: key,
onTap: () {
//
if (itemData.isChecked) {} else {}

View File

@ -132,20 +132,21 @@ class LockMainLogic extends BaseGetXController {
state.lockListInfoGroupEntity.refresh();
// AppLog.log('entity:$entity state.lockListInfoGroupEntity.value.groupList!.length:${state.lockListInfoGroupEntity.value.groupList![0].lockList!.length}');
//
if (Get.isRegistered<LockListLogic>()) {
//
// AppLog.log('检测控制器是否存 调用了 setLockListInfoGroupEntity');
Get.find<LockListLogic>().setLockListInfoGroupEntity(entity);
} else {
//
Future<dynamic>.delayed(200.milliseconds, () {
if (Get.isRegistered<LockListLogic>()) {
//
// AppLog.log('检测控制器是否存 延迟调用了 setLockListInfoGroupEntity');
Get.find<LockListLogic>().setLockListInfoGroupEntity(entity);
}
});
}
eventBus.fire(SetLockListInfoGroupEntity(lockListInfoGroupEntity: entity));
// if (Get.isRegistered<LockListLogic>()) {
// //
// // AppLog.log('检测控制器是否存 调用了 setLockListInfoGroupEntity');
// Get.find<LockListLogic>().setLockListInfoGroupEntity(entity);
// } else {
// //
// Future<dynamic>.delayed(500.milliseconds, () {
// if (Get.isRegistered<LockListLogic>()) {
// //
// // AppLog.log('检测控制器是否存 延迟调用了 setLockListInfoGroupEntity');
// Get.find<LockListLogic>().setLockListInfoGroupEntity(entity);
// }
// });
// }
if (state.dataLength.value == 1) {
if (Get.isRegistered<LockDetailLogic>()) {

View File

@ -15,6 +15,7 @@ import 'package:star_lock/talk/starChart/proto/generic.pb.dart';
import 'package:star_lock/talk/starChart/proto/talk_accept.pb.dart';
import 'package:star_lock/talk/starChart/proto/talk_expect.pb.dart';
import 'package:star_lock/tools/commonDataManage.dart';
import 'package:star_lock/tools/storage.dart';
import '../../star_chart_manage.dart';
@ -34,7 +35,7 @@ class UdpTalkAcceptHandler extends ScpMessageBaseHandle
//
startChartManage.stopTalkAcceptTimer();
//
_handleSendExpect();
_handleSendExpect(lockPeerID: scpMessage.FromPeerId!);
//
stopRingtone();
//
@ -79,11 +80,33 @@ class UdpTalkAcceptHandler extends ScpMessageBaseHandle
}
///
void _handleSendExpect() {
void _handleSendExpect({
required String lockPeerID,
}) async {
final LockListInfoItemEntity currentKeyInfo =
CommonDataManage().currentKeyInfo;
final isH264 = currentKeyInfo.lockFeature?.isH264 == 1;
final isMJpeg = currentKeyInfo.lockFeature?.isMJpeg == 1;
var isH264 = currentKeyInfo.lockFeature?.isH264 == 1;
var isMJpeg = currentKeyInfo.lockFeature?.isMJpeg == 1;
final LockListInfoGroupEntity? lockListInfoGroupEntity =
await Storage.getLockMainListData();
if (lockListInfoGroupEntity != null) {
lockListInfoGroupEntity!.groupList?.forEach((element) {
final lockList = element.lockList;
if (lockList != null && lockList.length != 0) {
for (var lockInfo in lockList) {
final peerId = lockInfo.network?.peerId;
if (peerId != null && peerId != '') {
if (peerId == lockPeerID) {
isH264 = lockInfo.lockFeature?.isH264 == 1;
isMJpeg = lockInfo.lockFeature?.isMJpeg == 1;
}
}
}
}
});
}
// 使H264MJPEG
if (isH264) {

View File

@ -43,7 +43,9 @@ class UdpTalkExpectHandler extends ScpMessageBaseHandle
startChartManage.stopCallRequestMessageTimer();
// talkViewState.rotateAngle.value = talkExpectResp.rotate ?? 0;
startChartManage.rotateAngle = talkExpectResp.rotate;
AppLog.log('视频画面需要旋转:${talkExpectResp.rotate}');
startChartManage.videoWidth = talkExpectResp.width;
startChartManage.videoHeight = talkExpectResp.height;
AppLog.log('视频画面需要旋转:${talkExpectResp.rotate},画面宽高:${talkExpectResp.width}-${talkExpectResp.height}');
//
// x秒内没有收到通话保持则执行的操作;
talkePingOverTimeTimerManager.start();

View File

@ -37,7 +37,10 @@ class UdpTalkRequestHandler extends ScpMessageBaseHandle
startChartManage.ToPeerId = scpMessage.FromPeerId!;
startChartManage.lockPeerId = scpMessage.FromPeerId!;
//
_talkRequestEvent(talkObjectName: talkReq.callerName);
_talkRequestEvent(
talkObjectName: talkReq.callerName,
lockPeerID: scpMessage.FromPeerId!,
);
//
replySuccessMessage(scpMessage);
@ -56,8 +59,11 @@ class UdpTalkRequestHandler extends ScpMessageBaseHandle
//
startChartManage.FromPeerId = scpMessage.ToPeerId!;
startChartManage.ToPeerId = scpMessage.FromPeerId!;
startChartManage.lockPeerId = scpMessage.FromPeerId!;
//
_handleResponseSendExpect();
_handleResponseSendExpect(
lockPeerID: scpMessage.FromPeerId!,
);
//
startChartManage.startTalkExpectTimer();
//
@ -78,9 +84,12 @@ class UdpTalkRequestHandler extends ScpMessageBaseHandle
void handleRealTimeData(ScpMessage scpMessage) {}
//
void _talkRequestEvent({required String talkObjectName}) {
void _talkRequestEvent({
required String talkObjectName,
required String lockPeerID,
}) async {
//
_handleRequestSendExpect();
_handleRequestSendExpect(lockPeerID: lockPeerID);
//
//test:使
playRingtone();
@ -88,6 +97,33 @@ class UdpTalkRequestHandler extends ScpMessageBaseHandle
// _showTalkRequestNotification(talkObjectName: talkObjectName);
//
talkStatus.setPassiveCallWaitingAnswer();
//
final LockListInfoItemEntity currentKeyInfo =
CommonDataManage().currentKeyInfo;
var isWifiLockType = currentKeyInfo.lockFeature?.wifiLockType == 1;
final LockListInfoGroupEntity? lockListInfoGroupEntity =
await Storage.getLockMainListData();
if (lockListInfoGroupEntity != null) {
lockListInfoGroupEntity!.groupList?.forEach((element) {
final lockList = element.lockList;
if (lockList != null && lockList.length != 0) {
for (var lockInfo in lockList) {
final peerId = lockInfo.network?.peerId;
if (peerId != null && peerId != '') {
if (peerId == lockPeerID) {
isWifiLockType = lockInfo.lockFeature?.wifiLockType == 1;
}
}
}
}
});
}
if (isWifiLockType) {
Get.toNamed(Routers.imageTransmissionView);
return;
}
if (startChartManage
.getDefaultTalkExpect()
.videoType
@ -170,48 +206,96 @@ class UdpTalkRequestHandler extends ScpMessageBaseHandle
}
/// app收到的对讲请求后
void _handleRequestSendExpect() {
void _handleRequestSendExpect({
required String lockPeerID,
}) async {
final LockListInfoItemEntity currentKeyInfo =
CommonDataManage().currentKeyInfo;
final isH264 = currentKeyInfo.lockFeature?.isH264 == 1;
final isMJpeg = currentKeyInfo.lockFeature?.isMJpeg == 1;
var isH264 = currentKeyInfo.lockFeature?.isH264 == 1;
var isMJpeg = currentKeyInfo.lockFeature?.isMJpeg == 1;
final LockListInfoGroupEntity? lockListInfoGroupEntity =
await Storage.getLockMainListData();
if (lockListInfoGroupEntity != null) {
lockListInfoGroupEntity!.groupList?.forEach((element) {
final lockList = element.lockList;
if (lockList != null && lockList.length != 0) {
for (var lockInfo in lockList) {
final peerId = lockInfo.network?.peerId;
if (peerId != null && peerId != '') {
if (peerId == lockPeerID) {
isH264 = lockInfo.lockFeature?.isH264 == 1;
isMJpeg = lockInfo.lockFeature?.isMJpeg == 1;
}
}
}
}
});
}
// 使H264MJPEG
if (isH264) {
// H264H264视频和G711音频期望
startChartManage.sendOnlyH264VideoTalkExpectData();
print('app收到的对讲请求后发送的预期数据=========锁支持H264发送H264视频格式期望数据');
print(
'app收到的对讲请求后发送的预期数据=========锁支持H264发送H264视频格式期望数据,peerID=${lockPeerID}');
} else if (isMJpeg) {
// MJPEGG711音频期望
startChartManage.sendOnlyImageVideoTalkExpectData();
print('app收到的对讲请求后发送的预期数据=========锁不支持H264支持MJPEG发送MJPEG视频格式期望数据');
print(
'app收到的对讲请求后发送的预期数据=========锁不支持H264支持MJPEG发送MJPEG视频格式期望数据,peerID=${lockPeerID}');
} else {
// 使
startChartManage.sendOnlyImageVideoTalkExpectData();
print('app收到的对讲请求后发送的预期数据=========锁不支持H264和MJPEG默认发送图像视频格式期望数据');
print(
'app收到的对讲请求后发送的预期数据=========锁不支持H264和MJPEG默认发送MJPEG视频格式期望数据,peerID=${lockPeerID}');
}
}
/// app主动发请求
void _handleResponseSendExpect() {
void _handleResponseSendExpect({
required String lockPeerID,
}) async {
final LockListInfoItemEntity currentKeyInfo =
CommonDataManage().currentKeyInfo;
final isH264 = currentKeyInfo.lockFeature?.isH264 == 1;
final isMJpeg = currentKeyInfo.lockFeature?.isMJpeg == 1;
var isH264 = currentKeyInfo.lockFeature?.isH264 == 1;
var isMJpeg = currentKeyInfo.lockFeature?.isMJpeg == 1;
final LockListInfoGroupEntity? lockListInfoGroupEntity =
await Storage.getLockMainListData();
if (lockListInfoGroupEntity != null) {
lockListInfoGroupEntity!.groupList?.forEach((element) {
final lockList = element.lockList;
if (lockList != null && lockList.length != 0) {
for (var lockInfo in lockList) {
final peerId = lockInfo.network?.peerId;
if (peerId != null && peerId != '') {
if (peerId == lockPeerID) {
isH264 = lockInfo.lockFeature?.isH264 == 1;
isMJpeg = lockInfo.lockFeature?.isMJpeg == 1;
}
}
}
}
});
}
// 使H264MJPEG
if (isH264) {
// H264H264视频和G711音频期望
startChartManage.sendH264VideoAndG711AudioTalkExpectData();
print('app主动发请求收到回复后发送的预期数据=======锁支持H264发送H264视频格式期望数据');
AppLog.log(
'app主动发对讲请求收到回复后发送的预期数据=======锁支持H264发送H264视频格式期望数据,peerID=${lockPeerID}');
} else if (isMJpeg) {
// MJPEGG711音频期望
startChartManage.sendImageVideoAndG711AudioTalkExpectData();
print('app主动发请求收到回复后发送的预期数据=======锁不支持H264支持MJPEG发送MJPEG视频格式期望数据');
AppLog.log(
'app主动发对讲请求收到回复后发送的预期数据=======锁不支持H264支持MJPEG发送MJPEG视频格式期望数据,peerID=${lockPeerID}');
} else {
// 使
startChartManage.sendImageVideoAndG711AudioTalkExpectData();
print('app主动发请求收到回复后发送的预期数据=======锁不支持H264和MJPEG默认发送图像视频格式期望数据');
AppLog.log(
'app主动发对讲请求收到回复后发送的预期数据=======锁不支持H264和MJPEG默认发送MJPEG视频格式期望数据,peerID=${lockPeerID}');
}
}
}

View File

@ -114,6 +114,8 @@ class StartChartManage {
final int _maxPayloadSize = 8 * 1024; //
int rotateAngle = 0; //
int videoWidth = 0; //
int videoHeight = 0; //
//
TalkExpectReq _defaultTalkExpect = TalkConstant.H264Expect;
@ -604,7 +606,7 @@ class StartChartManage {
void startTalkRejectMessageTimer() async {
try {
int count = 0;
final int maxCount = 10; // 10
final int maxCount = 3; // 10
talkRejectTimer ??= Timer.periodic(
Duration(seconds: _defaultIntervalTime),
@ -630,6 +632,8 @@ class StartChartManage {
stopCallRequestMessageTimer();
stopSendingRbcuInfoMessages();
stopSendingRbcuProBeMessages();
stopTalkAcceptTimer();
stopCallRequestMessageTimer();
//
talkePingOverTimeTimerManager.cancel();
@ -728,6 +732,8 @@ class StartChartManage {
stopCallRequestMessageTimer();
stopSendingRbcuInfoMessages();
stopSendingRbcuProBeMessages();
stopTalkAcceptTimer();
stopCallRequestMessageTimer();
//
talkePingOverTimeTimerManager.cancel();
talkDataOverTimeTimerManager.cancel();

View File

@ -0,0 +1,675 @@
import 'dart:async';
import 'dart:io';
import 'dart:ui' as ui;
import 'dart:math'; // Import the math package to use sqrt
import 'dart:ui' show decodeImageFromList;
import 'package:flutter/foundation.dart';
import 'package:flutter/rendering.dart';
import 'package:flutter/services.dart';
import 'package:flutter_pcm_sound/flutter_pcm_sound.dart';
import 'package:flutter_voice_processor/flutter_voice_processor.dart';
import 'package:gallery_saver/gallery_saver.dart';
import 'package:get/get.dart';
import 'package:image_gallery_saver/image_gallery_saver.dart';
import 'package:path_provider/path_provider.dart';
import 'package:permission_handler/permission_handler.dart';
import 'package:star_lock/app_settings/app_settings.dart';
import 'package:star_lock/login/login/entity/LoginEntity.dart';
import 'package:star_lock/main/lockDetail/lockDetail/lockDetail_logic.dart';
import 'package:star_lock/main/lockDetail/lockDetail/lockDetail_state.dart';
import 'package:star_lock/main/lockDetail/lockDetail/lockNetToken_entity.dart';
import 'package:star_lock/main/lockDetail/lockSet/lockSet/lockSetInfo_entity.dart';
import 'package:star_lock/main/lockMian/entity/lockListInfo_entity.dart';
import 'package:star_lock/network/api_repository.dart';
import 'package:star_lock/talk/call/g711.dart';
import 'package:star_lock/talk/starChart/constant/talk_status.dart';
import 'package:star_lock/talk/starChart/handle/other/talk_data_model.dart';
import 'package:star_lock/talk/starChart/proto/talk_data.pb.dart';
import 'package:star_lock/talk/starChart/proto/talk_expect.pb.dart';
import 'package:star_lock/talk/starChart/star_chart_manage.dart';
import 'package:star_lock/talk/starChart/views/imageTransmission/image_transmission_state.dart';
import 'package:star_lock/talk/starChart/views/talkView/talk_view_state.dart';
import 'package:star_lock/tools/G711Tool.dart';
import 'package:star_lock/tools/bugly/bugly_tool.dart';
import 'package:star_lock/tools/commonDataManage.dart';
import 'package:star_lock/tools/storage.dart';
import '../../../../tools/baseGetXController.dart';
class ImageTransmissionLogic extends BaseGetXController {
ImageTransmissionState state = ImageTransmissionState();
final LockDetailState lockDetailState = Get.put(LockDetailLogic()).state;
int bufferSize = 8; //
int audioBufferSize = 2; // 2
bool _isFirstAudioFrame = true; //
int _startAudioTime = 0; //
//
final List<int> _bufferedAudioFrames = <int>[];
//
bool _isListening = false;
StreamSubscription? _streamSubscription;
Timer? videoRenderTimer; //
int _renderedFrameCount = 0;
int _lastFpsPrintTime = DateTime.now().millisecondsSinceEpoch;
///
void _initFlutterPcmSound() {
const int sampleRate = 8000;
FlutterPcmSound.setLogLevel(LogLevel.none);
FlutterPcmSound.setup(sampleRate: sampleRate, channelCount: 1);
// feed
if (Platform.isAndroid) {
FlutterPcmSound.setFeedThreshold(1024); // Android
} else {
FlutterPcmSound.setFeedThreshold(2000); // Android
}
}
///
void udpHangUpAction() async {
if (state.talkStatus.value == TalkStatus.answeredSuccessfully) {
//
StartChartManage().startTalkHangupMessageTimer();
} else {
//
StartChartManage().startTalkRejectMessageTimer();
}
Get.back();
}
//
void initiateAnswerCommand() {
StartChartManage().startTalkAcceptTimer();
}
//
void _startListenTalkData() {
//
if (_isListening) {
AppLog.log("已经存在数据流监听,避免重复监听");
return;
}
AppLog.log("==== 启动新的数据流监听 ====");
_isListening = true;
_streamSubscription = state.talkDataRepository.talkDataStream
.listen((TalkDataModel talkDataModel) async {
final talkData = talkDataModel.talkData;
final contentType = talkData!.contentType;
final currentTime = DateTime.now().millisecondsSinceEpoch;
//
switch (contentType) {
case TalkData_ContentTypeE.G711:
// //
if (_isFirstAudioFrame) {
_startAudioTime = currentTime;
_isFirstAudioFrame = false;
}
//
final expectedTime = _startAudioTime + talkData.durationMs;
final audioDelay = currentTime - expectedTime;
//
if (audioDelay > 500) {
state.audioBuffer.clear();
if (state.isOpenVoice.value) {
_playAudioFrames();
}
return;
}
if (state.audioBuffer.length >= audioBufferSize) {
state.audioBuffer.removeAt(0); //
}
state.audioBuffer.add(talkData); //
//
_playAudioFrames();
break;
case TalkData_ContentTypeE.Image:
// bufferSize帧
state.videoBuffer.add(talkData);
if (state.videoBuffer.length > bufferSize) {
state.videoBuffer.removeAt(0); //
}
break;
}
});
}
//
void _playAudioFrames() {
//
//
if (state.audioBuffer.isEmpty ||
state.audioBuffer.length < audioBufferSize) {
return;
}
//
TalkData? oldestFrame;
int oldestIndex = -1;
for (int i = 0; i < state.audioBuffer.length; i++) {
if (oldestFrame == null ||
state.audioBuffer[i].durationMs < oldestFrame.durationMs) {
oldestFrame = state.audioBuffer[i];
oldestIndex = i;
}
}
//
if (oldestFrame != null && oldestIndex != -1) {
if (state.isOpenVoice.value) {
//
_playAudioData(oldestFrame);
}
state.audioBuffer.removeAt(oldestIndex);
}
}
///
void _startListenTalkStatus() {
state.startChartTalkStatus.statusStream.listen((talkStatus) {
state.talkStatus.value = talkStatus;
switch (talkStatus) {
case TalkStatus.rejected:
case TalkStatus.hangingUpDuring:
case TalkStatus.notTalkData:
case TalkStatus.notTalkPing:
case TalkStatus.end:
_handleInvalidTalkStatus();
break;
case TalkStatus.answeredSuccessfully:
state.oneMinuteTimeTimer?.cancel(); //
state.oneMinuteTimeTimer ??=
Timer.periodic(const Duration(seconds: 1), (Timer t) {
if (state.listData.value.length > 0) {
state.oneMinuteTime.value++;
// if (state.oneMinuteTime.value >= 60) {
// t.cancel(); //
// state.oneMinuteTime.value = 0;
// //
// // udpHangUpAction();
// }
}
});
break;
default:
//
break;
}
});
}
///
void _playAudioData(TalkData talkData) async {
if (state.isOpenVoice.value) {
final list =
G711().decodeAndDenoise(talkData.content, true, 8000, 300, 150);
// // PCM PcmArrayInt16
final PcmArrayInt16 fromList = PcmArrayInt16.fromList(list);
FlutterPcmSound.feed(fromList);
if (!state.isPlaying.value) {
FlutterPcmSound.play();
state.isPlaying.value = true;
}
}
}
///
void _stopPlayG711Data() async {
await FlutterPcmSound.pause();
await FlutterPcmSound.stop();
await FlutterPcmSound.clear();
}
///
// udpOpenDoorAction() async {
// final List<String>? privateKey =
// await Storage.getStringList(saveBluePrivateKey);
// final List<int> getPrivateKeyList = changeStringListToIntList(privateKey!);
//
// final List<String>? signKey = await Storage.getStringList(saveBlueSignKey);
// final List<int> signKeyDataList = changeStringListToIntList(signKey!);
//
// final List<String>? token = await Storage.getStringList(saveBlueToken);
// final List<int> getTokenList = changeStringListToIntList(token!);
//
// await _getLockNetToken();
//
// final OpenLockCommand openLockCommand = OpenLockCommand(
// lockID: BlueManage().connectDeviceName,
// userID: await Storage.getUid(),
// openMode: lockDetailState.openDoorModel,
// openTime: _getUTCNetTime(),
// onlineToken: lockDetailState.lockNetToken,
// token: getTokenList,
// needAuthor: 1,
// signKey: signKeyDataList,
// privateKey: getPrivateKeyList,
// );
// final messageDetail = openLockCommand.packageData();
// // List<int>
// String hexString = messageDetail
// .map((byte) => byte.toRadixString(16).padLeft(2, '0'))
// .join(' ');
//
// AppLog.log('open lock hexString: $hexString');
// //
// StartChartManage().sendRemoteUnLockMessage(
// bluetoothDeviceName: BlueManage().connectDeviceName,
// openLockCommand: messageDetail,
// );
// showToast('正在开锁中...'.tr);
// }
int _getUTCNetTime() {
if (lockDetailState.isHaveNetwork) {
return DateTime.now().millisecondsSinceEpoch ~/ 1000 +
lockDetailState.differentialTime;
} else {
return 0;
}
}
///
Future<bool> getPermissionStatus() async {
final Permission permission = Permission.microphone;
//granted denied permanentlyDenied
final PermissionStatus status = await permission.status;
if (status.isGranted) {
return true;
} else if (status.isDenied) {
requestPermission(permission);
} else if (status.isPermanentlyDenied) {
openAppSettings();
} else if (status.isRestricted) {
requestPermission(permission);
} else {}
return false;
}
///
void requestPermission(Permission permission) async {
final PermissionStatus status = await permission.request();
if (status.isPermanentlyDenied) {
openAppSettings();
}
}
Future<void> requestPermissions() async {
//
var storageStatus = await Permission.storage.request();
//
var microphoneStatus = await Permission.microphone.request();
if (storageStatus.isGranted && microphoneStatus.isGranted) {
print("Permissions granted");
} else {
print("Permissions denied");
//
if (await Permission.storage.isPermanentlyDenied) {
openAppSettings(); //
}
}
}
Future<void> startRecording() async {
// requestPermissions();
// if (state.isRecordingScreen.value) {
// showToast('录屏已开始,请勿重复点击');
// }
// bool start = await FlutterScreenRecording.startRecordScreen(
// "Screen Recording", //
// titleNotification: "Recording in progress", //
// messageNotification: "Tap to stop recording", //
// );
//
// if (start) {
// state.isRecordingScreen.value = true;
// }
}
Future<void> stopRecording() async {
// String path = await FlutterScreenRecording.stopRecordScreen;
// print("Recording saved to: $path");
//
// //
// bool? success = await GallerySaver.saveVideo(path);
// if (success == true) {
// print("Video saved to gallery");
// } else {
// print("Failed to save video to gallery");
// }
//
// showToast('录屏结束,已保存到系统相册');
// state.isRecordingScreen.value = false;
}
@override
void onReady() {
super.onReady();
}
@override
void onInit() {
super.onInit();
//
_startListenTalkData();
//
_startListenTalkStatus();
//
// *** ***
state.talkStatus.value = state.startChartTalkStatus.status;
//
_initFlutterPcmSound();
//
// _startPlayback();
//
_initAudioRecorder();
requestPermissions();
// 10fps
videoRenderTimer = Timer.periodic(const Duration(milliseconds: 100), (_) {
final int now = DateTime.now().millisecondsSinceEpoch;
if (state.videoBuffer.isNotEmpty) {
final TalkData oldestFrame = state.videoBuffer.removeAt(0);
if (oldestFrame.content.isNotEmpty) {
state.listData.value =
Uint8List.fromList(oldestFrame.content); //
final int decodeStart = DateTime.now().millisecondsSinceEpoch;
decodeImageFromList(Uint8List.fromList(oldestFrame.content))
.then((ui.Image img) {
final int decodeEnd = DateTime.now().millisecondsSinceEpoch;
state.currentImage.value = img;
_renderedFrameCount++;
// fps
if (now - _lastFpsPrintTime >= 1000) {
// print('实际渲染fps: $_renderedFrameCount');
_renderedFrameCount = 0;
_lastFpsPrintTime = now;
}
}).catchError((e) {
print('图片解码失败: $e');
});
}
}
//
});
}
@override
void onClose() {
_stopPlayG711Data(); //
state.listData.value = Uint8List(0); //
state.audioBuffer.clear(); //
state.videoBuffer.clear(); //
state.oneMinuteTimeTimer?.cancel();
state.oneMinuteTimeTimer = null;
stopProcessingAudio();
//
// _imageCache.clear();
state.oneMinuteTimeTimer?.cancel(); //
state.oneMinuteTimeTimer = null; //
state.oneMinuteTime.value = 0;
//
_streamSubscription?.cancel();
_isListening = false;
//
videoRenderTimer?.cancel();
videoRenderTimer = null;
super.onClose();
}
@override
void dispose() {
stopProcessingAudio();
//
StartChartManage().reSetDefaultTalkExpect();
//
videoRenderTimer?.cancel();
videoRenderTimer = null;
super.dispose();
}
///
void _handleInvalidTalkStatus() {
state.listData.value = Uint8List(0);
//
_stopPlayG711Data();
stopProcessingAudio();
}
///
void updateTalkExpect() {
TalkExpectReq talkExpectReq = TalkExpectReq();
state.isOpenVoice.value = !state.isOpenVoice.value;
if (!state.isOpenVoice.value) {
talkExpectReq = TalkExpectReq(
videoType: [VideoTypeE.IMAGE],
audioType: [],
);
showToast('已静音'.tr);
} else {
talkExpectReq = TalkExpectReq(
videoType: [VideoTypeE.IMAGE],
audioType: [AudioTypeE.G711],
);
}
///
StartChartManage().changeTalkExpectDataTypeAndReStartTalkExpectMessageTimer(
talkExpect: talkExpectReq);
}
///
Future<void> captureAndSavePng() async {
try {
if (state.globalKey.currentContext == null) {
AppLog.log('截图失败: 未找到当前上下文');
return;
}
final RenderRepaintBoundary boundary = state.globalKey.currentContext!
.findRenderObject()! as RenderRepaintBoundary;
final ui.Image image = await boundary.toImage();
final ByteData? byteData =
await image.toByteData(format: ui.ImageByteFormat.png);
if (byteData == null) {
AppLog.log('截图失败: 图像数据为空');
return;
}
final Uint8List pngBytes = byteData.buffer.asUint8List();
//
final Directory directory = await getApplicationDocumentsDirectory();
final String imagePath = '${directory.path}/screenshot.png';
//
final File imgFile = File(imagePath);
await imgFile.writeAsBytes(pngBytes);
//
await ImageGallerySaver.saveFile(imagePath);
AppLog.log('截图保存路径: $imagePath');
showToast('截图已保存到相册'.tr);
} catch (e) {
AppLog.log('截图失败: $e');
}
}
//
Future<void> remoteOpenLock() async {
final LockListInfoItemEntity currentKeyInfo =
CommonDataManage().currentKeyInfo;
var lockId = currentKeyInfo.lockId ?? 0;
var remoteUnlock = currentKeyInfo.lockSetting?.remoteUnlock ?? 0;
final lockPeerId = StartChartManage().lockPeerId;
final LockListInfoGroupEntity? lockListInfoGroupEntity =
await Storage.getLockMainListData();
if (lockListInfoGroupEntity != null) {
lockListInfoGroupEntity!.groupList?.forEach((element) {
final lockList = element.lockList;
if (lockList != null && lockList.length != 0) {
for (var lockInfo in lockList) {
final peerId = lockInfo.network?.peerId;
if (peerId != null && peerId != '') {
if (peerId == lockPeerId) {
lockId = lockInfo.lockId ?? 0;
remoteUnlock = lockInfo.lockSetting?.remoteUnlock ?? 0;
}
}
}
}
});
}
if (remoteUnlock == 1) {
final LoginEntity entity = await ApiRepository.to
.remoteOpenLock(lockId: lockId.toString(), timeOut: 60);
if (entity.errorCode!.codeIsSuccessful) {
showToast('已开锁'.tr);
StartChartManage().lockListPeerId = [];
}
} else {
showToast('该锁的远程开锁功能未启用'.tr);
}
}
///
void _initAudioRecorder() {
state.voiceProcessor = VoiceProcessor.instance;
}
//
Future<void> startProcessingAudio() async {
try {
if (await state.voiceProcessor?.hasRecordAudioPermission() ?? false) {
await state.voiceProcessor?.start(state.frameLength, state.sampleRate);
final bool? isRecording = await state.voiceProcessor?.isRecording();
state.isRecordingAudio.value = isRecording!;
state.startRecordingAudioTime.value = DateTime.now();
//
state.voiceProcessor
?.addFrameListeners(<VoiceProcessorFrameListener>[_onFrame]);
state.voiceProcessor?.addErrorListener(_onError);
} else {
// state.errorMessage.value = 'Recording permission not granted';
}
} on PlatformException catch (ex) {
// state.errorMessage.value = 'Failed to start recorder: $ex';
}
state.isOpenVoice.value = false;
}
///
Future<void> stopProcessingAudio() async {
try {
await state.voiceProcessor?.stop();
state.voiceProcessor?.removeFrameListener(_onFrame);
state.udpSendDataFrameNumber = 0;
//
state.endRecordingAudioTime.value = DateTime.now();
//
final Duration duration = state.endRecordingAudioTime.value
.difference(state.startRecordingAudioTime.value);
state.recordingAudioTime.value = duration.inSeconds;
} on PlatformException catch (ex) {
// state.errorMessage.value = 'Failed to stop recorder: $ex';
} finally {
final bool? isRecording = await state.voiceProcessor?.isRecording();
state.isRecordingAudio.value = isRecording!;
state.isOpenVoice.value = true;
}
}
//
Future<void> _onFrame(List<int> frame) async {
//
if (_bufferedAudioFrames.length > state.frameLength * 3) {
_bufferedAudioFrames.clear(); //
return;
}
//
List<int> amplifiedFrame = _applyGain(frame, 1.6);
// G711数据
List<int> encodedData = G711Tool.encode(amplifiedFrame, 0); // 0A-law
_bufferedAudioFrames.addAll(encodedData);
// 使
final int ms = DateTime.now().millisecondsSinceEpoch % 1000000; // 使
int getFrameLength = state.frameLength;
if (Platform.isIOS) {
getFrameLength = state.frameLength * 2;
}
//
if (_bufferedAudioFrames.length >= state.frameLength) {
try {
await StartChartManage().sendTalkDataMessage(
talkData: TalkData(
content: _bufferedAudioFrames,
contentType: TalkData_ContentTypeE.G711,
durationMs: ms,
),
);
} finally {
_bufferedAudioFrames.clear(); //
}
} else {
_bufferedAudioFrames.addAll(encodedData);
}
}
//
void _onError(VoiceProcessorException error) {
AppLog.log(error.message!);
}
//
List<int> _applyGain(List<int> pcmData, double gainFactor) {
List<int> result = List<int>.filled(pcmData.length, 0);
for (int i = 0; i < pcmData.length; i++) {
// PCM数据通常是有符号的16位整数
int sample = pcmData[i];
//
double amplified = sample * gainFactor;
//
if (amplified > 32767) {
amplified = 32767;
} else if (amplified < -32768) {
amplified = -32768;
}
result[i] = amplified.toInt();
}
return result;
}
}

View File

@ -0,0 +1,238 @@
import 'package:flutter/material.dart';
import 'package:flutter_screenutil/flutter_screenutil.dart';
import 'package:get/get.dart';
import 'package:star_lock/app_settings/app_colors.dart';
import 'package:star_lock/talk/call/callTalk.dart';
import 'package:star_lock/talk/starChart/constant/talk_status.dart';
import 'package:star_lock/talk/starChart/star_chart_manage.dart';
import 'package:star_lock/talk/starChart/views/imageTransmission/image_transmission_logic.dart';
import 'package:star_lock/talk/starChart/views/imageTransmission/image_transmission_state.dart';
import 'package:star_lock/tools/titleAppBar.dart';
import 'package:slide_to_act/slide_to_act.dart';
//
// import 'package:flutter_slider_button/flutter_slider_button.dart';
class ImageTransmissionPage extends StatefulWidget {
const ImageTransmissionPage();
@override
State<ImageTransmissionPage> createState() => _ImageTransmissionPageState();
}
class _ImageTransmissionPageState extends State<ImageTransmissionPage>
with TickerProviderStateMixin {
final ImageTransmissionLogic logic = Get.put(ImageTransmissionLogic());
final ImageTransmissionState state = Get.find<ImageTransmissionLogic>().state;
final startChartManage = StartChartManage();
@override
void initState() {
super.initState();
state.animationController = AnimationController(
vsync: this, // 使TickerProvider是当前Widget
duration: const Duration(seconds: 1),
);
state.animationController.repeat();
state.animationController.addStatusListener((AnimationStatus status) {
if (status == AnimationStatus.completed) {
state.animationController.reset();
state.animationController.forward();
} else if (status == AnimationStatus.dismissed) {
state.animationController.reset();
state.animationController.forward();
}
});
}
@override
void dispose() {
state.animationController.dispose();
CallTalk().finishAVData();
super.dispose();
}
@override
Widget build(BuildContext context) {
return Scaffold(
backgroundColor: AppColors.mainBackgroundColor,
resizeToAvoidBottomInset: false,
appBar: TitleAppBar(
barTitle: '图传'.tr,
haveBack: true,
backgroundColor: AppColors.mainColor,
backAction: (){
logic.udpHangUpAction();
},
),
body: Obx(() => Column(
children: [
SizedBox(height: 24.h),
SizedBox(
height: 0.6.sh,
child: state.listData.value.isEmpty
? _buildWaitingView()
: _buildVideoView(),
),
SizedBox(height: 30.h),
_buildBottomToolBar(),
SizedBox(height: 30.h),
],
)),
);
}
Widget _buildWaitingView() {
double barWidth = MediaQuery.of(context).size.width - 60.w;
return Center(
child: ClipRRect(
borderRadius: BorderRadius.circular(30.h),
child: Stack(
alignment: Alignment.center,
children: [
Container(
width: barWidth,
height: double.infinity,
child: Image.asset(
'images/main/monitorBg.png',
fit: BoxFit.cover,
),
),
RotationTransition(
turns: state.animationController,
child: Image.asset(
'images/main/realTime_connecting.png',
width: 300.w,
height: 300.w,
fit: BoxFit.contain,
),
),
],
),
),
);
}
Widget _buildVideoView() {
double barWidth = MediaQuery.of(context).size.width - 60.w;
return PopScope(
canPop: false,
child: RepaintBoundary(
key: state.globalKey,
child: Center(
child: ClipRRect(
borderRadius: BorderRadius.circular(30.h),
child: Container(
width: barWidth,
height: double.infinity,
child: RotatedBox(
quarterTurns: startChartManage.rotateAngle ~/ 90,
child: RawImage(
image: state.currentImage.value,
fit: BoxFit.cover,
filterQuality: FilterQuality.high,
),
),
),
),
),
),
);
}
Widget _buildBottomToolBar() {
return Container(
margin: EdgeInsets.symmetric(horizontal: 30.w),
padding: EdgeInsets.symmetric(vertical: 28.h, horizontal: 20.w),
decoration: BoxDecoration(
color: Colors.white,
borderRadius: BorderRadius.circular(30.h),
boxShadow: [
BoxShadow(
color: Colors.black12,
blurRadius: 12,
offset: Offset(0, 4),
),
],
),
child: Column(
mainAxisSize: MainAxisSize.min,
children: [
Row(
mainAxisAlignment: MainAxisAlignment.spaceEvenly,
children: [
_circleButton(
icon: Icons.call,
color: Colors.green,
onTap: () {
if (state.talkStatus.value ==
TalkStatus.passiveCallWaitingAnswer) {
//
logic.initiateAnswerCommand();
}
},
),
_circleButton(
icon: Icons.call_end,
color: Colors.red,
onTap: () {
logic.udpHangUpAction();
},
),
_circleButton(
icon: Icons.camera_alt,
color: Colors.blue,
onTap: () async {
if (state.talkStatus.value ==
TalkStatus.answeredSuccessfully) {
await logic.captureAndSavePng();
}
},
),
],
),
SizedBox(height: 36.h),
SlideAction(
height: 64.h,
borderRadius: 24.h,
elevation: 0,
innerColor: Colors.amber,
outerColor: Colors.amber.withOpacity(0.15),
sliderButtonIcon: Icon(Icons.lock, color: Colors.white, size: 40.w),
text: '滑动解锁',
textStyle: TextStyle(fontSize: 26.sp, color: Colors.black54, fontWeight: FontWeight.bold),
onSubmit: () {
// TODO:
logic.remoteOpenLock();
},
),
],
),
);
}
Widget _circleButton(
{required IconData icon,
required Color color,
required VoidCallback onTap}) {
return GestureDetector(
onTap: onTap,
child: Container(
width: 90.w,
height: 90.w,
decoration: BoxDecoration(
color: color,
shape: BoxShape.circle,
boxShadow: [
BoxShadow(
color: color.withOpacity(0.3),
blurRadius: 10,
offset: Offset(0, 4),
),
],
),
child: Icon(icon, color: Colors.white, size: 48.w),
),
);
}
}

View File

@ -0,0 +1,94 @@
import 'dart:async';
import 'dart:typed_data';
import 'dart:ui' as ui;
import 'package:flutter/material.dart';
import 'package:flutter_voice_processor/flutter_voice_processor.dart';
import 'package:get/get.dart';
import 'package:get/get_rx/get_rx.dart';
import 'package:get/get_rx/src/rx_types/rx_types.dart';
import 'package:get/state_manager.dart';
import 'package:network_info_plus/network_info_plus.dart';
import 'package:star_lock/talk/starChart/constant/talk_status.dart';
import 'package:star_lock/talk/starChart/handle/other/talk_data_repository.dart';
import 'package:star_lock/talk/starChart/proto/talk_data.pb.dart';
import 'package:star_lock/talk/starChart/status/star_chart_talk_status.dart';
import '../../../../tools/storage.dart';
enum NetworkStatus {
normal, // 0
lagging, // 1
delayed, // 2
packetLoss // 3
}
class ImageTransmissionState{
int udpSendDataFrameNumber = 0; //
// var isSenderAudioData = false.obs;//
Future<String?> userMobileIP = NetworkInfo().getWifiIP();
Future<String?> userUid = Storage.getUid();
RxInt udpStatus =
0.obs; //0 1 2 3 4 5 6 8 9
TextEditingController passwordTF = TextEditingController();
Rx<Uint8List> listData = Uint8List(0).obs; //
RxList<int> listAudioData = <int>[].obs; //
GlobalKey globalKey = GlobalKey();
Timer? oneMinuteTimeTimer; // 60
RxInt oneMinuteTime = 0.obs; //
// 10
late Timer answerTimer;
late Timer hangUpTimer;
late Timer openDoorTimer;
Timer? fpsTimer;
late AnimationController animationController;
late Timer autoBackTimer =
Timer(const Duration(seconds: 1), () {}); //30
late Timer realTimePicTimer =
Timer(const Duration(seconds: 1), () {}); //
RxInt elapsedSeconds = 0.obs;
//
List<TalkData> audioBuffer = <TalkData>[].obs;
List<TalkData> activeAudioBuffer = <TalkData>[].obs;
List<TalkData> activeVideoBuffer = <TalkData>[].obs;
List<TalkData> videoBuffer = <TalkData>[].obs;
List<TalkData> videoBuffer2 = <TalkData>[].obs;
RxBool isPlaying = false.obs; //
Rx<TalkStatus> talkStatus = TalkStatus.none.obs; //
// startChartTalkStatus
final StartChartTalkStatus startChartTalkStatus =
StartChartTalkStatus.instance;
//
final TalkDataRepository talkDataRepository = TalkDataRepository.instance;
RxInt lastFrameTimestamp = 0.obs; // ,
Rx<NetworkStatus> networkStatus =
NetworkStatus.normal.obs; // 0- 1- 2- 3-
RxInt alertCount = 0.obs; //
RxInt maxAlertNumber = 3.obs; //
RxBool isOpenVoice = true.obs; //
RxBool isRecordingScreen = false.obs; //
RxBool isRecordingAudio = false.obs; //
Rx<DateTime> startRecordingAudioTime = DateTime.now().obs; //
Rx<DateTime> endRecordingAudioTime = DateTime.now().obs; //
RxInt recordingAudioTime = 0.obs; //
RxInt fps = 0.obs; // FPS
late VoiceProcessor? voiceProcessor; //
final int frameLength = 320; //640
final int sampleRate = 8000; //8000
List<int> recordingAudioAllFrames = <int>[]; //
List<int> lockRecordingAudioAllFrames = <int>[]; //
RxInt rotateAngle = 0.obs; //
RxBool isLongPressing = false.obs; //
RxBool hasAudioData = false.obs; //
RxInt lastAudioTimestamp = 0.obs; //
Rx<ui.Image?> currentImage = Rx<ui.Image?>(null);
}

View File

@ -35,6 +35,8 @@ import 'package:star_lock/talk/starChart/views/native/talk_view_native_decode_st
import 'package:star_lock/talk/starChart/views/talkView/talk_view_state.dart';
import 'package:star_lock/tools/G711Tool.dart';
import 'package:star_lock/tools/bugly/bugly_tool.dart';
import 'package:star_lock/tools/commonDataManage.dart';
import 'package:star_lock/tools/storage.dart';
import 'package:video_decode_plugin/video_decode_plugin.dart';
import '../../../../tools/baseGetXController.dart';
@ -75,6 +77,16 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
// frameSeq
int? _lastFrameSeq;
// frameSeq回绕检测标志
bool _pendingStreamReset = false;
//
int _pendingResetWidth = 864;
int _pendingResetHeight = 480;
// I帧状态
bool _waitingForIFrame = false;
//
Future<void> _initVideoDecoder() async {
try {
@ -89,12 +101,12 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
// textureId
final textureId = await VideoDecodePlugin.initDecoder(config);
if (textureId != null) {
state.textureId.value = textureId;
Future.microtask(() => state.textureId.value = textureId);
AppLog.log('视频解码器初始化成功textureId=$textureId');
VideoDecodePlugin.setOnFrameRenderedListener((textureId) {
state.isLoading.value = false;
AppLog.log('已经开始渲染=======');
// loading
Future.microtask(() => state.isLoading.value = false);
});
} else {
AppLog.log('视频解码器初始化失败');
@ -146,13 +158,53 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
int frameSeq,
int frameSeqI,
) {
// frameSeq严格递增
if (_lastFrameSeq != null && frameSeq <= _lastFrameSeq!) {
//
AppLog.log('丢弃乱序或重复帧: frameSeq=$frameSeq, lastFrameSeq=$_lastFrameSeq');
return;
// frameSeq回绕I帧
if (!_pendingStreamReset &&
_lastFrameSeq != null &&
frameType == TalkDataH264Frame_FrameTypeE.I &&
frameSeq < _lastFrameSeq!) {
// I帧loading并重置所有本地状态
AppLog.log(
'检测到新流I帧frameSeq回绕进入loading并重置: frameSeq=$frameSeq, lastFrameSeq=$_lastFrameSeq');
Future.microtask(() => state.isLoading.value = true);
_pendingStreamReset = true;
//
_stopFrameProcessTimer();
//
_resetDecoderForNewStream(_pendingResetWidth, _pendingResetHeight);
//
_lastFrameSeq = null;
_decodedIFrames.clear();
state.h264FrameBuffer.clear();
//
_startFrameProcessTimer();
// returnI帧初始化解码器并解码
//
}
// pendingStreamResetI帧
if (_pendingStreamReset) {
if (frameType == TalkDataH264Frame_FrameTypeE.I) {
// I帧loading
AppLog.log('收到新流I帧关闭loading: frameSeq=$frameSeq');
//Future.microtask(() => state.isLoading.value = false);
_pendingStreamReset = false;
_lastFrameSeq = frameSeq;
_decodedIFrames.clear();
_decodedIFrames.add(frameSeq);
// I帧解码
} else {
// I帧期间I帧
AppLog.log('等待新流I帧丢弃非I帧: frameSeq=$frameSeq, frameType=$frameType');
return;
}
} else {
//
if (_lastFrameSeq != null && frameSeq <= _lastFrameSeq!) {
AppLog.log('丢弃乱序或重复帧: frameSeq=$frameSeq, lastFrameSeq=$_lastFrameSeq');
return;
}
_lastFrameSeq = frameSeq;
}
_lastFrameSeq = frameSeq;
// Map
final Map<String, dynamic> frameMap = {
'frameData': frameData,
@ -162,13 +214,19 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
'pts': pts,
};
// P/B帧
while (state.h264FrameBuffer.length >= state.maxFrameBufferSize) {
int pbIndex = state.h264FrameBuffer
.indexWhere((f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.P);
if (pbIndex != -1) {
state.h264FrameBuffer.removeAt(pbIndex);
} else {
state.h264FrameBuffer.removeAt(0);
}
}
//
state.h264FrameBuffer.add(frameMap);
//
while (state.h264FrameBuffer.length > state.maxFrameBufferSize) {
state.h264FrameBuffer.removeAt(0);
}
}
///
@ -204,29 +262,31 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
try {
//
final Map<String, dynamic> frameMap = state.h264FrameBuffer.removeAt(0);
final List<int> frameData = frameMap['frameData'];
final TalkDataH264Frame_FrameTypeE frameType = frameMap['frameType'];
final int frameSeq = frameMap['frameSeq'];
final int frameSeqI = frameMap['frameSeqI'];
int pts = frameMap['pts'];
// int pts = DateTime.now().millisecondsSinceEpoch;
// if (frameType == TalkDataH264Frame_FrameTypeE.P) {
// // frameSeqI为I帧序号标识
// if (!(_decodedIFrames.contains(frameSeqI))) {
// AppLog.log('丢弃P帧未收到对应I帧frameSeqI=${frameSeqI}');
// return;
// }
// } else if (frameType == TalkDataH264Frame_FrameTypeE.I) {
// // I帧序号
// _decodedIFrames.add(frameSeq);
// }
// h264文件
// _appendH264FrameToFile(frameData, frameType);
// final timestamp = DateTime.now().millisecondsSinceEpoch;
// final timestamp64 = timestamp is int ? timestamp : timestamp.toInt();
final Map<String, dynamic>? frameMap = state.h264FrameBuffer.isNotEmpty
? state.h264FrameBuffer.removeAt(0)
: null;
if (frameMap == null) {
state.isProcessingFrame = false;
return;
}
final List<int>? frameData = frameMap['frameData'];
final TalkDataH264Frame_FrameTypeE? frameType = frameMap['frameType'];
final int? frameSeq = frameMap['frameSeq'];
final int? frameSeqI = frameMap['frameSeqI'];
final int? pts = frameMap['pts'];
if (frameData == null ||
frameType == null ||
frameSeq == null ||
frameSeqI == null ||
pts == null) {
state.isProcessingFrame = false;
return;
}
// textureId为null时跳过
if (state.textureId.value == null) {
state.isProcessingFrame = false;
return;
}
await VideoDecodePlugin.sendFrame(
frameData: frameData,
frameType: frameType == TalkDataH264Frame_FrameTypeE.I ? 0 : 1,
@ -457,6 +517,7 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
//
_initVideoDecoder();
_initHdOptions();
// H264帧缓冲区
state.h264FrameBuffer.clear();
state.isProcessingFrame = false;
@ -485,7 +546,7 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
//
if (state.textureId.value != null) {
VideoDecodePlugin.releaseDecoder();
state.textureId.value = null;
Future.microtask(() => state.textureId.value = null);
}
//
@ -515,17 +576,25 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
///
void updateTalkExpect() {
// VideoTypeE的映射
final Map<String, VideoTypeE> qualityToVideoType = {
'标清': VideoTypeE.H264,
'高清': VideoTypeE.H264_720P,
//
};
TalkExpectReq talkExpectReq = TalkExpectReq();
state.isOpenVoice.value = !state.isOpenVoice.value;
// videoType
VideoTypeE currentVideoType = qualityToVideoType[state.currentQuality.value] ?? VideoTypeE.H264;
if (!state.isOpenVoice.value) {
talkExpectReq = TalkExpectReq(
videoType: [VideoTypeE.H264],
videoType: [currentVideoType],
audioType: [],
);
showToast('已静音'.tr);
} else {
talkExpectReq = TalkExpectReq(
videoType: [VideoTypeE.H264],
videoType: [currentVideoType],
audioType: [AudioTypeE.G711],
);
}
@ -572,36 +641,42 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
}
}
//
//
Future<void> remoteOpenLock() async {
final LockListInfoItemEntity currentKeyInfo =
CommonDataManage().currentKeyInfo;
var lockId = currentKeyInfo.lockId ?? 0;
var remoteUnlock = currentKeyInfo.lockSetting?.remoteUnlock ?? 0;
final lockPeerId = StartChartManage().lockPeerId;
final lockListPeerId = StartChartManage().lockListPeerId;
int lockId = lockDetailState.keyInfos.value.lockId ?? 0;
// peerId使peerId
// peerId
lockListPeerId.forEach((element) {
if (element.network?.peerId == lockPeerId) {
lockId = element.lockId ?? 0;
}
});
final LockSetInfoEntity lockSetInfoEntity =
await ApiRepository.to.getLockSettingInfoData(
lockId: lockId.toString(),
);
if (lockSetInfoEntity.errorCode!.codeIsSuccessful) {
if (lockSetInfoEntity.data?.lockFeature?.remoteUnlock == 1 &&
lockSetInfoEntity.data?.lockSettingInfo?.remoteUnlock == 1) {
final LoginEntity entity = await ApiRepository.to
.remoteOpenLock(lockId: lockId.toString(), timeOut: 60);
if (entity.errorCode!.codeIsSuccessful) {
showToast('已开锁'.tr);
StartChartManage().lockListPeerId = [];
final LockListInfoGroupEntity? lockListInfoGroupEntity =
await Storage.getLockMainListData();
if (lockListInfoGroupEntity != null) {
lockListInfoGroupEntity!.groupList?.forEach((element) {
final lockList = element.lockList;
if (lockList != null && lockList.length != 0) {
for (var lockInfo in lockList) {
final peerId = lockInfo.network?.peerId;
if (peerId != null && peerId != '') {
if (peerId == lockPeerId) {
lockId = lockInfo.lockId ?? 0;
remoteUnlock = lockInfo.lockSetting?.remoteUnlock ?? 0;
}
}
}
}
} else {
showToast('该锁的远程开锁功能未启用'.tr);
});
}
if (remoteUnlock == 1) {
final LoginEntity entity = await ApiRepository.to
.remoteOpenLock(lockId: lockId.toString(), timeOut: 60);
if (entity.errorCode!.codeIsSuccessful) {
showToast('已开锁'.tr);
StartChartManage().lockListPeerId = [];
}
} else {
showToast('该锁的远程开锁功能未启用'.tr);
}
}
@ -1167,4 +1242,81 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
}
}
}
//
void onQualityChanged(String quality) async {
state.currentQuality.value = quality;
TalkExpectReq talkExpectReq = StartChartManage().getDefaultTalkExpect();
final audioType = talkExpectReq.audioType;
int width = 864;
int height = 480;
switch (quality) {
case '高清':
talkExpectReq = TalkExpectReq(
videoType: [VideoTypeE.H264_720P],
audioType: audioType,
);
width = 1280;
height = 720;
break;
case '标清':
talkExpectReq = TalkExpectReq(
videoType: [VideoTypeE.H264],
audioType: audioType,
);
width = 864;
height = 480;
break;
}
///
StartChartManage().changeTalkExpectDataTypeAndReStartTalkExpectMessageTimer(
talkExpect: talkExpectReq);
// loadingframeSeq回绕检测
// frameSeq回绕检测标志
_pendingStreamReset = false;
_pendingResetWidth = width;
_pendingResetHeight = height;
}
void _initHdOptions() {
TalkExpectReq talkExpectReq = StartChartManage().getDefaultTalkExpect();
final videoType = talkExpectReq.videoType;
if (videoType.contains(VideoTypeE.H264)) {
state.currentQuality.value = '标清';
} else if (videoType.contains(VideoTypeE.H264_720P)) {
state.currentQuality.value = '高清';
}
}
//
Future<void> _resetDecoderForNewStream(int width, int height) async {
try {
if (state.textureId.value != null) {
await VideoDecodePlugin.releaseDecoder();
Future.microtask(() => state.textureId.value = null);
}
final config = VideoDecoderConfig(
width: width,
height: height,
codecType: 'h264',
);
final textureId = await VideoDecodePlugin.initDecoder(config);
if (textureId != null) {
Future.microtask(() => state.textureId.value = textureId);
AppLog.log('frameSeq回绕后解码器初始化成功textureId=$textureId');
VideoDecodePlugin.setOnFrameRenderedListener((textureId) {
AppLog.log('已经开始渲染=======');
// loading
Future.microtask(() => state.isLoading.value = false);
});
} else {
AppLog.log('frameSeq回绕后解码器初始化失败');
}
_startFrameProcessTimer();
} catch (e) {
AppLog.log('frameSeq回绕时解码器初始化错误: $e');
}
}
}

View File

@ -97,40 +97,42 @@ class _TalkViewNativeDecodePageState extends State<TalkViewNativeDecodePage>
final double scaleWidth = physicalWidth / rotatedImageWidth;
final double scaleHeight = physicalHeight / rotatedImageHeight;
max(scaleWidth, scaleHeight); //
return state.isLoading.isTrue
? Image.asset(
'images/main/monitorBg.png',
width: screenWidth,
height: screenHeight,
fit: BoxFit.cover,
)
: Positioned.fill(
child: PopScope(
canPop: false,
child: RepaintBoundary(
key: state.globalKey,
child: SizedBox.expand(
child: RotatedBox(
// 使RotatedBox
quarterTurns:
startChartManage.rotateAngle ~/ 90,
child: Platform.isIOS
? Transform.scale(
scale: 1.008, // iOS白边
child: Texture(
textureId: state.textureId.value!,
filterQuality: FilterQuality.medium,
),
)
: Texture(
textureId: state.textureId.value!,
filterQuality: FilterQuality.medium,
),
),
),
// loading中或textureId为nullloading/
if (state.isLoading.isTrue || state.textureId.value == null) {
return Image.asset(
'images/main/monitorBg.png',
width: screenWidth,
height: screenHeight,
fit: BoxFit.cover,
);
} else {
return Positioned.fill(
child: PopScope(
canPop: false,
child: RepaintBoundary(
key: state.globalKey,
child: SizedBox.expand(
child: RotatedBox(
// 使RotatedBox
quarterTurns: startChartManage.rotateAngle ~/ 90,
child: Platform.isIOS
? Transform.scale(
scale: 1.008, // iOS白边
child: Texture(
textureId: state.textureId.value!,
filterQuality: FilterQuality.medium,
),
)
: Texture(
textureId: state.textureId.value!,
filterQuality: FilterQuality.medium,
),
),
),
);
),
),
);
}
},
),
@ -295,6 +297,62 @@ class _TalkViewNativeDecodePageState extends State<TalkViewNativeDecodePage>
),
),
),
SizedBox(width: 50.w),
//
GestureDetector(
onTap: () async {
//
showModalBottomSheet(
context: context,
backgroundColor: Colors.white,
shape: RoundedRectangleBorder(
borderRadius: BorderRadius.vertical(top: Radius.circular(20.w)),
),
builder: (BuildContext context) {
final List<String> qualities = ['高清', '标清'];
return SafeArea(
child: SingleChildScrollView(
child: Column(
mainAxisSize: MainAxisSize.min,
children: qualities.map((q) {
return Obx(() => InkWell(
onTap: () {
Navigator.of(context).pop();
logic.onQualityChanged(q);
},
child: Container(
padding: EdgeInsets.symmetric(vertical: 18.w),
child: Row(
mainAxisAlignment: MainAxisAlignment.center,
mainAxisSize: MainAxisSize.max,
children: [
Text(
q,
style: TextStyle(
color: state.currentQuality.value == q
? AppColors.mainColor
: Colors.black,
fontWeight: state.currentQuality.value == q
? FontWeight.bold
: FontWeight.normal,
fontSize: 28.sp,
),
),
],
),
),
));
}).toList(),
),
),
);
},
);
},
child: Container(
child: Icon(Icons.high_quality_outlined, color: Colors.white, size: 38.w),
),
),
]);
}

View File

@ -109,7 +109,7 @@ class TalkViewNativeDecodeState {
// H264帧缓冲区相关
final List<Map<String, dynamic>> h264FrameBuffer = <Map<String, dynamic>>[]; // H264帧缓冲区
final int maxFrameBufferSize = 7; //
final int maxFrameBufferSize = 15; //
final int targetFps = 30; // ,native的缓冲区
Timer? frameProcessTimer; //
bool isProcessingFrame = false; //
@ -117,4 +117,7 @@ class TalkViewNativeDecodeState {
// H264文件保存相关
String? h264FilePath;
File? h264File;
// '高清'
RxString currentQuality = '高清'.obs; //
}

View File

@ -31,6 +31,8 @@ import 'package:star_lock/talk/starChart/star_chart_manage.dart';
import 'package:star_lock/talk/starChart/views/talkView/talk_view_state.dart';
import 'package:star_lock/tools/G711Tool.dart';
import 'package:star_lock/tools/bugly/bugly_tool.dart';
import 'package:star_lock/tools/commonDataManage.dart';
import 'package:star_lock/tools/storage.dart';
import '../../../../tools/baseGetXController.dart';
@ -514,34 +516,40 @@ class TalkViewLogic extends BaseGetXController {
//
Future<void> remoteOpenLock() async {
final LockListInfoItemEntity currentKeyInfo =
CommonDataManage().currentKeyInfo;
var lockId = currentKeyInfo.lockId ?? 0;
var remoteUnlock = currentKeyInfo.lockSetting?.remoteUnlock ?? 0;
final lockPeerId = StartChartManage().lockPeerId;
final lockListPeerId = StartChartManage().lockListPeerId;
int lockId = lockDetailState.keyInfos.value.lockId ?? 0;
// peerId使peerId
// peerId
lockListPeerId.forEach((element) {
if (element.network?.peerId == lockPeerId) {
lockId = element.lockId ?? 0;
}
});
final LockSetInfoEntity lockSetInfoEntity =
await ApiRepository.to.getLockSettingInfoData(
lockId: lockId.toString(),
);
if (lockSetInfoEntity.errorCode!.codeIsSuccessful) {
if (lockSetInfoEntity.data?.lockFeature?.remoteUnlock == 1 &&
lockSetInfoEntity.data?.lockSettingInfo?.remoteUnlock == 1) {
final LoginEntity entity = await ApiRepository.to
.remoteOpenLock(lockId: lockId.toString(), timeOut: 60);
if (entity.errorCode!.codeIsSuccessful) {
showToast('已开锁'.tr);
StartChartManage().lockListPeerId = [];
final LockListInfoGroupEntity? lockListInfoGroupEntity =
await Storage.getLockMainListData();
if (lockListInfoGroupEntity != null) {
lockListInfoGroupEntity!.groupList?.forEach((element) {
final lockList = element.lockList;
if (lockList != null && lockList.length != 0) {
for (var lockInfo in lockList) {
final peerId = lockInfo.network?.peerId;
if (peerId != null && peerId != '') {
if (peerId == lockPeerId) {
lockId = lockInfo.lockId ?? 0;
remoteUnlock = lockInfo.lockSetting?.remoteUnlock ?? 0;
}
}
}
}
} else {
showToast('该锁的远程开锁功能未启用'.tr);
});
}
if (remoteUnlock == 1) {
final LoginEntity entity = await ApiRepository.to
.remoteOpenLock(lockId: lockId.toString(), timeOut: 60);
if (entity.errorCode!.codeIsSuccessful) {
showToast('已开锁'.tr);
StartChartManage().lockListPeerId = [];
}
} else {
showToast('该锁的远程开锁功能未启用'.tr);
}
}

View File

@ -98,56 +98,55 @@ class _TalkViewPageState extends State<TalkViewPage>
child: Stack(
alignment: Alignment.center,
children: <Widget>[
Obx(
() {
final double screenWidth = MediaQuery.of(context).size.width;
final double screenHeight = MediaQuery.of(context).size.height;
final double logicalWidth = MediaQuery.of(context).size.width;
final double logicalHeight = MediaQuery.of(context).size.height;
final double devicePixelRatio =
MediaQuery.of(context).devicePixelRatio;
//
final double physicalWidth = logicalWidth * devicePixelRatio;
final double physicalHeight = logicalHeight * devicePixelRatio;
//
const int rotatedImageWidth = 480; //
const int rotatedImageHeight = 864; //
//
final double scaleWidth = physicalWidth / rotatedImageWidth;
final double scaleHeight = physicalHeight / rotatedImageHeight;
max(scaleWidth, scaleHeight); //
return state.listData.value.isEmpty
? Image.asset(
'images/main/monitorBg.png',
width: screenWidth,
height: screenHeight,
fit: BoxFit.cover,
)
: PopScope(
canPop: false,
child: RepaintBoundary(
key: state.globalKey,
child: SizedBox.expand(
child: RotatedBox(
quarterTurns: startChartManage.rotateAngle ~/ 90,
child: RawImage(
image: state.currentImage.value,
width: ScreenUtil().scaleWidth,
height: ScreenUtil().scaleHeight,
fit: BoxFit.cover,
filterQuality: FilterQuality.high,
),
),
),
),
);
},
),
//
Obx(() {
if (state.listData.value.isEmpty) {
return SizedBox.expand(
child: Image.asset(
'images/main/monitorBg.png',
fit: BoxFit.cover,
),
);
}
final int videoW = startChartManage.videoWidth;
final int videoH = startChartManage.videoHeight;
if (videoW == 320 && videoH == 240) {
return SizedBox.expand(
child: Container(
decoration: const BoxDecoration(
gradient: LinearGradient(
begin: Alignment.topCenter,
end: Alignment.bottomCenter,
colors: [
Color(0xFF232526),
Color(0xFF414345),
],
),
),
),
);
}
return const SizedBox.shrink();
}),
//
Obx(() {
if (state.listData.value.isEmpty) {
return const SizedBox.shrink();
}
final int videoW = startChartManage.videoWidth;
final int videoH = startChartManage.videoHeight;
if (videoW == 320 && videoH == 240) {
return Positioned(
top: 150.h,
left: 0,
right: 0,
child: _buildVideoWidget(),
);
} else {
//
return _buildVideoWidget();
}
}),
Obx(() => state.listData.value.isEmpty
? Positioned(
bottom: 310.h,
@ -183,6 +182,8 @@ class _TalkViewPageState extends State<TalkViewPage>
),
)
: Container()),
///
Positioned(
bottom: 10.w,
child: Container(
@ -614,4 +615,68 @@ class _TalkViewPageState extends State<TalkViewPage>
// UdpTalkDataHandler().resetDataRates();
super.dispose();
}
Widget _buildVideoWidget() {
//
double barWidth = 1.sw - 30.w * 2;
int videoW = startChartManage.videoWidth;
int videoH = startChartManage.videoHeight;
int quarterTurns = startChartManage.rotateAngle ~/ 90;
bool isRotated = quarterTurns % 2 == 1;
//
double videoAspect = isRotated ? videoW / videoH : videoH / videoW;
double containerHeight =
barWidth * (isRotated ? videoW / videoH : videoH / videoW);
if (videoW == 320 && videoH == 240) {
return Center(
child: ClipRRect(
borderRadius: BorderRadius.circular(20.h),
child: Container(
width: barWidth,
height: containerHeight,
decoration: const BoxDecoration(
gradient: LinearGradient(
begin: Alignment.topCenter,
end: Alignment.bottomCenter,
colors: [
Color(0xFF232526),
Color(0xFF414345),
],
),
),
child: RotatedBox(
quarterTurns: quarterTurns,
child: RawImage(
image: state.currentImage.value,
fit: BoxFit.contain,
filterQuality: FilterQuality.high,
width: barWidth,
height: containerHeight,
),
),
),
),
);
} else {
return PopScope(
canPop: false,
child: RepaintBoundary(
key: state.globalKey,
child: SizedBox.expand(
child: RotatedBox(
quarterTurns: startChartManage.rotateAngle ~/ 90,
child: RawImage(
image: state.currentImage.value,
width: ScreenUtil().scaleWidth,
height: ScreenUtil().scaleHeight,
fit: BoxFit.cover,
filterQuality: FilterQuality.high,
),
),
),
),
);
}
}
}

View File

@ -1,4 +1,5 @@
import 'package:event_bus/event_bus.dart';
import 'package:star_lock/main/lockMian/entity/lockListInfo_entity.dart';
import '../main/lockDetail/lockSet/lockSet/lockSetInfo_entity.dart';
@ -195,6 +196,7 @@ class RogerThatLockInfoDataEvent {
class GetGatewayListRefreshUI {
GetGatewayListRefreshUI();
}
///
class AgreePrivacyAgreement {
AgreePrivacyAgreement();
@ -204,3 +206,10 @@ class AgreePrivacyAgreement {
class SuccessfulDistributionNetwork {
SuccessfulDistributionNetwork();
}
///
class SetLockListInfoGroupEntity {
SetLockListInfoGroupEntity({required this.lockListInfoGroupEntity});
LockListInfoGroupEntity lockListInfoGroupEntity;
}

View File

@ -130,7 +130,7 @@ dependencies:
video_decode_plugin:
git:
url: git@code.star-lock.cn:liyi/video_decode_plugin.git
ref: 38df1883f5108ec1ce590ba52318815333fded38
ref: 68bb4b7fb637ef5a78856908e1bc464f50fe967a
flutter_localizations:
sdk: flutter
@ -280,6 +280,8 @@ dependencies:
video_thumbnail: ^0.5.3
# 角标管理
flutter_app_badger: ^1.3.0
# 滑块支持
slide_to_act: ^2.0.2