diff --git a/assets/html/h264.html b/assets/html/h264.html
index fee7ac28..97143565 100644
--- a/assets/html/h264.html
+++ b/assets/html/h264.html
@@ -63,6 +63,7 @@
window.Flutter.postMessage('ready');
}
});
+ jmuxer.reset();
console.log("JMuxer initialized.");
} catch (e) {
console.error("Error initializing JMuxer:", e);
diff --git a/lib/talk/starChart/star_chart_manage.dart b/lib/talk/starChart/star_chart_manage.dart
index 1e53b7f0..91617794 100644
--- a/lib/talk/starChart/star_chart_manage.dart
+++ b/lib/talk/starChart/star_chart_manage.dart
@@ -112,7 +112,7 @@ class StartChartManage {
// 默认通话的期望数据格式
TalkExpectReq _defaultTalkExpect = TalkExpectReq(
- videoType: [VideoTypeE.IMAGE],
+ videoType: [VideoTypeE.H264],
audioType: [AudioTypeE.G711],
);
@@ -1119,7 +1119,7 @@ class StartChartManage {
void reSetDefaultTalkExpect() {
_defaultTalkExpect = TalkExpectReq(
- videoType: [VideoTypeE.IMAGE],
+ videoType: [VideoTypeE.H264],
audioType: [AudioTypeE.G711],
);
}
@@ -1131,7 +1131,7 @@ class StartChartManage {
/// 修改预期接收到的数据
void sendOnlyImageVideoTalkExpectData() {
final talkExpectReq = TalkExpectReq(
- videoType: [VideoTypeE.IMAGE],
+ videoType: [VideoTypeE.H264],
audioType: [],
);
changeTalkExpectDataTypeAndReStartTalkExpectMessageTimer(
@@ -1141,7 +1141,7 @@ class StartChartManage {
/// 修改预期接收到的数据
void sendImageVideoAndG711AudioTalkExpectData() {
final talkExpectReq = TalkExpectReq(
- videoType: [VideoTypeE.IMAGE],
+ videoType: [VideoTypeE.H264],
audioType: [AudioTypeE.G711],
);
changeTalkExpectDataTypeAndReStartTalkExpectMessageTimer(
diff --git a/lib/talk/starChart/webView/h264_web_logic.dart b/lib/talk/starChart/webView/h264_web_logic.dart
index 02b1c2ed..e8e28784 100644
--- a/lib/talk/starChart/webView/h264_web_logic.dart
+++ b/lib/talk/starChart/webView/h264_web_logic.dart
@@ -1,16 +1,44 @@
-import 'dart:math';
+import 'dart:async';
+import 'dart:io';
+import 'dart:ui' as ui;
+import 'dart:math'; // Import the math package to use sqrt
+import 'package:flutter/foundation.dart';
+import 'package:flutter/rendering.dart';
import 'package:flutter/services.dart';
+import 'package:flutter_pcm_sound/flutter_pcm_sound.dart';
+import 'package:flutter_screen_recording/flutter_screen_recording.dart';
+import 'package:flutter_voice_processor/flutter_voice_processor.dart';
+import 'package:gallery_saver/gallery_saver.dart';
+import 'package:get/get.dart';
+import 'package:image_gallery_saver/image_gallery_saver.dart';
+import 'package:path_provider/path_provider.dart';
+import 'package:permission_handler/permission_handler.dart';
import 'package:star_lock/app_settings/app_settings.dart';
+import 'package:star_lock/login/login/entity/LoginEntity.dart';
+import 'package:star_lock/main/lockDetail/lockDetail/lockDetail_logic.dart';
+import 'package:star_lock/main/lockDetail/lockDetail/lockDetail_state.dart';
+import 'package:star_lock/main/lockDetail/lockDetail/lockNetToken_entity.dart';
+import 'package:star_lock/main/lockDetail/lockSet/lockSet/lockSetInfo_entity.dart';
+import 'package:star_lock/main/lockMian/entity/lockListInfo_entity.dart';
+import 'package:star_lock/network/api_repository.dart';
+import 'package:star_lock/talk/call/g711.dart';
+import 'package:star_lock/talk/starChart/constant/talk_status.dart';
import 'package:star_lock/talk/starChart/proto/talk_data.pb.dart';
+import 'package:star_lock/talk/starChart/proto/talk_expect.pb.dart';
import 'package:star_lock/talk/starChart/star_chart_manage.dart';
+import 'package:star_lock/talk/starChart/views/talkView/talk_view_state.dart';
import 'package:star_lock/talk/starChart/webView/h264_web_view_state.dart';
-import 'package:star_lock/tools/baseGetXController.dart';
+import 'package:star_lock/tools/bugly/bugly_tool.dart';
import 'package:webview_flutter/webview_flutter.dart';
+import '../../../../tools/baseGetXController.dart';
+
class H264WebViewLogic extends BaseGetXController {
final H264WebViewState state = H264WebViewState();
+ final LockDetailState lockDetailState = Get.put(LockDetailLogic()).state;
+
@override
void onInit() {
super.onInit();
@@ -25,10 +53,36 @@ class H264WebViewLogic extends BaseGetXController {
},
);
+ state.isShowLoading.value = true;
// 加载本地 HTML
_loadLocalHtml();
// 创建流数据监听
_createFramesStreamListen();
+
+ _startListenTalkStatus();
+ state.talkStatus.value = state.startChartTalkStatus.status;
+ // 初始化音频播放器
+ _initFlutterPcmSound();
+ // 初始化录音控制器
+ _initAudioRecorder();
+ }
+
+ /// 初始化音频录制器
+ void _initAudioRecorder() {
+ state.voiceProcessor = VoiceProcessor.instance;
+ }
+
+ /// 初始化音频播放器
+ void _initFlutterPcmSound() {
+ const int sampleRate = 8000;
+ FlutterPcmSound.setLogLevel(LogLevel.none);
+ FlutterPcmSound.setup(sampleRate: sampleRate, channelCount: 1);
+ // 设置 feed 阈值
+ if (Platform.isAndroid) {
+ FlutterPcmSound.setFeedThreshold(1024); // Android 平台的特殊处理
+ } else {
+ FlutterPcmSound.setFeedThreshold(2000); // 非 Android 平台的处理
+ }
}
void _createFramesStreamListen() async {
@@ -65,11 +119,316 @@ class H264WebViewLogic extends BaseGetXController {
// 原始发送逻辑
String jsCode = "feedDataFromFlutter($buffer);";
await state.webViewController.runJavaScript(jsCode);
+
+ if (state.isShowLoading.isTrue) {
+ await Future.delayed(Duration(seconds: 1));
+ state.isShowLoading.value = false;
+ }
+ }
+
+ /// 监听对讲状态
+ void _startListenTalkStatus() {
+ state.startChartTalkStatus.statusStream.listen((talkStatus) {
+ state.talkStatus.value = talkStatus;
+ switch (talkStatus) {
+ case TalkStatus.rejected:
+ case TalkStatus.hangingUpDuring:
+ case TalkStatus.notTalkData:
+ case TalkStatus.notTalkPing:
+ case TalkStatus.end:
+ _handleInvalidTalkStatus();
+ break;
+ case TalkStatus.answeredSuccessfully:
+ state.oneMinuteTimeTimer?.cancel(); // 取消旧定时器
+ state.oneMinuteTimeTimer ??=
+ Timer.periodic(const Duration(seconds: 1), (Timer t) {
+ if (state.isShowLoading.isFalse) {
+ state.oneMinuteTime.value++;
+ if (state.oneMinuteTime.value >= 60) {
+ t.cancel(); // 取消定时器
+ state.oneMinuteTime.value = 0;
+ }
+ }
+ });
+ break;
+ default:
+ // 其他状态的处理
+ break;
+ }
+ });
+ }
+
+ /// 更新发送预期数据
+ void updateTalkExpect() {
+ TalkExpectReq talkExpectReq = TalkExpectReq();
+ state.isOpenVoice.value = !state.isOpenVoice.value;
+ if (!state.isOpenVoice.value) {
+ talkExpectReq = TalkExpectReq(
+ videoType: [VideoTypeE.IMAGE],
+ audioType: [],
+ );
+ showToast('已静音'.tr);
+ } else {
+ talkExpectReq = TalkExpectReq(
+ videoType: [VideoTypeE.IMAGE],
+ audioType: [AudioTypeE.G711],
+ );
+ }
+
+ /// 修改发送预期数据
+ StartChartManage().changeTalkExpectDataTypeAndReStartTalkExpectMessageTimer(
+ talkExpect: talkExpectReq);
+ }
+
+ /// 处理无效通话状态
+ void _handleInvalidTalkStatus() {}
+
+ /// 截图并保存到相册
+ Future captureAndSavePng() async {
+ try {
+ if (state.globalKey.currentContext == null) {
+ AppLog.log('截图失败: 未找到当前上下文');
+ return;
+ }
+ final RenderRepaintBoundary boundary = state.globalKey.currentContext!
+ .findRenderObject()! as RenderRepaintBoundary;
+ final ui.Image image = await boundary.toImage();
+ final ByteData? byteData =
+ await image.toByteData(format: ui.ImageByteFormat.png);
+
+ if (byteData == null) {
+ AppLog.log('截图失败: 图像数据为空');
+ return;
+ }
+ final Uint8List pngBytes = byteData.buffer.asUint8List();
+
+ // 获取应用程序的文档目录
+ final Directory directory = await getApplicationDocumentsDirectory();
+ final String imagePath = '${directory.path}/screenshot.png';
+
+ // 将截图保存为文件
+ final File imgFile = File(imagePath);
+ await imgFile.writeAsBytes(pngBytes);
+
+ // 将截图保存到相册
+ await ImageGallerySaver.saveFile(imagePath);
+
+ AppLog.log('截图保存路径: $imagePath');
+ showToast('截图已保存到相册'.tr);
+ } catch (e) {
+ AppLog.log('截图失败: $e');
+ }
+ }
+
+ // 发起接听命令
+ void initiateAnswerCommand() {
+ StartChartManage().startTalkAcceptTimer();
+ }
+
+ //开始录音
+ Future startProcessingAudio() async {
+ // 增加录音帧监听器和错误监听器
+ state.voiceProcessor?.addFrameListener(_onFrame);
+ state.voiceProcessor?.addErrorListener(_onError);
+ try {
+ if (await state.voiceProcessor?.hasRecordAudioPermission() ?? false) {
+ await state.voiceProcessor?.start(state.frameLength, state.sampleRate);
+ final bool? isRecording = await state.voiceProcessor?.isRecording();
+ state.isRecordingAudio.value = isRecording!;
+ state.startRecordingAudioTime.value = DateTime.now();
+ } else {
+ // state.errorMessage.value = 'Recording permission not granted';
+ }
+ } on PlatformException catch (ex) {
+ // state.errorMessage.value = 'Failed to start recorder: $ex';
+ }
+ state.isOpenVoice.value = false;
+ }
+
+ /// 停止录音
+ Future stopProcessingAudio() async {
+ try {
+ await state.voiceProcessor?.stop();
+ state.voiceProcessor?.removeFrameListener(_onFrame);
+ state.udpSendDataFrameNumber = 0;
+ // 记录结束时间
+ state.endRecordingAudioTime.value = DateTime.now();
+
+ // 计算录音的持续时间
+ final duration = state.endRecordingAudioTime.value!
+ .difference(state.startRecordingAudioTime.value!);
+
+ state.recordingAudioTime.value = duration.inSeconds;
+ } on PlatformException catch (ex) {
+ // state.errorMessage.value = 'Failed to stop recorder: $ex';
+ } finally {
+ final bool? isRecording = await state.voiceProcessor?.isRecording();
+ state.isRecordingAudio.value = isRecording!;
+ state.isOpenVoice.value = true;
+ }
+ }
+
+ // 音频帧处理
+ Future _onFrame(List frame) async {
+ // 预处理和转码操作放到异步计算线程
+ // final processedFrame = await compute(preprocessAudio, frame);
+ // final list = listLinearToALaw(processedFrame);
+ final List processedFrame = preprocessAudio(frame);
+ final List list = listLinearToALaw(processedFrame);
+
+ final int ms = DateTime.now().millisecondsSinceEpoch -
+ state.startRecordingAudioTime.value.millisecondsSinceEpoch;
+
+ // 发送音频数据到UDP
+ await StartChartManage().sendTalkDataMessage(
+ talkData: TalkData(
+ content: list,
+ contentType: TalkData_ContentTypeE.G711,
+ durationMs: ms,
+ ),
+ );
+ }
+
+ /// 挂断
+ void udpHangUpAction() async {
+ if (state.talkStatus.value == TalkStatus.answeredSuccessfully) {
+ // 如果是通话中就挂断
+ StartChartManage().startTalkHangupMessageTimer();
+ } else {
+ // 拒绝
+ StartChartManage().startTalkRejectMessageTimer();
+ }
+ Get.back();
+ }
+
+ // 远程开锁
+ Future remoteOpenLock() async {
+ final lockPeerId = StartChartManage().lockPeerId;
+ final lockListPeerId = StartChartManage().lockListPeerId;
+ int lockId = lockDetailState.keyInfos.value.lockId ?? 0;
+
+ // 如果锁列表获取到peerId,代表有多个锁,使用锁列表的peerId
+ // 从列表中遍历出对应的peerId
+ lockListPeerId.forEach((element) {
+ if (element.network?.peerId == lockPeerId) {
+ lockId = element.lockId ?? 0;
+ }
+ });
+
+ final LockSetInfoEntity lockSetInfoEntity =
+ await ApiRepository.to.getLockSettingInfoData(
+ lockId: lockId.toString(),
+ );
+ if (lockSetInfoEntity.errorCode!.codeIsSuccessful) {
+ if (lockSetInfoEntity.data?.lockFeature?.remoteUnlock == 1 &&
+ lockSetInfoEntity.data?.lockSettingInfo?.remoteUnlock == 1) {
+ final LoginEntity entity = await ApiRepository.to
+ .remoteOpenLock(lockId: lockId.toString(), timeOut: 60);
+ if (entity.errorCode!.codeIsSuccessful) {
+ showToast('已开锁'.tr);
+ StartChartManage().lockListPeerId = [];
+ }
+ } else {
+ showToast('该锁的远程开锁功能未启用'.tr);
+ }
+ }
+ }
+
+ List preprocessAudio(List pcmList) {
+ // 简单的降噪处理
+ final List processedList = [];
+ for (int pcmVal in pcmList) {
+ // 简单的降噪示例:将小于阈值的信号置为0
+ if (pcmVal.abs() < 200) {
+ pcmVal = 0;
+ }
+ processedList.add(pcmVal);
+ }
+ return processedList;
+ }
+
+ List listLinearToALaw(List pcmList) {
+ final List aLawList = [];
+ for (int pcmVal in pcmList) {
+ final int aLawVal = linearToALaw(pcmVal);
+ aLawList.add(aLawVal);
+ }
+ return aLawList;
+ }
+
+ int linearToALaw(int pcmVal) {
+ const int ALAW_MAX = 0x7FFF; // 32767
+ const int ALAW_BIAS = 0x84; // 132
+
+ int mask;
+ int seg;
+ int aLawVal;
+
+ // Handle sign
+ if (pcmVal < 0) {
+ pcmVal = -pcmVal;
+ mask = 0x7F; // 127 (sign bit is 1)
+ } else {
+ mask = 0xFF; // 255 (sign bit is 0)
+ }
+
+ // Add bias and clamp to ALAW_MAX
+ pcmVal += ALAW_BIAS;
+ if (pcmVal > ALAW_MAX) {
+ pcmVal = ALAW_MAX;
+ }
+
+ // Determine segment
+ seg = search(pcmVal);
+
+ // Calculate A-law value
+ if (seg >= 8) {
+ aLawVal = 0x7F ^ mask; // Clamp to maximum value
+ } else {
+ int quantized = (pcmVal >> (seg + 3)) & 0xF;
+ aLawVal = (seg << 4) | quantized;
+ aLawVal ^= 0xD5; // XOR with 0xD5 to match standard A-law table
+ }
+
+ return aLawVal;
+ }
+
+ int search(int val) {
+ final List table = [
+ 0xFF, // Segment 0
+ 0x1FF, // Segment 1
+ 0x3FF, // Segment 2
+ 0x7FF, // Segment 3
+ 0xFFF, // Segment 4
+ 0x1FFF, // Segment 5
+ 0x3FFF, // Segment 6
+ 0x7FFF // Segment 7
+ ];
+ const int size = 8;
+ for (int i = 0; i < size; i++) {
+ if (val <= table[i]) {
+ return i;
+ }
+ }
+ return size;
+ }
+
+// 错误监听
+ void _onError(VoiceProcessorException error) {
+ AppLog.log(error.message!);
}
@override
- void onClose() {
- super.onClose();
+ void dispose() {
+ // TODO: implement dispose
+ super.dispose();
StartChartManage().startTalkHangupMessageTimer();
+ state.animationController.dispose();
+ state.webViewController.clearCache();
+ state.webViewController.reload();
+ state.oneMinuteTimeTimer?.cancel();
+ state.oneMinuteTimeTimer = null;
+ stopProcessingAudio();
+ StartChartManage().reSetDefaultTalkExpect();
}
}
diff --git a/lib/talk/starChart/webView/h264_web_view.dart b/lib/talk/starChart/webView/h264_web_view.dart
index a83bd21d..51aa30d8 100644
--- a/lib/talk/starChart/webView/h264_web_view.dart
+++ b/lib/talk/starChart/webView/h264_web_view.dart
@@ -1,11 +1,13 @@
import 'dart:async';
import 'dart:convert';
+import 'dart:math';
import 'package:flutter/material.dart';
import 'package:flutter/services.dart' show ByteData, Uint8List, rootBundle;
import 'package:flutter_screenutil/flutter_screenutil.dart';
import 'package:get/get.dart';
import 'package:star_lock/app_settings/app_colors.dart';
import 'package:star_lock/app_settings/app_settings.dart';
+import 'package:star_lock/talk/starChart/constant/talk_status.dart';
import 'package:star_lock/talk/starChart/handle/other/talk_data_repository.dart';
import 'package:star_lock/talk/starChart/proto/talk_data.pbserver.dart';
import 'package:star_lock/talk/starChart/star_chart_manage.dart';
@@ -19,16 +21,400 @@ class H264WebView extends StatefulWidget {
_H264WebViewState createState() => _H264WebViewState();
}
-class _H264WebViewState extends State {
+class _H264WebViewState extends State
+ with TickerProviderStateMixin {
final H264WebViewLogic logic = Get.put(H264WebViewLogic());
final H264WebViewState state = Get.find().state;
+ @override
+ void initState() {
+ // TODO: implement initState
+ super.initState();
+ state.animationController = AnimationController(
+ vsync: this, // 确保使用的TickerProvider是当前Widget
+ duration: const Duration(seconds: 1),
+ );
+
+ state.animationController.repeat();
+ //动画开始、结束、向前移动或向后移动时会调用StatusListener
+ state.animationController.addStatusListener((AnimationStatus status) {
+ if (status == AnimationStatus.completed) {
+ state.animationController.reset();
+ state.animationController.forward();
+ } else if (status == AnimationStatus.dismissed) {
+ state.animationController.reset();
+ state.animationController.forward();
+ }
+ });
+ }
+
@override
Widget build(BuildContext context) {
- return Stack(
- children: [
- WebViewWidget(controller: state.webViewController),
- ],
+ return WillPopScope(
+ onWillPop: () async {
+ // 返回 false 表示禁止退出
+ return false;
+ },
+ child: SizedBox(
+ width: 1.sw,
+ height: 1.sh,
+ child: Stack(
+ alignment: Alignment.center,
+ children: [
+ Obx(() {
+ final double screenWidth = MediaQuery.of(context).size.width;
+ final double screenHeight = MediaQuery.of(context).size.height;
+ return state.isShowLoading.value
+ ? Image.asset(
+ 'images/main/monitorBg.png',
+ width: screenWidth,
+ height: screenHeight,
+ fit: BoxFit.cover,
+ )
+ : WebViewWidget(
+ controller: state.webViewController,
+ );
+ }),
+ Obx(
+ () => state.isShowLoading.value
+ ? Positioned(
+ bottom: 310.h,
+ child: Text(
+ '正在创建安全连接...'.tr,
+ style: TextStyle(color: Colors.black, fontSize: 26.sp),
+ ),
+ )
+ : Container(),
+ ),
+ Obx(
+ () => state.isShowLoading.isFalse
+ ? Positioned(
+ top: ScreenUtil().statusBarHeight + 75.h,
+ width: 1.sw,
+ child: Obx(
+ () {
+ final String sec = (state.oneMinuteTime.value % 60)
+ .toString()
+ .padLeft(2, '0');
+ final String min = (state.oneMinuteTime.value ~/ 60)
+ .toString()
+ .padLeft(2, '0');
+ return Row(
+ mainAxisAlignment: MainAxisAlignment.center,
+ children: [
+ Text(
+ '$min:$sec',
+ style: TextStyle(
+ fontSize: 26.sp, color: Colors.white),
+ ),
+ ],
+ );
+ },
+ ),
+ )
+ : Container(),
+ ),
+ Positioned(
+ bottom: 10.w,
+ child: Container(
+ width: 1.sw - 30.w * 2,
+ // height: 300.h,
+ margin: EdgeInsets.all(30.w),
+ decoration: BoxDecoration(
+ color: Colors.black.withOpacity(0.2),
+ borderRadius: BorderRadius.circular(20.h)),
+ child: Column(
+ children: [
+ SizedBox(height: 20.h),
+ bottomTopBtnWidget(),
+ SizedBox(height: 20.h),
+ bottomBottomBtnWidget(),
+ SizedBox(height: 20.h),
+ ],
+ ),
+ ),
+ ),
+ Obx(() => state.isShowLoading.isTrue
+ ? buildRotationTransition()
+ : Container()),
+ Obx(() => state.isLongPressing.value
+ ? Positioned(
+ top: 80.h,
+ left: 0,
+ right: 0,
+ child: Center(
+ child: Container(
+ padding: EdgeInsets.all(10.w),
+ decoration: BoxDecoration(
+ color: Colors.black.withOpacity(0.7),
+ borderRadius: BorderRadius.circular(10.w),
+ ),
+ child: Row(
+ mainAxisSize: MainAxisSize.min,
+ children: [
+ Icon(Icons.mic, color: Colors.white, size: 24.w),
+ SizedBox(width: 10.w),
+ Text(
+ '正在说话...'.tr,
+ style: TextStyle(
+ fontSize: 20.sp, color: Colors.white),
+ ),
+ ],
+ ),
+ ),
+ ),
+ )
+ : Container()),
+ ],
+ ),
+ ),
);
}
+
+ Widget bottomTopBtnWidget() {
+ return Row(mainAxisAlignment: MainAxisAlignment.center, children: [
+ // 打开关闭声音
+ GestureDetector(
+ onTap: () {
+ if (state.talkStatus.value == TalkStatus.answeredSuccessfully) {
+ // 打开关闭声音
+ logic.updateTalkExpect();
+ }
+ },
+ child: Container(
+ width: 50.w,
+ height: 50.w,
+ padding: EdgeInsets.all(5.w),
+ child: Obx(() => Image(
+ width: 40.w,
+ height: 40.w,
+ image: state.isOpenVoice.value
+ ? const AssetImage(
+ 'images/main/icon_lockDetail_monitoringOpenVoice.png')
+ : const AssetImage(
+ 'images/main/icon_lockDetail_monitoringCloseVoice.png'))),
+ ),
+ ),
+ SizedBox(width: 50.w),
+ // 截图
+ GestureDetector(
+ onTap: () async {
+ if (state.talkStatus.value == TalkStatus.answeredSuccessfully) {
+ await logic.captureAndSavePng();
+ }
+ },
+ child: Container(
+ width: 50.w,
+ height: 50.w,
+ padding: EdgeInsets.all(5.w),
+ child: Image(
+ width: 40.w,
+ height: 40.w,
+ image: const AssetImage(
+ 'images/main/icon_lockDetail_monitoringScreenshot.png')),
+ ),
+ ),
+ SizedBox(width: 50.w),
+ // 录制
+ GestureDetector(
+ onTap: () async {
+ logic.showToast('功能暂未开放'.tr);
+ // if (
+ // state.talkStatus.value == TalkStatus.answeredSuccessfully) {
+ // if (state.isRecordingScreen.value) {
+ // await logic.stopRecording();
+ // } else {
+ // await logic.startRecording();
+ // }
+ // }
+ },
+ child: Container(
+ width: 50.w,
+ height: 50.w,
+ padding: EdgeInsets.all(5.w),
+ child: Image(
+ width: 40.w,
+ height: 40.w,
+ fit: BoxFit.fill,
+ image: const AssetImage(
+ 'images/main/icon_lockDetail_monitoringScreenRecording.png'),
+ ),
+ ),
+ ),
+ SizedBox(width: 50.w),
+ GestureDetector(
+ onTap: () {
+ logic.showToast('功能暂未开放'.tr);
+ },
+ child: Image(
+ width: 28.w,
+ height: 28.w,
+ fit: BoxFit.fill,
+ image: const AssetImage('images/main/icon_lockDetail_rectangle.png'),
+ ),
+ ),
+ ]);
+ }
+
+ Widget bottomBottomBtnWidget() {
+ return Row(
+ mainAxisAlignment: MainAxisAlignment.spaceEvenly,
+ children: [
+ // 接听
+ Obx(
+ () => bottomBtnItemWidget(
+ getAnswerBtnImg(),
+ getAnswerBtnName(),
+ Colors.white,
+ longPress: () async {
+ if (state.talkStatus.value == TalkStatus.answeredSuccessfully) {
+ // 启动录音
+ logic.startProcessingAudio();
+ state.isLongPressing.value = true;
+ }
+ },
+ longPressUp: () async {
+ // 停止录音
+ logic.stopProcessingAudio();
+ state.isLongPressing.value = false;
+ },
+ onClick: () async {
+ if (state.talkStatus.value ==
+ TalkStatus.passiveCallWaitingAnswer) {
+ // 接听
+ logic.initiateAnswerCommand();
+ }
+ },
+ ),
+ ),
+ bottomBtnItemWidget(
+ 'images/main/icon_lockDetail_hangUp.png', '挂断'.tr, Colors.red,
+ onClick: () {
+ // 挂断
+ logic.udpHangUpAction();
+ }),
+ bottomBtnItemWidget(
+ 'images/main/icon_lockDetail_monitoringUnlock.png',
+ '开锁'.tr,
+ AppColors.mainColor,
+ onClick: () {
+ // if (state.talkStatus.value == TalkStatus.answeredSuccessfully &&
+ // state.listData.value.length > 0) {
+ // logic.udpOpenDoorAction();
+ logic.remoteOpenLock();
+ // }
+ // if (UDPManage().remoteUnlock == 1) {
+ // logic.udpOpenDoorAction();
+ // showDeletPasswordAlertDialog(context);
+ // } else {
+ // logic.showToast('请在锁设置中开启远程开锁'.tr);
+ // }
+ },
+ )
+ ]);
+ }
+
+ String getAnswerBtnImg() {
+ switch (state.talkStatus.value) {
+ case TalkStatus.passiveCallWaitingAnswer:
+ return 'images/main/icon_lockDetail_monitoringAnswerCalls.png';
+ case TalkStatus.answeredSuccessfully:
+ case TalkStatus.proactivelyCallWaitingAnswer:
+ return 'images/main/icon_lockDetail_monitoringUnTalkback.png';
+ default:
+ return 'images/main/icon_lockDetail_monitoringAnswerCalls.png';
+ }
+ }
+
+ String getAnswerBtnName() {
+ switch (state.talkStatus.value) {
+ case TalkStatus.passiveCallWaitingAnswer:
+ return '接听'.tr;
+ case TalkStatus.proactivelyCallWaitingAnswer:
+ case TalkStatus.answeredSuccessfully:
+ return '长按说话'.tr;
+ default:
+ return '接听'.tr;
+ }
+ }
+
+ Widget bottomBtnItemWidget(
+ String iconUrl,
+ String name,
+ Color backgroundColor, {
+ required Function() onClick,
+ Function()? longPress,
+ Function()? longPressUp,
+ }) {
+ double wh = 80.w;
+ return GestureDetector(
+ onTap: onClick,
+ onLongPress: longPress,
+ onLongPressUp: longPressUp,
+ child: SizedBox(
+ height: 160.w,
+ width: 140.w,
+ child: Column(
+ crossAxisAlignment: CrossAxisAlignment.center,
+ children: [
+ Container(
+ width: wh,
+ height: wh,
+ constraints: BoxConstraints(
+ minWidth: wh,
+ ),
+ decoration: BoxDecoration(
+ color: backgroundColor,
+ borderRadius: BorderRadius.circular((wh + 10.w * 2) / 2),
+ ),
+ padding: EdgeInsets.all(20.w),
+ child: Image.asset(iconUrl, fit: BoxFit.fitWidth),
+ ),
+ SizedBox(height: 20.w),
+ Text(
+ name,
+ style: TextStyle(fontSize: 20.sp, color: Colors.white),
+ textAlign: TextAlign.center, // 当文本超出指定行数时,使用省略号表示
+ maxLines: 2, // 设置最大行数为1
+ )
+ ],
+ ),
+ ),
+ );
+ }
+
+ //旋转动画
+ Widget buildRotationTransition() {
+ return Positioned(
+ left: ScreenUtil().screenWidth / 2 - 220.w / 2,
+ top: ScreenUtil().screenHeight / 2 - 220.w / 2 - 150.h,
+ child: GestureDetector(
+ child: RotationTransition(
+ //设置动画的旋转中心
+ alignment: Alignment.center,
+ //动画控制器
+ turns: state.animationController,
+ //将要执行动画的子view
+ child: AnimatedOpacity(
+ opacity: 0.5,
+ duration: const Duration(seconds: 2),
+ child: Image.asset(
+ 'images/main/realTime_connecting.png',
+ width: 220.w,
+ height: 220.w,
+ ),
+ ),
+ ),
+ onTap: () {
+ state.animationController.forward();
+ },
+ ),
+ );
+ }
+ @override
+ void dispose() {
+ state.animationController.dispose(); // 确保释放控制器
+ super.dispose();
+
+ }
}
diff --git a/lib/talk/starChart/webView/h264_web_view_state.dart b/lib/talk/starChart/webView/h264_web_view_state.dart
index 7a79d89a..2ae11041 100644
--- a/lib/talk/starChart/webView/h264_web_view_state.dart
+++ b/lib/talk/starChart/webView/h264_web_view_state.dart
@@ -1,12 +1,52 @@
+import 'dart:async';
+
+import 'package:flutter/cupertino.dart';
+import 'package:flutter_voice_processor/flutter_voice_processor.dart';
+import 'package:get/get.dart';
+import 'package:star_lock/talk/starChart/constant/talk_status.dart';
import 'package:star_lock/talk/starChart/handle/other/talk_data_repository.dart';
+import 'package:star_lock/talk/starChart/status/star_chart_talk_status.dart';
+import 'package:star_lock/talk/starChart/views/talkView/talk_view_state.dart';
import 'package:webview_flutter/webview_flutter.dart';
class H264WebViewState {
+ GlobalKey globalKey = GlobalKey();
+ int udpSendDataFrameNumber = 0; // 帧序号
+ late AnimationController animationController;
// webview 控制器
late final WebViewController webViewController;
- // 通话数据流的单例流数据处理类
- final TalkDataRepository talkDataRepository = TalkDataRepository.instance;
+ // 获取 startChartTalkStatus 的唯一实例
+ final StartChartTalkStatus startChartTalkStatus =
+ StartChartTalkStatus.instance;
+ Rx talkStatus = TalkStatus.none.obs; //星图对讲状态
+ RxBool isShowLoading = true.obs;
+
+ Timer? oneMinuteTimeTimer; // 定时器超过60秒关闭当前界面
+ RxInt oneMinuteTime = 0.obs; // 定时器秒数
+
+ RxBool isLongPressing = false.obs; // 是否长按说话
+ final TalkDataRepository talkDataRepository = TalkDataRepository.instance;
+ RxInt lastFrameTimestamp = 0.obs; // 上一帧的时间戳,用来判断网络环境
+ Rx networkStatus =
+ NetworkStatus.normal.obs; // 网络状态:0-正常 1-网络卡顿 2-网络延迟 3-网络丢包
+ RxInt alertCount = 0.obs; // 网络状态提示计数器
+ RxInt maxAlertNumber = 3.obs; // 网络状态提示最大提示次数
+ RxBool isOpenVoice = true.obs; // 是否打开声音
+ RxBool isRecordingScreen = false.obs; // 是否录屏中
+ RxBool isRecordingAudio = false.obs; // 是否录音中
+ Rx startRecordingAudioTime = DateTime.now().obs; // 开始录音时间
+ Rx endRecordingAudioTime = DateTime.now().obs; // 结束录音时间
+ RxInt recordingAudioTime = 0.obs; // 录音时间持续时间
+ RxDouble fps = 0.0.obs; // 添加 FPS 计数
+ late VoiceProcessor? voiceProcessor; // 音频处理器、录音
+ final int frameLength = 320; //录音视频帧长度为640
+ final int sampleRate = 8000; //录音频采样率为8000
+ List recordingAudioAllFrames = []; // 录制音频的所有帧
+ List lockRecordingAudioAllFrames = []; // 录制音频的所有帧
+ RxInt rotateAngle = 0.obs; // 旋转角度(以弧度为单位)
+ RxBool hasAudioData = false.obs; // 是否有音频数据
+ RxInt lastAudioTimestamp = 0.obs; // 最后接收到的音频数据的时间戳
}