fix:增加页面播放逻辑、调整proto文件

This commit is contained in:
liyi 2024-12-28 14:58:01 +08:00
parent 133f863448
commit c865db7a9f
13 changed files with 302 additions and 97 deletions

View File

@ -1011,6 +1011,7 @@
"请在锁设置中开启远程开锁": "Please enable remote unlocking in the lock settings",
"接听": "Answer",
"截图已保存到相册": "Screenshot saved to album",
"录屏已保存到相册": "Screen recording file saved to album",
"添加遥控": "Add remote control",
"已连接到锁,请按遥控": "Connected to the lock, please press the remote control",
"遥控号": "Remote control number",

View File

@ -1014,6 +1014,7 @@
"请在锁设置中开启远程开锁": "请在锁设置中开启远程开锁",
"接听": "接听",
"截图已保存到相册": "截图已保存到相册",
"录屏已保存到相册": "录屏已保存到相册",
"添加遥控": "添加遥控",
"已连接到锁,请按遥控": "已连接到锁,请按遥控",
"遥控号": "遥控号",

View File

@ -1013,6 +1013,7 @@
"请在锁设置中开启远程开锁": "请在锁设置中开启远程开锁",
"接听": "接听",
"截图已保存到相册": "截图已保存到相册",
"录屏已保存到相册": "录屏已保存到相册",
"添加遥控": "添加遥控",
"已连接到锁,请按遥控": "已连接到锁,请按遥控",
"遥控号": "遥控号",

View File

@ -39,6 +39,9 @@ class UdpTalkAcceptHandler extends ScpMessageBaseHandle
stopRingtone();
//
talkStatus.setAnsweredSuccessfully();
//
talkeRequestOverTimeTimerManager.receiveMessage();
talkeRequestOverTimeTimerManager.dispose();
}
}

View File

@ -39,6 +39,10 @@ class UdpTalkHangUpHandler extends ScpMessageBaseHandle
startChartManage.stopTalkExpectMessageTimer();
talkStatus.setHangingUpDuring();
stopRingtone();
//
talkeRequestOverTimeTimerManager.receiveMessage();
talkeRequestOverTimeTimerManager.dispose();
}
@override

View File

@ -18,16 +18,13 @@ class UdpTalkRejectHandler extends ScpMessageBaseHandle
@override
void handleReq(ScpMessage scpMessage) {
//
startChartManage.sendGenericRespSuccessMessage(
ToPeerId: scpMessage.FromPeerId!,
FromPeerId: scpMessage.ToPeerId!,
PayloadType: scpMessage.PayloadType!,
);
startChartManage.stopTalkPingMessageTimer();
startChartManage.stopTalkExpectMessageTimer();
//
replySuccessMessage(scpMessage);
//
stopRingtone();
//
talkStatus.setRejected();
}
@override
@ -35,6 +32,11 @@ class UdpTalkRejectHandler extends ScpMessageBaseHandle
startChartManage.stopTalkPingMessageTimer();
startChartManage.stopTalkExpectMessageTimer();
stopRingtone();
//
talkeRequestOverTimeTimerManager.receiveMessage();
talkeRequestOverTimeTimerManager.dispose();
}
@override

View File

@ -34,6 +34,16 @@ class UdpTalkRequestHandler extends ScpMessageBaseHandle
startChartManage.ToPeerId = scpMessage.FromPeerId!;
//
_talkRequestEvent(talkObjectName: talkReq.callerName);
//
talkeRequestOverTimeTimerManager.startTimer();
talkeRequestOverTimeTimerManager.setOnTimeout(() {
if (talkStatus.status == TalkStatus.waitingAnswer) {
//
startChartManage.sendTalkRejectMessage();
Get.back();
}
});
}
@override

View File

@ -36,6 +36,11 @@ class ScpMessageBaseHandle {
final audioManager = AudioPlayerManager();
//
final talkeRequestOverTimeTimerManager = OverTimeTimerManager(
timeoutInSeconds: 30,
);
//
final talkePingOverTimeTimerManager = OverTimeTimerManager(
timeoutInSeconds: 260,

View File

@ -86,7 +86,7 @@ class StartChartManage {
final int _maxPayloadSize = 8 * 1024; //
//
TalkExpectReq defaultTalkExpect = TalkExpectReq(
TalkExpectReq _defaultTalkExpect = TalkExpectReq(
videoType: [VideoTypeE.IMAGE],
audioType: [AudioTypeE.G711],
);
@ -857,7 +857,7 @@ class StartChartManage {
(Timer timer) {
//
sendTalkExpectMessage(
talkExpect: defaultTalkExpect,
talkExpect: _defaultTalkExpect,
);
},
);
@ -971,9 +971,16 @@ class StartChartManage {
talkExpectTimer = null; //
}
//
void reStartTalkExpectMessageTimer() {
stopTalkExpectMessageTimer();
startTalkExpectTimer();
}
///
void changeTalkExpectDataType({required TalkExpectReq talkExpect}) {
defaultTalkExpect = talkExpect;
_defaultTalkExpect = talkExpect;
reStartTalkExpectMessageTimer();
}
///

View File

@ -1,34 +1,36 @@
import 'dart:async';
import 'dart:io';
import 'dart:math';
import 'dart:ui' as ui;
import 'package:flutter/rendering.dart';
import 'package:flutter/services.dart';
import 'package:flutter_easyloading/flutter_easyloading.dart';
import 'package:flutter_pcm_sound/flutter_pcm_sound.dart';
import 'package:flutter_voice_processor/flutter_voice_processor.dart';
import 'package:flutter_screen_recording/flutter_screen_recording.dart';
import 'package:gallery_saver/gallery_saver.dart';
import 'package:get/get.dart';
import 'package:image_gallery_saver/image_gallery_saver.dart';
import 'package:path_provider/path_provider.dart';
import 'package:permission_handler/permission_handler.dart';
import 'package:star_lock/app_settings/app_settings.dart';
import 'package:star_lock/blue/io_tool/manager_event_bus.dart';
import 'package:star_lock/talk/call/callTalk.dart';
import 'package:star_lock/talk/startChart/constant/talk_status.dart';
import 'package:star_lock/talk/startChart/proto/talk_data.pb.dart';
import 'package:star_lock/talk/startChart/proto/talk_data.pbenum.dart';
import 'package:star_lock/talk/startChart/proto/talk_expect.pb.dart';
import 'package:star_lock/talk/startChart/start_chart_manage.dart';
import 'package:star_lock/talk/startChart/start_chart_talk_status.dart';
import 'package:star_lock/talk/startChart/views/talkView/talk_view_state.dart';
import '../../../../talk/call/g711.dart';
import '../../../../talk/udp/udp_manage.dart';
import '../../../../talk/udp/udp_senderManage.dart';
import '../../../../tools/baseGetXController.dart';
import '../../../../tools/eventBusEventManage.dart';
class TalkViewLogic extends BaseGetXController {
final TalkViewState state = TalkViewState();
Timer? _syncTimer;
int _startTime = 0;
final int bufferSize = 22; //
final int bufferSize = 20; //
final List<int> frameTimestamps = [];
int frameIntervalMs = 45; // 4522FPS
int minFrameIntervalMs = 30; // 33 FPS
@ -82,22 +84,27 @@ class TalkViewLogic extends BaseGetXController {
void _startListenTalkData() {
state.talkDataRepository.talkDataStream.listen((talkData) {
final contentType = talkData.contentType;
final currentTimestamp = DateTime.now().millisecondsSinceEpoch;
///
if (state.startChartTalkStatus.status != TalkStatus.duringCall) {
return;
}
//
switch (contentType) {
case TalkData_ContentTypeE.G711:
// state.audioBuffer.add(talkData);
if (state.audioBuffer.length < 60) {
// 60
if (state.audioBuffer.length < bufferSize) {
state.audioBuffer.add(talkData);
}
break;
case TalkData_ContentTypeE.Image:
// state.videoBuffer.add(talkData);
//
if (state.videoBuffer.length < 60) {
// 60
if (state.videoBuffer.length < bufferSize) {
state.videoBuffer.add(talkData);
}
///
updateNetworkStatus(currentTimestamp);
break;
}
});
@ -136,6 +143,7 @@ class TalkViewLogic extends BaseGetXController {
state.listData.value = Uint8List.fromList(talkData.content);
}
///
void _startPlayback() {
int frameIntervalMs = 45; // 4522FPS
@ -154,7 +162,15 @@ class TalkViewLogic extends BaseGetXController {
//
if (state.audioBuffer.isNotEmpty &&
state.audioBuffer.first.durationMs <= elapsedTime) {
_playAudioData(state.audioBuffer.removeAt(0));
//
if (state.isOpenVoice.value) {
_playAudioData(state.audioBuffer.removeAt(0));
} else {
//
//
//
state.audioBuffer.removeAt(0);
}
}
//
@ -165,10 +181,10 @@ class TalkViewLogic extends BaseGetXController {
if (state.videoBuffer.length > 1) {
state.videoBuffer.removeAt(0);
} else {
//
frameTimestamps.add(DateTime.now().millisecondsSinceEpoch);
// FPS
_updateFps(frameTimestamps);
// //
// frameTimestamps.add(DateTime.now().millisecondsSinceEpoch);
// // FPS
// _updateFps(frameTimestamps);
_playVideoData(state.videoBuffer.removeAt(0));
}
}
@ -195,7 +211,15 @@ class TalkViewLogic extends BaseGetXController {
//
if (state.audioBuffer.isNotEmpty &&
state.audioBuffer.first.durationMs <= elapsedTime) {
_playAudioData(state.audioBuffer.removeAt(0));
//
if (state.isOpenVoice.value) {
_playAudioData(state.audioBuffer.removeAt(0));
} else {
//
//
//
state.audioBuffer.removeAt(0);
}
}
//
@ -206,19 +230,47 @@ class TalkViewLogic extends BaseGetXController {
if (state.videoBuffer.length > 1) {
state.videoBuffer.removeAt(0);
} else {
//
frameTimestamps.add(DateTime.now().millisecondsSinceEpoch);
// FPS
_updateFps(frameTimestamps);
// //
// frameTimestamps.add(DateTime.now().millisecondsSinceEpoch);
// // FPS
// _updateFps(frameTimestamps);
_playVideoData(state.videoBuffer.removeAt(0));
}
}
});
}
///
void updateNetworkStatus(int currentTimestamp) {
if (state.lastFrameTimestamp.value != 0) {
final frameInterval = currentTimestamp - state.lastFrameTimestamp.value;
if (frameInterval > 500 && frameInterval <= 1000) {
// 5001
state.networkStatus.value = NetworkStatus.lagging;
showNetworkStatus("Network is lagging");
} else if (frameInterval > 1000) {
// 1
state.networkStatus.value = NetworkStatus.delayed;
showNetworkStatus("Network is delayed");
} else {
state.networkStatus.value = NetworkStatus.normal;
state.alertCount.value = 0; //
EasyLoading.dismiss(); //
}
}
state.lastFrameTimestamp.value = currentTimestamp;
}
///
void showNetworkStatus(String message) {
if (state.alertCount.value < 3 && !EasyLoading.isShow) {
showToast(message);
state.alertCount++;
}
}
///
void _stopPlayG711Data() async {
print('停止播放');
await FlutterPcmSound.pause();
await FlutterPcmSound.stop();
await FlutterPcmSound.clear();
@ -288,4 +340,85 @@ class TalkViewLogic extends BaseGetXController {
//
Get.back();
}
///
void updateTalkExpect() {
TalkExpectReq talkExpectReq = TalkExpectReq();
if (state.isOpenVoice.value) {
talkExpectReq = TalkExpectReq(
videoType: [VideoTypeE.IMAGE],
audioType: [AudioTypeE.G711],
);
} else {
talkExpectReq = TalkExpectReq(
videoType: [VideoTypeE.IMAGE],
audioType: [],
);
}
///
StartChartManage().changeTalkExpectDataType(talkExpect: talkExpectReq);
state.isOpenVoice.value = !state.isOpenVoice.value;
}
///
Future<void> captureAndSavePng() async {
try {
if (state.globalKey.currentContext == null) {
AppLog.log('截图失败: 未找到当前上下文');
return;
}
final RenderRepaintBoundary boundary = state.globalKey.currentContext!
.findRenderObject()! as RenderRepaintBoundary;
final ui.Image image = await boundary.toImage();
final ByteData? byteData =
await image.toByteData(format: ui.ImageByteFormat.png);
if (byteData == null) {
AppLog.log('截图失败: 图像数据为空');
return;
}
final Uint8List pngBytes = byteData.buffer.asUint8List();
//
final Directory directory = await getApplicationDocumentsDirectory();
final String imagePath = '${directory.path}/screenshot.png';
//
final File imgFile = File(imagePath);
await imgFile.writeAsBytes(pngBytes);
//
await ImageGallerySaver.saveFile(imagePath);
AppLog.log('截图保存路径: $imagePath');
showToast('截图已保存到相册'.tr);
} catch (e) {
AppLog.log('截图失败: $e');
}
}
///
Future<void> startRecording() async {
getPermissionStatus();
bool started =
await FlutterScreenRecording.startRecordScreenAndAudio("Recording");
if (started) {
state.isRecording.value = true;
}
}
///
Future<void> stopRecording() async {
String path = await FlutterScreenRecording.stopRecordScreen;
if (path != null) {
state.isRecording.value = false;
//
await GallerySaver.saveVideo(path).then((bool? success) {});
showToast('录屏已保存到相册'.tr);
} else {
state.isRecording.value = false;
print("Recording failed");
}
}
}

View File

@ -1,10 +1,16 @@
import 'dart:async';
import 'dart:convert';
import 'dart:io';
import 'package:flutter/material.dart';
import 'package:flutter/rendering.dart';
import 'package:flutter/services.dart';
import 'package:flutter_screen_recording/flutter_screen_recording.dart';
import 'package:flutter_screenutil/flutter_screenutil.dart';
import 'package:gallery_saver/gallery_saver.dart';
import 'package:get/get.dart';
import 'package:image_gallery_saver/image_gallery_saver.dart';
import 'package:path_provider/path_provider.dart';
import 'package:star_lock/app_settings/app_settings.dart';
import 'package:star_lock/main/lockDetail/realTimePicture/realTimePicture_state.dart';
import 'package:star_lock/talk/call/callTalk.dart';
@ -43,7 +49,6 @@ class _TalkViewPageState extends State<TalkViewPage>
state.animationController.repeat();
//StatusListener
state.animationController.addStatusListener((AnimationStatus status) {
// AppLog.log("AnimationStatus:$status");
if (status == AnimationStatus.completed) {
state.animationController.reset();
state.animationController.forward();
@ -70,23 +75,29 @@ class _TalkViewPageState extends State<TalkViewPage>
height: ScreenUtil().screenHeight,
fit: BoxFit.cover,
)
: Image.memory(
state.listData.value,
gaplessPlayback: true,
width: 1.sw,
height: 1.sh,
fit: BoxFit.cover,
filterQuality: FilterQuality.high,
errorBuilder: (
BuildContext context,
Object error,
StackTrace? stackTrace,
) {
return Container(color: Colors.transparent);
},
: PopScope(
canPop: false,
child: RepaintBoundary(
key: state.globalKey,
child: Image.memory(
state.listData.value,
gaplessPlayback: true,
width: 1.sw,
height: 1.sh,
fit: BoxFit.cover,
filterQuality: FilterQuality.high,
errorBuilder: (
BuildContext context,
Object error,
StackTrace? stackTrace,
) {
return Container(color: Colors.transparent);
},
),
),
),
),
Obx(() => state.listData.value.isEmpty
Obx(() => state.talkStatus.value == TalkStatus.answeredSuccessfully
? Positioned(
bottom: 300.h,
child: Text(
@ -114,17 +125,20 @@ class _TalkViewPageState extends State<TalkViewPage>
),
),
),
Positioned(
top: 100.h,
left: 10.w,
child: Obx(
() => Text(
'FPS:${state.fps.value}',
style: TextStyle(fontSize: 30.sp, color: Colors.orange,fontWeight: FontWeight.bold),
),
),
),
Obx(() => state.listData.value.isEmpty
// Positioned(
// top: 100.h,
// left: 10.w,
// child: Obx(
// () => Text(
// 'FPS:${state.fps.value}',
// style: TextStyle(
// fontSize: 30.sp,
// color: Colors.orange,
// fontWeight: FontWeight.bold),
// ),
// ),
// ),
Obx(() => state.talkStatus.value == TalkStatus.answeredSuccessfully
? buildRotationTransition()
: Container())
],
@ -137,7 +151,7 @@ class _TalkViewPageState extends State<TalkViewPage>
//
GestureDetector(
onTap: () {
state.isOpenVoice.value = !state.isOpenVoice.value;
logic.updateTalkExpect();
},
child: Container(
width: 50.w,
@ -148,16 +162,16 @@ class _TalkViewPageState extends State<TalkViewPage>
height: 40.w,
image: state.isOpenVoice.value
? const AssetImage(
'images/main/icon_lockDetail_monitoringCloseVoice.png')
'images/main/icon_lockDetail_monitoringOpenVoice.png')
: const AssetImage(
'images/main/icon_lockDetail_monitoringOpenVoice.png'))),
'images/main/icon_lockDetail_monitoringCloseVoice.png'))),
),
),
SizedBox(width: 50.w),
//
GestureDetector(
onTap: () {
// Get.toNamed(Routers.monitoringRealTimeScreenPage);
onTap: () async {
await logic.captureAndSavePng();
},
child: Container(
width: 50.w,
@ -173,32 +187,38 @@ class _TalkViewPageState extends State<TalkViewPage>
SizedBox(width: 50.w),
//
GestureDetector(
onTap: () {
// Get.toNamed(Routers.monitoringRealTimeScreenPage);
onTap: () async {
if (state.isRecording.value) {
await logic.stopRecording();
} else {
await logic.startRecording();
}
},
child: Container(
width: 50.w,
height: 50.w,
padding: EdgeInsets.all(5.w),
child: Image(
width: 40.w,
height: 40.w,
fit: BoxFit.fill,
image: const AssetImage(
'images/main/icon_lockDetail_monitoringScreenRecording.png')),
width: 40.w,
height: 40.w,
fit: BoxFit.fill,
image: const AssetImage(
'images/main/icon_lockDetail_monitoringScreenRecording.png'),
),
),
),
SizedBox(width: 50.w),
GestureDetector(
onTap: () {
logic.showToast('功能暂未开放'.tr);
},
child: Image(
width: 28.w,
height: 28.w,
fit: BoxFit.fill,
image: const AssetImage(
'images/main/icon_lockDetail_rectangle.png')))
onTap: () {
logic.showToast('功能暂未开放'.tr);
},
child: Image(
width: 28.w,
height: 28.w,
fit: BoxFit.fill,
image: const AssetImage('images/main/icon_lockDetail_rectangle.png'),
),
),
]);
}
@ -212,7 +232,10 @@ class _TalkViewPageState extends State<TalkViewPage>
getAnswerBtnImg(),
getAnswerBtnName(),
Colors.white,
longPress: () async {},
longPress: () async {
if (state.talkStatus.value == TalkStatus.answeredSuccessfully ||
state.talkStatus.value == TalkStatus.duringCall) {}
},
longPressUp: () async {},
onClick: () async {
if (state.talkStatus.value == TalkStatus.waitingAnswer) {

View File

@ -13,8 +13,15 @@ import 'package:star_lock/talk/startChart/start_chart_talk_status.dart';
import '../../../../tools/storage.dart';
enum NetworkStatus {
normal, // 0
lagging, // 1
delayed, // 2
packetLoss // 3
}
class TalkViewState {
RxBool isOpenVoice = false.obs;
int udpSendDataFrameNumber = 0; //
// var isSenderAudioData = false.obs;//
@ -27,7 +34,7 @@ class TalkViewState {
Rx<Uint8List> listData = Uint8List(0).obs; //
RxList<int> listAudioData = <int>[].obs; //
GlobalKey globalKey = GlobalKey();
late final VoiceProcessor? voiceProcessor;
late Timer oneMinuteTimeTimer =
@ -40,8 +47,6 @@ class TalkViewState {
late Timer openDoorTimer;
late AnimationController animationController;
RxDouble fps = 0.0.obs; // FPS
late Timer autoBackTimer =
Timer(const Duration(seconds: 1), () {}); //30
@ -50,19 +55,25 @@ class TalkViewState {
RxInt elapsedSeconds = 0.obs;
//
List<TalkData> audioBuffer = <TalkData>[].obs;
List<TalkData> videoBuffer = <TalkData>[].obs;
RxBool isPlaying = false.obs; //
Rx<TalkStatus> talkStatus = TalkStatus.none.obs; //
// startChartTalkStatus
final StartChartTalkStatus startChartTalkStatus =
StartChartTalkStatus.instance;
//
final TalkDataRepository talkDataRepository = TalkDataRepository.instance;
RxInt lastFrameTimestamp = 0.obs; // ,
Rx<NetworkStatus> networkStatus =
NetworkStatus.normal.obs; // 0- 1- 2- 3-
RxInt alertCount = 0.obs; //
RxInt maxAlertNumber = 3.obs; //
RxBool isOpenVoice = true.obs; //
RxBool isRecording = true.obs; //
RxDouble fps = 0.0.obs; // FPS
}

View File

@ -256,6 +256,10 @@ dependencies:
asn1lib: ^1.0.0
fast_rsa: ^3.6.6
protobuf: ^3.1.0
#录屏
flutter_screen_recording: 2.0.16
#图库保存
gallery_saver: ^2.3.2