fix:增加页面播放逻辑、调整proto文件

This commit is contained in:
liyi 2024-12-27 13:35:56 +08:00
parent 44ba4c2920
commit 133f863448
29 changed files with 1139 additions and 984 deletions

View File

@ -36,7 +36,6 @@ import 'package:star_lock/main/lockDetail/messageWarn/msgNotification/msgNotific
import 'package:star_lock/main/lockDetail/messageWarn/msgNotification/nDaysUnopened/nDaysUnopened_page.dart';
import 'package:star_lock/main/lockDetail/messageWarn/msgNotification/openDoorNotify/openDoorNotify_page.dart';
import 'package:star_lock/main/lockDetail/messageWarn/notificationMode/notificationMode_page.dart';
import 'package:star_lock/main/lockDetail/monitoring/star_chart_h264/star_chart_page.dart';
import 'package:star_lock/main/lockDetail/palm/addPalm/addPalm_page.dart';
import 'package:star_lock/main/lockDetail/palm/palmList/palmList_page.dart';
import 'package:star_lock/main/lockDetail/passwordKey/passwordKeyDetailChangeDate/passwordKeyDetailChangeDate_page.dart';
@ -61,6 +60,7 @@ import 'package:star_lock/mine/mineSet/transferSmartLock/transferSmartLockList/t
import 'package:star_lock/mine/valueAddedServices/advancedFeaturesWeb/advancedFeaturesWeb_page.dart';
import 'package:star_lock/mine/valueAddedServices/advancedFunctionRecord/advancedFunctionRecord_page.dart';
import 'package:star_lock/mine/valueAddedServices/valueAddedServicesRecord/value_added_services_record_page.dart';
import 'package:star_lock/talk/startChart/views/talkView/talk_view_page.dart';
import 'common/safetyVerification/safetyVerification_page.dart';
import 'login/forgetPassword/starLock_forgetPassword_page.dart';
@ -514,6 +514,7 @@ abstract class Routers {
static const String googleHomePage = '/googleHomePage'; //GoogleHome
static const String doubleLockLinkPage = '/doubleLockLinkPage'; //
static const String starChartPage = '/starChartPage'; //
static const String starChartTalkView = '/starChartTalkView'; //
}
abstract class AppRouters {
@ -747,7 +748,8 @@ abstract class AppRouters {
GetPage<dynamic>(
name: Routers.lockTimePage,
page: () => const LockTimePage(),
), //
),
//
GetPage<dynamic>(
name: Routers.diagnosePage,
page: () => const DiagnosePage(),
@ -1192,6 +1194,6 @@ abstract class AppRouters {
name: Routers.doubleLockLinkPage,
page: () => const DoubleLockLinkPage()),
GetPage<dynamic>(
name: Routers.starChartPage, page: () => const StarChartPage()),
name: Routers.starChartTalkView, page: () => const TalkViewPage()),
];
}

View File

@ -21,4 +21,12 @@ class EventBusManager {
eventBusFir(dynamic event) {
eventBus?.fire(event);
}
//
void fireEvent(dynamic event) {
eventBus?.fire(event);
}
// EventBus
EventBus? get bus => eventBus;
}

View File

@ -1,276 +0,0 @@
import 'dart:async';
import 'package:flutter/foundation.dart';
import 'package:flutter/services.dart';
import 'package:flutter_pcm_sound/flutter_pcm_sound.dart';
import 'package:flutter_voice_processor/flutter_voice_processor.dart';
import 'package:get/get.dart';
import 'package:permission_handler/permission_handler.dart';
import 'package:star_lock/app_settings/app_settings.dart';
import 'package:star_lock/blue/io_tool/manager_event_bus.dart';
import 'package:star_lock/main/lockDetail/monitoring/star_chart_h264/star_chart_state.dart';
import 'package:star_lock/talk/startChart/events/talk_status_change_event.dart';
import 'package:star_lock/talk/startChart/handle/other/talk_data_repository.dart';
import 'package:star_lock/talk/startChart/proto/talk_data.pbenum.dart';
import 'package:star_lock/talk/startChart/start_chart_manage.dart';
import 'package:star_lock/talk/startChart/start_chart_talk_status.dart';
import 'package:star_lock/tools/baseGetXController.dart';
import 'package:star_lock/tools/eventBusEventManage.dart';
class StarChartLogic extends BaseGetXController {
final StarChartState state = StarChartState();
/// Talk发送的状态
StreamSubscription? _getTalkStatusRefreshUIEvent;
int startTime = DateTime.now().millisecondsSinceEpoch;
@override
void onReady() {
super.onReady();
// PCM
FlutterPcmSound.setup(sampleRate: 8000, channelCount: 1);
//
FlutterPcmSound.setFeedThreshold(8000 ~/ 2); //
_getTalkStatusRefreshUIAction();
_startListenTalkData();
}
void _getTalkStatusRefreshUIAction() {
_getTalkStatusRefreshUIEvent = EventBusManager()
.eventBus!
.on<TalkStatusChangeEvent>()
.listen((TalkStatusChangeEvent event) async {
state.talkStatus.value = event.newStatus.index;
state.oneMinuteTime.value = 0;
if (state.talkStatus.value == TalkStatus.rejected.index ||
state.talkStatus.value == TalkStatus.notTalkData.index ||
state.talkStatus.value == TalkStatus.notTalkPing.index ||
state.talkStatus.value == TalkStatus.end.index) {
_cancelTimers();
stopProcessing();
state.listPhotoData.value = Uint8List(0);
//
_stopPlayG711Data();
//
Get.back();
return;
}
if (state.talkStatus.value == TalkStatus.duringCall.index) {
_startCallTimer();
}
});
}
//
void _startListenTalkData() {
state.talkDataRepository.talkDataStream.listen((talkData) {
final contentType = talkData.contentType;
//
switch (contentType) {
case TalkData_ContentTypeE.G711:
_playG711Data(talkData.content);
break;
case TalkData_ContentTypeE.Image:
//
state.listPhotoData.value = Uint8List.fromList(talkData.content);
break;
}
});
}
void syncPlay() {
int currentTime = DateTime.now().millisecondsSinceEpoch - startTime;
//
// while (audioBuffer.isNotEmpty && audioBuffer.first.durationMs <= currentTime) {
// TalkData audioData = audioBuffer.removeAt(0);
// playAudio(audioData.content);
// }
//
// //
// while (videoBuffer.isNotEmpty && videoBuffer.first.durationMs <= currentTime) {
// TalkData videoData = videoBuffer.removeAt(0);
// playVideo(videoData.content);
// }
}
///
Future<void> _playG711Data(List<int> pcmData) async {
// PCM PcmArrayInt16
final PcmArrayInt16 fromList = PcmArrayInt16.fromList(pcmData);
await FlutterPcmSound.feed(fromList);
FlutterPcmSound.play();
}
void _stopPlayG711Data() {
FlutterPcmSound.pause();
FlutterPcmSound.clear();
FlutterPcmSound.stop();
}
void _startCallTimer() {
if (state.oneMinuteTimeTimer.isActive) return;
state.oneMinuteTimeTimer.cancel();
state.oneMinuteTimeTimer =
Timer.periodic(const Duration(seconds: 1), (Timer t) {
state.oneMinuteTime.value++;
// if (state.oneMinuteTime.value >= 60) {
// t.cancel();
// initiateHangUpCommand();
// AppLog.log('通话时间超过60秒自动挂断');
// state.oneMinuteTime.value = 0;
// }
});
}
void _cancelTimers() {
state.oneMinuteTimeTimer.cancel();
}
//
void initiateAnswerCommand() {
StartChartManage().sendTalkAcceptMessage();
}
//
void initiateHangUpCommand() {
_cancelTimers();
if (state.talkStatus.value == TalkStatus.duringCall.index) {
//
StartChartManage().sendTalkHangupMessage();
} else {
//
StartChartManage().sendTalkRejectMessage();
}
Get.back();
}
Future<void> _onFrame(List<int> frame) async {
state.allFrames.add(frame);
final List<int> concatenatedFrames = concatenateFrames(state.allFrames);
AppLog.log('pcm数据:$concatenatedFrames');
final List<int> pcmBytes = listLinearToULaw(frame);
//
// StartChartManage().sendTalkAudioMessage(pcmBytes);
}
List<int> listLinearToULaw(List<int> pcmList) {
return pcmList.map(linearToULaw).toList();
}
List<int> concatenateFrames(List<List<int>> frames) {
return frames.expand((frame) => frame).toList();
}
Future<void> startProcessing() async {
// state.isButtonDisabled.value = true;
//
// state.voiceProcessor?.addFrameListener(_onFrame);
// state.voiceProcessor?.addErrorListener(_onError);
// try {
// if (await state.voiceProcessor?.hasRecordAudioPermission() ?? false) {
// await state.voiceProcessor?.start(state.frameLength, state.sampleRate);
// state.isProcessing.value =
// await state.voiceProcessor?.isRecording() ?? false;
// } else {
// state.errorMessage.value = 'Recording permission not granted';
// }
// } on PlatformException catch (ex) {
// state.errorMessage.value = 'Failed to start recorder: $ex';
// } finally {
// state.isButtonDisabled.value = false;
// }
}
void _onError(VoiceProcessorException error) {
state.errorMessage.value = error.message!;
}
Future<void> stopProcessing() async {
// voiceProcessor
// if (state.voiceProcessor == null) {
// state.errorMessage.value = 'Voice processor is not initialized.';
// return;
// }
//
// state.isButtonDisabled.value = true;
// try {
// await state.voiceProcessor?.stop();
// state.voiceProcessor?.removeFrameListener(_onFrame);
// state.udpSendDataFrameNumber = 0;
// } on PlatformException catch (ex) {
// state.errorMessage.value = 'Failed to stop recorder: $ex';
// } finally {
// state.isProcessing.value =
// await state.voiceProcessor?.isRecording() ?? false;
// state.isButtonDisabled.value = false;
// }
}
int linearToULaw(int pcmVal) {
int mask;
int seg;
int uval;
if (pcmVal < 0) {
pcmVal = 0x84 - pcmVal;
mask = 0x7F;
} else {
pcmVal += 0x84;
mask = 0xFF;
}
seg = search(pcmVal);
if (seg >= 8) {
return 0x7F ^ mask;
} else {
uval = seg << 4;
uval |= (pcmVal >> (seg + 3)) & 0xF;
return uval ^ mask;
}
}
int search(int val) {
final List<int> table = [
0xFF,
0x1FF,
0x3FF,
0x7FF,
0xFFF,
0x1FFF,
0x3FFF,
0x7FFF
];
for (int i = 0; i < table.length; i++) {
if (val <= table[i]) {
return i;
}
}
return table.length;
}
Future<bool> getPermissionStatus() async {
const Permission permission = Permission.microphone;
final PermissionStatus status = await permission.status;
if (status.isGranted) {
return true;
} else if (status.isDenied || status.isRestricted) {
await requestPermission(permission);
} else if (status.isPermanentlyDenied) {
openAppSettings();
}
return false;
}
Future<void> requestPermission(Permission permission) async {
final PermissionStatus status = await permission.request();
if (status.isPermanentlyDenied) {
openAppSettings();
}
}
}

View File

@ -1,394 +0,0 @@
import 'dart:async';
import 'dart:convert';
import 'dart:io';
import 'dart:ui' as ui;
import 'package:flutter/foundation.dart';
import 'package:flutter/material.dart';
import 'package:flutter/rendering.dart';
import 'package:flutter/services.dart';
import 'package:flutter_screenutil/flutter_screenutil.dart';
import 'package:get/get.dart';
import 'package:image_gallery_saver/image_gallery_saver.dart';
import 'package:path_provider/path_provider.dart';
import 'package:star_lock/app_settings/app_colors.dart';
import 'package:star_lock/app_settings/app_settings.dart';
import 'package:star_lock/main/lockDetail/monitoring/star_chart_h264/star_chart_logic.dart';
import 'package:star_lock/main/lockDetail/monitoring/star_chart_h264/star_chart_state.dart';
import 'package:star_lock/talk/startChart/proto/talk_data.pbenum.dart';
import 'package:star_lock/talk/startChart/start_chart_talk_status.dart';
import 'package:star_lock/talk/startChart/webView/h264_web_view.dart';
import 'package:star_lock/talk/udp/udp_manage.dart';
import 'package:star_lock/tools/eventBusEventManage.dart';
import 'package:star_lock/tools/showTFView.dart';
class StarChartPage extends StatefulWidget {
const StarChartPage({Key? key}) : super(key: key);
@override
State<StarChartPage> createState() => _StarChartPageState();
}
class _StarChartPageState extends State<StarChartPage> {
final StarChartLogic logic = Get.put(StarChartLogic());
final StarChartState state = Get.find<StarChartLogic>().state;
@override
void initState() {
super.initState();
initAsync();
}
Future<void> initAsync() async {
await requestMicrophonePermission();
}
@override
Widget build(BuildContext context) {
return PopScope(
canPop: false,
child: RepaintBoundary(
key: state.globalKey,
child: Container(
width: 1.sw,
height: 1.sh,
color: Colors.transparent,
child: _buildTalkView(isMpeg4: true),
),
),
);
}
Widget buildTopButtons() {
return Row(
mainAxisAlignment: MainAxisAlignment.center,
children: <Widget>[
buildIconButton(
icon: state.isOpenVoice.value
? 'images/main/icon_lockDetail_monitoringCloseVoice.png'
: 'images/main/icon_lockDetail_monitoringOpenVoice.png',
onTap: () {
state.isOpenVoice.value = !state.isOpenVoice.value;
},
),
SizedBox(width: 60.w),
buildIconButton(
icon: 'images/main/icon_lockDetail_monitoringScreenshot.png',
onTap: captureAndSavePng,
),
SizedBox(width: 60.w),
buildIconButton(
icon: 'images/main/icon_lockDetail_monitoringScreenRecording.png',
onTap: () {
// Get.toNamed(Routers.monitoringRealTimeScreenPage);
},
),
],
);
}
Widget buildBottomButtons() {
return Row(
mainAxisAlignment: MainAxisAlignment.spaceEvenly,
children: <Widget>[
buildAnswerButton(),
buildIconButton(
icon: 'images/main/icon_lockDetail_hangUp.png',
label: '挂断'.tr,
color: Colors.red,
onTap: () async {
logic.initiateHangUpCommand();
},
),
buildIconButton(
icon: 'images/main/icon_lockDetail_monitoringUnlock.png',
label: '开锁'.tr,
color: AppColors.mainColor,
onTap: () {
if (UDPManage().remoteUnlock == 1) {
showDeletPasswordAlertDialog(context);
} else {
logic.showToast('请在锁设置中开启远程开锁'.tr);
}
},
),
],
);
}
Widget buildAnswerButton() {
return Obx(() {
// final bool isDuringCall =
// state.talkStatus.value == TalkStatus.duringCall.index;
return buildIconButton(
icon: state.talkStatus.value == TalkStatus.duringCall.index
? 'images/main/icon_lockDetail_monitoringUnTalkback.png'
: 'images/main/icon_lockDetail_monitoringAnswerCalls.png',
label: state.talkStatus.value == TalkStatus.duringCall.index
? '长按说话'.tr
: '接听'.tr,
onTap: () async {
if (state.talkStatus.value == TalkStatus.waitingAnswer.index) {
logic.initiateAnswerCommand();
setState(() {});
}
},
onLongPress: () {
state.listAudioData.value = <int>[];
logic.startProcessing();
},
onLongPressUp: () {
logic.stopProcessing();
},
);
});
}
Widget buildIconButton({
required String icon,
String? label,
Color color = Colors.white,
required Function() onTap,
Function()? onLongPress,
Function()? onLongPressUp,
}) {
final double wh = 80.w;
return GestureDetector(
onTap: onTap,
onLongPress: onLongPress,
onLongPressUp: onLongPressUp,
child: SizedBox(
height: 140.h,
child: Column(
crossAxisAlignment: CrossAxisAlignment.center,
children: <Widget>[
Container(
width: wh,
height: wh,
decoration: BoxDecoration(
color: color,
borderRadius: BorderRadius.circular((wh + 10.w * 2) / 2),
),
padding: EdgeInsets.all(20.w),
child: Image.asset(icon, fit: BoxFit.fitWidth),
),
if (label != null) ...[
SizedBox(height: 20.w),
Expanded(
child: Text(
label,
style: TextStyle(fontSize: 20.sp, color: Colors.white),
textAlign: TextAlign.center,
),
),
],
],
),
),
);
}
void showDeletPasswordAlertDialog(BuildContext context) {
showDialog(
barrierDismissible: false,
context: context,
builder: (BuildContext context) {
return ShowTFView(
title: '请输入6位数字开锁密码'.tr,
tipTitle: '',
controller: state.passwordTF,
inputFormatters: <TextInputFormatter>[
LengthLimitingTextInputFormatter(6), //
FilteringTextInputFormatter.allow(RegExp('[0-9]')),
],
sureClick: () async {
if (state.passwordTF.text.isEmpty) {
logic.showToast('请输入开锁密码'.tr);
return;
}
final List<int> numbers = <int>[];
final List<int> lockIDData = utf8.encode(state.passwordTF.text);
numbers.addAll(lockIDData);
for (int i = 0; i < 6 - lockIDData.length; i++) {
numbers.add(0);
}
//todo:
// logic.udpOpenDoorAction(numbers);
},
cancelClick: () {
Get.back();
},
);
},
);
}
Future<void> requestMicrophonePermission() async {
await logic.getPermissionStatus().then((bool value) async {
if (!value) {
return;
}
});
}
Future<void> captureAndSavePng() async {
try {
if (state.globalKey.currentContext == null) {
AppLog.log('截图失败: 未找到当前上下文');
return;
}
final RenderRepaintBoundary boundary = state.globalKey.currentContext!
.findRenderObject()! as RenderRepaintBoundary;
final ui.Image image = await boundary.toImage();
final ByteData? byteData =
await image.toByteData(format: ui.ImageByteFormat.png);
if (byteData == null) {
AppLog.log('截图失败: 图像数据为空');
return;
}
final Uint8List pngBytes = byteData.buffer.asUint8List();
final Directory directory = await getApplicationDocumentsDirectory();
final String imagePath = '${directory.path}/screenshot.png';
final File imgFile = File(imagePath);
await imgFile.writeAsBytes(pngBytes);
await ImageGallerySaver.saveFile(imagePath);
AppLog.log('截图保存路径: $imagePath');
logic.showToast('截图已保存到相册'.tr);
} catch (e) {
AppLog.log('截图失败: $e');
}
}
String listToHexString(List<int> intList) {
//
List<String> hexList = intList.map((num) => num.toRadixString(16)).toList();
//
return hexList.join('');
}
@override
void dispose() {
super.dispose();
logic.stopProcessing();
// state.getTVDataRefreshUIEvent!.cancel();
}
Widget _buildTalkView({required bool isMpeg4}) {
return isMpeg4 ? _buildMpeg4TalkView() : _buildH264TalkView();
}
Widget _buildMpeg4TalkView() {
return Obx(
() => Stack(
children: <Widget>[
state.listPhotoData.value.isNotEmpty
? Image.memory(
state.listPhotoData.value,
gaplessPlayback: true,
width: 1.sw,
height: 1.sh,
fit: BoxFit.cover,
filterQuality: FilterQuality.high,
errorBuilder: (BuildContext context, Object error,
StackTrace? stackTrace) {
return Container(color: Colors.transparent);
},
)
: Image.asset(
'images/main/monitorBg.png',
width: 1.sw,
height: 1.sh,
fit: BoxFit.cover,
),
Positioned(
top: ScreenUtil().statusBarHeight + 30.h,
width: 1.sw,
child: Obx(() {
final String sec =
(state.oneMinuteTime.value % 60).toString().padLeft(2, '0');
final String min =
(state.oneMinuteTime.value ~/ 60).toString().padLeft(2, '0');
return Row(
mainAxisAlignment: MainAxisAlignment.center,
children: <Widget>[
Text('$min:$sec',
style: TextStyle(fontSize: 26.sp, color: Colors.white)),
],
);
}),
),
Positioned(
bottom: 10.w,
child: Container(
width: 1.sw - 30.w * 2,
margin: EdgeInsets.all(30.w),
decoration: BoxDecoration(
color: const Color(0xC83C3F41),
borderRadius: BorderRadius.circular(20.h),
),
child: Column(
children: <Widget>[
SizedBox(height: 20.h),
buildTopButtons(),
SizedBox(height: 20.h),
buildBottomButtons(),
SizedBox(height: 20.h),
],
),
),
),
],
),
);
}
Widget _buildH264TalkView() {
return Stack(
children: <Widget>[
H264WebView(),
Positioned(
top: ScreenUtil().statusBarHeight + 30.h,
width: 1.sw,
child: Obx(() {
final String sec =
(state.oneMinuteTime.value % 60).toString().padLeft(2, '0');
final String min =
(state.oneMinuteTime.value ~/ 60).toString().padLeft(2, '0');
return Row(
mainAxisAlignment: MainAxisAlignment.center,
children: <Widget>[
Text('$min:$sec',
style: TextStyle(fontSize: 26.sp, color: Colors.white)),
],
);
}),
),
Positioned(
bottom: 10.w,
child: Container(
width: 1.sw - 30.w * 2,
margin: EdgeInsets.all(30.w),
decoration: BoxDecoration(
color: const Color(0xC83C3F41),
borderRadius: BorderRadius.circular(20.h),
),
child: Column(
children: <Widget>[
SizedBox(height: 20.h),
buildTopButtons(),
SizedBox(height: 20.h),
buildBottomButtons(),
SizedBox(height: 20.h),
],
),
),
),
],
);
}
}

View File

@ -1,65 +0,0 @@
import 'dart:async';
import 'dart:typed_data';
import 'package:flutter/material.dart';
import 'package:flutter_voice_processor/flutter_voice_processor.dart';
import 'package:get/get.dart';
import 'package:network_info_plus/network_info_plus.dart';
import 'package:star_lock/talk/startChart/handle/other/talk_data_repository.dart';
import 'package:star_lock/talk/startChart/start_chart_talk_status.dart';
import '../../../../tools/storage.dart';
class StarChartState {
RxBool isOpenVoice = false.obs;
int udpSendDataFrameNumber = 0; //
// var isSenderAudioData = false.obs;//
// StreamSubscription? getTVDataRefreshUIEvent; //
RxBool shouldUpdateUI = false.obs; //UI
Future<String?> userMobileIP = NetworkInfo().getWifiIP();
Future<String?> userUid = Storage.getUid();
// RxInt udpStatus =
// 0.obs; //0 1 2 3 4 5 6 8 9
TextEditingController passwordTF = TextEditingController();
Rx<Uint8List> listPhotoData = Uint8List(0).obs; //
RxList<int> listAudioData = <int>[].obs; //
//
late VoiceProcessor? voiceProcessor;
RxBool isProcessing = false.obs; //
RxBool isButtonDisabled = false.obs; //
final int frameLength = 320; //320
final int sampleRate = 8000; //8000
RxString errorMessage = ''.obs;
List<List<int>> allFrames = <List<int>>[];
GlobalKey globalKey = GlobalKey();
late Timer oneMinuteTimeTimer =
Timer(const Duration(seconds: 1), () {}); // 60
RxInt oneMinuteTime = 0.obs; //
// 10
late Timer answerTimer = Timer(const Duration(seconds: 1), () {}); //
RxInt answerSeconds = 0.obs;
RxBool isClickAnswer = false.obs; //
late Timer hangUpTimer = Timer(const Duration(seconds: 1), () {}); //
RxInt hangUpSeconds = 0.obs;
RxBool isClickHangUp = false.obs; //
late Timer openDoorTimer = Timer(const Duration(seconds: 1), () {}); //
RxInt openDoorSeconds = 0.obs;
RxInt talkStatus = 0.obs; //
// StartChartTalkStatus
StartChartTalkStatus talkStatusInstance = StartChartTalkStatus.instance;
//
final TalkDataRepository talkDataRepository = TalkDataRepository.instance;
}

View File

@ -9,6 +9,8 @@ import 'package:star_lock/app_settings/app_colors.dart';
import 'package:star_lock/blue/blue_manage.dart';
import 'package:star_lock/main/lockMian/lockList/lockList_xhj_page.dart';
import 'package:star_lock/main/lockMian/lockMain/lockMain_state.dart';
import 'package:star_lock/talk/startChart/proto/talk_data.pb.dart';
import 'package:star_lock/talk/startChart/proto/talk_expect.pb.dart';
import 'package:star_lock/talk/startChart/proto/talk_request.pb.dart';
import 'package:star_lock/talk/startChart/proto/test.pb.dart';
import 'package:star_lock/talk/startChart/start_chart_manage.dart';
@ -72,6 +74,10 @@ class _StarLockMainPageState extends State<StarLockMainPage>
_initLoadDataAction();
}
String bufferToHexString(Uint8List buffer) {
return buffer.map((byte) => byte.toRadixString(16).padLeft(2, '0')).join();
}
@override
void didChangeDependencies() {
super.didChangeDependencies();

View File

@ -295,7 +295,7 @@ class MessageCommand {
static List<int> talkExpectMessage({
required String FromPeerId,
required String ToPeerId,
required TalkExpect talkExpect,
required TalkExpectReq talkExpect,
int? MessageId,
}) {
final payload = talkExpect.writeToBuffer();

View File

@ -0,0 +1,18 @@
enum TalkStatus {
none, //
waitingAnswer, //
answeredSuccessfully, //
waitingData, //
duringCall, //
hangingUpDuring, //
rejected, //
uninitialized, //
initializationCompleted, //
notTalkData, //
notTalkPing, //
error, //
end, //
}

View File

@ -155,7 +155,7 @@ class ScpMessage {
// // _log(text: 'result bytes hex: ${hexString}');
// _log(
// text:
// '\n result bytes hex: ${hexString} \n payload hex: ${hexString.substring(194)}');
// '\n result bytes hex: ${hexString} \n payload hex: ${hexString.substring(210)}');
// ProtocolFlag (4 bytes)
if (bytes.length - offset >= 4) {

View File

@ -1,13 +0,0 @@
import 'package:star_lock/talk/startChart/start_chart_talk_status.dart';
class TalkStatusChangeEvent {
final TalkStatus oldStatus;
final TalkStatus newStatus;
TalkStatusChangeEvent(this.oldStatus, this.newStatus);
@override
String toString() {
return "TalkStatusChangeEvent: ${oldStatus.name} -> ${newStatus.name}";
}
}

View File

@ -43,7 +43,15 @@ class UdpHeartBeatHandler extends ScpMessageBaseHandle
void handleRealTimeData(ScpMessage scpMessage) {}
@override
deserializePayload({required int payloadType, required int messageType, required List<int> byte, int? offset, int? PayloadLength, int? spTotal, int? spIndex, int? messageId}) {
deserializePayload(
{required int payloadType,
required int messageType,
required List<int> byte,
int? offset,
int? PayloadLength,
int? spTotal,
int? spIndex,
int? messageId}) {
//
HeartbeatResponse heartbeatResponse = HeartbeatResponse.fromBytes(byte);
return heartbeatResponse;

View File

@ -2,6 +2,7 @@ import 'dart:convert';
import 'dart:typed_data';
import 'package:flutter_easyloading/flutter_easyloading.dart';
import 'package:flutter_pcm_sound/flutter_pcm_sound.dart';
import 'package:get/get.dart';
import 'package:star_lock/talk/startChart/constant/message_type_constant.dart';
import 'package:star_lock/talk/startChart/entity/scp_message.dart';
@ -17,13 +18,8 @@ class UdpTalkAcceptHandler extends ScpMessageBaseHandle
implements ScpMessageHandler {
@override
void handleReq(ScpMessage scpMessage) {
print('收到同意接听请求');
//
startChartManage.sendGenericRespSuccessMessage(
ToPeerId: scpMessage.FromPeerId!,
FromPeerId: scpMessage.ToPeerId!,
PayloadType: scpMessage.PayloadType!,
);
replySuccessMessage(scpMessage);
}
@override
@ -32,7 +28,6 @@ class UdpTalkAcceptHandler extends ScpMessageBaseHandle
final GenericResp genericResp = scpMessage.Payload;
if (checkGenericRespSuccess(genericResp)) {
Future.delayed(Duration(seconds: 1), () {
print('启动定时器判断');
//
_handleStartTalkPing();
//
@ -42,9 +37,8 @@ class UdpTalkAcceptHandler extends ScpMessageBaseHandle
});
//
stopRingtone();
//
//
talkStatus.setAnsweredSuccessfully();
talkStatus.setWaitingData();
}
}
@ -58,7 +52,7 @@ class UdpTalkAcceptHandler extends ScpMessageBaseHandle
deserializePayload(
{required int payloadType,
required int messageType,
required List<int> byte,
required List<int> byte,
int? offset,
int? PayloadLength,
int? spTotal,

View File

@ -1,8 +1,11 @@
import 'dart:convert';
import 'dart:io';
import 'dart:typed_data';
import 'package:flutter_easyloading/flutter_easyloading.dart';
import 'package:get/get.dart';
import 'package:path_provider/path_provider.dart';
import 'package:star_lock/app_settings/app_settings.dart';
import 'package:star_lock/talk/call/g711.dart';
import 'package:star_lock/talk/startChart/constant/message_type_constant.dart';
import 'package:star_lock/talk/startChart/entity/scp_message.dart';
@ -32,11 +35,13 @@ class UdpTalkDataHandler extends ScpMessageBaseHandle
final TalkData talkData = scpMessage.Payload;
//
_handleTalkData(talkData: talkData);
//
talkStatus.setDuringCall();
}
}
String bufferToHexString(List<int> buffer) {
return buffer.map((byte) => byte.toRadixString(16).padLeft(2, '0')).join();
}
@override
deserializePayload(
{required int payloadType,
@ -47,9 +52,9 @@ class UdpTalkDataHandler extends ScpMessageBaseHandle
int? spTotal,
int? spIndex,
int? messageId}) {
// AppLog.log(
// '没有组包之前的每一个包的数据:${byte.length} messageId:$messageId spTotal:$spTotal spIndex:$spIndex PayloadLength:$PayloadLength,byte:${bufferToHexString(byte)}');
if (messageType == MessageTypeConstant.RealTimeData) {
print(
'收到音视频数据:${byte.length} messageId:$messageId spTotal:$spTotal spIndex:$spIndex PayloadLength:$PayloadLength');
//
if (spTotal != null &&
spTotal > 1 &&
@ -104,13 +109,15 @@ class UdpTalkDataHandler extends ScpMessageBaseHandle
}
///
void _handleVideoImage(TalkData talkData) {
void _handleVideoImage(TalkData talkData) async {
final List<Uint8List> processCompletePayload =
_processCompletePayload(Uint8List.fromList(talkData.content));
//
await _processCompletePayload(Uint8List.fromList(talkData.content));
// AppLog.log('得到完整的帧:${processCompletePayload.length}'); //
processCompletePayload.forEach((element) {
talkData.content = element;
talkDataRepository.addTalkData(talkData);
//
talkStatus.setDuringCall();
});
}
@ -122,26 +129,30 @@ class UdpTalkDataHandler extends ScpMessageBaseHandle
List<int> pcmBytes = G711().convertList(g711Data);
talkData.content = pcmBytes;
talkDataRepository.addTalkData(talkData);
//
talkStatus.setDuringCall();
} catch (e) {
print('Error decoding G.711 to PCM: $e');
}
}
///
List<Uint8List> _processCompletePayload(Uint8List payload) {
Future<List<Uint8List>> _processCompletePayload(Uint8List payload) async {
//
List<Uint8List> frames = [];
// (0xFFD8 , 0xFFD9 )
int startIdx = payload.indexOf(0xFF);
while (startIdx != -1 && startIdx + 1 < payload.length) {
int startIdx = 0;
while (startIdx < payload.length - 1) {
// 0xFFD8
startIdx = payload.indexOf(0xFF, startIdx);
if (startIdx == -1 || startIdx + 1 >= payload.length) break;
if (payload[startIdx + 1] == 0xD8) {
// 0xFFD8
int endIdx = startIdx + 2;
while (endIdx < payload.length - 1) {
endIdx = payload.indexOf(0xFF, endIdx);
if (endIdx == -1) break;
if (endIdx + 1 < payload.length && payload[endIdx + 1] == 0xD9) {
if (endIdx == -1 || endIdx + 1 >= payload.length) break;
if (payload[endIdx + 1] == 0xD9) {
// 0xFFD9
Uint8List frame = payload.sublist(startIdx, endIdx + 2);
frames.add(frame);
@ -152,7 +163,7 @@ class UdpTalkDataHandler extends ScpMessageBaseHandle
}
}
} else {
startIdx = payload.indexOf(0xFF, startIdx + 1); //
startIdx += 1; //
}
}

View File

@ -20,7 +20,7 @@ class UdpTalkExpectHandler extends ScpMessageBaseHandle
@override
void handleReq(ScpMessage scpMessage) {
//
final TalkExpect talkExpect = scpMessage.Payload;
final TalkExpectReq talkExpect = scpMessage.Payload;
print('收到预期音视频数据请求:$talkExpect');
//
@ -49,18 +49,21 @@ class UdpTalkExpectHandler extends ScpMessageBaseHandle
deserializePayload(
{required int payloadType,
required int messageType,
required List<int> byte,
required List<int> byte,
int? offset,
int? PayloadLength,
int? spTotal,
int? spIndex,
int? messageId}) {
if (messageType == MessageTypeConstant.Resp) {
// final TalkExpectResp talkExpectResp = TalkExpectResp();
// talkExpectResp.mergeFromBuffer(byte);
// return talkExpectResp;
final GenericResp genericResp = GenericResp();
genericResp.mergeFromBuffer(byte);
return genericResp;
} else if (messageType == MessageTypeConstant.Req) {
final TalkExpect talkExpect = TalkExpect();
final TalkExpectReq talkExpect = TalkExpectReq();
talkExpect.mergeFromBuffer(byte);
return talkExpect;
} else {

View File

@ -1,24 +1,25 @@
import 'dart:convert';
import 'dart:typed_data';
import 'package:flutter_easyloading/flutter_easyloading.dart';
import 'package:get/get.dart';
import 'package:star_lock/talk/startChart/constant/message_type_constant.dart';
import 'package:star_lock/talk/startChart/constant/talk_status.dart';
import 'package:star_lock/talk/startChart/entity/scp_message.dart';
import 'package:star_lock/talk/startChart/handle/scp_message_base_handle.dart';
import 'package:star_lock/talk/startChart/handle/scp_message_handle.dart';
import 'package:star_lock/talk/startChart/proto/gateway_reset.pb.dart';
import 'package:star_lock/talk/startChart/proto/generic.pb.dart';
import 'package:star_lock/talk/startChart/proto/talk_hangup.pb.dart';
import '../../start_chart_manage.dart';
class UdpTalkHangUpHandler extends ScpMessageBaseHandle
implements ScpMessageHandler {
@override
void handleReq(ScpMessage scpMessage) {
//
print('收到通话中挂断请求');
if (talkStatus.status != TalkStatus.duringCall) {
//
return;
}
startChartManage.sendGenericRespSuccessMessage(
ToPeerId: scpMessage.FromPeerId!,
FromPeerId: scpMessage.ToPeerId!,
@ -28,18 +29,15 @@ class UdpTalkHangUpHandler extends ScpMessageBaseHandle
startChartManage.stopTalkPingMessageTimer();
startChartManage.stopTalkExpectMessageTimer();
talkStatus.setHangingUpDuring();
talkStatus.setEnd();
stopRingtone();
}
@override
void handleResp(ScpMessage scpMessage) {
print('收到通话中挂断回复');
//
startChartManage.stopTalkPingMessageTimer();
startChartManage.stopTalkExpectMessageTimer();
talkStatus.setHangingUpDuring();
talkStatus.setEnd();
stopRingtone();
}
@ -53,7 +51,7 @@ class UdpTalkHangUpHandler extends ScpMessageBaseHandle
deserializePayload(
{required int payloadType,
required int messageType,
required List<int> byte,
required List<int> byte,
int? offset,
int? PayloadLength,
int? spTotal,

View File

@ -25,19 +25,16 @@ class UdpTalkRejectHandler extends ScpMessageBaseHandle
);
startChartManage.stopTalkPingMessageTimer();
startChartManage.stopTalkExpectMessageTimer();
talkStatus.setRejected();
stopRingtone();
talkStatus.setEnd();
//
talkStatus.setRejected();
}
@override
void handleResp(ScpMessage scpMessage) {
//
talkStatus.setRejected();
startChartManage.stopTalkPingMessageTimer();
startChartManage.stopTalkExpectMessageTimer();
stopRingtone();
talkStatus.setEnd();
}
@override
@ -50,7 +47,7 @@ class UdpTalkRejectHandler extends ScpMessageBaseHandle
deserializePayload(
{required int payloadType,
required int messageType,
required List<int> byte,
required List<int> byte,
int? offset,
int? PayloadLength,
int? spTotal,

View File

@ -1,22 +1,18 @@
import 'dart:convert';
import 'dart:typed_data';
import 'package:flutter/services.dart';
import 'package:flutter_easyloading/flutter_easyloading.dart';
import 'package:flutter_local_notifications/flutter_local_notifications.dart';
import 'package:get/get.dart';
import 'package:star_lock/appRouters.dart';
import 'package:star_lock/talk/startChart/constant/message_type_constant.dart';
import 'package:star_lock/talk/startChart/constant/talk_status.dart';
import 'package:star_lock/talk/startChart/entity/scp_message.dart';
import 'package:star_lock/talk/startChart/handle/scp_message_base_handle.dart';
import 'package:star_lock/talk/startChart/handle/scp_message_handle.dart';
import 'package:star_lock/talk/startChart/proto/gateway_reset.pb.dart';
import 'package:star_lock/talk/startChart/proto/generic.pb.dart';
import 'package:star_lock/talk/startChart/proto/talk_request.pb.dart';
import 'package:star_lock/talk/startChart/start_chart_talk_status.dart';
import 'package:star_lock/tools/storage.dart';
import '../../start_chart_manage.dart';
class UdpTalkRequestHandler extends ScpMessageBaseHandle
implements ScpMessageHandler {
@ -38,8 +34,6 @@ class UdpTalkRequestHandler extends ScpMessageBaseHandle
startChartManage.ToPeerId = scpMessage.FromPeerId!;
//
_talkRequestEvent(talkObjectName: talkReq.callerName);
//
talkStatus.setWaitingAnswer();
}
@override
@ -63,11 +57,12 @@ class UdpTalkRequestHandler extends ScpMessageBaseHandle
_showTalkRequestNotification(talkObjectName: talkObjectName);
//
Get.toNamed(
Routers.starChartPage,
arguments: <String, String>{'lockId': '111'},
Routers.starChartTalkView,
);
//
HapticFeedback.vibrate();
//
talkStatus.setWaitingAnswer();
}
//
@ -95,7 +90,7 @@ class UdpTalkRequestHandler extends ScpMessageBaseHandle
deserializePayload(
{required int payloadType,
required int messageType,
required List<int> byte,
required List<int> byte,
int? offset,
int? PayloadLength,
int? spTotal,

View File

@ -19,7 +19,7 @@ class TalkDataRepository {
Stream<TalkData> get talkDataStream => _talkDataStreamController.stream;
// TalkData Stream
void addTalkData(TalkData talkData) {
void addTalkData(TalkData talkData) async {
_talkDataStreamController.add(talkData);
}

View File

@ -31,16 +31,19 @@ class ScpMessageBaseHandle {
//
final TalkDataRepository talkDataRepository = TalkDataRepository.instance;
// StartChartTalkStatus
final StartChartTalkStatus talkStatus = StartChartTalkStatus.instance;
final audioManager = AudioPlayerManager();
//
final talkePingOverTimeTimerManager = OverTimeTimerManager(
timeoutInSeconds: 55,
timeoutInSeconds: 260,
);
//
final talkDataOverTimeTimerManager = OverTimeTimerManager(
timeoutInSeconds: 53,
timeoutInSeconds: 260,
);
//
@ -52,9 +55,6 @@ class ScpMessageBaseHandle {
);
}
// StartChartTalkStatus
StartChartTalkStatus talkStatus = StartChartTalkStatus.instance;
bool checkGenericRespSuccess(GenericResp genericResp) {
if (genericResp == null) return false;
final code = genericResp.code;
@ -95,8 +95,8 @@ class ScpMessageBaseHandle {
//
if (spIndex < 1 || spIndex > spTotal) {
print(
'Invalid spTotal: $spTotal spIndex: $spIndex for messageId: $messageId');
// print(
// 'Invalid spTotal: $spTotal spIndex: $spIndex for messageId: $messageId');
return null;
}

View File

@ -0,0 +1,18 @@
syntax = "proto3";
package main;
option go_package = "./spb/talk";
message TalkDataH264Frame {
// seq
uint32 FrameSeq = 1;
// H264I帧P帧
// NONE
enum FrameTypeE {
NONE = 0;
I = 1;
P = 2;
};
FrameTypeE FrameType = 2;
//
bytes FrameData = 3;
}

View File

@ -17,10 +17,11 @@ import 'talk_expect.pbenum.dart';
export 'talk_expect.pbenum.dart';
class TalkExpect extends $pb.GeneratedMessage {
factory TalkExpect({
$core.Iterable<TalkExpect_VideoTypeE>? videoType,
$core.Iterable<TalkExpect_AudioTypeE>? audioType,
/// 便
class TalkExpectReq extends $pb.GeneratedMessage {
factory TalkExpectReq({
$core.Iterable<VideoTypeE>? videoType,
$core.Iterable<AudioTypeE>? audioType,
}) {
final $result = create();
if (videoType != null) {
@ -31,13 +32,13 @@ class TalkExpect extends $pb.GeneratedMessage {
}
return $result;
}
TalkExpect._() : super();
factory TalkExpect.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r);
factory TalkExpect.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r);
TalkExpectReq._() : super();
factory TalkExpectReq.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r);
factory TalkExpectReq.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r);
static final $pb.BuilderInfo _i = $pb.BuilderInfo(_omitMessageNames ? '' : 'TalkExpect', package: const $pb.PackageName(_omitMessageNames ? '' : 'main'), createEmptyInstance: create)
..pc<TalkExpect_VideoTypeE>(1, _omitFieldNames ? '' : 'VideoType', $pb.PbFieldType.KE, protoName: 'VideoType', valueOf: TalkExpect_VideoTypeE.valueOf, enumValues: TalkExpect_VideoTypeE.values, defaultEnumValue: TalkExpect_VideoTypeE.NONE_V)
..pc<TalkExpect_AudioTypeE>(2, _omitFieldNames ? '' : 'AudioType', $pb.PbFieldType.KE, protoName: 'AudioType', valueOf: TalkExpect_AudioTypeE.valueOf, enumValues: TalkExpect_AudioTypeE.values, defaultEnumValue: TalkExpect_AudioTypeE.NONE_A)
static final $pb.BuilderInfo _i = $pb.BuilderInfo(_omitMessageNames ? '' : 'TalkExpectReq', package: const $pb.PackageName(_omitMessageNames ? '' : 'main'), createEmptyInstance: create)
..pc<VideoTypeE>(1, _omitFieldNames ? '' : 'VideoType', $pb.PbFieldType.KE, protoName: 'VideoType', valueOf: VideoTypeE.valueOf, enumValues: VideoTypeE.values, defaultEnumValue: VideoTypeE.NONE_V)
..pc<AudioTypeE>(2, _omitFieldNames ? '' : 'AudioType', $pb.PbFieldType.KE, protoName: 'AudioType', valueOf: AudioTypeE.valueOf, enumValues: AudioTypeE.values, defaultEnumValue: AudioTypeE.NONE_A)
..hasRequiredFields = false
;
@ -45,30 +46,141 @@ class TalkExpect extends $pb.GeneratedMessage {
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.deepCopy] instead. '
'Will be removed in next major version')
TalkExpect clone() => TalkExpect()..mergeFromMessage(this);
TalkExpectReq clone() => TalkExpectReq()..mergeFromMessage(this);
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.rebuild] instead. '
'Will be removed in next major version')
TalkExpect copyWith(void Function(TalkExpect) updates) => super.copyWith((message) => updates(message as TalkExpect)) as TalkExpect;
TalkExpectReq copyWith(void Function(TalkExpectReq) updates) => super.copyWith((message) => updates(message as TalkExpectReq)) as TalkExpectReq;
$pb.BuilderInfo get info_ => _i;
@$core.pragma('dart2js:noInline')
static TalkExpect create() => TalkExpect._();
TalkExpect createEmptyInstance() => create();
static $pb.PbList<TalkExpect> createRepeated() => $pb.PbList<TalkExpect>();
static TalkExpectReq create() => TalkExpectReq._();
TalkExpectReq createEmptyInstance() => create();
static $pb.PbList<TalkExpectReq> createRepeated() => $pb.PbList<TalkExpectReq>();
@$core.pragma('dart2js:noInline')
static TalkExpect getDefault() => _defaultInstance ??= $pb.GeneratedMessage.$_defaultFor<TalkExpect>(create);
static TalkExpect? _defaultInstance;
static TalkExpectReq getDefault() => _defaultInstance ??= $pb.GeneratedMessage.$_defaultFor<TalkExpectReq>(create);
static TalkExpectReq? _defaultInstance;
/// NONE的话
///
@$pb.TagNumber(1)
$core.List<TalkExpect_VideoTypeE> get videoType => $_getList(0);
$core.List<VideoTypeE> get videoType => $_getList(0);
@$pb.TagNumber(2)
$core.List<TalkExpect_AudioTypeE> get audioType => $_getList(1);
$core.List<AudioTypeE> get audioType => $_getList(1);
}
///
class TalkExpectResp extends $pb.GeneratedMessage {
factory TalkExpectResp({
$core.int? width,
$core.int? height,
$core.int? rotate,
VideoTypeE? videoType,
AudioTypeE? audioType,
}) {
final $result = create();
if (width != null) {
$result.width = width;
}
if (height != null) {
$result.height = height;
}
if (rotate != null) {
$result.rotate = rotate;
}
if (videoType != null) {
$result.videoType = videoType;
}
if (audioType != null) {
$result.audioType = audioType;
}
return $result;
}
TalkExpectResp._() : super();
factory TalkExpectResp.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r);
factory TalkExpectResp.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r);
static final $pb.BuilderInfo _i = $pb.BuilderInfo(_omitMessageNames ? '' : 'TalkExpectResp', package: const $pb.PackageName(_omitMessageNames ? '' : 'main'), createEmptyInstance: create)
..a<$core.int>(1, _omitFieldNames ? '' : 'Width', $pb.PbFieldType.OU3, protoName: 'Width')
..a<$core.int>(2, _omitFieldNames ? '' : 'Height', $pb.PbFieldType.OU3, protoName: 'Height')
..a<$core.int>(3, _omitFieldNames ? '' : 'Rotate', $pb.PbFieldType.OU3, protoName: 'Rotate')
..e<VideoTypeE>(4, _omitFieldNames ? '' : 'VideoType', $pb.PbFieldType.OE, protoName: 'VideoType', defaultOrMaker: VideoTypeE.NONE_V, valueOf: VideoTypeE.valueOf, enumValues: VideoTypeE.values)
..e<AudioTypeE>(5, _omitFieldNames ? '' : 'AudioType', $pb.PbFieldType.OE, protoName: 'AudioType', defaultOrMaker: AudioTypeE.NONE_A, valueOf: AudioTypeE.valueOf, enumValues: AudioTypeE.values)
..hasRequiredFields = false
;
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.deepCopy] instead. '
'Will be removed in next major version')
TalkExpectResp clone() => TalkExpectResp()..mergeFromMessage(this);
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.rebuild] instead. '
'Will be removed in next major version')
TalkExpectResp copyWith(void Function(TalkExpectResp) updates) => super.copyWith((message) => updates(message as TalkExpectResp)) as TalkExpectResp;
$pb.BuilderInfo get info_ => _i;
@$core.pragma('dart2js:noInline')
static TalkExpectResp create() => TalkExpectResp._();
TalkExpectResp createEmptyInstance() => create();
static $pb.PbList<TalkExpectResp> createRepeated() => $pb.PbList<TalkExpectResp>();
@$core.pragma('dart2js:noInline')
static TalkExpectResp getDefault() => _defaultInstance ??= $pb.GeneratedMessage.$_defaultFor<TalkExpectResp>(create);
static TalkExpectResp? _defaultInstance;
///
@$pb.TagNumber(1)
$core.int get width => $_getIZ(0);
@$pb.TagNumber(1)
set width($core.int v) { $_setUnsignedInt32(0, v); }
@$pb.TagNumber(1)
$core.bool hasWidth() => $_has(0);
@$pb.TagNumber(1)
void clearWidth() => clearField(1);
///
@$pb.TagNumber(2)
$core.int get height => $_getIZ(1);
@$pb.TagNumber(2)
set height($core.int v) { $_setUnsignedInt32(1, v); }
@$pb.TagNumber(2)
$core.bool hasHeight() => $_has(1);
@$pb.TagNumber(2)
void clearHeight() => clearField(2);
/// 0
@$pb.TagNumber(3)
$core.int get rotate => $_getIZ(2);
@$pb.TagNumber(3)
set rotate($core.int v) { $_setUnsignedInt32(2, v); }
@$pb.TagNumber(3)
$core.bool hasRotate() => $_has(2);
@$pb.TagNumber(3)
void clearRotate() => clearField(3);
///
@$pb.TagNumber(4)
VideoTypeE get videoType => $_getN(3);
@$pb.TagNumber(4)
set videoType(VideoTypeE v) { setField(4, v); }
@$pb.TagNumber(4)
$core.bool hasVideoType() => $_has(3);
@$pb.TagNumber(4)
void clearVideoType() => clearField(4);
@$pb.TagNumber(5)
AudioTypeE get audioType => $_getN(4);
@$pb.TagNumber(5)
set audioType(AudioTypeE v) { setField(5, v); }
@$pb.TagNumber(5)
$core.bool hasAudioType() => $_has(4);
@$pb.TagNumber(5)
void clearAudioType() => clearField(5);
}

View File

@ -14,39 +14,43 @@ import 'dart:core' as $core;
import 'package:protobuf/protobuf.dart' as $pb;
///
class TalkExpect_VideoTypeE extends $pb.ProtobufEnum {
static const TalkExpect_VideoTypeE NONE_V = TalkExpect_VideoTypeE._(0, _omitEnumNames ? '' : 'NONE_V');
static const TalkExpect_VideoTypeE H264 = TalkExpect_VideoTypeE._(1, _omitEnumNames ? '' : 'H264');
static const TalkExpect_VideoTypeE IMAGE = TalkExpect_VideoTypeE._(2, _omitEnumNames ? '' : 'IMAGE');
class VideoTypeE extends $pb.ProtobufEnum {
static const VideoTypeE NONE_V = VideoTypeE._(0, _omitEnumNames ? '' : 'NONE_V');
static const VideoTypeE H264 = VideoTypeE._(1, _omitEnumNames ? '' : 'H264');
static const VideoTypeE IMAGE = VideoTypeE._(2, _omitEnumNames ? '' : 'IMAGE');
static const VideoTypeE VP8 = VideoTypeE._(3, _omitEnumNames ? '' : 'VP8');
static const $core.List<TalkExpect_VideoTypeE> values = <TalkExpect_VideoTypeE> [
static const $core.List<VideoTypeE> values = <VideoTypeE> [
NONE_V,
H264,
IMAGE,
VP8,
];
static final $core.Map<$core.int, TalkExpect_VideoTypeE> _byValue = $pb.ProtobufEnum.initByValue(values);
static TalkExpect_VideoTypeE? valueOf($core.int value) => _byValue[value];
static final $core.Map<$core.int, VideoTypeE> _byValue = $pb.ProtobufEnum.initByValue(values);
static VideoTypeE? valueOf($core.int value) => _byValue[value];
const TalkExpect_VideoTypeE._($core.int v, $core.String n) : super(v, n);
const VideoTypeE._($core.int v, $core.String n) : super(v, n);
}
///
class TalkExpect_AudioTypeE extends $pb.ProtobufEnum {
static const TalkExpect_AudioTypeE NONE_A = TalkExpect_AudioTypeE._(0, _omitEnumNames ? '' : 'NONE_A');
static const TalkExpect_AudioTypeE AAC = TalkExpect_AudioTypeE._(1, _omitEnumNames ? '' : 'AAC');
static const TalkExpect_AudioTypeE G711 = TalkExpect_AudioTypeE._(2, _omitEnumNames ? '' : 'G711');
class AudioTypeE extends $pb.ProtobufEnum {
static const AudioTypeE NONE_A = AudioTypeE._(0, _omitEnumNames ? '' : 'NONE_A');
static const AudioTypeE AAC = AudioTypeE._(1, _omitEnumNames ? '' : 'AAC');
static const AudioTypeE G711 = AudioTypeE._(2, _omitEnumNames ? '' : 'G711');
static const AudioTypeE OPUS = AudioTypeE._(3, _omitEnumNames ? '' : 'OPUS');
static const $core.List<TalkExpect_AudioTypeE> values = <TalkExpect_AudioTypeE> [
static const $core.List<AudioTypeE> values = <AudioTypeE> [
NONE_A,
AAC,
G711,
OPUS,
];
static final $core.Map<$core.int, TalkExpect_AudioTypeE> _byValue = $pb.ProtobufEnum.initByValue(values);
static TalkExpect_AudioTypeE? valueOf($core.int value) => _byValue[value];
static final $core.Map<$core.int, AudioTypeE> _byValue = $pb.ProtobufEnum.initByValue(values);
static AudioTypeE? valueOf($core.int value) => _byValue[value];
const TalkExpect_AudioTypeE._($core.int v, $core.String n) : super(v, n);
const AudioTypeE._($core.int v, $core.String n) : super(v, n);
}

View File

@ -13,40 +13,66 @@ import 'dart:convert' as $convert;
import 'dart:core' as $core;
import 'dart:typed_data' as $typed_data;
@$core.Deprecated('Use talkExpectDescriptor instead')
const TalkExpect$json = {
'1': 'TalkExpect',
'2': [
{'1': 'VideoType', '3': 1, '4': 3, '5': 14, '6': '.main.TalkExpect.VideoTypeE', '10': 'VideoType'},
{'1': 'AudioType', '3': 2, '4': 3, '5': 14, '6': '.main.TalkExpect.AudioTypeE', '10': 'AudioType'},
],
'4': [TalkExpect_VideoTypeE$json, TalkExpect_AudioTypeE$json],
};
@$core.Deprecated('Use talkExpectDescriptor instead')
const TalkExpect_VideoTypeE$json = {
@$core.Deprecated('Use videoTypeEDescriptor instead')
const VideoTypeE$json = {
'1': 'VideoTypeE',
'2': [
{'1': 'NONE_V', '2': 0},
{'1': 'H264', '2': 1},
{'1': 'IMAGE', '2': 2},
{'1': 'VP8', '2': 3},
],
};
@$core.Deprecated('Use talkExpectDescriptor instead')
const TalkExpect_AudioTypeE$json = {
/// Descriptor for `VideoTypeE`. Decode as a `google.protobuf.EnumDescriptorProto`.
final $typed_data.Uint8List videoTypeEDescriptor = $convert.base64Decode(
'CgpWaWRlb1R5cGVFEgoKBk5PTkVfVhAAEggKBEgyNjQQARIJCgVJTUFHRRACEgcKA1ZQOBAD');
@$core.Deprecated('Use audioTypeEDescriptor instead')
const AudioTypeE$json = {
'1': 'AudioTypeE',
'2': [
{'1': 'NONE_A', '2': 0},
{'1': 'AAC', '2': 1},
{'1': 'G711', '2': 2},
{'1': 'OPUS', '2': 3},
],
};
/// Descriptor for `TalkExpect`. Decode as a `google.protobuf.DescriptorProto`.
final $typed_data.Uint8List talkExpectDescriptor = $convert.base64Decode(
'CgpUYWxrRXhwZWN0EjkKCVZpZGVvVHlwZRgBIAMoDjIbLm1haW4uVGFsa0V4cGVjdC5WaWRlb1'
'R5cGVFUglWaWRlb1R5cGUSOQoJQXVkaW9UeXBlGAIgAygOMhsubWFpbi5UYWxrRXhwZWN0LkF1'
'ZGlvVHlwZUVSCUF1ZGlvVHlwZSItCgpWaWRlb1R5cGVFEgoKBk5PTkVfVhAAEggKBEgyNjQQAR'
'IJCgVJTUFHRRACIisKCkF1ZGlvVHlwZUUSCgoGTk9ORV9BEAASBwoDQUFDEAESCAoERzcxMRAC');
/// Descriptor for `AudioTypeE`. Decode as a `google.protobuf.EnumDescriptorProto`.
final $typed_data.Uint8List audioTypeEDescriptor = $convert.base64Decode(
'CgpBdWRpb1R5cGVFEgoKBk5PTkVfQRAAEgcKA0FBQxABEggKBEc3MTEQAhIICgRPUFVTEAM=');
@$core.Deprecated('Use talkExpectReqDescriptor instead')
const TalkExpectReq$json = {
'1': 'TalkExpectReq',
'2': [
{'1': 'VideoType', '3': 1, '4': 3, '5': 14, '6': '.main.VideoTypeE', '10': 'VideoType'},
{'1': 'AudioType', '3': 2, '4': 3, '5': 14, '6': '.main.AudioTypeE', '10': 'AudioType'},
],
};
/// Descriptor for `TalkExpectReq`. Decode as a `google.protobuf.DescriptorProto`.
final $typed_data.Uint8List talkExpectReqDescriptor = $convert.base64Decode(
'Cg1UYWxrRXhwZWN0UmVxEi4KCVZpZGVvVHlwZRgBIAMoDjIQLm1haW4uVmlkZW9UeXBlRVIJVm'
'lkZW9UeXBlEi4KCUF1ZGlvVHlwZRgCIAMoDjIQLm1haW4uQXVkaW9UeXBlRVIJQXVkaW9UeXBl');
@$core.Deprecated('Use talkExpectRespDescriptor instead')
const TalkExpectResp$json = {
'1': 'TalkExpectResp',
'2': [
{'1': 'Width', '3': 1, '4': 1, '5': 13, '10': 'Width'},
{'1': 'Height', '3': 2, '4': 1, '5': 13, '10': 'Height'},
{'1': 'Rotate', '3': 3, '4': 1, '5': 13, '10': 'Rotate'},
{'1': 'VideoType', '3': 4, '4': 1, '5': 14, '6': '.main.VideoTypeE', '10': 'VideoType'},
{'1': 'AudioType', '3': 5, '4': 1, '5': 14, '6': '.main.AudioTypeE', '10': 'AudioType'},
],
};
/// Descriptor for `TalkExpectResp`. Decode as a `google.protobuf.DescriptorProto`.
final $typed_data.Uint8List talkExpectRespDescriptor = $convert.base64Decode(
'Cg5UYWxrRXhwZWN0UmVzcBIUCgVXaWR0aBgBIAEoDVIFV2lkdGgSFgoGSGVpZ2h0GAIgASgNUg'
'ZIZWlnaHQSFgoGUm90YXRlGAMgASgNUgZSb3RhdGUSLgoJVmlkZW9UeXBlGAQgASgOMhAubWFp'
'bi5WaWRlb1R5cGVFUglWaWRlb1R5cGUSLgoJQXVkaW9UeXBlGAUgASgOMhAubWFpbi5BdWRpb1'
'R5cGVFUglBdWRpb1R5cGU=');

View File

@ -3,21 +3,39 @@ syntax = "proto3";
package main;
option go_package = "./spb/talk";
message TalkExpect {
//
enum VideoTypeE {
NONE_V = 0;
H264 = 1;
IMAGE = 2;
}
//
enum AudioTypeE {
NONE_A = 0;
AAC = 1;
G711 = 2;
}
//
enum VideoTypeE {
NONE_V = 0;
H264 = 1; // AVC广
IMAGE = 2; // JPEG图像帧
VP8 = 3; // Google开发WebRTC默认编码器 广WebRTC视频会议YouTube视频播放等VP8 Google广泛推广HTML5视频和实时视频应用中有着广泛应用
}
//
enum AudioTypeE {
NONE_A = 0;
AAC = 1; // AAC 广
G711 = 2; // 使8 kHz的采样率使8广VoIP中
OPUS = 3; // WebRTC默认编码器 广6 kbps到510 kbpsVoIP
}
// 便
message TalkExpectReq {
// NONE的话
//
repeated VideoTypeE VideoType = 1;
repeated AudioTypeE AudioType = 2;
}
//
message TalkExpectResp {
//
uint32 Width = 1;
//
uint32 Height = 2;
// 0
uint32 Rotate = 3;
//
VideoTypeE VideoType = 4;
AudioTypeE AudioType = 5;
}

View File

@ -12,11 +12,13 @@ import 'package:star_lock/flavors.dart';
import 'package:star_lock/login/login/entity/LoginEntity.dart';
import 'package:star_lock/network/api_repository.dart';
import 'package:star_lock/network/start_chart_api.dart';
import 'package:star_lock/talk/other/audio_player_manager.dart';
import 'package:star_lock/talk/startChart/command/message_command.dart';
import 'package:star_lock/talk/startChart/constant/ip_constant.dart';
import 'package:star_lock/talk/startChart/constant/listen_addr_type_constant.dart';
import 'package:star_lock/talk/startChart/constant/message_type_constant.dart';
import 'package:star_lock/talk/startChart/constant/payload_type_constant.dart';
import 'package:star_lock/talk/startChart/constant/talk_status.dart';
import 'package:star_lock/talk/startChart/entity/relay_info_entity.dart';
import 'package:star_lock/talk/startChart/entity/report_information_data.dart';
import 'package:star_lock/talk/startChart/entity/scp_message.dart';
@ -26,6 +28,7 @@ import 'package:star_lock/talk/startChart/handle/scp_message_handle.dart';
import 'package:star_lock/talk/startChart/handle/scp_message_handler_factory.dart';
import 'package:star_lock/talk/startChart/proto/talk_data.pb.dart';
import 'package:star_lock/talk/startChart/proto/talk_expect.pb.dart';
import 'package:star_lock/talk/startChart/proto/talk_expect.pbserver.dart';
import 'package:star_lock/talk/startChart/start_chart_talk_status.dart';
import 'package:star_lock/tools/baseGetXController.dart';
import 'package:star_lock/tools/deviceInfo_utils.dart';
@ -83,9 +86,9 @@ class StartChartManage {
final int _maxPayloadSize = 8 * 1024; //
//
TalkExpect defaultTalkExpect = TalkExpect(
videoType: [TalkExpect_VideoTypeE.IMAGE],
audioType: [TalkExpect_AudioTypeE.G711],
TalkExpectReq defaultTalkExpect = TalkExpectReq(
videoType: [VideoTypeE.IMAGE],
audioType: [AudioTypeE.G711],
);
//
@ -345,7 +348,9 @@ class StartChartManage {
MessageId: MessageCommand.getNextMessageId(ToPeerId, increment: true),
);
await _sendMessage(message: message);
//
talkStatus.setWaitingAnswer();
_log(text: '发送同意接听消息');
}
//
@ -360,10 +365,15 @@ class StartChartManage {
MessageId: MessageCommand.getNextMessageId(ToPeerId, increment: true),
);
await _sendMessage(message: message);
//
StartChartTalkStatus.instance.setRejected();
//
AudioPlayerManager().stopRingtone();
}
//
void sendTalkExpectMessage({required TalkExpect talkExpect}) async {
void sendTalkExpectMessage({required TalkExpectReq talkExpect}) async {
final message = MessageCommand.talkExpectMessage(
ToPeerId: ToPeerId,
FromPeerId: FromPeerId,
@ -424,6 +434,11 @@ class StartChartManage {
MessageId: MessageCommand.getNextMessageId(ToPeerId, increment: true),
);
await _sendMessage(message: message);
//
StartChartTalkStatus.instance.setHangingUpDuring();
//
AudioPlayerManager().stopRingtone();
}
// 线
@ -785,8 +800,7 @@ class StartChartManage {
}
}
} catch (e, stackTrace) {
throw StartChartMessageException(
'❌ Udp result data error ----> $e\n,$stackTrace');
throw StartChartMessageException('$e\n,$stackTrace');
}
}
});
@ -958,7 +972,7 @@ class StartChartManage {
}
///
void changeTalkExpectDataType({required TalkExpect talkExpect}) {
void changeTalkExpectDataType({required TalkExpectReq talkExpect}) {
defaultTalkExpect = talkExpect;
}

View File

@ -1,131 +1,70 @@
//
import 'dart:async';
import 'package:star_lock/blue/io_tool/manager_event_bus.dart';
import 'package:star_lock/talk/startChart/events/talk_status_change_event.dart';
enum TalkStatus {
waitingAnswer, //
answeredSuccessfully, //
waitingData, //
duringCall, //
hangingUpDuring, //
rejected, //
uninitialized, //
initializationCompleted, //
notTalkData, //
notTalkPing, //
error, //
end, //
}
import 'package:star_lock/talk/startChart/constant/talk_status.dart';
class StartChartTalkStatus {
//
TalkStatus _status = TalkStatus.uninitialized;
//
StartChartTalkStatus._(
{TalkStatus initialStatus = TalkStatus.uninitialized}) {
_status = initialStatus;
}
StartChartTalkStatus._();
//
static final StartChartTalkStatus _instance = StartChartTalkStatus._();
//
//
static StartChartTalkStatus get instance => _instance;
// StreamController
final StreamController<TalkStatus> _statusStreamController =
StreamController<TalkStatus>.broadcast();
// Stream
Stream<TalkStatus> get statusStream => _statusStreamController.stream;
// getter
TalkStatus get status => _status;
// set
set status(TalkStatus newStatus) {
_setStatus(newStatus);
}
//
void _setStatus(TalkStatus newStatus) {
if (_status == newStatus) return; //
print("对讲状态变化: ${_status.name} -> ${newStatus.name}");
print("对讲状态变化: ${_status} -> ${newStatus}");
//
_status = newStatus;
//
_onStatusChanged(newStatus);
//
_statusStreamController.add(_status);
}
//
void _onStatusChanged(TalkStatus newStatus) {
//
EventBusManager().eventBus!.fire(TalkStatusChangeEvent(_status, newStatus));
}
//
void setWaitingAnswer() => _setStatus(TalkStatus.waitingAnswer);
///
void setWaitingAnswer() {
_setStatus(TalkStatus.waitingAnswer);
// "waitingAnswer"
}
void setWaitingData() => _setStatus(TalkStatus.waitingData);
///
void setWaitingData() {
_setStatus(TalkStatus.waitingData);
// "waitingAnswer"
}
void setDuringCall() => _setStatus(TalkStatus.duringCall);
void setRejected() => _setStatus(TalkStatus.rejected);
///
void setDuringCall() {
_setStatus(TalkStatus.duringCall);
// "duringCall"
}
void setUninitialized() => _setStatus(TalkStatus.uninitialized);
///
void setRejected() {
_setStatus(TalkStatus.rejected);
// "rejected"
}
void setInitializationCompleted() =>
_setStatus(TalkStatus.initializationCompleted);
///
void setUninitialized() {
_setStatus(TalkStatus.uninitialized);
// "uninitialized"
}
void setNotTalkData() => _setStatus(TalkStatus.notTalkData);
///
void setInitializationCompleted() {
_setStatus(TalkStatus.initializationCompleted);
// "initializationCompleted"
}
void setNotTalkPing() => _setStatus(TalkStatus.notTalkPing);
///
void setNotTalkData() {
_setStatus(TalkStatus.notTalkData);
// "notTalkData"
}
void setError() => _setStatus(TalkStatus.error);
///
void setNotTalkPing() {
_setStatus(TalkStatus.notTalkPing);
// "notTalkPing"
}
void setHangingUpDuring() => _setStatus(TalkStatus.hangingUpDuring);
///
void setError() {
_setStatus(TalkStatus.error);
// "error"
}
void setAnsweredSuccessfully() => _setStatus(TalkStatus.answeredSuccessfully);
///
void setHangingUpDuring() {
_setStatus(TalkStatus.hangingUpDuring);
// "hangingUpDuring"
} ///
void setAnsweredSuccessfully() {
_setStatus(TalkStatus.answeredSuccessfully);
// "hangingUpDuring"
}
void setEnd() => _setStatus(TalkStatus.end);
///
void setEnd() {
_setStatus(TalkStatus.end);
// "end"
// StreamController
void dispose() {
_statusStreamController.close();
}
}

View File

@ -0,0 +1,291 @@
import 'dart:async';
import 'dart:io';
import 'dart:math';
import 'package:flutter/services.dart';
import 'package:flutter_pcm_sound/flutter_pcm_sound.dart';
import 'package:flutter_voice_processor/flutter_voice_processor.dart';
import 'package:get/get.dart';
import 'package:permission_handler/permission_handler.dart';
import 'package:star_lock/app_settings/app_settings.dart';
import 'package:star_lock/blue/io_tool/manager_event_bus.dart';
import 'package:star_lock/talk/call/callTalk.dart';
import 'package:star_lock/talk/startChart/constant/talk_status.dart';
import 'package:star_lock/talk/startChart/proto/talk_data.pb.dart';
import 'package:star_lock/talk/startChart/proto/talk_data.pbenum.dart';
import 'package:star_lock/talk/startChart/start_chart_manage.dart';
import 'package:star_lock/talk/startChart/start_chart_talk_status.dart';
import 'package:star_lock/talk/startChart/views/talkView/talk_view_state.dart';
import '../../../../talk/call/g711.dart';
import '../../../../talk/udp/udp_manage.dart';
import '../../../../talk/udp/udp_senderManage.dart';
import '../../../../tools/baseGetXController.dart';
import '../../../../tools/eventBusEventManage.dart';
class TalkViewLogic extends BaseGetXController {
final TalkViewState state = TalkViewState();
Timer? _syncTimer;
int _startTime = 0;
final int bufferSize = 22; //
final List<int> frameTimestamps = [];
int frameIntervalMs = 45; // 4522FPS
int minFrameIntervalMs = 30; // 33 FPS
int maxFrameIntervalMs = 100; // 10 FPS
/// Talk发送的状态
StreamSubscription? _getTalkStatusRefreshUIEvent;
void _initFlutterPcmSound() {
const int sampleRate = 44100;
FlutterPcmSound.setLogLevel(LogLevel.verbose);
FlutterPcmSound.setup(sampleRate: sampleRate, channelCount: 2);
// feed
if (Platform.isAndroid) {
FlutterPcmSound.setFeedThreshold(-1); // Android
} else {
FlutterPcmSound.setFeedThreshold(sampleRate ~/ 32); // Android
}
}
///
void udpHangUpAction() async {
if (state.talkStatus.value == TalkStatus.duringCall) {
//
StartChartManage().sendTalkHangupMessage();
} else {
//
StartChartManage().sendTalkRejectMessage();
}
Get.back();
}
//
void initiateAnswerCommand() {
StartChartManage().sendTalkAcceptMessage();
}
void _updateFps(List<int> frameTimestamps) {
final int now = DateTime.now().millisecondsSinceEpoch;
// 1
frameTimestamps.removeWhere((timestamp) => now - timestamp > 1000);
// FPS
final double fps = frameTimestamps.length.toDouble();
// FPS
state.fps.value = fps;
}
//
void _startListenTalkData() {
state.talkDataRepository.talkDataStream.listen((talkData) {
final contentType = talkData.contentType;
//
switch (contentType) {
case TalkData_ContentTypeE.G711:
// state.audioBuffer.add(talkData);
if (state.audioBuffer.length < 60) {
// 60
state.audioBuffer.add(talkData);
}
break;
case TalkData_ContentTypeE.Image:
// state.videoBuffer.add(talkData);
//
if (state.videoBuffer.length < 60) {
// 60
state.videoBuffer.add(talkData);
}
break;
}
});
}
///
void _startListenTalkStatus() {
state.startChartTalkStatus.statusStream.listen((talkStatus) {
state.talkStatus.value = talkStatus;
switch (talkStatus) {
case TalkStatus.rejected:
case TalkStatus.hangingUpDuring:
case TalkStatus.notTalkData:
case TalkStatus.notTalkPing:
case TalkStatus.end:
_handleInvalidTalkStatus();
break;
default:
//
break;
}
});
}
void _playAudioData(TalkData talkData) {
// PCM PcmArrayInt16
final PcmArrayInt16 fromList = PcmArrayInt16.fromList(talkData.content);
FlutterPcmSound.feed(fromList);
if (!state.isPlaying.value) {
FlutterPcmSound.play();
state.isPlaying.value = true;
}
}
void _playVideoData(TalkData talkData) {
state.listData.value = Uint8List.fromList(talkData.content);
}
void _startPlayback() {
int frameIntervalMs = 45; // 4522FPS
Future.delayed(Duration(milliseconds: 800), () {
_startTime = DateTime.now().millisecondsSinceEpoch;
_syncTimer ??=
Timer.periodic(Duration(milliseconds: frameIntervalMs), (timer) {
final currentTime = DateTime.now().millisecondsSinceEpoch;
final elapsedTime = currentTime - _startTime;
// elapsedTime
// AppLog.log('Elapsed Time: $elapsedTime ms');
//
_adjustFrameInterval();
//
if (state.audioBuffer.isNotEmpty &&
state.audioBuffer.first.durationMs <= elapsedTime) {
_playAudioData(state.audioBuffer.removeAt(0));
}
//
//
while (state.videoBuffer.isNotEmpty &&
state.videoBuffer.first.durationMs <= elapsedTime) {
//
if (state.videoBuffer.length > 1) {
state.videoBuffer.removeAt(0);
} else {
//
frameTimestamps.add(DateTime.now().millisecondsSinceEpoch);
// FPS
_updateFps(frameTimestamps);
_playVideoData(state.videoBuffer.removeAt(0));
}
}
});
});
}
///
void _adjustFrameInterval() {
if (state.videoBuffer.length < 10 && frameIntervalMs < maxFrameIntervalMs) {
//
frameIntervalMs += 5;
} else if (state.videoBuffer.length > 20 &&
frameIntervalMs > minFrameIntervalMs) {
//
frameIntervalMs -= 5;
}
_syncTimer?.cancel();
_syncTimer =
Timer.periodic(Duration(milliseconds: frameIntervalMs), (timer) {
final currentTime = DateTime.now().millisecondsSinceEpoch;
final elapsedTime = currentTime - _startTime;
//
if (state.audioBuffer.isNotEmpty &&
state.audioBuffer.first.durationMs <= elapsedTime) {
_playAudioData(state.audioBuffer.removeAt(0));
}
//
//
while (state.videoBuffer.isNotEmpty &&
state.videoBuffer.first.durationMs <= elapsedTime) {
//
if (state.videoBuffer.length > 1) {
state.videoBuffer.removeAt(0);
} else {
//
frameTimestamps.add(DateTime.now().millisecondsSinceEpoch);
// FPS
_updateFps(frameTimestamps);
_playVideoData(state.videoBuffer.removeAt(0));
}
}
});
}
///
void _stopPlayG711Data() async {
print('停止播放');
await FlutterPcmSound.pause();
await FlutterPcmSound.stop();
await FlutterPcmSound.clear();
}
///
udpOpenDoorAction(List<int> list) async {}
Future<bool> getPermissionStatus() async {
final Permission permission = Permission.microphone;
//granted denied permanentlyDenied
final PermissionStatus status = await permission.status;
if (status.isGranted) {
return true;
} else if (status.isDenied) {
requestPermission(permission);
} else if (status.isPermanentlyDenied) {
openAppSettings();
} else if (status.isRestricted) {
requestPermission(permission);
} else {}
return false;
}
///
void requestPermission(Permission permission) async {
final PermissionStatus status = await permission.request();
if (status.isPermanentlyDenied) {
openAppSettings();
}
}
@override
void onReady() {
super.onReady();
}
@override
void onInit() {
super.onInit();
//
_startListenTalkData();
//
_startListenTalkStatus();
//
// *** ***
state.talkStatus.value = state.startChartTalkStatus.status;
_initFlutterPcmSound();
_startPlayback();
}
@override
void onClose() {
_stopPlayG711Data();
state.listData.value = Uint8List(0);
_syncTimer?.cancel();
}
///
void _handleInvalidTalkStatus() {
state.listData.value = Uint8List(0);
//
_stopPlayG711Data();
//
Get.back();
}
}

View File

@ -0,0 +1,373 @@
import 'dart:async';
import 'dart:convert';
import 'package:flutter/material.dart';
import 'package:flutter/services.dart';
import 'package:flutter_screenutil/flutter_screenutil.dart';
import 'package:get/get.dart';
import 'package:star_lock/app_settings/app_settings.dart';
import 'package:star_lock/main/lockDetail/realTimePicture/realTimePicture_state.dart';
import 'package:star_lock/talk/call/callTalk.dart';
import 'package:star_lock/talk/startChart/constant/talk_status.dart';
import 'package:star_lock/talk/startChart/start_chart_talk_status.dart';
import 'package:star_lock/talk/startChart/views/talkView/talk_view_logic.dart';
import 'package:star_lock/talk/startChart/views/talkView/talk_view_state.dart';
import '../../../../app_settings/app_colors.dart';
import '../../../../tools/showTFView.dart';
class TalkViewPage extends StatefulWidget {
const TalkViewPage({Key? key}) : super(key: key);
@override
State<TalkViewPage> createState() => _TalkViewPageState();
}
class _TalkViewPageState extends State<TalkViewPage>
with TickerProviderStateMixin {
final TalkViewLogic logic = Get.put(TalkViewLogic());
final TalkViewState state = Get.find<TalkViewLogic>().state;
@override
void initState() {
super.initState();
//
// state.autoBackTimer = Timer(const Duration(seconds: 30), Get.back);
state.animationController = AnimationController(
vsync: this, // 使TickerProvider是当前Widget
duration: const Duration(seconds: 1),
);
state.animationController.repeat();
//StatusListener
state.animationController.addStatusListener((AnimationStatus status) {
// AppLog.log("AnimationStatus:$status");
if (status == AnimationStatus.completed) {
state.animationController.reset();
state.animationController.forward();
} else if (status == AnimationStatus.dismissed) {
state.animationController.reset();
state.animationController.forward();
}
});
}
@override
Widget build(BuildContext context) {
return SizedBox(
width: 1.sw,
height: 1.sh,
child: Stack(
alignment: Alignment.center,
children: <Widget>[
Obx(
() => state.listData.value.isEmpty
? Image.asset(
'images/main/monitorBg.png',
width: ScreenUtil().screenWidth,
height: ScreenUtil().screenHeight,
fit: BoxFit.cover,
)
: Image.memory(
state.listData.value,
gaplessPlayback: true,
width: 1.sw,
height: 1.sh,
fit: BoxFit.cover,
filterQuality: FilterQuality.high,
errorBuilder: (
BuildContext context,
Object error,
StackTrace? stackTrace,
) {
return Container(color: Colors.transparent);
},
),
),
Obx(() => state.listData.value.isEmpty
? Positioned(
bottom: 300.h,
child: Text(
'正在创建安全连接...'.tr,
style: TextStyle(color: Colors.black, fontSize: 26.sp),
))
: Container()),
Positioned(
bottom: 10.w,
child: Container(
width: 1.sw - 30.w * 2,
// height: 300.h,
margin: EdgeInsets.all(30.w),
decoration: BoxDecoration(
color: Colors.black.withOpacity(0.2),
borderRadius: BorderRadius.circular(20.h)),
child: Column(
children: <Widget>[
SizedBox(height: 20.h),
bottomTopBtnWidget(),
SizedBox(height: 20.h),
bottomBottomBtnWidget(),
SizedBox(height: 20.h),
],
),
),
),
Positioned(
top: 100.h,
left: 10.w,
child: Obx(
() => Text(
'FPS:${state.fps.value}',
style: TextStyle(fontSize: 30.sp, color: Colors.orange,fontWeight: FontWeight.bold),
),
),
),
Obx(() => state.listData.value.isEmpty
? buildRotationTransition()
: Container())
],
),
);
}
Widget bottomTopBtnWidget() {
return Row(mainAxisAlignment: MainAxisAlignment.center, children: <Widget>[
//
GestureDetector(
onTap: () {
state.isOpenVoice.value = !state.isOpenVoice.value;
},
child: Container(
width: 50.w,
height: 50.w,
padding: EdgeInsets.all(5.w),
child: Obx(() => Image(
width: 40.w,
height: 40.w,
image: state.isOpenVoice.value
? const AssetImage(
'images/main/icon_lockDetail_monitoringCloseVoice.png')
: const AssetImage(
'images/main/icon_lockDetail_monitoringOpenVoice.png'))),
),
),
SizedBox(width: 50.w),
//
GestureDetector(
onTap: () {
// Get.toNamed(Routers.monitoringRealTimeScreenPage);
},
child: Container(
width: 50.w,
height: 50.w,
padding: EdgeInsets.all(5.w),
child: Image(
width: 40.w,
height: 40.w,
image: const AssetImage(
'images/main/icon_lockDetail_monitoringScreenshot.png')),
),
),
SizedBox(width: 50.w),
//
GestureDetector(
onTap: () {
// Get.toNamed(Routers.monitoringRealTimeScreenPage);
},
child: Container(
width: 50.w,
height: 50.w,
padding: EdgeInsets.all(5.w),
child: Image(
width: 40.w,
height: 40.w,
fit: BoxFit.fill,
image: const AssetImage(
'images/main/icon_lockDetail_monitoringScreenRecording.png')),
),
),
SizedBox(width: 50.w),
GestureDetector(
onTap: () {
logic.showToast('功能暂未开放'.tr);
},
child: Image(
width: 28.w,
height: 28.w,
fit: BoxFit.fill,
image: const AssetImage(
'images/main/icon_lockDetail_rectangle.png')))
]);
}
Widget bottomBottomBtnWidget() {
return Row(
mainAxisAlignment: MainAxisAlignment.spaceEvenly,
children: <Widget>[
//
Obx(
() => bottomBtnItemWidget(
getAnswerBtnImg(),
getAnswerBtnName(),
Colors.white,
longPress: () async {},
longPressUp: () async {},
onClick: () async {
if (state.talkStatus.value == TalkStatus.waitingAnswer) {
//
logic.initiateAnswerCommand();
}
},
),
),
bottomBtnItemWidget(
'images/main/icon_lockDetail_hangUp.png', '挂断'.tr, Colors.red,
onClick: () {
//
logic.udpHangUpAction();
}),
bottomBtnItemWidget(
'images/main/icon_lockDetail_monitoringUnlock.png',
'开锁',
AppColors.mainColor,
onClick: () {},
)
]);
}
String getAnswerBtnImg() {
switch (state.talkStatus.value) {
case TalkStatus.waitingAnswer:
return 'images/main/icon_lockDetail_monitoringAnswerCalls.png';
case TalkStatus.answeredSuccessfully:
case TalkStatus.duringCall:
return 'images/main/icon_lockDetail_monitoringUnTalkback.png';
default:
return 'images/main/icon_lockDetail_monitoringAnswerCalls.png';
}
}
String getAnswerBtnName() {
switch (state.talkStatus.value) {
case TalkStatus.waitingAnswer:
return '接听'.tr;
case TalkStatus.answeredSuccessfully:
case TalkStatus.duringCall:
return '长按说话'.tr;
default:
return '接听'.tr;
}
}
Widget bottomBtnItemWidget(String iconUrl, String name, Color backgroundColor,
{required Function() onClick,
Function()? longPress,
Function()? longPressUp}) {
double wh = 80.w;
return GestureDetector(
onTap: onClick,
onLongPress: longPress,
onLongPressUp: longPressUp,
child: SizedBox(
height: 140.h,
child: Column(
crossAxisAlignment: CrossAxisAlignment.center,
children: <Widget>[
Container(
width: wh,
height: wh,
decoration: BoxDecoration(
color: backgroundColor,
borderRadius: BorderRadius.circular((wh + 10.w * 2) / 2)),
padding: EdgeInsets.all(20.w),
child: Image.asset(iconUrl, fit: BoxFit.fitWidth),
),
SizedBox(height: 20.w),
Expanded(
child: Text(name,
style: TextStyle(fontSize: 20.sp, color: Colors.white),
textAlign: TextAlign.center))
],
)),
);
}
void showDeletPasswordAlertDialog(BuildContext context) {
showDialog(
barrierDismissible: false,
context: context,
builder: (BuildContext context) {
return ShowTFView(
title: '请输入六位数字开锁密码'.tr,
tipTitle: '',
controller: state.passwordTF,
inputFormatters: <TextInputFormatter>[
LengthLimitingTextInputFormatter(6), //
FilteringTextInputFormatter.allow(RegExp('[0-9]')),
],
sureClick: () async {
// //
// if (state.passwordTF.text.isEmpty) {
// logic.showToast('请输入开锁密码'.tr);
// return;
// }
//
// // List<int> numbers = state.passwordTF.text.split('').map((char) => int.parse(char)).toList();
// //
// // lockID
// List<int> numbers = <int>[];
// List<int> lockIDData = utf8.encode(state.passwordTF.text);
// numbers.addAll(lockIDData);
// // topBytes = getFixedLengthList(lockIDData, 20 - lockIDData.length);
// for (int i = 0; i < 6 - lockIDData.length; i++) {
// numbers.add(0);
// }
},
cancelClick: () {
Get.back();
},
);
},
);
}
//
Widget buildRotationTransition() {
return Positioned(
left: ScreenUtil().screenWidth / 2 - 220.w / 2,
top: ScreenUtil().screenHeight / 2 - 220.w / 2 - 150.h,
child: GestureDetector(
child: RotationTransition(
//
alignment: Alignment.center,
//
turns: state.animationController,
//view
child: AnimatedOpacity(
opacity: 0.5,
duration: const Duration(seconds: 2),
child: Image.asset(
'images/main/realTime_connecting.png',
width: 220.w,
height: 220.w,
),
),
),
onTap: () {
state.animationController.forward();
},
),
);
}
@override
void dispose() {
state.animationController.dispose();
state.realTimePicTimer.cancel();
state.autoBackTimer.cancel();
CallTalk().finishAVData();
super.dispose();
}
}

View File

@ -0,0 +1,68 @@
import 'dart:async';
import 'dart:typed_data';
import 'package:flutter/material.dart';
import 'package:flutter_voice_processor/flutter_voice_processor.dart';
import 'package:get/get.dart';
import 'package:get/get_rx/get_rx.dart';
import 'package:network_info_plus/network_info_plus.dart';
import 'package:star_lock/talk/startChart/constant/talk_status.dart';
import 'package:star_lock/talk/startChart/handle/other/talk_data_repository.dart';
import 'package:star_lock/talk/startChart/proto/talk_data.pb.dart';
import 'package:star_lock/talk/startChart/start_chart_talk_status.dart';
import '../../../../tools/storage.dart';
class TalkViewState {
RxBool isOpenVoice = false.obs;
int udpSendDataFrameNumber = 0; //
// var isSenderAudioData = false.obs;//
Future<String?> userMobileIP = NetworkInfo().getWifiIP();
Future<String?> userUid = Storage.getUid();
RxInt udpStatus =
0.obs; //0 1 2 3 4 5 6 8 9
TextEditingController passwordTF = TextEditingController();
Rx<Uint8List> listData = Uint8List(0).obs; //
RxList<int> listAudioData = <int>[].obs; //
late final VoiceProcessor? voiceProcessor;
late Timer oneMinuteTimeTimer =
Timer(const Duration(seconds: 1), () {}); // 60
RxInt oneMinuteTime = 0.obs; //
// 10
late Timer answerTimer;
late Timer hangUpTimer;
late Timer openDoorTimer;
late AnimationController animationController;
RxDouble fps = 0.0.obs; // FPS
late Timer autoBackTimer =
Timer(const Duration(seconds: 1), () {}); //30
late Timer realTimePicTimer =
Timer(const Duration(seconds: 1), () {}); //
RxInt elapsedSeconds = 0.obs;
List<TalkData> audioBuffer = <TalkData>[].obs;
List<TalkData> videoBuffer = <TalkData>[].obs;
RxBool isPlaying = false.obs; //
Rx<TalkStatus> talkStatus = TalkStatus.none.obs; //
// startChartTalkStatus
final StartChartTalkStatus startChartTalkStatus =
StartChartTalkStatus.instance;
//
final TalkDataRepository talkDataRepository = TalkDataRepository.instance;
}