fix:增加图传全自动锁通话页面

This commit is contained in:
liyi 2025-05-13 09:45:57 +08:00
parent 4a725be23f
commit 905368ec8d
4 changed files with 1009 additions and 1 deletions

View File

@ -203,6 +203,7 @@ import 'mine/valueAddedServices/valueAddedServicesRealName/value_added_services_
import 'mine/valueAddedServices/valueAddedServicesSMSTemplate/valueAddedServicesAddSMSTemplate/newSMSTemplate_page.dart';
import 'mine/valueAddedServices/valueAddedServicesSMSTemplate/valueAddedServicesListSMSTemplate/customSMSTemplateList_page.dart';
import 'starLockApplication/starLockApplication.dart';
import 'talk/starChart/views/imageTransmission/image_transmission_page.dart';
import 'tools/seletKeyCyclicDate/seletKeyCyclicDate_page.dart';
abstract class Routers {
@ -515,6 +516,8 @@ abstract class Routers {
static const String starChartPage = '/starChartPage'; //
static const String starChartTalkView = '/starChartTalkView'; //
static const String h264WebView = '/h264WebView'; //
static const String imageTransmissionView =
'/imageTransmissionView'; //()
}
abstract class AppRouters {
@ -1185,7 +1188,13 @@ abstract class AppRouters {
page: () => const DoubleLockLinkPage()),
GetPage<dynamic>(
name: Routers.starChartTalkView, page: () => const TalkViewPage()),
GetPage<dynamic>(name: Routers.h264WebView, page: () => TalkViewNativeDecodePage()), //
GetPage<dynamic>(
name: Routers.h264WebView, page: () => TalkViewNativeDecodePage()),
//
GetPage<dynamic>(
name: Routers.imageTransmissionView,
page: () => ImageTransmissionPage()),
//
// GetPage<dynamic>(name: Routers.h264WebView, page: () => H264WebView()), // webview播放页面
];
}

View File

@ -0,0 +1,667 @@
import 'dart:async';
import 'dart:io';
import 'dart:ui' as ui;
import 'dart:math'; // Import the math package to use sqrt
import 'dart:ui' show decodeImageFromList;
import 'package:flutter/foundation.dart';
import 'package:flutter/rendering.dart';
import 'package:flutter/services.dart';
import 'package:flutter_pcm_sound/flutter_pcm_sound.dart';
import 'package:flutter_voice_processor/flutter_voice_processor.dart';
import 'package:gallery_saver/gallery_saver.dart';
import 'package:get/get.dart';
import 'package:image_gallery_saver/image_gallery_saver.dart';
import 'package:path_provider/path_provider.dart';
import 'package:permission_handler/permission_handler.dart';
import 'package:star_lock/app_settings/app_settings.dart';
import 'package:star_lock/login/login/entity/LoginEntity.dart';
import 'package:star_lock/main/lockDetail/lockDetail/lockDetail_logic.dart';
import 'package:star_lock/main/lockDetail/lockDetail/lockDetail_state.dart';
import 'package:star_lock/main/lockDetail/lockDetail/lockNetToken_entity.dart';
import 'package:star_lock/main/lockDetail/lockSet/lockSet/lockSetInfo_entity.dart';
import 'package:star_lock/main/lockMian/entity/lockListInfo_entity.dart';
import 'package:star_lock/network/api_repository.dart';
import 'package:star_lock/talk/call/g711.dart';
import 'package:star_lock/talk/starChart/constant/talk_status.dart';
import 'package:star_lock/talk/starChart/handle/other/talk_data_model.dart';
import 'package:star_lock/talk/starChart/proto/talk_data.pb.dart';
import 'package:star_lock/talk/starChart/proto/talk_expect.pb.dart';
import 'package:star_lock/talk/starChart/star_chart_manage.dart';
import 'package:star_lock/talk/starChart/views/imageTransmission/image_transmission_state.dart';
import 'package:star_lock/talk/starChart/views/talkView/talk_view_state.dart';
import 'package:star_lock/tools/G711Tool.dart';
import 'package:star_lock/tools/bugly/bugly_tool.dart';
import '../../../../tools/baseGetXController.dart';
class ImageTransmissionLogic extends BaseGetXController {
ImageTransmissionState state = ImageTransmissionState();
final LockDetailState lockDetailState = Get.put(LockDetailLogic()).state;
int bufferSize = 8; //
int audioBufferSize = 2; // 2
bool _isFirstAudioFrame = true; //
int _startAudioTime = 0; //
//
final List<int> _bufferedAudioFrames = <int>[];
//
bool _isListening = false;
StreamSubscription? _streamSubscription;
Timer? videoRenderTimer; //
int _renderedFrameCount = 0;
int _lastFpsPrintTime = DateTime.now().millisecondsSinceEpoch;
///
void _initFlutterPcmSound() {
const int sampleRate = 8000;
FlutterPcmSound.setLogLevel(LogLevel.none);
FlutterPcmSound.setup(sampleRate: sampleRate, channelCount: 1);
// feed
if (Platform.isAndroid) {
FlutterPcmSound.setFeedThreshold(1024); // Android
} else {
FlutterPcmSound.setFeedThreshold(2000); // Android
}
}
///
void udpHangUpAction() async {
if (state.talkStatus.value == TalkStatus.answeredSuccessfully) {
//
StartChartManage().startTalkHangupMessageTimer();
} else {
//
StartChartManage().startTalkRejectMessageTimer();
}
Get.back();
}
//
void initiateAnswerCommand() {
StartChartManage().startTalkAcceptTimer();
}
//
void _startListenTalkData() {
//
if (_isListening) {
AppLog.log("已经存在数据流监听,避免重复监听");
return;
}
AppLog.log("==== 启动新的数据流监听 ====");
_isListening = true;
_streamSubscription = state.talkDataRepository.talkDataStream
.listen((TalkDataModel talkDataModel) async {
final talkData = talkDataModel.talkData;
final contentType = talkData!.contentType;
final currentTime = DateTime.now().millisecondsSinceEpoch;
//
switch (contentType) {
case TalkData_ContentTypeE.G711:
// //
if (_isFirstAudioFrame) {
_startAudioTime = currentTime;
_isFirstAudioFrame = false;
}
//
final expectedTime = _startAudioTime + talkData.durationMs;
final audioDelay = currentTime - expectedTime;
//
if (audioDelay > 500) {
state.audioBuffer.clear();
if (state.isOpenVoice.value) {
_playAudioFrames();
}
return;
}
if (state.audioBuffer.length >= audioBufferSize) {
state.audioBuffer.removeAt(0); //
}
state.audioBuffer.add(talkData); //
//
_playAudioFrames();
break;
case TalkData_ContentTypeE.Image:
// bufferSize帧
state.videoBuffer.add(talkData);
if (state.videoBuffer.length > bufferSize) {
state.videoBuffer.removeAt(0); //
}
break;
}
});
}
//
void _playAudioFrames() {
//
//
if (state.audioBuffer.isEmpty ||
state.audioBuffer.length < audioBufferSize) {
return;
}
//
TalkData? oldestFrame;
int oldestIndex = -1;
for (int i = 0; i < state.audioBuffer.length; i++) {
if (oldestFrame == null ||
state.audioBuffer[i].durationMs < oldestFrame.durationMs) {
oldestFrame = state.audioBuffer[i];
oldestIndex = i;
}
}
//
if (oldestFrame != null && oldestIndex != -1) {
if (state.isOpenVoice.value) {
//
_playAudioData(oldestFrame);
}
state.audioBuffer.removeAt(oldestIndex);
}
}
///
void _startListenTalkStatus() {
state.startChartTalkStatus.statusStream.listen((talkStatus) {
state.talkStatus.value = talkStatus;
switch (talkStatus) {
case TalkStatus.rejected:
case TalkStatus.hangingUpDuring:
case TalkStatus.notTalkData:
case TalkStatus.notTalkPing:
case TalkStatus.end:
_handleInvalidTalkStatus();
break;
case TalkStatus.answeredSuccessfully:
state.oneMinuteTimeTimer?.cancel(); //
state.oneMinuteTimeTimer ??=
Timer.periodic(const Duration(seconds: 1), (Timer t) {
if (state.listData.value.length > 0) {
state.oneMinuteTime.value++;
// if (state.oneMinuteTime.value >= 60) {
// t.cancel(); //
// state.oneMinuteTime.value = 0;
// //
// // udpHangUpAction();
// }
}
});
break;
default:
//
break;
}
});
}
///
void _playAudioData(TalkData talkData) async {
if (state.isOpenVoice.value) {
final list =
G711().decodeAndDenoise(talkData.content, true, 8000, 300, 150);
// // PCM PcmArrayInt16
final PcmArrayInt16 fromList = PcmArrayInt16.fromList(list);
FlutterPcmSound.feed(fromList);
if (!state.isPlaying.value) {
FlutterPcmSound.play();
state.isPlaying.value = true;
}
}
}
///
void _stopPlayG711Data() async {
await FlutterPcmSound.pause();
await FlutterPcmSound.stop();
await FlutterPcmSound.clear();
}
///
// udpOpenDoorAction() async {
// final List<String>? privateKey =
// await Storage.getStringList(saveBluePrivateKey);
// final List<int> getPrivateKeyList = changeStringListToIntList(privateKey!);
//
// final List<String>? signKey = await Storage.getStringList(saveBlueSignKey);
// final List<int> signKeyDataList = changeStringListToIntList(signKey!);
//
// final List<String>? token = await Storage.getStringList(saveBlueToken);
// final List<int> getTokenList = changeStringListToIntList(token!);
//
// await _getLockNetToken();
//
// final OpenLockCommand openLockCommand = OpenLockCommand(
// lockID: BlueManage().connectDeviceName,
// userID: await Storage.getUid(),
// openMode: lockDetailState.openDoorModel,
// openTime: _getUTCNetTime(),
// onlineToken: lockDetailState.lockNetToken,
// token: getTokenList,
// needAuthor: 1,
// signKey: signKeyDataList,
// privateKey: getPrivateKeyList,
// );
// final messageDetail = openLockCommand.packageData();
// // List<int>
// String hexString = messageDetail
// .map((byte) => byte.toRadixString(16).padLeft(2, '0'))
// .join(' ');
//
// AppLog.log('open lock hexString: $hexString');
// //
// StartChartManage().sendRemoteUnLockMessage(
// bluetoothDeviceName: BlueManage().connectDeviceName,
// openLockCommand: messageDetail,
// );
// showToast('正在开锁中...'.tr);
// }
int _getUTCNetTime() {
if (lockDetailState.isHaveNetwork) {
return DateTime.now().millisecondsSinceEpoch ~/ 1000 +
lockDetailState.differentialTime;
} else {
return 0;
}
}
///
Future<bool> getPermissionStatus() async {
final Permission permission = Permission.microphone;
//granted denied permanentlyDenied
final PermissionStatus status = await permission.status;
if (status.isGranted) {
return true;
} else if (status.isDenied) {
requestPermission(permission);
} else if (status.isPermanentlyDenied) {
openAppSettings();
} else if (status.isRestricted) {
requestPermission(permission);
} else {}
return false;
}
///
void requestPermission(Permission permission) async {
final PermissionStatus status = await permission.request();
if (status.isPermanentlyDenied) {
openAppSettings();
}
}
Future<void> requestPermissions() async {
//
var storageStatus = await Permission.storage.request();
//
var microphoneStatus = await Permission.microphone.request();
if (storageStatus.isGranted && microphoneStatus.isGranted) {
print("Permissions granted");
} else {
print("Permissions denied");
//
if (await Permission.storage.isPermanentlyDenied) {
openAppSettings(); //
}
}
}
Future<void> startRecording() async {
// requestPermissions();
// if (state.isRecordingScreen.value) {
// showToast('录屏已开始,请勿重复点击');
// }
// bool start = await FlutterScreenRecording.startRecordScreen(
// "Screen Recording", //
// titleNotification: "Recording in progress", //
// messageNotification: "Tap to stop recording", //
// );
//
// if (start) {
// state.isRecordingScreen.value = true;
// }
}
Future<void> stopRecording() async {
// String path = await FlutterScreenRecording.stopRecordScreen;
// print("Recording saved to: $path");
//
// //
// bool? success = await GallerySaver.saveVideo(path);
// if (success == true) {
// print("Video saved to gallery");
// } else {
// print("Failed to save video to gallery");
// }
//
// showToast('录屏结束,已保存到系统相册');
// state.isRecordingScreen.value = false;
}
@override
void onReady() {
super.onReady();
}
@override
void onInit() {
super.onInit();
//
_startListenTalkData();
//
_startListenTalkStatus();
//
// *** ***
state.talkStatus.value = state.startChartTalkStatus.status;
//
_initFlutterPcmSound();
//
// _startPlayback();
//
_initAudioRecorder();
requestPermissions();
// 10fps
videoRenderTimer = Timer.periodic(const Duration(milliseconds: 100), (_) {
final int now = DateTime.now().millisecondsSinceEpoch;
if (state.videoBuffer.isNotEmpty) {
final TalkData oldestFrame = state.videoBuffer.removeAt(0);
if (oldestFrame.content.isNotEmpty) {
state.listData.value =
Uint8List.fromList(oldestFrame.content); //
final int decodeStart = DateTime.now().millisecondsSinceEpoch;
decodeImageFromList(Uint8List.fromList(oldestFrame.content))
.then((ui.Image img) {
final int decodeEnd = DateTime.now().millisecondsSinceEpoch;
state.currentImage.value = img;
_renderedFrameCount++;
// fps
if (now - _lastFpsPrintTime >= 1000) {
// print('实际渲染fps: $_renderedFrameCount');
_renderedFrameCount = 0;
_lastFpsPrintTime = now;
}
}).catchError((e) {
print('图片解码失败: $e');
});
}
}
//
});
}
@override
void onClose() {
_stopPlayG711Data(); //
state.listData.value = Uint8List(0); //
state.audioBuffer.clear(); //
state.videoBuffer.clear(); //
state.oneMinuteTimeTimer?.cancel();
state.oneMinuteTimeTimer = null;
stopProcessingAudio();
//
// _imageCache.clear();
state.oneMinuteTimeTimer?.cancel(); //
state.oneMinuteTimeTimer = null; //
state.oneMinuteTime.value = 0;
//
_streamSubscription?.cancel();
_isListening = false;
//
videoRenderTimer?.cancel();
videoRenderTimer = null;
super.onClose();
}
@override
void dispose() {
stopProcessingAudio();
//
StartChartManage().reSetDefaultTalkExpect();
//
videoRenderTimer?.cancel();
videoRenderTimer = null;
super.dispose();
}
///
void _handleInvalidTalkStatus() {
state.listData.value = Uint8List(0);
//
_stopPlayG711Data();
stopProcessingAudio();
}
///
void updateTalkExpect() {
TalkExpectReq talkExpectReq = TalkExpectReq();
state.isOpenVoice.value = !state.isOpenVoice.value;
if (!state.isOpenVoice.value) {
talkExpectReq = TalkExpectReq(
videoType: [VideoTypeE.IMAGE],
audioType: [],
);
showToast('已静音'.tr);
} else {
talkExpectReq = TalkExpectReq(
videoType: [VideoTypeE.IMAGE],
audioType: [AudioTypeE.G711],
);
}
///
StartChartManage().changeTalkExpectDataTypeAndReStartTalkExpectMessageTimer(
talkExpect: talkExpectReq);
}
///
Future<void> captureAndSavePng() async {
try {
if (state.globalKey.currentContext == null) {
AppLog.log('截图失败: 未找到当前上下文');
return;
}
final RenderRepaintBoundary boundary = state.globalKey.currentContext!
.findRenderObject()! as RenderRepaintBoundary;
final ui.Image image = await boundary.toImage();
final ByteData? byteData =
await image.toByteData(format: ui.ImageByteFormat.png);
if (byteData == null) {
AppLog.log('截图失败: 图像数据为空');
return;
}
final Uint8List pngBytes = byteData.buffer.asUint8List();
//
final Directory directory = await getApplicationDocumentsDirectory();
final String imagePath = '${directory.path}/screenshot.png';
//
final File imgFile = File(imagePath);
await imgFile.writeAsBytes(pngBytes);
//
await ImageGallerySaver.saveFile(imagePath);
AppLog.log('截图保存路径: $imagePath');
showToast('截图已保存到相册'.tr);
} catch (e) {
AppLog.log('截图失败: $e');
}
}
//
Future<void> remoteOpenLock() async {
final lockPeerId = StartChartManage().lockPeerId;
final lockListPeerId = StartChartManage().lockListPeerId;
int lockId = lockDetailState.keyInfos.value.lockId ?? 0;
// peerId使peerId
// peerId
lockListPeerId.forEach((element) {
if (element.network?.peerId == lockPeerId) {
lockId = element.lockId ?? 0;
}
});
final LockSetInfoEntity lockSetInfoEntity =
await ApiRepository.to.getLockSettingInfoData(
lockId: lockId.toString(),
);
if (lockSetInfoEntity.errorCode!.codeIsSuccessful) {
if (lockSetInfoEntity.data?.lockFeature?.remoteUnlock == 1 &&
lockSetInfoEntity.data?.lockSettingInfo?.remoteUnlock == 1) {
final LoginEntity entity = await ApiRepository.to
.remoteOpenLock(lockId: lockId.toString(), timeOut: 60);
if (entity.errorCode!.codeIsSuccessful) {
showToast('已开锁'.tr);
StartChartManage().lockListPeerId = [];
}
} else {
showToast('该锁的远程开锁功能未启用'.tr);
}
}
}
///
void _initAudioRecorder() {
state.voiceProcessor = VoiceProcessor.instance;
}
//
Future<void> startProcessingAudio() async {
try {
if (await state.voiceProcessor?.hasRecordAudioPermission() ?? false) {
await state.voiceProcessor?.start(state.frameLength, state.sampleRate);
final bool? isRecording = await state.voiceProcessor?.isRecording();
state.isRecordingAudio.value = isRecording!;
state.startRecordingAudioTime.value = DateTime.now();
//
state.voiceProcessor
?.addFrameListeners(<VoiceProcessorFrameListener>[_onFrame]);
state.voiceProcessor?.addErrorListener(_onError);
} else {
// state.errorMessage.value = 'Recording permission not granted';
}
} on PlatformException catch (ex) {
// state.errorMessage.value = 'Failed to start recorder: $ex';
}
state.isOpenVoice.value = false;
}
///
Future<void> stopProcessingAudio() async {
try {
await state.voiceProcessor?.stop();
state.voiceProcessor?.removeFrameListener(_onFrame);
state.udpSendDataFrameNumber = 0;
//
state.endRecordingAudioTime.value = DateTime.now();
//
final Duration duration = state.endRecordingAudioTime.value
.difference(state.startRecordingAudioTime.value);
state.recordingAudioTime.value = duration.inSeconds;
} on PlatformException catch (ex) {
// state.errorMessage.value = 'Failed to stop recorder: $ex';
} finally {
final bool? isRecording = await state.voiceProcessor?.isRecording();
state.isRecordingAudio.value = isRecording!;
state.isOpenVoice.value = true;
}
}
//
Future<void> _onFrame(List<int> frame) async {
//
if (_bufferedAudioFrames.length > state.frameLength * 3) {
_bufferedAudioFrames.clear(); //
return;
}
//
List<int> amplifiedFrame = _applyGain(frame, 1.6);
// G711数据
List<int> encodedData = G711Tool.encode(amplifiedFrame, 0); // 0A-law
_bufferedAudioFrames.addAll(encodedData);
// 使
final int ms = DateTime.now().millisecondsSinceEpoch % 1000000; // 使
int getFrameLength = state.frameLength;
if (Platform.isIOS) {
getFrameLength = state.frameLength * 2;
}
//
if (_bufferedAudioFrames.length >= state.frameLength) {
try {
await StartChartManage().sendTalkDataMessage(
talkData: TalkData(
content: _bufferedAudioFrames,
contentType: TalkData_ContentTypeE.G711,
durationMs: ms,
),
);
} finally {
_bufferedAudioFrames.clear(); //
}
} else {
_bufferedAudioFrames.addAll(encodedData);
}
}
//
void _onError(VoiceProcessorException error) {
AppLog.log(error.message!);
}
//
List<int> _applyGain(List<int> pcmData, double gainFactor) {
List<int> result = List<int>.filled(pcmData.length, 0);
for (int i = 0; i < pcmData.length; i++) {
// PCM数据通常是有符号的16位整数
int sample = pcmData[i];
//
double amplified = sample * gainFactor;
//
if (amplified > 32767) {
amplified = 32767;
} else if (amplified < -32768) {
amplified = -32768;
}
result[i] = amplified.toInt();
}
return result;
}
}

View File

@ -0,0 +1,238 @@
import 'package:flutter/material.dart';
import 'package:flutter_screenutil/flutter_screenutil.dart';
import 'package:get/get.dart';
import 'package:star_lock/app_settings/app_colors.dart';
import 'package:star_lock/talk/call/callTalk.dart';
import 'package:star_lock/talk/starChart/constant/talk_status.dart';
import 'package:star_lock/talk/starChart/star_chart_manage.dart';
import 'package:star_lock/talk/starChart/views/imageTransmission/image_transmission_logic.dart';
import 'package:star_lock/talk/starChart/views/imageTransmission/image_transmission_state.dart';
import 'package:star_lock/tools/titleAppBar.dart';
import 'package:slide_to_act/slide_to_act.dart';
//
// import 'package:flutter_slider_button/flutter_slider_button.dart';
class ImageTransmissionPage extends StatefulWidget {
const ImageTransmissionPage();
@override
State<ImageTransmissionPage> createState() => _ImageTransmissionPageState();
}
class _ImageTransmissionPageState extends State<ImageTransmissionPage>
with TickerProviderStateMixin {
final ImageTransmissionLogic logic = Get.put(ImageTransmissionLogic());
final ImageTransmissionState state = Get.find<ImageTransmissionLogic>().state;
final startChartManage = StartChartManage();
@override
void initState() {
super.initState();
state.animationController = AnimationController(
vsync: this, // 使TickerProvider是当前Widget
duration: const Duration(seconds: 1),
);
state.animationController.repeat();
state.animationController.addStatusListener((AnimationStatus status) {
if (status == AnimationStatus.completed) {
state.animationController.reset();
state.animationController.forward();
} else if (status == AnimationStatus.dismissed) {
state.animationController.reset();
state.animationController.forward();
}
});
}
@override
void dispose() {
state.animationController.dispose();
CallTalk().finishAVData();
super.dispose();
}
@override
Widget build(BuildContext context) {
return Scaffold(
backgroundColor: AppColors.mainBackgroundColor,
resizeToAvoidBottomInset: false,
appBar: TitleAppBar(
barTitle: '图传全自动'.tr,
haveBack: true,
backgroundColor: AppColors.mainColor,
backAction: (){
logic.udpHangUpAction();
},
),
body: Obx(() => Column(
children: [
SizedBox(height: 24.h),
SizedBox(
height: 0.6.sh,
child: state.listData.value.isEmpty
? _buildWaitingView()
: _buildVideoView(),
),
SizedBox(height: 30.h),
_buildBottomToolBar(),
SizedBox(height: 30.h),
],
)),
);
}
Widget _buildWaitingView() {
double barWidth = MediaQuery.of(context).size.width - 60.w;
return Center(
child: ClipRRect(
borderRadius: BorderRadius.circular(30.h),
child: Stack(
alignment: Alignment.center,
children: [
Container(
width: barWidth,
height: double.infinity,
child: Image.asset(
'images/main/monitorBg.png',
fit: BoxFit.cover,
),
),
RotationTransition(
turns: state.animationController,
child: Image.asset(
'images/main/realTime_connecting.png',
width: 300.w,
height: 300.w,
fit: BoxFit.contain,
),
),
],
),
),
);
}
Widget _buildVideoView() {
double barWidth = MediaQuery.of(context).size.width - 60.w;
return PopScope(
canPop: false,
child: RepaintBoundary(
key: state.globalKey,
child: Center(
child: ClipRRect(
borderRadius: BorderRadius.circular(30.h),
child: Container(
width: barWidth,
height: double.infinity,
child: RotatedBox(
quarterTurns: startChartManage.rotateAngle ~/ 90,
child: RawImage(
image: state.currentImage.value,
fit: BoxFit.cover,
filterQuality: FilterQuality.high,
),
),
),
),
),
),
);
}
Widget _buildBottomToolBar() {
return Container(
margin: EdgeInsets.symmetric(horizontal: 30.w),
padding: EdgeInsets.symmetric(vertical: 28.h, horizontal: 20.w),
decoration: BoxDecoration(
color: Colors.white,
borderRadius: BorderRadius.circular(30.h),
boxShadow: [
BoxShadow(
color: Colors.black12,
blurRadius: 12,
offset: Offset(0, 4),
),
],
),
child: Column(
mainAxisSize: MainAxisSize.min,
children: [
Row(
mainAxisAlignment: MainAxisAlignment.spaceEvenly,
children: [
_circleButton(
icon: Icons.call,
color: Colors.green,
onTap: () {
if (state.talkStatus.value ==
TalkStatus.passiveCallWaitingAnswer) {
//
logic.initiateAnswerCommand();
}
},
),
_circleButton(
icon: Icons.call_end,
color: Colors.red,
onTap: () {
logic.udpHangUpAction();
},
),
_circleButton(
icon: Icons.camera_alt,
color: Colors.blue,
onTap: () async {
if (state.talkStatus.value ==
TalkStatus.answeredSuccessfully) {
await logic.captureAndSavePng();
}
},
),
],
),
SizedBox(height: 36.h),
SlideAction(
height: 64.h,
borderRadius: 24.h,
elevation: 0,
innerColor: Colors.amber,
outerColor: Colors.amber.withOpacity(0.15),
sliderButtonIcon: Icon(Icons.lock, color: Colors.white, size: 40.w),
text: '滑动解锁',
textStyle: TextStyle(fontSize: 26.sp, color: Colors.black54, fontWeight: FontWeight.bold),
onSubmit: () {
// TODO:
logic.remoteOpenLock();
},
),
],
),
);
}
Widget _circleButton(
{required IconData icon,
required Color color,
required VoidCallback onTap}) {
return GestureDetector(
onTap: onTap,
child: Container(
width: 90.w,
height: 90.w,
decoration: BoxDecoration(
color: color,
shape: BoxShape.circle,
boxShadow: [
BoxShadow(
color: color.withOpacity(0.3),
blurRadius: 10,
offset: Offset(0, 4),
),
],
),
child: Icon(icon, color: Colors.white, size: 48.w),
),
);
}
}

View File

@ -0,0 +1,94 @@
import 'dart:async';
import 'dart:typed_data';
import 'dart:ui' as ui;
import 'package:flutter/material.dart';
import 'package:flutter_voice_processor/flutter_voice_processor.dart';
import 'package:get/get.dart';
import 'package:get/get_rx/get_rx.dart';
import 'package:get/get_rx/src/rx_types/rx_types.dart';
import 'package:get/state_manager.dart';
import 'package:network_info_plus/network_info_plus.dart';
import 'package:star_lock/talk/starChart/constant/talk_status.dart';
import 'package:star_lock/talk/starChart/handle/other/talk_data_repository.dart';
import 'package:star_lock/talk/starChart/proto/talk_data.pb.dart';
import 'package:star_lock/talk/starChart/status/star_chart_talk_status.dart';
import '../../../../tools/storage.dart';
enum NetworkStatus {
normal, // 0
lagging, // 1
delayed, // 2
packetLoss // 3
}
class ImageTransmissionState{
int udpSendDataFrameNumber = 0; //
// var isSenderAudioData = false.obs;//
Future<String?> userMobileIP = NetworkInfo().getWifiIP();
Future<String?> userUid = Storage.getUid();
RxInt udpStatus =
0.obs; //0 1 2 3 4 5 6 8 9
TextEditingController passwordTF = TextEditingController();
Rx<Uint8List> listData = Uint8List(0).obs; //
RxList<int> listAudioData = <int>[].obs; //
GlobalKey globalKey = GlobalKey();
Timer? oneMinuteTimeTimer; // 60
RxInt oneMinuteTime = 0.obs; //
// 10
late Timer answerTimer;
late Timer hangUpTimer;
late Timer openDoorTimer;
Timer? fpsTimer;
late AnimationController animationController;
late Timer autoBackTimer =
Timer(const Duration(seconds: 1), () {}); //30
late Timer realTimePicTimer =
Timer(const Duration(seconds: 1), () {}); //
RxInt elapsedSeconds = 0.obs;
//
List<TalkData> audioBuffer = <TalkData>[].obs;
List<TalkData> activeAudioBuffer = <TalkData>[].obs;
List<TalkData> activeVideoBuffer = <TalkData>[].obs;
List<TalkData> videoBuffer = <TalkData>[].obs;
List<TalkData> videoBuffer2 = <TalkData>[].obs;
RxBool isPlaying = false.obs; //
Rx<TalkStatus> talkStatus = TalkStatus.none.obs; //
// startChartTalkStatus
final StartChartTalkStatus startChartTalkStatus =
StartChartTalkStatus.instance;
//
final TalkDataRepository talkDataRepository = TalkDataRepository.instance;
RxInt lastFrameTimestamp = 0.obs; // ,
Rx<NetworkStatus> networkStatus =
NetworkStatus.normal.obs; // 0- 1- 2- 3-
RxInt alertCount = 0.obs; //
RxInt maxAlertNumber = 3.obs; //
RxBool isOpenVoice = true.obs; //
RxBool isRecordingScreen = false.obs; //
RxBool isRecordingAudio = false.obs; //
Rx<DateTime> startRecordingAudioTime = DateTime.now().obs; //
Rx<DateTime> endRecordingAudioTime = DateTime.now().obs; //
RxInt recordingAudioTime = 0.obs; //
RxInt fps = 0.obs; // FPS
late VoiceProcessor? voiceProcessor; //
final int frameLength = 320; //640
final int sampleRate = 8000; //8000
List<int> recordingAudioAllFrames = <int>[]; //
List<int> lockRecordingAudioAllFrames = <int>[]; //
RxInt rotateAngle = 0.obs; //
RxBool isLongPressing = false.obs; //
RxBool hasAudioData = false.obs; //
RxInt lastAudioTimestamp = 0.obs; //
Rx<ui.Image?> currentImage = Rx<ui.Image?>(null);
}