Merge branch 'talk_flutter' of gitee.com:starlock-cn/app-starlock into talk_flutter

# Conflicts:
#	star_lock/pubspec.yaml
This commit is contained in:
Daisy 2023-12-29 14:32:49 +08:00
commit b8a2c78836
11 changed files with 324 additions and 437 deletions

View File

@ -38,13 +38,6 @@ PODS:
- device_info_plus (0.0.1):
- Flutter
- EMASRest (11.1.1.2)
- ffmpeg-kit-ios-https (5.1.LTS)
- ffmpeg_kit_flutter (5.1.0.LTS):
- ffmpeg_kit_flutter/https-lts (= 5.1.0.LTS)
- Flutter
- ffmpeg_kit_flutter/https-lts (5.1.0.LTS):
- ffmpeg-kit-ios-https (= 5.1.LTS)
- Flutter
- Flutter (1.0.0)
- flutter_native_contact_picker (0.0.1):
- Flutter
@ -64,11 +57,11 @@ PODS:
- Flutter
- google_maps_flutter_ios (0.0.1):
- Flutter
- GoogleMaps (< 8.0)
- GoogleMaps (5.2.0):
- GoogleMaps/Maps (= 5.2.0)
- GoogleMaps/Base (5.2.0)
- GoogleMaps/Maps (5.2.0):
- GoogleMaps (< 9.0)
- GoogleMaps (7.4.0):
- GoogleMaps/Maps (= 7.4.0)
- GoogleMaps/Base (7.4.0)
- GoogleMaps/Maps (7.4.0):
- GoogleMaps/Base
- image_gallery_saver (2.0.2):
- Flutter
@ -85,7 +78,7 @@ PODS:
- FlutterMacOS
- permission_handler_apple (9.1.1):
- Flutter
- Protobuf (3.25.0)
- Protobuf (3.25.1)
- reactive_ble_mobile (0.0.1):
- Flutter
- Protobuf (~> 3.5)
@ -96,7 +89,7 @@ PODS:
- sqflite (0.0.3):
- Flutter
- FMDB (>= 2.7.5)
- SwiftProtobuf (1.25.1)
- SwiftProtobuf (1.25.2)
- Toast (4.0.0)
- url_launcher_ios (0.0.1):
- Flutter
@ -116,7 +109,6 @@ DEPENDENCIES:
- auto_orientation (from `.symlinks/plugins/auto_orientation/ios`)
- camera_avfoundation (from `.symlinks/plugins/camera_avfoundation/ios`)
- device_info_plus (from `.symlinks/plugins/device_info_plus/ios`)
- ffmpeg_kit_flutter (from `.symlinks/plugins/ffmpeg_kit_flutter/ios`)
- Flutter (from `Flutter`)
- flutter_native_contact_picker (from `.symlinks/plugins/flutter_native_contact_picker/ios`)
- flutter_pcm_sound (from `.symlinks/plugins/flutter_pcm_sound/ios`)
@ -150,7 +142,6 @@ SPEC REPOS:
- AMap3DMap
- AMapFoundation
- AMapLocation
- ffmpeg-kit-ios-https
- flutter_sound_core
- FMDB
- GoogleMaps
@ -177,8 +168,6 @@ EXTERNAL SOURCES:
:path: ".symlinks/plugins/camera_avfoundation/ios"
device_info_plus:
:path: ".symlinks/plugins/device_info_plus/ios"
ffmpeg_kit_flutter:
:path: ".symlinks/plugins/ffmpeg_kit_flutter/ios"
Flutter:
:path: Flutter
flutter_native_contact_picker:
@ -239,18 +228,16 @@ SPEC CHECKSUMS:
camera_avfoundation: 3125e8cd1a4387f6f31c6c63abb8a55892a9eeeb
device_info_plus: e5c5da33f982a436e103237c0c85f9031142abed
EMASRest: 8df6f87836767a9415ad5cc4af739bc9d215b475
ffmpeg-kit-ios-https: 9e50ffa7eaa6272a0021829e054ef241f2ecffb2
ffmpeg_kit_flutter: fb5bee3a6038231463ee99e30f97a5763e0ae40f
Flutter: f04841e97a9d0b0a8025694d0796dd46242b2854
flutter_native_contact_picker: bd430ba0fbf82768bb50c2c52a69a65759a8f907
flutter_pcm_sound: de0572ca4f99091cc2abfcc31601b8a4ddd33c0e
flutter_sound: c60effa2a350fb977885f0db2fbc4c1ad5160900
flutter_sound_core: 26c10e5832e76aaacfae252d8925232281c486ae
fluttertoast: fafc4fa4d01a6a9e4f772ecd190ffa525e9e2d9c
fluttertoast: 31b00dabfa7fb7bacd9e7dbee580d7a2ff4bf265
FMDB: 2ce00b547f966261cd18927a3ddb07cb6f3db82a
g711_flutter: 8f2769052d2cf3549f83d11e1c42d81d94441123
google_maps_flutter_ios: abdac20d6ce8931f6ebc5f46616df241bfaa2cfd
GoogleMaps: 025272d5876d3b32604e5c080dc25eaf68764693
google_maps_flutter_ios: 590249c67f34f422122c232f2a626192adbc78ee
GoogleMaps: 032f676450ba0779bd8ce16840690915f84e57ac
image_gallery_saver: cb43cc43141711190510e92c460eb1655cd343cb
image_picker_ios: 4a8aadfbb6dc30ad5141a2ce3832af9214a705b5
just_audio: baa7252489dbcf47a4c7cc9ca663e9661c99aafa
@ -258,16 +245,16 @@ SPEC CHECKSUMS:
package_info_plus: 6c92f08e1f853dc01228d6f553146438dafcd14e
path_provider_foundation: 29f094ae23ebbca9d3d0cec13889cd9060c0e943
permission_handler_apple: e76247795d700c14ea09e3a2d8855d41ee80a2e6
Protobuf: 6a4183ec1d51649eb2be7b86ccc286e5c539219c
Protobuf: d94761c33f1239c0a43a0817ca1a5f7f7c900241
reactive_ble_mobile: 9ce6723d37ccf701dbffd202d487f23f5de03b4c
shared_preferences_foundation: 5b919d13b803cadd15ed2dc053125c68730e5126
sqflite: 31f7eba61e3074736dff8807a9b41581e4f7f15a
SwiftProtobuf: 69f02cd54fb03201c5e6bf8b76f687c5ef7541a3
SwiftProtobuf: 407a385e97fd206c4fbe880cc84123989167e0d1
Toast: 91b396c56ee72a5790816f40d3a94dd357abc196
url_launcher_ios: bf5ce03e0e2088bad9cc378ea97fa0ed5b49673b
video_player_avfoundation: 81e49bb3d9fb63dccf9fa0f6d877dc3ddbeac126
webview_flutter_wkwebview: 2e2d318f21a5e036e2c3f26171342e95908bd60a
PODFILE CHECKSUM: 42aa7ffc6134b996f93caa6a9b6a2b5b580ff28a
PODFILE CHECKSUM: 85f69c27139c5112fdd69e85b95c1cc6de403d3e
COCOAPODS: 1.12.1
COCOAPODS: 1.14.3

View File

@ -649,7 +649,6 @@
97C146EC1CF9000F007C117D /* Resources */,
9705A1C41CF9048500538489 /* Embed Frameworks */,
3B06AD1E1E4923F5004D2608 /* Thin Binary */,
8AC988DFB36B18C3A52624A8 /* [CP] Embed Pods Frameworks */,
C87CD71185302EE14BA1323E /* [CP] Copy Pods Resources */,
);
buildRules = (
@ -667,7 +666,7 @@
97C146E61CF9000F007C117D /* Project object */ = {
isa = PBXProject;
attributes = {
LastUpgradeCheck = 1430;
LastUpgradeCheck = 1300;
ORGANIZATIONNAME = "";
TargetAttributes = {
97C146ED1CF9000F007C117D = {
@ -822,23 +821,6 @@
shellScript = "diff \"${PODS_PODFILE_DIR_PATH}/Podfile.lock\" \"${PODS_ROOT}/Manifest.lock\" > /dev/null\nif [ $? != 0 ] ; then\n # print error to STDERR\n echo \"error: The sandbox is not in sync with the Podfile.lock. Run 'pod install' or update your CocoaPods installation.\" >&2\n exit 1\nfi\n# This output is used by Xcode 'outputs' to avoid re-running this script phase.\necho \"SUCCESS\" > \"${SCRIPT_OUTPUT_FILE_0}\"\n";
showEnvVarsInLog = 0;
};
8AC988DFB36B18C3A52624A8 /* [CP] Embed Pods Frameworks */ = {
isa = PBXShellScriptBuildPhase;
buildActionMask = 2147483647;
files = (
);
inputFileListPaths = (
"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks-${CONFIGURATION}-input-files.xcfilelist",
);
name = "[CP] Embed Pods Frameworks";
outputFileListPaths = (
"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks-${CONFIGURATION}-output-files.xcfilelist",
);
runOnlyForDeploymentPostprocessing = 0;
shellPath = /bin/sh;
shellScript = "\"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks.sh\"\n";
showEnvVarsInLog = 0;
};
9740EEB61CF901F6004384FC /* Run Script */ = {
isa = PBXShellScriptBuildPhase;
alwaysOutOfDate = 1;
@ -1007,6 +989,7 @@
ENABLE_BITCODE = NO;
GCC_NO_COMMON_BLOCKS = NO;
INFOPLIST_FILE = Runner/Info.plist;
IPHONEOS_DEPLOYMENT_TARGET = 12.0;
LD_RUNPATH_SEARCH_PATHS = (
"$(inherited)",
"@executable_path/Frameworks",
@ -1257,6 +1240,7 @@
ENABLE_BITCODE = NO;
GCC_NO_COMMON_BLOCKS = NO;
INFOPLIST_FILE = Runner/Info.plist;
IPHONEOS_DEPLOYMENT_TARGET = 12.0;
LD_RUNPATH_SEARCH_PATHS = (
"$(inherited)",
"@executable_path/Frameworks",
@ -1401,6 +1385,7 @@
ENABLE_BITCODE = NO;
GCC_NO_COMMON_BLOCKS = NO;
INFOPLIST_FILE = Runner/Info.plist;
IPHONEOS_DEPLOYMENT_TARGET = 12.0;
LD_RUNPATH_SEARCH_PATHS = (
"$(inherited)",
"@executable_path/Frameworks",

View File

@ -1,6 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<Scheme
LastUpgradeVersion = "1430"
LastUpgradeVersion = "1300"
version = "1.3">
<BuildAction
parallelizeBuildables = "YES"

View File

@ -1,9 +1,16 @@
import 'dart:async';
import 'dart:convert';
import 'dart:math';
import 'package:flutter/services.dart';
import 'package:flutter_voice_processor/flutter_voice_processor.dart';
import 'package:get/get.dart';
import 'package:permission_handler/permission_handler.dart';
import '../../../../talk/call/g711.dart';
import '../../../../talk/udp/udp_manage.dart';
import '../../../../talk/udp/udp_senderManage.dart';
import '../../../../tools/baseGetXController.dart';
import '../../../../tools/eventBusEventManage.dart';
import 'lockMonitoring_state.dart';
@ -11,6 +18,11 @@ import 'lockMonitoring_state.dart';
class LockMonitoringLogic extends BaseGetXController {
final LockMonitoringState state = LockMonitoringState();
///
initRecorder() {
state.voiceProcessor = VoiceProcessor.instance;
}
///
StreamSubscription? _getTVDataRefreshUIEvent;
void _getTVDataRefreshUIAction() {
@ -29,12 +41,247 @@ class LockMonitoringLogic extends BaseGetXController {
///
StreamSubscription? _getUDPStatusRefreshUIEvent;
void _getUDPStatusRefreshUIAction() {
// eventBus
_getUDPStatusRefreshUIEvent = eventBus.on<GetUDPStatusRefreshUI>().listen((event) {
state.udpStatus.value = event.udpStatus;
});
}
Future<void> _readG711Data() async {
String filePath = 'assets/s10-g711.bin';
List<int> audioData = await G711().readAssetFile(filePath);
// Get.log('发送读取711文件数据为:$audioData');// :$audioData
// return;
// print('发送读取711文件数据长度为:${audioData.length}');// :$audioData
if (audioData.isNotEmpty) {
//
// pcmBytes = G711().convertList(audioData);
// print('发送转换pcmBytes数据长度为:${pcmBytes.length}');
int start = 0;
int length = 320;
while (start < audioData.length) {
// await Future.delayed(const Duration(milliseconds: 50));
int end = (start + length > audioData.length) ? audioData.length : start + length;
List<int> sublist = audioData.sublist(start, end);
sendRecordData({
"bytes": sublist,
// "udpSendDataFrameNumber": 0,
"lockID": UDPManage().lockId,
"lockIP": UDPManage().host,
"userMobile": await state.userMobile,
"userMobileIP": await state.userMobileIP,
});
print(sublist);
start += length;
}
print('G711数据发送完成');
} else {
print('Failed to read audio data.');
}
}
Future<void> startProcessing() async {
frameListener(List<int> frame) async {
Get.log('Get data.length:${frame.length} Received data:$frame');
for (int i = 0; i < frame.length; i++) {
frame[i] = linearToULaw(frame[i]);
}
Get.log('change Get data.length:${frame.length} change Received data:$frame');
await Future.delayed(const Duration(milliseconds: 50));
sendRecordData({
"bytes": frame,
// "udpSendDataFrameNumber": 0,
"lockID": UDPManage().lockId,
"lockIP": UDPManage().host,
"userMobile": await state.userMobile,
"userMobileIP": await state.userMobileIP,
});
}
errorListener(VoiceProcessorException error) {
print("VoiceProcessorException: $error");
};
state.voiceProcessor?.addFrameListener(frameListener);
state.voiceProcessor?.addErrorListener(errorListener);
try {
if (await state.voiceProcessor?.hasRecordAudioPermission() ?? false) {
await state.voiceProcessor?.start(320, 8000);
bool? isRecording = await state.voiceProcessor?.isRecording();
} else {
}
} on PlatformException catch (ex) {
Get.log("PlatformException: $ex");
} finally {
}
}
Future<void> stopProcessing() async {
try {
await state.voiceProcessor?.stop();
} on PlatformException catch (ex) {
Get.log("PlatformException: $ex");
} finally {
}
}
void onError(Object e) {
print(e);
}
sendRecordData(Map<String, dynamic> args) async {
List<int> bytes = args["bytes"];
// int udpSendDataFrameNumber = args["udpSendDataFrameNumber"];
String? lockID = args["lockID"];
String? lockIP = args["lockIP"];
String? userMobile = args["userMobile"];
String? userMobileIP = args["userMobileIP"];
// int length = 320; // List的长度
// List<int> list = state.listAudioData.value.sublist(0, 320);
// for (int i = 0; i < bytes.length; i += length) {
// int end = (i + length < bytes.length) ? i + length : bytes.length;
// bytes.sublist(i, end);
// // _sendRecordData(bytes.sublist(i, end));
// // //
// }
// while(list.isNotEmpty) {
state.udpSendDataFrameNumber++;
if (state.udpSendDataFrameNumber >= 65536) state.udpSendDataFrameNumber=1;
// 57
List<int> topBytes = [];
// var cID = "XXXCID";
// List<int> cIDData = utf8.encode(cID!);
// topBytes.addAll(cIDData);
// // topBytes = getFixedLengthList(cIDData, 20 - cIDData.length);
// for (int i = 0; i < 6 - cIDData.length; i++) {
// topBytes.add(0);
// }
//
// //
// topBytes.add(150);
//
// //
// topBytes.add(1);
//
// //
// topBytes.add(8);
//
// // lockID
// List<int> lockIDData = utf8.encode(lockID!);
// topBytes.addAll(lockIDData);
// // topBytes = getFixedLengthList(lockIDData, 20 - lockIDData.length);
// for (int i = 0; i < 20 - lockIDData.length; i++) {
// topBytes.add(0);
// }
//
// // lockIP
// var lockIPList = lockIP!.split(".");
// lockIPList.forEach((element) {
// topBytes.add(int.parse(element));
// });
//
// // userMobile
// List<int> userMobileData = utf8.encode(userMobile!);
// topBytes.addAll(userMobileData);
// // topBytes = getFixedLengthList(topBytes, 20 - userMobileData.length);
// for (int i = 0; i < 20 - userMobileData.length; i++) {
// topBytes.add(0);
// }
//
// // userMobileIP
// var userMobileIPList = userMobileIP!.split(".");
// userMobileIPList.forEach((element) {
// topBytes.add(int.parse(element));
// });
topBytes.addAll([
1, 1, 1, 1, //
1, 0, //
1, 0, //
64, 0, 0, 0, //
1, 0, //
1, 0, //
64, 1, //
176, 4, //
]);
topBytes[6] = (state.udpSendDataFrameNumber & 0x000000FF);
topBytes[7] = ((state.udpSendDataFrameNumber & 0x0000FF00) >> 8);
print("udpSendDataFrameNumber:${state.udpSendDataFrameNumber} topBytes[63]:${topBytes[6]} topBytes[64]:${topBytes[7]}");
topBytes.addAll(bytes);
Get.log("setVoiceBytes:$topBytes");
UDPSenderManage.sendMainProtocol(
command: 150,
commandTypeIsCalling: 1,
subCommand: 8,
lockID: lockID,
lockIP: lockIP,
userMobile: userMobile,
userMobileIP: userMobileIP,
endData: topBytes
);
// UDPManage().sendData(topBytes);
}
// pcm
int linearToULaw(int pcmVal) {
int mask;
int seg;
int uval;
if (pcmVal < 0) {
pcmVal = 0x84 - pcmVal;
mask = 0x7F;
} else {
pcmVal += 0x84;
mask = 0xFF;
}
seg = search(pcmVal);
if (seg >= 8) {
return 0x7F ^ mask;
} else {
uval = (seg << 4);
uval |= ((pcmVal >> (seg + 3)) & 0xF);
return uval ^ mask;
}
}
int search(int val) {
List<int> table = [0xFF, 0x1FF, 0x3FF, 0x7FF, 0xFFF, 0x1FFF, 0x3FFF, 0x7FFF];
int size = 8;
for (int i = 0; i < size; i++) {
if (val <= table[i]) {
return i;
}
}
return size;
}
double _calculateVolumeLevel(List<int> frame) {
double rms = 0.0;
for (int sample in frame) {
rms += pow(sample, 2);
}
rms = sqrt(rms / frame.length);
double dbfs = 20 * log(rms / 32767.0) / log(10);
double normalizedValue = (dbfs + 50) / 50;
return normalizedValue.clamp(0.0, 1.0);
}
Future<bool> getPermissionStatus() async {
Permission permission = Permission.microphone;
//granted denied permanentlyDenied
@ -67,6 +314,8 @@ class LockMonitoringLogic extends BaseGetXController {
_getTVDataRefreshUIAction();
_getUDPStatusRefreshUIAction();
initRecorder();
}
@override
@ -81,5 +330,7 @@ class LockMonitoringLogic extends BaseGetXController {
print("锁详情界面销毁了");
_getTVDataRefreshUIEvent!.cancel();
_getUDPStatusRefreshUIEvent!.cancel();
stopProcessing();
}
}

View File

@ -1,26 +1,13 @@
import 'dart:async';
import 'dart:convert';
import 'dart:io';
import 'dart:isolate';
import 'dart:typed_data';
import 'package:flutter/foundation.dart';
import 'package:flutter/material.dart';
import 'package:flutter/services.dart';
import 'package:flutter_screenutil/flutter_screenutil.dart';
import 'package:flutter_sound/flutter_sound.dart';
import 'package:get/get.dart';
import 'package:network_info_plus/network_info_plus.dart';
import 'package:path_provider/path_provider.dart';
import 'package:flutter_audio_capture/flutter_audio_capture.dart';
import '../../../../app_settings/app_colors.dart';
import '../../../../blue/io_tool/io_tool.dart';
import '../../../../talk/udp/udp_manage.dart';
import '../../../../talk/udp/udp_senderManage.dart';
import '../../../../talk/udp/udp_talkClass.dart';
import '../../../../tools/showTFView.dart';
import '../../../../tools/storage.dart';
import '../../../../tools/toast.dart';
import 'lockMonitoring_logic.dart';
@ -35,18 +22,6 @@ class _LockMonitoringPageState extends State<LockMonitoringPage> {
final logic = Get.put(LockMonitoringLogic());
final state = Get.find<LockMonitoringLogic>().state;
late FlutterAudioCapture audioCapture;
// late FlutterSoundRecorder recorder;
// late FlutterSoundPlayer player;
// late String filePath;
@override
void initState() {
super.initState();
_initRecorder();
}
@override
Widget build(BuildContext context) {
return SizedBox(
@ -177,9 +152,9 @@ class _LockMonitoringPageState extends State<LockMonitoringPage> {
if (!value) {
return;
}
var userMobileIP = await NetworkInfo().getWifiIP();
var userMobile = await Storage.getMobile();
// state.isSenderAudioData.value = false;
print("发送接听了");
//
UDPSenderManage.sendMainProtocol(
command: 150,
@ -187,8 +162,8 @@ class _LockMonitoringPageState extends State<LockMonitoringPage> {
subCommand: 6,
lockID: UDPManage().lockId,
lockIP: UDPManage().host,
userMobile: userMobile,
userMobileIP: userMobileIP,
userMobile: await state.userMobile,
userMobileIP: await state.userMobileIP,
endData: []
);
});
@ -196,27 +171,24 @@ class _LockMonitoringPageState extends State<LockMonitoringPage> {
longPress: (){
//
print("onLongPress");
state.listAudioData.value = <int>[];
if (state.udpStatus.value == 8) {
state.udpStatus.value = 9;
}
_startRecording();
// logic.readG711Data();
logic.startProcessing();
},
longPressUp: () async {
//
print("onLongPressUp");
// _playRecording();
// _stopCapture();
if (state.udpStatus.value == 9) {
state.udpStatus.value = 8;
}
}
)
),
bottomBtnItemWidget(
"images/main/icon_lockDetail_hangUp.png", "挂断", Colors.red, () async {
var userMobileIP = await NetworkInfo().getWifiIP();
var userMobile = await Storage.getMobile();
bottomBtnItemWidget("images/main/icon_lockDetail_hangUp.png", "挂断", Colors.red, () async {
logic.stopProcessing();
//
UDPSenderManage.sendMainProtocol(
@ -225,15 +197,12 @@ class _LockMonitoringPageState extends State<LockMonitoringPage> {
subCommand: 30,
lockID: UDPManage().lockId,
lockIP: UDPManage().host,
userMobile: userMobile,
userMobileIP: userMobileIP,
userMobile: await state.userMobile,
userMobileIP: await state.userMobileIP,
endData: []);
}),
bottomBtnItemWidget("images/main/icon_lockDetail_monitoringUnlock.png",
"开锁", AppColors.mainColor, () {
// _playRecording();
showDeletPasswordAlertDialog(context);
bottomBtnItemWidget("images/main/icon_lockDetail_monitoringUnlock.png", "开锁", AppColors.mainColor, () {
showDeletPasswordAlertDialog(context);
})
]);
}
@ -296,7 +265,7 @@ class _LockMonitoringPageState extends State<LockMonitoringPage> {
context: context,
builder: (BuildContext context) {
return ShowTFView(
title: "请输入开锁密码",
title: "请输入六位数字开锁密码",
tipTitle: "",
controller: state.passwordTF,
inputFormatters: [
@ -310,9 +279,6 @@ class _LockMonitoringPageState extends State<LockMonitoringPage> {
return;
}
var userMobileIP = await NetworkInfo().getWifiIP();
var userMobile = await Storage.getMobile();
//
UDPSenderManage.sendMainProtocol(
command: 150,
@ -320,8 +286,8 @@ class _LockMonitoringPageState extends State<LockMonitoringPage> {
subCommand: 10,
lockID: UDPManage().lockId,
lockIP: UDPManage().host,
userMobile: userMobile,
userMobileIP: userMobileIP,
userMobile: await state.userMobile,
userMobileIP: await state.userMobileIP,
endData: []);
Get.back();
},
@ -337,328 +303,5 @@ class _LockMonitoringPageState extends State<LockMonitoringPage> {
void dispose() {
super.dispose();
// UDPTalkClass().isBeCall = false;
}
//
_initRecorder() {
// recorder = FlutterSoundRecorder();
audioCapture = FlutterAudioCapture();
}
// Future<void> _startRecording(List<int> dataList) async {
// try {
// await recorder.openAudioSession();
// ///
// recorder.onProgress!.listen((e) {
// print("onProgress:$e");
// });
//
// await recorder.startRecorder(
// onProgress: (StreamController<FooderData> progress) {
// progress.stream.listen((FooderData fooData) {
// // PCM fooData.buffer
// print('Received PCM data: ${fooData.buffer.length} bytes');
// });
// },
// );
// setState(() {
// _isRecording = true;
// });
// } catch (e) {
// print('Error starting recording: $e');
// }
// }
// Future<void> _stopRecording() async {
// try {
// await recorder.stopRecorder();
// await _recorder.closeAudioSession();
// setState(() {
// _isRecording = false;
// });
// } catch (e) {
// print('Error stopping recording: $e');
// }
// }
//
_startRecording() async {
try {
// _getFilePath().then((value) {
// filePath = value;
// });
// await recorder.openRecorder();
// //
// recorder.onProgress!.listen((e) {
// print("onProgress:$e");
// });
// await recorder.startRecorder(
// toFile: filePath,
// codec: Codec.pcm16WAV,
// bitRate: 8000,
// numChannels: 1,
// sampleRate: 8000,
// );
_startCapture();
} catch (e) {
print('Error starting recording: $e');
}
// getFilePath().then((value) {
// filePath = value;
// });
// await recorder.openRecorder();
// await recorder.startRecorder(
// toFile: filePath,
// codec: Codec.pcm16WAV,
// bitRate: 8000,
// numChannels: 1,
// sampleRate: 8000,
// );
}
Future<void> _startCapture() async {
await audioCapture.start(
_listener,
onError,
sampleRate: 8000,
bufferSize: 3000
);
}
Future<void> _stopCapture() async {
await audioCapture.stop();
}
Future<void> _listener(dynamic obj) async {
print('data.length:${obj.length} Received data:$obj');
var buffer = Float64List.fromList(obj.cast<double>());
Int16List list = float64ListToInt16List(buffer);
print('Get data.length:${list.length} Received data:$list');
for (int i = 0; i < list.length; i++) {
list[i] = linearToULaw(list[i]);
}
print('change Get data.length:${list.length} change Received data:$list');
List<int> sendList = list.toList();
Isolate isolate = await Isolate.spawn(_sendRecordData, {
"bytes": sendList,
"udpSendDataFrameNumber": 0,
"lockID": UDPManage().lockId,
"lockIP": UDPManage().host,
"userMobile": await Storage.getMobile(),
"userMobileIP": await NetworkInfo().getWifiIP(),
});
// 线线
// receivePort.listen((data) {
// print('Received data: $data');
// });
}
void onError(Object e) {
print(e);
}
int linearToULaw(int pcmVal) {
int mask;
int seg;
int uval;
if (pcmVal < 0) {
pcmVal = 0x84 - pcmVal;
mask = 0x7F;
} else {
pcmVal += 0x84;
mask = 0xFF;
}
seg = search(pcmVal);
if (seg >= 8) {
return 0x7F ^ mask;
} else {
uval = (seg << 4);
uval |= ((pcmVal >> (seg + 3)) & 0xF);
return uval ^ mask;
}
}
int search(int val) {
List<int> table = [0xFF, 0x1FF, 0x3FF, 0x7FF, 0xFFF, 0x1FFF, 0x3FFF, 0x7FFF];
int size = 8;
for (int i = 0; i < size; i++) {
if (val <= table[i]) {
return i;
}
}
return size;
}
Int16List float64ListToInt16List(Float64List float64List) {
Int16List int16List = Int16List(float64List.length);
for (int i = 0; i < float64List.length; i++) {
double sample = float64List[i];
// Make sure the sample value is within the int16 range
if (sample > 1.0) {
sample = 1.0;
} else if (sample < -1.0) {
sample = -1.0;
}
// Convert the sample to int16 by scaling it to the full int16 range
int16List[i] = (sample * 32767).toInt();
}
return int16List;
}
//
// _stopRecording() async {
// try {
// await recorder.stopRecorder();
// } catch (e) {
// print('Error stopping recording: $e');
// }
// await recorder.stopRecorder();
//
// // final file = File(filePath);
// File file = File(filePath); // 使 create
//
// print('filePathfilePath:$filePath file:$file await file.exists():${await file.exists()}');
// if (await file.exists()) {
// final List<int> bytes = await file.readAsBytes();
// print('Recorded audio bytes.length:${bytes.length} bytes: $bytes');
//
// _sendRecordData(bytes);
// }
// }
// Future<String> _getFilePath() async {
// final directory = await getApplicationDocumentsDirectory();
// final filePath = '${directory.path}/recording.wav';
//
// //
// File file = File(filePath);
// await file.create(); // 使 create
//
// return filePath;
// }
// Future<void> _getRecordedAudioBytes() async {
// final file = File(filePath);
// if (await file.exists()) {
// final List<int> bytes = await file.readAsBytes();
// print('Recorded audio bytes: $bytes');
// }
// }
//
// _playRecording() async {
// player = FlutterSoundPlayer();
// player.openPlayer();
//
// await player.startPlayer(
// fromURI: filePath,
// codec: Codec.pcm16WAV,
// );
// Toast.show(msg: "储存录音播放了");
// print('_playRecording() 储存录音播放了');
// }
//
// _stopPlaying() async {
// await player.stopPlayer();
// }
// var udpSendDataFrameNumber = 0;
_sendRecordData(Map<String, dynamic> args) async {
List<int> bytes = args["bytes"];
int udpSendDataFrameNumber = args["udpSendDataFrameNumber"];
String? lockID = args["lockID"];
String? lockIP = args["lockIP"];
String? userMobile = args["userMobile"];
String? userMobileIP = args["userMobileIP"];
int length = 320; // List的长度
for (int i = 0; i < bytes.length; i += length) {
int end = (i + length < bytes.length) ? i + length : bytes.length;
bytes.sublist(i, end);
// _sendRecordData(bytes.sublist(i, end));
// //
// var userMobileIP = await NetworkInfo().getWifiIP();
// var userMobile = await Storage.getMobile();
while(true) {
udpSendDataFrameNumber++;
if (udpSendDataFrameNumber >= 65536) udpSendDataFrameNumber=1;
// 57
List<int> topBytes = [
1, 1, 1, 1, //
1, 0, //
1, 0, //
64, 0, 0, 0, //
1, 0, //
1, 0, //
64, 1, //
176, 4, //
];
ByteData byteData = ByteData(2);
byteData.setUint16(0, udpSendDataFrameNumber, Endian.little);
topBytes[6] = byteData.getUint8(0);
topBytes[7] = byteData.getUint8(1);
topBytes.addAll(bytes);
// print("topBytes:$topBytes");
// UDPSenderManage.sendMainProtocol(
// command: 150,
// commandTypeIsCalling: 1,
// subCommand: 8,
// lockID: lockID,
// lockIP: lockIP,
// userMobile: userMobile,
// userMobileIP: userMobileIP,
// endData: topBytes
// );
//
topBytes.add(150);
//
topBytes.add(1);
//
topBytes.add(8);
// lockID
List<int> lockIDData = utf8.encode(lockID!);
topBytes.addAll(lockIDData);
topBytes = getFixedLengthList(lockIDData, 20 - lockIDData.length);
// lockIP
var lockIPList = lockIP!.split(".");
lockIPList.forEach((element) {
topBytes.add(int.parse(element));
});
// userMobile
List<int> userMobileData = utf8.encode(userMobile!);
topBytes.addAll(userMobileData);
topBytes = getFixedLengthList(topBytes, 20 - userMobileData.length);
// userMobileIP
var userMobileIPList = lockIP!.split(".");
userMobileIPList.forEach((element) {
topBytes.add(int.parse(element));
});
UDPManage().sendData(topBytes);
}
}
}
}

View File

@ -1,14 +1,33 @@
import 'dart:async';
import 'dart:typed_data';
import 'package:flutter/material.dart';
import 'package:flutter_voice_processor/flutter_voice_processor.dart';
import 'package:get/get.dart';
import 'package:network_info_plus/network_info_plus.dart';
import '../../../../tools/storage.dart';
class LockMonitoringState {
var isOpenVoice = false.obs;
var udpSendDataFrameNumber = 0;//
var udpStatus =
0.obs; //0 1 2 3 4 5 6 8 9
// var isSenderAudioData = false.obs;//
var userMobileIP = NetworkInfo().getWifiIP();
var userMobile = Storage.getMobile();
var udpStatus = 0.obs; //0 1 2 3 4 5 6 8 9
var passwordTF = TextEditingController();
var listData = Uint8List(0).obs; //
var listAudioData = <int>[].obs; //
late final VoiceProcessor? voiceProcessor;
// 60
late Timer oneMinuteTimeTimer;
// 10
late Timer answerTimer;
late Timer hangUpTimer;
late Timer openDoorTimer;
}

View File

@ -43,8 +43,12 @@ class UDPMainProtocolCommand extends UDPSenderProtocol {
data.add(subCommand!);
// lockID
data.addAll(utf8.encode(lockID!));
data = getFixedLengthList(data, 20 - utf8.encode(lockID!).length);
List<int> lockIDData = utf8.encode(lockID!);
data.addAll(lockIDData);
// topBytes = getFixedLengthList(lockIDData, 20 - lockIDData.length);
for (int i = 0; i < 20 - lockIDData.length; i++) {
data.add(0);
}
// lockIP
var lockIPList = lockIP!.split(".");
@ -53,8 +57,12 @@ class UDPMainProtocolCommand extends UDPSenderProtocol {
});
// userMobile
data.addAll(utf8.encode(userMobile!));
data = getFixedLengthList(data, 20 - utf8.encode(userMobile!).length);
List<int> userMobileData = utf8.encode(userMobile!);
data.addAll(userMobileData);
// topBytes = getFixedLengthList(topBytes, 20 - userMobileData.length);
for (int i = 0; i < 20 - userMobileData.length; i++) {
data.add(0);
}
// userMobileIP
var userMobileIPList = lockIP!.split(".");

View File

@ -9,7 +9,6 @@
#include <aj_captcha_flutter/aj_captcha_flutter_plugin.h>
#include <audioplayers_linux/audioplayers_linux_plugin.h>
#include <file_selector_linux/file_selector_plugin.h>
#include <flutter_audio_capture/flutter_audio_capture_plugin.h>
#include <url_launcher_linux/url_launcher_plugin.h>
void fl_register_plugins(FlPluginRegistry* registry) {
@ -22,9 +21,6 @@ void fl_register_plugins(FlPluginRegistry* registry) {
g_autoptr(FlPluginRegistrar) file_selector_linux_registrar =
fl_plugin_registry_get_registrar_for_plugin(registry, "FileSelectorPlugin");
file_selector_plugin_register_with_registrar(file_selector_linux_registrar);
g_autoptr(FlPluginRegistrar) flutter_audio_capture_registrar =
fl_plugin_registry_get_registrar_for_plugin(registry, "FlutterAudioCapturePlugin");
flutter_audio_capture_plugin_register_with_registrar(flutter_audio_capture_registrar);
g_autoptr(FlPluginRegistrar) url_launcher_linux_registrar =
fl_plugin_registry_get_registrar_for_plugin(registry, "UrlLauncherPlugin");
url_launcher_plugin_register_with_registrar(url_launcher_linux_registrar);

View File

@ -6,7 +6,6 @@ list(APPEND FLUTTER_PLUGIN_LIST
aj_captcha_flutter
audioplayers_linux
file_selector_linux
flutter_audio_capture
url_launcher_linux
)

View File

@ -9,7 +9,6 @@ import aj_captcha_flutter
import audio_session
import audioplayers_darwin
import device_info_plus
import ffmpeg_kit_flutter
import file_selector_macos
import flutter_pcm_sound
import just_audio
@ -25,7 +24,6 @@ func RegisterGeneratedPlugins(registry: FlutterPluginRegistry) {
AudioSessionPlugin.register(with: registry.registrar(forPlugin: "AudioSessionPlugin"))
AudioplayersDarwinPlugin.register(with: registry.registrar(forPlugin: "AudioplayersDarwinPlugin"))
DeviceInfoPlusMacosPlugin.register(with: registry.registrar(forPlugin: "DeviceInfoPlusMacosPlugin"))
FFmpegKitFlutterPlugin.register(with: registry.registrar(forPlugin: "FFmpegKitFlutterPlugin"))
FileSelectorPlugin.register(with: registry.registrar(forPlugin: "FileSelectorPlugin"))
FlutterPcmSoundPlugin.register(with: registry.registrar(forPlugin: "FlutterPcmSoundPlugin"))
JustAudioPlugin.register(with: registry.registrar(forPlugin: "JustAudioPlugin"))

View File

@ -125,17 +125,18 @@ dependencies:
convert: ^3.1.1
just_audio: ^0.9.36
flutter_sound: ^9.2.13
ffmpeg_kit_flutter: 5.1.0-LTS
# ffmpeg_kit_flutter: 5.1.0-LTS
fast_gbk: ^1.0.0
flutter_audio_capture: ^1.1.6
flutter_pcm_sound: ^1.1.0
# flutter_audio_capture: <1.1.5
flutter_voice_processor: ^1.1.0
dev_dependencies:
flutter_test:
sdk: flutter
# The "flutter_lints" package below contains a set of recommended lints to
# encourage good coding practices. The lint set provided by the package is
# encourage good coding practices. The lint fset provided by the package is
# activated in the `analysis_options.yaml` file located at the root of your
# package. See that file for information about deactivating specific lint
# rules and activating additional ones.