697 lines
24 KiB
Dart
697 lines
24 KiB
Dart
import 'dart:async';
|
||
import 'dart:io';
|
||
import 'dart:typed_data';
|
||
import 'dart:ui' as ui;
|
||
|
||
import 'package:flutter/rendering.dart';
|
||
import 'package:flutter/services.dart';
|
||
import 'package:flutter_easyloading/flutter_easyloading.dart';
|
||
import 'package:flutter_pcm_sound/flutter_pcm_sound.dart';
|
||
import 'package:flutter_screen_recording/flutter_screen_recording.dart';
|
||
|
||
import 'package:flutter_voice_processor/flutter_voice_processor.dart';
|
||
import 'package:g711_flutter/g711_flutter.dart';
|
||
import 'package:gallery_saver/gallery_saver.dart';
|
||
|
||
import 'package:get/get.dart';
|
||
import 'package:image_gallery_saver/image_gallery_saver.dart';
|
||
import 'package:path_provider/path_provider.dart';
|
||
import 'package:permission_handler/permission_handler.dart';
|
||
import 'package:star_lock/app_settings/app_settings.dart';
|
||
import 'package:star_lock/blue/blue_manage.dart';
|
||
import 'package:star_lock/blue/io_protocol/io_openLock.dart';
|
||
import 'package:star_lock/blue/io_tool/io_tool.dart';
|
||
import 'package:star_lock/main/lockDetail/lockDetail/lockDetail_logic.dart';
|
||
import 'package:star_lock/main/lockDetail/lockDetail/lockDetail_state.dart';
|
||
import 'package:star_lock/main/lockDetail/lockDetail/lockNetToken_entity.dart';
|
||
import 'package:star_lock/network/api_repository.dart';
|
||
import 'package:star_lock/talk/call/g711.dart';
|
||
|
||
import 'package:star_lock/talk/startChart/constant/talk_status.dart';
|
||
import 'package:star_lock/talk/startChart/proto/talk_data.pb.dart';
|
||
import 'package:star_lock/talk/startChart/proto/talk_data.pbenum.dart';
|
||
import 'package:star_lock/talk/startChart/proto/talk_expect.pb.dart';
|
||
import 'package:star_lock/talk/startChart/start_chart_manage.dart';
|
||
|
||
import 'package:star_lock/talk/startChart/views/talkView/talk_view_state.dart';
|
||
import 'package:star_lock/talk/udp/udp_manage.dart';
|
||
import 'package:star_lock/talk/udp/udp_senderManage.dart';
|
||
import 'package:star_lock/tools/bugly/bugly_tool.dart';
|
||
import 'package:star_lock/tools/storage.dart';
|
||
|
||
import '../../../../tools/baseGetXController.dart';
|
||
|
||
class TalkViewLogic extends BaseGetXController {
|
||
final TalkViewState state = TalkViewState();
|
||
final LockDetailState lockDetailState = Get.put(LockDetailLogic()).state;
|
||
Timer? _syncTimer; // 音视频播放刷新率定时器
|
||
int _startTime = 0; // 开始播放时间戳,用于判断帧数据中的时间戳位置
|
||
int bufferSize = 20; // 缓冲区大小(以帧为单位)
|
||
final List<int> frameTimestamps = []; // 帧时间戳用于计算 FPS
|
||
int frameIntervalMs = 45; // 初始帧间隔设置为45毫秒(约22FPS)
|
||
int minFrameIntervalMs = 30; // 最小帧间隔(约33 FPS)
|
||
int maxFrameIntervalMs = 100; // 最大帧间隔(约1 FPS)
|
||
// int maxFrameIntervalMs = 100; // 最大帧间隔(约10 FPS)
|
||
|
||
/// 初始化音频播放器
|
||
void _initFlutterPcmSound() {
|
||
const int sampleRate = 8000;
|
||
FlutterPcmSound.setLogLevel(LogLevel.none);
|
||
FlutterPcmSound.setup(sampleRate: sampleRate, channelCount: 1);
|
||
// 设置 feed 阈值
|
||
if (Platform.isAndroid) {
|
||
FlutterPcmSound.setFeedThreshold(-1); // Android 平台的特殊处理
|
||
} else {
|
||
FlutterPcmSound.setFeedThreshold(sampleRate ~/ 32); // 非 Android 平台的处理
|
||
}
|
||
}
|
||
|
||
/// 挂断
|
||
void udpHangUpAction() async {
|
||
if (state.talkStatus.value == TalkStatus.answeredSuccessfully) {
|
||
// 如果是通话中就挂断
|
||
StartChartManage().sendTalkHangupMessage();
|
||
} else {
|
||
// 拒绝
|
||
StartChartManage().sendTalkRejectMessage();
|
||
}
|
||
Get.back();
|
||
}
|
||
|
||
// 发起接听命令
|
||
void initiateAnswerCommand() {
|
||
StartChartManage().startTalkAcceptTimer();
|
||
}
|
||
|
||
// 监听音视频数据流
|
||
void _startListenTalkData() {
|
||
state.talkDataRepository.talkDataStream.listen((TalkData talkData) async {
|
||
final contentType = talkData.contentType;
|
||
int currentTimestamp = DateTime.now().millisecondsSinceEpoch;
|
||
// 判断数据类型,进行分发处理
|
||
switch (contentType) {
|
||
case TalkData_ContentTypeE.G711:
|
||
if (state.audioBuffer.length >= bufferSize) {
|
||
state.audioBuffer.removeAt(0); // 丢弃最旧的数据
|
||
}
|
||
state.audioBuffer.add(talkData); // 添加新数据
|
||
break;
|
||
case TalkData_ContentTypeE.Image:
|
||
if (state.videoBuffer.length >= bufferSize) {
|
||
state.videoBuffer.removeAt(0); // 丢弃最旧的数据
|
||
}
|
||
state.videoBuffer.add(talkData); // 添加新数据
|
||
/// 更新网络状态
|
||
// updateNetworkStatus(currentTimestamp);
|
||
break;
|
||
}
|
||
});
|
||
}
|
||
|
||
/// 监听对讲状态
|
||
void _startListenTalkStatus() {
|
||
state.startChartTalkStatus.statusStream.listen((talkStatus) {
|
||
state.talkStatus.value = talkStatus;
|
||
switch (talkStatus) {
|
||
case TalkStatus.rejected:
|
||
case TalkStatus.hangingUpDuring:
|
||
case TalkStatus.notTalkData:
|
||
case TalkStatus.notTalkPing:
|
||
case TalkStatus.end:
|
||
_handleInvalidTalkStatus();
|
||
break;
|
||
default:
|
||
// 其他状态的处理
|
||
break;
|
||
}
|
||
});
|
||
}
|
||
|
||
/// 播放音频数据
|
||
void _playAudioData(TalkData talkData) async {
|
||
// final list = G711().convertList(talkData.content);
|
||
final list = G711().decodeAndDenoise(talkData.content, true, 8000, 300, 50);
|
||
//
|
||
// // 将 PCM 数据转换为 PcmArrayInt16
|
||
final PcmArrayInt16 fromList = PcmArrayInt16.fromList(list);
|
||
FlutterPcmSound.feed(fromList);
|
||
if (!state.isPlaying.value) {
|
||
FlutterPcmSound.play();
|
||
state.isPlaying.value = true;
|
||
}
|
||
}
|
||
|
||
/// 播放视频数据
|
||
void _playVideoData(TalkData talkData) async {
|
||
state.listData.value = Uint8List.fromList(talkData.content);
|
||
}
|
||
|
||
/// 启动播放
|
||
void _startPlayback() {
|
||
int frameIntervalMs = 45; // 初始帧间隔设置为45毫秒(约22FPS)
|
||
|
||
Future.delayed(Duration(milliseconds: 800), () {
|
||
_startTime = DateTime.now().millisecondsSinceEpoch;
|
||
_syncTimer ??=
|
||
Timer.periodic(Duration(milliseconds: frameIntervalMs), (timer) {
|
||
final currentTime = DateTime.now().millisecondsSinceEpoch;
|
||
final elapsedTime = currentTime - _startTime;
|
||
|
||
// 动态调整帧间隔
|
||
_adjustFrameInterval();
|
||
|
||
// 播放合适的音频帧
|
||
if (state.audioBuffer.isNotEmpty &&
|
||
state.audioBuffer.first.durationMs <= elapsedTime) {
|
||
// 判断音频开关是否打开
|
||
if (state.isOpenVoice.value) {
|
||
AppLog.log('播放音频:${state.audioBuffer[0]}');
|
||
_playAudioData(state.audioBuffer.removeAt(0));
|
||
} else {
|
||
// 如果不播放音频,只从缓冲区中读取数据,但不移除
|
||
// 你可以根据需要调整此处逻辑,例如保留缓冲区的最大长度,防止无限增长
|
||
// 仅移除缓冲区数据但不播放音频,确保音频也是实时更新的
|
||
state.audioBuffer.removeAt(0);
|
||
}
|
||
}
|
||
|
||
// 播放合适的视频帧
|
||
// 跳帧策略:如果缓冲区中有多个帧,且它们的时间戳都在当前时间之前,则播放最新的帧
|
||
while (state.videoBuffer.isNotEmpty &&
|
||
state.videoBuffer.first.durationMs <= elapsedTime) {
|
||
// 如果有多个帧,移除旧的帧,保持最新的帧
|
||
if (state.videoBuffer.length > 1) {
|
||
state.videoBuffer.removeAt(0);
|
||
} else {
|
||
_playVideoData(state.videoBuffer.removeAt(0));
|
||
}
|
||
}
|
||
});
|
||
});
|
||
}
|
||
|
||
/// 动态调整帧间隔
|
||
void _adjustFrameInterval() {
|
||
int newFrameIntervalMs = frameIntervalMs;
|
||
if (state.networkStatus.value == NetworkStatus.lagging) {
|
||
bufferSize = 30; // 增大缓冲区
|
||
} else {
|
||
bufferSize = 20; // 恢复默认缓冲区大小
|
||
}
|
||
|
||
if (state.videoBuffer.length < 10 && frameIntervalMs < maxFrameIntervalMs) {
|
||
// 如果缓冲区较小且帧间隔小于最大值,则增加帧间隔
|
||
frameIntervalMs += 5;
|
||
} else if (state.videoBuffer.length > 20 &&
|
||
frameIntervalMs > minFrameIntervalMs) {
|
||
// 如果缓冲区较大且帧间隔大于最小值,则减少帧间隔
|
||
frameIntervalMs -= 5;
|
||
}
|
||
// 只有在帧间隔发生变化时才重建定时器
|
||
if (newFrameIntervalMs != frameIntervalMs) {
|
||
frameIntervalMs = newFrameIntervalMs;
|
||
// 取消旧的定时器
|
||
_syncTimer?.cancel();
|
||
_syncTimer =
|
||
Timer.periodic(Duration(milliseconds: frameIntervalMs), (timer) {
|
||
final currentTime = DateTime.now().millisecondsSinceEpoch;
|
||
final elapsedTime = currentTime - _startTime;
|
||
|
||
// 播放合适的音频帧
|
||
if (state.audioBuffer.isNotEmpty &&
|
||
state.audioBuffer.first.durationMs <= elapsedTime) {
|
||
// 判断音频开关是否打开
|
||
if (state.isOpenVoice.value) {
|
||
_playAudioData(state.audioBuffer.removeAt(0));
|
||
} else {
|
||
// 如果不播放音频,只从缓冲区中读取数据,但不移除
|
||
// 你可以根据需要调整此处逻辑,例如保留缓冲区的最大长度,防止无限增长
|
||
// 仅移除缓冲区数据但不播放音频,确保音频也是实时更新的
|
||
state.audioBuffer.removeAt(0);
|
||
}
|
||
}
|
||
|
||
// 播放合适的视频帧
|
||
// 跳帧策略:如果缓冲区中有多个帧,且它们的时间戳都在当前时间之前,则播放最新的帧
|
||
int maxFramesToProcess = 5; // 每次最多处理 5 帧
|
||
int processedFrames = 0;
|
||
|
||
while (state.videoBuffer.isNotEmpty &&
|
||
state.videoBuffer.first.durationMs <= elapsedTime &&
|
||
processedFrames < maxFramesToProcess) {
|
||
if (state.videoBuffer.length > 1) {
|
||
state.videoBuffer.removeAt(0);
|
||
} else {
|
||
_playVideoData(state.videoBuffer.removeAt(0));
|
||
}
|
||
processedFrames++;
|
||
}
|
||
});
|
||
}
|
||
}
|
||
|
||
/// 修改网络状态
|
||
void updateNetworkStatus(int currentTimestamp) {
|
||
if (state.lastFrameTimestamp.value != 0) {
|
||
final frameInterval = currentTimestamp - state.lastFrameTimestamp.value;
|
||
if (frameInterval > 500 && frameInterval <= 1000) {
|
||
// 判断帧间隔是否在500毫秒到1秒之间
|
||
state.networkStatus.value = NetworkStatus.lagging;
|
||
// showNetworkStatus("Network is lagging");
|
||
} else if (frameInterval > 1000) {
|
||
// 判断帧间隔是否超过1秒
|
||
state.networkStatus.value = NetworkStatus.delayed;
|
||
// showNetworkStatus("Network is delayed");
|
||
} else {
|
||
state.networkStatus.value = NetworkStatus.normal;
|
||
state.alertCount.value = 0; // 重置计数器
|
||
}
|
||
}
|
||
state.lastFrameTimestamp.value = currentTimestamp;
|
||
}
|
||
|
||
/// 提示网络状态
|
||
void showNetworkStatus(String message) {
|
||
// 如果提示次数未达到最大值且 EasyLoading 未显示,则显示提示
|
||
if (state.alertCount.value < state.maxAlertNumber.value &&
|
||
!EasyLoading.isShow) {
|
||
showToast(message);
|
||
state.alertCount++;
|
||
}
|
||
}
|
||
|
||
/// 停止播放音频
|
||
void _stopPlayG711Data() async {
|
||
await FlutterPcmSound.pause();
|
||
await FlutterPcmSound.stop();
|
||
await FlutterPcmSound.clear();
|
||
}
|
||
|
||
/// 开门
|
||
udpOpenDoorAction() async {
|
||
final List<String>? privateKey =
|
||
await Storage.getStringList(saveBluePrivateKey);
|
||
final List<int> getPrivateKeyList = changeStringListToIntList(privateKey!);
|
||
|
||
final List<String>? signKey = await Storage.getStringList(saveBlueSignKey);
|
||
final List<int> signKeyDataList = changeStringListToIntList(signKey!);
|
||
|
||
final List<String>? token = await Storage.getStringList(saveBlueToken);
|
||
final List<int> getTokenList = changeStringListToIntList(token!);
|
||
|
||
await _getLockNetToken();
|
||
|
||
final OpenLockCommand openLockCommand = OpenLockCommand(
|
||
lockID: BlueManage().connectDeviceName,
|
||
userID: await Storage.getUid(),
|
||
openMode: lockDetailState.openDoorModel,
|
||
openTime: _getUTCNetTime(),
|
||
onlineToken: lockDetailState.lockNetToken,
|
||
token: getTokenList,
|
||
needAuthor: 1,
|
||
signKey: signKeyDataList,
|
||
privateKey: getPrivateKeyList,
|
||
);
|
||
final messageDetail = openLockCommand.packageData();
|
||
// 将 List<int> 转换为十六进制字符串
|
||
String hexString = messageDetail
|
||
.map((byte) => byte.toRadixString(16).padLeft(2, '0'))
|
||
.join(' ');
|
||
|
||
AppLog.log('open lock hexString: $hexString');
|
||
// 发送远程开门消息
|
||
StartChartManage().sendRemoteUnLockMessage(
|
||
bluetoothDeviceName: BlueManage().connectDeviceName,
|
||
openLockCommand: messageDetail,
|
||
);
|
||
showToast('已发送开门通知');
|
||
}
|
||
|
||
int _getUTCNetTime() {
|
||
if (lockDetailState.isHaveNetwork) {
|
||
return DateTime.now().millisecondsSinceEpoch ~/ 1000 +
|
||
lockDetailState.differentialTime;
|
||
} else {
|
||
return 0;
|
||
}
|
||
}
|
||
|
||
// 获取手机联网token,根据锁设置里面获取的开锁时是否联网来判断是否调用这个接口
|
||
Future<void> _getLockNetToken() async {
|
||
final LockNetTokenEntity entity = await ApiRepository.to.getLockNetToken(
|
||
lockId: lockDetailState.keyInfos.value.lockId.toString());
|
||
if (entity.errorCode!.codeIsSuccessful) {
|
||
lockDetailState.lockNetToken = entity.data!.token!.toString();
|
||
AppLog.log('从服务器获取联网token:${lockDetailState.lockNetToken}');
|
||
} else {
|
||
BuglyTool.uploadException(
|
||
message: '点击了需要联网开锁', detail: '点击了需要联网开锁 获取连网token失败', upload: true);
|
||
showToast('网络访问失败,请检查网络是否正常'.tr, something: () {});
|
||
}
|
||
}
|
||
|
||
/// 获取权限状态
|
||
Future<bool> getPermissionStatus() async {
|
||
final Permission permission = Permission.microphone;
|
||
//granted 通过,denied 被拒绝,permanentlyDenied 拒绝且不在提示
|
||
final PermissionStatus status = await permission.status;
|
||
if (status.isGranted) {
|
||
return true;
|
||
} else if (status.isDenied) {
|
||
requestPermission(permission);
|
||
} else if (status.isPermanentlyDenied) {
|
||
openAppSettings();
|
||
} else if (status.isRestricted) {
|
||
requestPermission(permission);
|
||
} else {}
|
||
return false;
|
||
}
|
||
|
||
///申请权限
|
||
void requestPermission(Permission permission) async {
|
||
final PermissionStatus status = await permission.request();
|
||
if (status.isPermanentlyDenied) {
|
||
openAppSettings();
|
||
}
|
||
}
|
||
|
||
Future<void> requestPermissions() async {
|
||
// 申请存储权限
|
||
var storageStatus = await Permission.storage.request();
|
||
// 申请录音权限
|
||
var microphoneStatus = await Permission.microphone.request();
|
||
|
||
if (storageStatus.isGranted && microphoneStatus.isGranted) {
|
||
print("Permissions granted");
|
||
} else {
|
||
print("Permissions denied");
|
||
// 如果权限被拒绝,可以提示用户或跳转到设置页面
|
||
if (await Permission.storage.isPermanentlyDenied) {
|
||
openAppSettings(); // 跳转到应用设置页面
|
||
}
|
||
}
|
||
}
|
||
|
||
Future<void> startRecording() async {
|
||
requestPermissions();
|
||
if (state.isRecordingScreen.value) {
|
||
showToast('录屏已开始,请勿重复点击');
|
||
}
|
||
bool start = await FlutterScreenRecording.startRecordScreen(
|
||
"Screen Recording", // 视频文件名
|
||
titleNotification: "Recording in progress", // 通知栏标题
|
||
messageNotification: "Tap to stop recording", // 通知栏内容
|
||
);
|
||
|
||
if (start) {
|
||
state.isRecordingScreen.value = true;
|
||
}
|
||
}
|
||
|
||
Future<void> stopRecording() async {
|
||
String path = await FlutterScreenRecording.stopRecordScreen;
|
||
print("Recording saved to: $path");
|
||
|
||
// 将视频保存到系统相册
|
||
bool? success = await GallerySaver.saveVideo(path);
|
||
if (success == true) {
|
||
print("Video saved to gallery");
|
||
} else {
|
||
print("Failed to save video to gallery");
|
||
}
|
||
|
||
showToast('录屏结束,已保存到系统相册');
|
||
state.isRecordingScreen.value = false;
|
||
}
|
||
|
||
@override
|
||
void onReady() {
|
||
super.onReady();
|
||
}
|
||
|
||
@override
|
||
void onInit() {
|
||
super.onInit();
|
||
|
||
// 启动监听音视频数据流
|
||
_startListenTalkData();
|
||
// 启动监听对讲状态
|
||
_startListenTalkStatus();
|
||
// 在没有监听成功之前赋值一遍状态
|
||
// *** 由于页面会在状态变化之后才会初始化,导致识别不到最新的状态,在这里手动赋值 ***
|
||
state.talkStatus.value = state.startChartTalkStatus.status;
|
||
|
||
// 初始化音频播放器
|
||
_initFlutterPcmSound();
|
||
|
||
// 启动播放定时器
|
||
_startPlayback();
|
||
|
||
// 初始化录音控制器
|
||
_initAudioRecorder();
|
||
|
||
requestPermissions();
|
||
}
|
||
|
||
@override
|
||
void onClose() {
|
||
_stopPlayG711Data(); // 停止播放音频
|
||
state.listData.value = Uint8List(0); // 清空视频数据
|
||
state.audioBuffer.clear(); // 清空音频缓冲区
|
||
state.videoBuffer.clear(); // 清空视频缓冲区
|
||
_syncTimer?.cancel(); // 取消定时器
|
||
_syncTimer = null; // 释放定时器引用
|
||
stopProcessingAudio();
|
||
super.onClose();
|
||
}
|
||
|
||
@override
|
||
void dispose() {
|
||
stopProcessingAudio();
|
||
super.dispose();
|
||
}
|
||
|
||
/// 处理无效通话状态
|
||
void _handleInvalidTalkStatus() {
|
||
state.listData.value = Uint8List(0);
|
||
// 停止播放音频
|
||
_stopPlayG711Data();
|
||
stopProcessingAudio();
|
||
// 状态错误,返回页面
|
||
Get.back();
|
||
}
|
||
|
||
/// 更新发送预期数据
|
||
void updateTalkExpect() {
|
||
TalkExpectReq talkExpectReq = TalkExpectReq();
|
||
if (state.isOpenVoice.value) {
|
||
talkExpectReq = TalkExpectReq(
|
||
videoType: [VideoTypeE.IMAGE],
|
||
audioType: [AudioTypeE.G711],
|
||
);
|
||
} else {
|
||
talkExpectReq = TalkExpectReq(
|
||
videoType: [VideoTypeE.IMAGE],
|
||
audioType: [],
|
||
);
|
||
}
|
||
|
||
/// 修改发送预期数据
|
||
StartChartManage().changeTalkExpectDataTypeAndReStartTalkExpectMessageTimer(
|
||
talkExpect: talkExpectReq);
|
||
state.isOpenVoice.value = !state.isOpenVoice.value;
|
||
}
|
||
|
||
/// 截图并保存到相册
|
||
Future<void> captureAndSavePng() async {
|
||
try {
|
||
if (state.globalKey.currentContext == null) {
|
||
AppLog.log('截图失败: 未找到当前上下文');
|
||
return;
|
||
}
|
||
final RenderRepaintBoundary boundary = state.globalKey.currentContext!
|
||
.findRenderObject()! as RenderRepaintBoundary;
|
||
final ui.Image image = await boundary.toImage();
|
||
final ByteData? byteData =
|
||
await image.toByteData(format: ui.ImageByteFormat.png);
|
||
|
||
if (byteData == null) {
|
||
AppLog.log('截图失败: 图像数据为空');
|
||
return;
|
||
}
|
||
final Uint8List pngBytes = byteData.buffer.asUint8List();
|
||
|
||
// 获取应用程序的文档目录
|
||
final Directory directory = await getApplicationDocumentsDirectory();
|
||
final String imagePath = '${directory.path}/screenshot.png';
|
||
|
||
// 将截图保存为文件
|
||
final File imgFile = File(imagePath);
|
||
await imgFile.writeAsBytes(pngBytes);
|
||
|
||
// 将截图保存到相册
|
||
await ImageGallerySaver.saveFile(imagePath);
|
||
|
||
AppLog.log('截图保存路径: $imagePath');
|
||
showToast('截图已保存到相册'.tr);
|
||
} catch (e) {
|
||
AppLog.log('截图失败: $e');
|
||
}
|
||
}
|
||
|
||
/// 初始化音频录制器
|
||
void _initAudioRecorder() {
|
||
state.voiceProcessor = VoiceProcessor.instance;
|
||
}
|
||
|
||
//开始录音
|
||
Future<void> startProcessingAudio() async {
|
||
// 增加录音帧监听器和错误监听器
|
||
state.voiceProcessor?.addFrameListener(_onFrame);
|
||
state.voiceProcessor?.addErrorListener(_onError);
|
||
try {
|
||
if (await state.voiceProcessor?.hasRecordAudioPermission() ?? false) {
|
||
await state.voiceProcessor?.start(state.frameLength, state.sampleRate);
|
||
final bool? isRecording = await state.voiceProcessor?.isRecording();
|
||
state.isRecordingAudio.value = isRecording!;
|
||
state.startRecordingAudioTime.value = DateTime.now();
|
||
} else {
|
||
// state.errorMessage.value = 'Recording permission not granted';
|
||
}
|
||
} on PlatformException catch (ex) {
|
||
// state.errorMessage.value = 'Failed to start recorder: $ex';
|
||
}
|
||
}
|
||
|
||
/// 停止录音
|
||
Future<void> stopProcessingAudio() async {
|
||
try {
|
||
await state.voiceProcessor?.stop();
|
||
state.voiceProcessor?.removeFrameListener(_onFrame);
|
||
state.udpSendDataFrameNumber = 0;
|
||
// 记录结束时间
|
||
state.endRecordingAudioTime.value = DateTime.now();
|
||
|
||
// 计算录音的持续时间
|
||
final duration = state.endRecordingAudioTime.value!
|
||
.difference(state.startRecordingAudioTime.value!);
|
||
|
||
state.recordingAudioTime.value = duration.inSeconds;
|
||
} on PlatformException catch (ex) {
|
||
// state.errorMessage.value = 'Failed to stop recorder: $ex';
|
||
} finally {
|
||
final bool? isRecording = await state.voiceProcessor?.isRecording();
|
||
state.isRecordingAudio.value = isRecording!;
|
||
}
|
||
}
|
||
|
||
Future<void> _onFrame(List<int> frame) async {
|
||
// state.recordingAudioAllFrames.add(frame); // 将帧添加到状态变量中
|
||
// final List<int> concatenatedFrames =
|
||
// concatenateFrames(state.recordingAudioAllFrames); // 连接所有帧
|
||
// final List<int> pcmBytes = _listLinearToULaw(frame);
|
||
// final aLaw = G711().encodeALaw(frame);
|
||
// final aLawFrame = listLinearToALaw(frame);
|
||
// 创建 640 个 0 的 PCM 数据
|
||
// 创建 640 个 0 的 8 位 PCM 数据(无符号)
|
||
final pcmSamples = List<int>.filled(640, 0); // 128 是 8 位 PCM 的 0 值
|
||
|
||
// 编码为 A-law
|
||
final aLawSamples = listLinearToALaw(pcmSamples, isUnsigned: true);
|
||
|
||
final encode = DartG711Codec().encode(Uint8List.fromList(pcmSamples));
|
||
AppLog.log('msg');
|
||
|
||
// AppLog.log('录制的音频数据(A-law):$aLawFrame, size:${aLawFrame.length}');
|
||
|
||
// 发送音频数据
|
||
await StartChartManage().sendTalkDataMessage(
|
||
talkData: TalkData(
|
||
content: aLawSamples,
|
||
contentType: TalkData_ContentTypeE.G711,
|
||
durationMs: DateTime.now().millisecondsSinceEpoch -
|
||
state.startRecordingAudioTime.value.millisecondsSinceEpoch,
|
||
),
|
||
);
|
||
}
|
||
|
||
void _onError(VoiceProcessorException error) {
|
||
// state.errorMessage.value = error.message!;
|
||
AppLog.log(error.message!);
|
||
}
|
||
|
||
int linearToALaw(int pcmVal) {
|
||
const int ALAW_MAX = 0x7FFF; // 16 位 PCM 的最大值
|
||
const int ALAW_BIAS = 0x84; // A-law 偏置值
|
||
|
||
// 处理符号位
|
||
int sign = (pcmVal & 0x8000) != 0 ? 0x00 : 0x80; // A-law 符号位
|
||
if (sign == 0x80) {
|
||
pcmVal = -pcmVal; // 取绝对值
|
||
}
|
||
|
||
// 限制 PCM 值在有效范围内
|
||
if (pcmVal > ALAW_MAX) {
|
||
pcmVal = ALAW_MAX;
|
||
}
|
||
|
||
// 添加偏置
|
||
pcmVal += ALAW_BIAS;
|
||
|
||
// 查找段和量化值
|
||
int seg = searchALawSegment(pcmVal);
|
||
int quantizedValue = (pcmVal >> (seg + 3)) & 0x0F;
|
||
|
||
// 生成 A-law 编码
|
||
int aLawVal = sign | (seg << 4) | quantizedValue;
|
||
return aLawVal;
|
||
}
|
||
|
||
int searchALawSegment(int val) {
|
||
const List<int> ALAW_SEGMENT_TABLE = [
|
||
0x1F,
|
||
0x3F,
|
||
0x7F,
|
||
0xFF,
|
||
0x1FF,
|
||
0x3FF,
|
||
0x7FF,
|
||
0xFFF
|
||
];
|
||
const int size = 8;
|
||
|
||
for (int i = 0; i < size; i++) {
|
||
if (val <= ALAW_SEGMENT_TABLE[i]) {
|
||
return i;
|
||
}
|
||
}
|
||
return size;
|
||
}
|
||
|
||
List<int> listLinearToALaw(List<int> pcmList, {bool isUnsigned = true}) {
|
||
final List<int> aLawList = [];
|
||
|
||
// 每两个 8 位 PCM 数据组合成一个 16 位 PCM 数据
|
||
for (int i = 0; i < pcmList.length; i += 2) {
|
||
int pcm8High = pcmList[i];
|
||
int pcm8Low = (i + 1 < pcmList.length) ? pcmList[i + 1] : 0; // 如果不足,补 0
|
||
|
||
// 将两个 8 位 PCM 数据组合成一个 16 位 PCM 数据
|
||
int pcm16;
|
||
if (isUnsigned) {
|
||
// 无符号 8 位 PCM 数据扩展为 16 位 PCM 数据
|
||
pcm16 = ((pcm8High - 128) << 8) | (pcm8Low - 128);
|
||
} else {
|
||
// 有符号 8 位 PCM 数据扩展为 16 位 PCM 数据
|
||
pcm16 = (pcm8High << 8) | pcm8Low;
|
||
}
|
||
|
||
// 将 16 位 PCM 数据编码为 A-law
|
||
final int aLawVal = linearToALaw(pcm16);
|
||
aLawList.add(aLawVal);
|
||
}
|
||
|
||
return aLawList;
|
||
}
|
||
}
|