2024-12-27 13:35:56 +08:00
|
|
|
|
import 'dart:async';
|
|
|
|
|
|
import 'dart:io';
|
2024-12-28 14:58:01 +08:00
|
|
|
|
import 'dart:ui' as ui;
|
2025-02-20 09:05:30 +08:00
|
|
|
|
import 'dart:math'; // Import the math package to use sqrt
|
2024-12-27 13:35:56 +08:00
|
|
|
|
|
2025-02-21 14:30:21 +08:00
|
|
|
|
import 'package:flutter/foundation.dart';
|
2024-12-28 14:58:01 +08:00
|
|
|
|
import 'package:flutter/rendering.dart';
|
2024-12-27 13:35:56 +08:00
|
|
|
|
import 'package:flutter/services.dart';
|
|
|
|
|
|
import 'package:flutter_pcm_sound/flutter_pcm_sound.dart';
|
2024-12-28 14:58:01 +08:00
|
|
|
|
import 'package:flutter_screen_recording/flutter_screen_recording.dart';
|
2024-12-30 11:53:42 +08:00
|
|
|
|
import 'package:flutter_voice_processor/flutter_voice_processor.dart';
|
2024-12-28 14:58:01 +08:00
|
|
|
|
import 'package:gallery_saver/gallery_saver.dart';
|
2024-12-27 13:35:56 +08:00
|
|
|
|
import 'package:get/get.dart';
|
2024-12-28 14:58:01 +08:00
|
|
|
|
import 'package:image_gallery_saver/image_gallery_saver.dart';
|
|
|
|
|
|
import 'package:path_provider/path_provider.dart';
|
2024-12-27 13:35:56 +08:00
|
|
|
|
import 'package:permission_handler/permission_handler.dart';
|
|
|
|
|
|
import 'package:star_lock/app_settings/app_settings.dart';
|
2025-01-24 11:05:28 +08:00
|
|
|
|
import 'package:star_lock/login/login/entity/LoginEntity.dart';
|
2025-01-08 09:14:29 +08:00
|
|
|
|
import 'package:star_lock/main/lockDetail/lockDetail/lockDetail_logic.dart';
|
|
|
|
|
|
import 'package:star_lock/main/lockDetail/lockDetail/lockDetail_state.dart';
|
|
|
|
|
|
import 'package:star_lock/main/lockDetail/lockDetail/lockNetToken_entity.dart';
|
2025-02-19 10:19:43 +08:00
|
|
|
|
import 'package:star_lock/main/lockDetail/lockSet/lockSet/lockSetInfo_entity.dart';
|
2025-01-24 11:05:28 +08:00
|
|
|
|
import 'package:star_lock/main/lockMian/entity/lockListInfo_entity.dart';
|
2025-01-08 09:14:29 +08:00
|
|
|
|
import 'package:star_lock/network/api_repository.dart';
|
2025-01-14 13:43:12 +08:00
|
|
|
|
import 'package:star_lock/talk/call/g711.dart';
|
2025-01-23 14:30:31 +08:00
|
|
|
|
import 'package:star_lock/talk/starChart/constant/talk_status.dart';
|
|
|
|
|
|
import 'package:star_lock/talk/starChart/proto/talk_data.pb.dart';
|
|
|
|
|
|
import 'package:star_lock/talk/starChart/proto/talk_expect.pb.dart';
|
|
|
|
|
|
import 'package:star_lock/talk/starChart/star_chart_manage.dart';
|
|
|
|
|
|
import 'package:star_lock/talk/starChart/views/talkView/talk_view_state.dart';
|
2025-01-08 09:14:29 +08:00
|
|
|
|
import 'package:star_lock/tools/bugly/bugly_tool.dart';
|
2024-12-27 13:35:56 +08:00
|
|
|
|
|
|
|
|
|
|
import '../../../../tools/baseGetXController.dart';
|
|
|
|
|
|
|
|
|
|
|
|
class TalkViewLogic extends BaseGetXController {
|
|
|
|
|
|
final TalkViewState state = TalkViewState();
|
2025-02-18 17:50:55 +08:00
|
|
|
|
|
2025-01-09 14:10:10 +08:00
|
|
|
|
final LockDetailState lockDetailState = Get.put(LockDetailLogic()).state;
|
2025-03-13 13:38:59 +08:00
|
|
|
|
|
|
|
|
|
|
final int minBufferSize = 2; // 最小缓冲2帧,约166ms
|
|
|
|
|
|
final int maxBufferSize = 8; // 最大缓冲8帧,约666ms
|
|
|
|
|
|
int bufferSize = 3; // 初始化为默认大小
|
|
|
|
|
|
// 修改音频相关的成员变量
|
|
|
|
|
|
final int minAudioBufferSize = 1; // 音频最小缓冲1帧
|
|
|
|
|
|
final int maxAudioBufferSize = 3; // 音频最大缓冲3帧
|
|
|
|
|
|
int audioBufferSize = 2; // 音频默认缓冲2帧
|
|
|
|
|
|
|
|
|
|
|
|
// 添加开始时间记录
|
|
|
|
|
|
int _startTime = 0; // 开始播放时间戳
|
2025-03-13 15:07:13 +08:00
|
|
|
|
int _startAudioTime = 0; // 开始播放时间戳
|
2025-03-13 13:38:59 +08:00
|
|
|
|
bool _isFirstFrame = true; // 是否是第一帧
|
2025-03-13 15:07:13 +08:00
|
|
|
|
bool _isFirstAudioFrame = true; // 是否是第一帧
|
2025-03-13 13:38:59 +08:00
|
|
|
|
|
2025-02-24 09:16:21 +08:00
|
|
|
|
// 定义音频帧缓冲和发送函数
|
2025-02-24 19:01:38 +08:00
|
|
|
|
final List<int> _bufferedAudioFrames = <int>[];
|
2024-12-27 13:35:56 +08:00
|
|
|
|
|
2025-03-12 17:42:02 +08:00
|
|
|
|
final Map<String, ui.Image> _imageCache = {};
|
|
|
|
|
|
|
2024-12-30 11:53:42 +08:00
|
|
|
|
/// 初始化音频播放器
|
2024-12-27 13:35:56 +08:00
|
|
|
|
void _initFlutterPcmSound() {
|
2025-01-14 13:43:12 +08:00
|
|
|
|
const int sampleRate = 8000;
|
2025-01-16 15:55:14 +08:00
|
|
|
|
FlutterPcmSound.setLogLevel(LogLevel.none);
|
2025-01-14 13:43:12 +08:00
|
|
|
|
FlutterPcmSound.setup(sampleRate: sampleRate, channelCount: 1);
|
2024-12-27 13:35:56 +08:00
|
|
|
|
// 设置 feed 阈值
|
|
|
|
|
|
if (Platform.isAndroid) {
|
2025-01-16 15:55:14 +08:00
|
|
|
|
FlutterPcmSound.setFeedThreshold(1024); // Android 平台的特殊处理
|
2024-12-27 13:35:56 +08:00
|
|
|
|
} else {
|
2025-02-20 09:05:30 +08:00
|
|
|
|
FlutterPcmSound.setFeedThreshold(2000); // 非 Android 平台的处理
|
2024-12-27 13:35:56 +08:00
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// 挂断
|
|
|
|
|
|
void udpHangUpAction() async {
|
2025-01-02 10:28:56 +08:00
|
|
|
|
if (state.talkStatus.value == TalkStatus.answeredSuccessfully) {
|
2024-12-27 13:35:56 +08:00
|
|
|
|
// 如果是通话中就挂断
|
2025-02-17 09:51:23 +08:00
|
|
|
|
StartChartManage().startTalkHangupMessageTimer();
|
2024-12-27 13:35:56 +08:00
|
|
|
|
} else {
|
|
|
|
|
|
// 拒绝
|
2025-02-17 09:51:23 +08:00
|
|
|
|
StartChartManage().startTalkRejectMessageTimer();
|
2024-12-27 13:35:56 +08:00
|
|
|
|
}
|
|
|
|
|
|
Get.back();
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// 发起接听命令
|
|
|
|
|
|
void initiateAnswerCommand() {
|
2025-01-02 10:28:56 +08:00
|
|
|
|
StartChartManage().startTalkAcceptTimer();
|
2024-12-27 13:35:56 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// 监听音视频数据流
|
|
|
|
|
|
void _startListenTalkData() {
|
2025-01-14 13:43:12 +08:00
|
|
|
|
state.talkDataRepository.talkDataStream.listen((TalkData talkData) async {
|
2024-12-27 13:35:56 +08:00
|
|
|
|
final contentType = talkData.contentType;
|
2025-03-13 13:38:59 +08:00
|
|
|
|
final currentTime = DateTime.now().millisecondsSinceEpoch;
|
|
|
|
|
|
|
2024-12-27 13:35:56 +08:00
|
|
|
|
// 判断数据类型,进行分发处理
|
|
|
|
|
|
switch (contentType) {
|
|
|
|
|
|
case TalkData_ContentTypeE.G711:
|
2025-03-13 15:07:13 +08:00
|
|
|
|
// 第一帧到达时记录开始时间
|
|
|
|
|
|
if (_isFirstAudioFrame) {
|
|
|
|
|
|
_startAudioTime = currentTime;
|
|
|
|
|
|
_isFirstAudioFrame = false;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// 计算音频延迟
|
|
|
|
|
|
final expectedTime = _startAudioTime + talkData.durationMs;
|
|
|
|
|
|
final audioDelay = currentTime - expectedTime;
|
|
|
|
|
|
|
|
|
|
|
|
// 如果延迟太大,清空缓冲区并直接播放
|
|
|
|
|
|
if (audioDelay > 500) {
|
|
|
|
|
|
state.audioBuffer.clear();
|
|
|
|
|
|
if (state.isOpenVoice.value) {
|
|
|
|
|
|
_playAudioFrames();
|
|
|
|
|
|
}
|
|
|
|
|
|
return;
|
|
|
|
|
|
}
|
|
|
|
|
|
if (state.audioBuffer.length >= audioBufferSize) {
|
2025-01-14 13:43:12 +08:00
|
|
|
|
state.audioBuffer.removeAt(0); // 丢弃最旧的数据
|
2024-12-27 13:35:56 +08:00
|
|
|
|
}
|
2025-01-14 13:43:12 +08:00
|
|
|
|
state.audioBuffer.add(talkData); // 添加新数据
|
2025-03-13 13:38:59 +08:00
|
|
|
|
// 添加音频播放逻辑,与视频类似
|
|
|
|
|
|
_playAudioFrames();
|
2024-12-27 13:35:56 +08:00
|
|
|
|
break;
|
|
|
|
|
|
case TalkData_ContentTypeE.Image:
|
2025-03-13 15:07:13 +08:00
|
|
|
|
// 第一帧到达时记录开始时间
|
|
|
|
|
|
if (_isFirstFrame) {
|
|
|
|
|
|
_startTime = currentTime;
|
|
|
|
|
|
_isFirstFrame = false;
|
|
|
|
|
|
AppLog.log('记录第一帧的时间戳${currentTime},${talkData.durationMs}');
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// 计算实际延迟:当前时间 - 预期播放时间
|
2025-03-13 13:38:59 +08:00
|
|
|
|
final expectedTime = _startTime + talkData.durationMs;
|
2025-03-13 15:07:13 +08:00
|
|
|
|
final videoDelay = currentTime - expectedTime; // 修改延迟计算方式
|
|
|
|
|
|
|
2025-03-13 13:38:59 +08:00
|
|
|
|
// 动态调整缓冲区
|
|
|
|
|
|
_adjustBufferSize(videoDelay);
|
|
|
|
|
|
// 然后添加到播放缓冲区
|
2025-01-14 13:43:12 +08:00
|
|
|
|
if (state.videoBuffer.length >= bufferSize) {
|
2025-03-13 13:38:59 +08:00
|
|
|
|
state.videoBuffer.removeAt(0);
|
2024-12-27 13:35:56 +08:00
|
|
|
|
}
|
2025-03-13 13:38:59 +08:00
|
|
|
|
state.videoBuffer.add(talkData);
|
|
|
|
|
|
// 先进行解码和缓存
|
|
|
|
|
|
await _decodeAndCacheFrame(talkData);
|
|
|
|
|
|
// 最后尝试播放
|
|
|
|
|
|
_playVideoFrames();
|
2024-12-27 13:35:56 +08:00
|
|
|
|
break;
|
|
|
|
|
|
}
|
|
|
|
|
|
});
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2025-03-13 13:38:59 +08:00
|
|
|
|
// 修改:视频帧播放逻辑
|
|
|
|
|
|
void _playVideoFrames() {
|
|
|
|
|
|
// 如果缓冲区为空或未达到目标大小,不进行播放
|
|
|
|
|
|
if (state.videoBuffer.isEmpty || state.videoBuffer.length < bufferSize) {
|
|
|
|
|
|
// AppLog.log('📊 缓冲中 - 当前缓冲区大小: ${state.videoBuffer.length}/${bufferSize}');
|
|
|
|
|
|
return;
|
|
|
|
|
|
}
|
|
|
|
|
|
// 找出时间戳最小的帧(最旧的帧)
|
|
|
|
|
|
TalkData? oldestFrame;
|
|
|
|
|
|
int oldestIndex = -1;
|
|
|
|
|
|
for (int i = 0; i < state.videoBuffer.length; i++) {
|
|
|
|
|
|
if (oldestFrame == null ||
|
|
|
|
|
|
state.videoBuffer[i].durationMs < oldestFrame.durationMs) {
|
|
|
|
|
|
oldestFrame = state.videoBuffer[i];
|
|
|
|
|
|
oldestIndex = i;
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
// 确保找到了有效帧
|
|
|
|
|
|
if (oldestFrame != null && oldestIndex != -1) {
|
|
|
|
|
|
final cacheKey = oldestFrame.content.hashCode.toString();
|
|
|
|
|
|
|
|
|
|
|
|
// 使用缓存的解码图片更新显示
|
|
|
|
|
|
if (_imageCache.containsKey(cacheKey)) {
|
|
|
|
|
|
state.currentImage.value = _imageCache[cacheKey];
|
|
|
|
|
|
state.listData.value = Uint8List.fromList(oldestFrame.content);
|
|
|
|
|
|
state.videoBuffer.removeAt(oldestIndex); // 移除已播放的帧
|
|
|
|
|
|
|
|
|
|
|
|
// AppLog.log('🎬 播放帧 - 缓冲区剩余: ${state.videoBuffer.length}/${bufferSize}, '
|
|
|
|
|
|
// '播放延迟: ${currentTime - oldestFrame.durationMs}ms, '
|
|
|
|
|
|
// '帧时间戳: ${oldestFrame.durationMs}');
|
|
|
|
|
|
} else {
|
|
|
|
|
|
// AppLog.log('⚠️ 帧未找到缓存 - Key: $cacheKey');
|
|
|
|
|
|
state.videoBuffer.removeAt(oldestIndex); // 移除无法播放的帧
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// 新增:音频帧播放逻辑
|
|
|
|
|
|
void _playAudioFrames() {
|
|
|
|
|
|
// 如果缓冲区为空或未达到目标大小,不进行播放
|
|
|
|
|
|
// 音频缓冲区要求更小,以减少延迟
|
|
|
|
|
|
if (state.audioBuffer.isEmpty ||
|
|
|
|
|
|
state.audioBuffer.length < audioBufferSize) {
|
|
|
|
|
|
return;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// 找出时间戳最小的音频帧
|
|
|
|
|
|
TalkData? oldestFrame;
|
|
|
|
|
|
int oldestIndex = -1;
|
|
|
|
|
|
for (int i = 0; i < state.audioBuffer.length; i++) {
|
|
|
|
|
|
if (oldestFrame == null ||
|
|
|
|
|
|
state.audioBuffer[i].durationMs < oldestFrame.durationMs) {
|
|
|
|
|
|
oldestFrame = state.audioBuffer[i];
|
|
|
|
|
|
oldestIndex = i;
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// 确保找到了有效帧
|
|
|
|
|
|
if (oldestFrame != null && oldestIndex != -1) {
|
|
|
|
|
|
if (state.isOpenVoice.value) {
|
|
|
|
|
|
// 播放音频
|
|
|
|
|
|
_playAudioData(oldestFrame);
|
|
|
|
|
|
}
|
|
|
|
|
|
state.audioBuffer.removeAt(oldestIndex);
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// 新增:解码和缓存帧的方法
|
|
|
|
|
|
Future<void> _decodeAndCacheFrame(TalkData talkData) async {
|
|
|
|
|
|
try {
|
|
|
|
|
|
String cacheKey = talkData.content.hashCode.toString();
|
|
|
|
|
|
|
|
|
|
|
|
// 如果该帧还没有被缓存,则进行解码和缓存
|
|
|
|
|
|
if (!_imageCache.containsKey(cacheKey)) {
|
|
|
|
|
|
final Uint8List uint8Data = Uint8List.fromList(talkData.content);
|
|
|
|
|
|
final ui.Image image = await decodeImageFromList(uint8Data);
|
|
|
|
|
|
|
|
|
|
|
|
// 管理缓存大小
|
|
|
|
|
|
if (_imageCache.length >= bufferSize) {
|
|
|
|
|
|
_imageCache.remove(_imageCache.keys.first);
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// 添加到缓存
|
|
|
|
|
|
_imageCache[cacheKey] = image;
|
|
|
|
|
|
|
|
|
|
|
|
// AppLog.log('📥 缓存新帧 - 缓存数: ${_imageCache.length}, Key: $cacheKey');
|
|
|
|
|
|
}
|
|
|
|
|
|
} catch (e) {
|
|
|
|
|
|
AppLog.log('❌ 帧解码错误: $e');
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// 新增:动态调整缓冲区大小的方法
|
|
|
|
|
|
void _adjustBufferSize(int delay) {
|
|
|
|
|
|
const int delayThresholdHigh = 250; // 高延迟阈值(约3帧的时间)
|
|
|
|
|
|
const int delayThresholdLow = 166; // 低延迟阈值(约2帧的时间)
|
|
|
|
|
|
const int adjustInterval = 1; // 每次调整1帧
|
|
|
|
|
|
|
|
|
|
|
|
if (delay > delayThresholdHigh && bufferSize < maxBufferSize) {
|
|
|
|
|
|
// 延迟较大,增加缓冲区
|
|
|
|
|
|
bufferSize = min(bufferSize + adjustInterval, maxBufferSize);
|
|
|
|
|
|
AppLog.log('📈 增加缓冲区 - 当前大小: $bufferSize, 延迟: ${delay}ms');
|
|
|
|
|
|
} else if (delay < delayThresholdLow && bufferSize > minBufferSize) {
|
|
|
|
|
|
// 延迟较小,减少缓冲区
|
|
|
|
|
|
bufferSize = max(bufferSize - adjustInterval, minBufferSize);
|
|
|
|
|
|
AppLog.log('📉 减少缓冲区 - 当前大小: $bufferSize, 延迟: ${delay}ms');
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2024-12-27 13:35:56 +08:00
|
|
|
|
/// 监听对讲状态
|
|
|
|
|
|
void _startListenTalkStatus() {
|
|
|
|
|
|
state.startChartTalkStatus.statusStream.listen((talkStatus) {
|
|
|
|
|
|
state.talkStatus.value = talkStatus;
|
|
|
|
|
|
switch (talkStatus) {
|
|
|
|
|
|
case TalkStatus.rejected:
|
|
|
|
|
|
case TalkStatus.hangingUpDuring:
|
|
|
|
|
|
case TalkStatus.notTalkData:
|
|
|
|
|
|
case TalkStatus.notTalkPing:
|
|
|
|
|
|
case TalkStatus.end:
|
|
|
|
|
|
_handleInvalidTalkStatus();
|
|
|
|
|
|
break;
|
2025-01-20 16:23:01 +08:00
|
|
|
|
case TalkStatus.answeredSuccessfully:
|
|
|
|
|
|
state.oneMinuteTimeTimer?.cancel(); // 取消旧定时器
|
|
|
|
|
|
state.oneMinuteTimeTimer ??=
|
|
|
|
|
|
Timer.periodic(const Duration(seconds: 1), (Timer t) {
|
2025-02-18 17:50:55 +08:00
|
|
|
|
if (state.listData.value.length > 0) {
|
|
|
|
|
|
state.oneMinuteTime.value++;
|
|
|
|
|
|
if (state.oneMinuteTime.value >= 60) {
|
|
|
|
|
|
t.cancel(); // 取消定时器
|
|
|
|
|
|
state.oneMinuteTime.value = 0;
|
|
|
|
|
|
}
|
2025-01-20 16:23:01 +08:00
|
|
|
|
}
|
|
|
|
|
|
});
|
|
|
|
|
|
break;
|
2024-12-27 13:35:56 +08:00
|
|
|
|
default:
|
|
|
|
|
|
// 其他状态的处理
|
|
|
|
|
|
break;
|
|
|
|
|
|
}
|
|
|
|
|
|
});
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2024-12-30 11:53:42 +08:00
|
|
|
|
/// 播放音频数据
|
2025-01-14 13:43:12 +08:00
|
|
|
|
void _playAudioData(TalkData talkData) async {
|
2025-01-16 15:55:14 +08:00
|
|
|
|
if (state.isOpenVoice.value) {
|
2025-01-17 13:58:30 +08:00
|
|
|
|
final list =
|
|
|
|
|
|
G711().decodeAndDenoise(talkData.content, true, 8000, 300, 150);
|
2025-01-16 15:55:14 +08:00
|
|
|
|
// // 将 PCM 数据转换为 PcmArrayInt16
|
|
|
|
|
|
final PcmArrayInt16 fromList = PcmArrayInt16.fromList(list);
|
|
|
|
|
|
FlutterPcmSound.feed(fromList);
|
|
|
|
|
|
if (!state.isPlaying.value) {
|
|
|
|
|
|
FlutterPcmSound.play();
|
|
|
|
|
|
state.isPlaying.value = true;
|
|
|
|
|
|
}
|
2024-12-27 13:35:56 +08:00
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// 停止播放音频
|
|
|
|
|
|
void _stopPlayG711Data() async {
|
|
|
|
|
|
await FlutterPcmSound.pause();
|
|
|
|
|
|
await FlutterPcmSound.stop();
|
|
|
|
|
|
await FlutterPcmSound.clear();
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// 开门
|
2025-01-24 11:05:28 +08:00
|
|
|
|
// udpOpenDoorAction() async {
|
|
|
|
|
|
// final List<String>? privateKey =
|
|
|
|
|
|
// await Storage.getStringList(saveBluePrivateKey);
|
|
|
|
|
|
// final List<int> getPrivateKeyList = changeStringListToIntList(privateKey!);
|
|
|
|
|
|
//
|
|
|
|
|
|
// final List<String>? signKey = await Storage.getStringList(saveBlueSignKey);
|
|
|
|
|
|
// final List<int> signKeyDataList = changeStringListToIntList(signKey!);
|
|
|
|
|
|
//
|
|
|
|
|
|
// final List<String>? token = await Storage.getStringList(saveBlueToken);
|
|
|
|
|
|
// final List<int> getTokenList = changeStringListToIntList(token!);
|
|
|
|
|
|
//
|
|
|
|
|
|
// await _getLockNetToken();
|
|
|
|
|
|
//
|
|
|
|
|
|
// final OpenLockCommand openLockCommand = OpenLockCommand(
|
|
|
|
|
|
// lockID: BlueManage().connectDeviceName,
|
|
|
|
|
|
// userID: await Storage.getUid(),
|
|
|
|
|
|
// openMode: lockDetailState.openDoorModel,
|
|
|
|
|
|
// openTime: _getUTCNetTime(),
|
|
|
|
|
|
// onlineToken: lockDetailState.lockNetToken,
|
|
|
|
|
|
// token: getTokenList,
|
|
|
|
|
|
// needAuthor: 1,
|
|
|
|
|
|
// signKey: signKeyDataList,
|
|
|
|
|
|
// privateKey: getPrivateKeyList,
|
|
|
|
|
|
// );
|
|
|
|
|
|
// final messageDetail = openLockCommand.packageData();
|
|
|
|
|
|
// // 将 List<int> 转换为十六进制字符串
|
|
|
|
|
|
// String hexString = messageDetail
|
|
|
|
|
|
// .map((byte) => byte.toRadixString(16).padLeft(2, '0'))
|
|
|
|
|
|
// .join(' ');
|
|
|
|
|
|
//
|
|
|
|
|
|
// AppLog.log('open lock hexString: $hexString');
|
|
|
|
|
|
// // 发送远程开门消息
|
|
|
|
|
|
// StartChartManage().sendRemoteUnLockMessage(
|
|
|
|
|
|
// bluetoothDeviceName: BlueManage().connectDeviceName,
|
|
|
|
|
|
// openLockCommand: messageDetail,
|
|
|
|
|
|
// );
|
|
|
|
|
|
// showToast('正在开锁中...'.tr);
|
|
|
|
|
|
// }
|
2025-01-08 09:14:29 +08:00
|
|
|
|
|
|
|
|
|
|
int _getUTCNetTime() {
|
|
|
|
|
|
if (lockDetailState.isHaveNetwork) {
|
|
|
|
|
|
return DateTime.now().millisecondsSinceEpoch ~/ 1000 +
|
|
|
|
|
|
lockDetailState.differentialTime;
|
|
|
|
|
|
} else {
|
|
|
|
|
|
return 0;
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// 获取手机联网token,根据锁设置里面获取的开锁时是否联网来判断是否调用这个接口
|
|
|
|
|
|
Future<void> _getLockNetToken() async {
|
|
|
|
|
|
final LockNetTokenEntity entity = await ApiRepository.to.getLockNetToken(
|
|
|
|
|
|
lockId: lockDetailState.keyInfos.value.lockId.toString());
|
|
|
|
|
|
if (entity.errorCode!.codeIsSuccessful) {
|
|
|
|
|
|
lockDetailState.lockNetToken = entity.data!.token!.toString();
|
|
|
|
|
|
AppLog.log('从服务器获取联网token:${lockDetailState.lockNetToken}');
|
|
|
|
|
|
} else {
|
|
|
|
|
|
BuglyTool.uploadException(
|
|
|
|
|
|
message: '点击了需要联网开锁', detail: '点击了需要联网开锁 获取连网token失败', upload: true);
|
|
|
|
|
|
showToast('网络访问失败,请检查网络是否正常'.tr, something: () {});
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
2024-12-27 13:35:56 +08:00
|
|
|
|
|
2024-12-30 11:53:42 +08:00
|
|
|
|
/// 获取权限状态
|
2024-12-27 13:35:56 +08:00
|
|
|
|
Future<bool> getPermissionStatus() async {
|
|
|
|
|
|
final Permission permission = Permission.microphone;
|
|
|
|
|
|
//granted 通过,denied 被拒绝,permanentlyDenied 拒绝且不在提示
|
|
|
|
|
|
final PermissionStatus status = await permission.status;
|
|
|
|
|
|
if (status.isGranted) {
|
|
|
|
|
|
return true;
|
|
|
|
|
|
} else if (status.isDenied) {
|
|
|
|
|
|
requestPermission(permission);
|
|
|
|
|
|
} else if (status.isPermanentlyDenied) {
|
|
|
|
|
|
openAppSettings();
|
|
|
|
|
|
} else if (status.isRestricted) {
|
|
|
|
|
|
requestPermission(permission);
|
|
|
|
|
|
} else {}
|
|
|
|
|
|
return false;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
///申请权限
|
|
|
|
|
|
void requestPermission(Permission permission) async {
|
|
|
|
|
|
final PermissionStatus status = await permission.request();
|
|
|
|
|
|
if (status.isPermanentlyDenied) {
|
|
|
|
|
|
openAppSettings();
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2025-01-08 09:14:29 +08:00
|
|
|
|
Future<void> requestPermissions() async {
|
|
|
|
|
|
// 申请存储权限
|
|
|
|
|
|
var storageStatus = await Permission.storage.request();
|
|
|
|
|
|
// 申请录音权限
|
|
|
|
|
|
var microphoneStatus = await Permission.microphone.request();
|
|
|
|
|
|
|
|
|
|
|
|
if (storageStatus.isGranted && microphoneStatus.isGranted) {
|
|
|
|
|
|
print("Permissions granted");
|
|
|
|
|
|
} else {
|
|
|
|
|
|
print("Permissions denied");
|
|
|
|
|
|
// 如果权限被拒绝,可以提示用户或跳转到设置页面
|
|
|
|
|
|
if (await Permission.storage.isPermanentlyDenied) {
|
|
|
|
|
|
openAppSettings(); // 跳转到应用设置页面
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
Future<void> startRecording() async {
|
|
|
|
|
|
requestPermissions();
|
|
|
|
|
|
if (state.isRecordingScreen.value) {
|
|
|
|
|
|
showToast('录屏已开始,请勿重复点击');
|
|
|
|
|
|
}
|
|
|
|
|
|
bool start = await FlutterScreenRecording.startRecordScreen(
|
|
|
|
|
|
"Screen Recording", // 视频文件名
|
|
|
|
|
|
titleNotification: "Recording in progress", // 通知栏标题
|
|
|
|
|
|
messageNotification: "Tap to stop recording", // 通知栏内容
|
|
|
|
|
|
);
|
|
|
|
|
|
|
|
|
|
|
|
if (start) {
|
|
|
|
|
|
state.isRecordingScreen.value = true;
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
Future<void> stopRecording() async {
|
|
|
|
|
|
String path = await FlutterScreenRecording.stopRecordScreen;
|
|
|
|
|
|
print("Recording saved to: $path");
|
|
|
|
|
|
|
|
|
|
|
|
// 将视频保存到系统相册
|
|
|
|
|
|
bool? success = await GallerySaver.saveVideo(path);
|
|
|
|
|
|
if (success == true) {
|
|
|
|
|
|
print("Video saved to gallery");
|
|
|
|
|
|
} else {
|
|
|
|
|
|
print("Failed to save video to gallery");
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
showToast('录屏结束,已保存到系统相册');
|
|
|
|
|
|
state.isRecordingScreen.value = false;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2024-12-27 13:35:56 +08:00
|
|
|
|
@override
|
|
|
|
|
|
void onReady() {
|
|
|
|
|
|
super.onReady();
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
@override
|
|
|
|
|
|
void onInit() {
|
|
|
|
|
|
super.onInit();
|
|
|
|
|
|
|
2024-12-30 11:53:42 +08:00
|
|
|
|
// 启动监听音视频数据流
|
2024-12-27 13:35:56 +08:00
|
|
|
|
_startListenTalkData();
|
2024-12-30 11:53:42 +08:00
|
|
|
|
// 启动监听对讲状态
|
2024-12-27 13:35:56 +08:00
|
|
|
|
_startListenTalkStatus();
|
|
|
|
|
|
// 在没有监听成功之前赋值一遍状态
|
|
|
|
|
|
// *** 由于页面会在状态变化之后才会初始化,导致识别不到最新的状态,在这里手动赋值 ***
|
|
|
|
|
|
state.talkStatus.value = state.startChartTalkStatus.status;
|
|
|
|
|
|
|
2024-12-30 11:53:42 +08:00
|
|
|
|
// 初始化音频播放器
|
2024-12-27 13:35:56 +08:00
|
|
|
|
_initFlutterPcmSound();
|
|
|
|
|
|
|
2024-12-30 11:53:42 +08:00
|
|
|
|
// 启动播放定时器
|
2025-03-13 13:38:59 +08:00
|
|
|
|
// _startPlayback();
|
2024-12-30 11:53:42 +08:00
|
|
|
|
|
|
|
|
|
|
// 初始化录音控制器
|
|
|
|
|
|
_initAudioRecorder();
|
2025-01-08 09:14:29 +08:00
|
|
|
|
|
|
|
|
|
|
requestPermissions();
|
2024-12-27 13:35:56 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
@override
|
|
|
|
|
|
void onClose() {
|
2025-01-14 13:43:12 +08:00
|
|
|
|
_stopPlayG711Data(); // 停止播放音频
|
|
|
|
|
|
state.listData.value = Uint8List(0); // 清空视频数据
|
|
|
|
|
|
state.audioBuffer.clear(); // 清空音频缓冲区
|
|
|
|
|
|
state.videoBuffer.clear(); // 清空视频缓冲区
|
2025-03-13 13:38:59 +08:00
|
|
|
|
|
2025-01-20 16:23:01 +08:00
|
|
|
|
state.oneMinuteTimeTimer?.cancel();
|
|
|
|
|
|
state.oneMinuteTimeTimer = null;
|
2025-03-13 13:38:59 +08:00
|
|
|
|
|
2025-01-14 17:57:33 +08:00
|
|
|
|
stopProcessingAudio();
|
2025-03-12 17:42:02 +08:00
|
|
|
|
// 清理图片缓存
|
|
|
|
|
|
_imageCache.clear();
|
|
|
|
|
|
|
2025-01-14 13:43:12 +08:00
|
|
|
|
super.onClose();
|
2024-12-27 13:35:56 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
2025-01-14 17:57:33 +08:00
|
|
|
|
@override
|
|
|
|
|
|
void dispose() {
|
|
|
|
|
|
stopProcessingAudio();
|
2025-02-12 15:13:07 +08:00
|
|
|
|
// 重置期望数据
|
|
|
|
|
|
StartChartManage().reSetDefaultTalkExpect();
|
2025-01-14 17:57:33 +08:00
|
|
|
|
super.dispose();
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2024-12-27 13:35:56 +08:00
|
|
|
|
/// 处理无效通话状态
|
|
|
|
|
|
void _handleInvalidTalkStatus() {
|
|
|
|
|
|
state.listData.value = Uint8List(0);
|
|
|
|
|
|
// 停止播放音频
|
|
|
|
|
|
_stopPlayG711Data();
|
2025-01-14 17:57:33 +08:00
|
|
|
|
stopProcessingAudio();
|
2024-12-27 13:35:56 +08:00
|
|
|
|
}
|
2024-12-28 14:58:01 +08:00
|
|
|
|
|
|
|
|
|
|
/// 更新发送预期数据
|
|
|
|
|
|
void updateTalkExpect() {
|
|
|
|
|
|
TalkExpectReq talkExpectReq = TalkExpectReq();
|
2025-02-19 16:22:01 +08:00
|
|
|
|
state.isOpenVoice.value = !state.isOpenVoice.value;
|
|
|
|
|
|
if (!state.isOpenVoice.value) {
|
2024-12-28 14:58:01 +08:00
|
|
|
|
talkExpectReq = TalkExpectReq(
|
|
|
|
|
|
videoType: [VideoTypeE.IMAGE],
|
2025-02-19 16:22:01 +08:00
|
|
|
|
audioType: [],
|
2024-12-28 14:58:01 +08:00
|
|
|
|
);
|
2025-02-19 16:22:01 +08:00
|
|
|
|
showToast('已静音'.tr);
|
2024-12-28 14:58:01 +08:00
|
|
|
|
} else {
|
|
|
|
|
|
talkExpectReq = TalkExpectReq(
|
|
|
|
|
|
videoType: [VideoTypeE.IMAGE],
|
2025-02-19 16:22:01 +08:00
|
|
|
|
audioType: [AudioTypeE.G711],
|
2024-12-28 14:58:01 +08:00
|
|
|
|
);
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// 修改发送预期数据
|
2025-01-02 10:28:56 +08:00
|
|
|
|
StartChartManage().changeTalkExpectDataTypeAndReStartTalkExpectMessageTimer(
|
|
|
|
|
|
talkExpect: talkExpectReq);
|
2024-12-28 14:58:01 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// 截图并保存到相册
|
|
|
|
|
|
Future<void> captureAndSavePng() async {
|
|
|
|
|
|
try {
|
|
|
|
|
|
if (state.globalKey.currentContext == null) {
|
|
|
|
|
|
AppLog.log('截图失败: 未找到当前上下文');
|
|
|
|
|
|
return;
|
|
|
|
|
|
}
|
|
|
|
|
|
final RenderRepaintBoundary boundary = state.globalKey.currentContext!
|
|
|
|
|
|
.findRenderObject()! as RenderRepaintBoundary;
|
|
|
|
|
|
final ui.Image image = await boundary.toImage();
|
|
|
|
|
|
final ByteData? byteData =
|
|
|
|
|
|
await image.toByteData(format: ui.ImageByteFormat.png);
|
|
|
|
|
|
|
|
|
|
|
|
if (byteData == null) {
|
|
|
|
|
|
AppLog.log('截图失败: 图像数据为空');
|
|
|
|
|
|
return;
|
|
|
|
|
|
}
|
|
|
|
|
|
final Uint8List pngBytes = byteData.buffer.asUint8List();
|
|
|
|
|
|
|
|
|
|
|
|
// 获取应用程序的文档目录
|
|
|
|
|
|
final Directory directory = await getApplicationDocumentsDirectory();
|
|
|
|
|
|
final String imagePath = '${directory.path}/screenshot.png';
|
|
|
|
|
|
|
|
|
|
|
|
// 将截图保存为文件
|
|
|
|
|
|
final File imgFile = File(imagePath);
|
|
|
|
|
|
await imgFile.writeAsBytes(pngBytes);
|
|
|
|
|
|
|
|
|
|
|
|
// 将截图保存到相册
|
|
|
|
|
|
await ImageGallerySaver.saveFile(imagePath);
|
|
|
|
|
|
|
|
|
|
|
|
AppLog.log('截图保存路径: $imagePath');
|
|
|
|
|
|
showToast('截图已保存到相册'.tr);
|
|
|
|
|
|
} catch (e) {
|
|
|
|
|
|
AppLog.log('截图失败: $e');
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2025-01-24 11:05:28 +08:00
|
|
|
|
// 远程开锁
|
|
|
|
|
|
Future<void> remoteOpenLock() async {
|
|
|
|
|
|
final lockPeerId = StartChartManage().lockPeerId;
|
|
|
|
|
|
final lockListPeerId = StartChartManage().lockListPeerId;
|
2025-02-19 16:36:39 +08:00
|
|
|
|
int lockId = lockDetailState.keyInfos.value.lockId ?? 0;
|
2025-02-21 10:21:58 +08:00
|
|
|
|
|
|
|
|
|
|
// 如果锁列表获取到peerId,代表有多个锁,使用锁列表的peerId
|
|
|
|
|
|
// 从列表中遍历出对应的peerId
|
|
|
|
|
|
lockListPeerId.forEach((element) {
|
|
|
|
|
|
if (element.network?.peerId == lockPeerId) {
|
|
|
|
|
|
lockId = element.lockId ?? 0;
|
|
|
|
|
|
}
|
|
|
|
|
|
});
|
|
|
|
|
|
|
2025-02-19 10:19:43 +08:00
|
|
|
|
final LockSetInfoEntity lockSetInfoEntity =
|
|
|
|
|
|
await ApiRepository.to.getLockSettingInfoData(
|
|
|
|
|
|
lockId: lockId.toString(),
|
|
|
|
|
|
);
|
|
|
|
|
|
if (lockSetInfoEntity.errorCode!.codeIsSuccessful) {
|
|
|
|
|
|
if (lockSetInfoEntity.data?.lockFeature?.remoteUnlock == 1 &&
|
|
|
|
|
|
lockSetInfoEntity.data?.lockSettingInfo?.remoteUnlock == 1) {
|
|
|
|
|
|
final LoginEntity entity = await ApiRepository.to
|
|
|
|
|
|
.remoteOpenLock(lockId: lockId.toString(), timeOut: 60);
|
|
|
|
|
|
if (entity.errorCode!.codeIsSuccessful) {
|
|
|
|
|
|
showToast('已开锁'.tr);
|
|
|
|
|
|
StartChartManage().lockListPeerId = [];
|
|
|
|
|
|
}
|
|
|
|
|
|
} else {
|
|
|
|
|
|
showToast('该锁的远程开锁功能未启用'.tr);
|
|
|
|
|
|
}
|
2025-01-24 11:05:28 +08:00
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2024-12-30 11:53:42 +08:00
|
|
|
|
/// 初始化音频录制器
|
|
|
|
|
|
void _initAudioRecorder() {
|
|
|
|
|
|
state.voiceProcessor = VoiceProcessor.instance;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
//开始录音
|
|
|
|
|
|
Future<void> startProcessingAudio() async {
|
|
|
|
|
|
try {
|
|
|
|
|
|
if (await state.voiceProcessor?.hasRecordAudioPermission() ?? false) {
|
|
|
|
|
|
await state.voiceProcessor?.start(state.frameLength, state.sampleRate);
|
|
|
|
|
|
final bool? isRecording = await state.voiceProcessor?.isRecording();
|
|
|
|
|
|
state.isRecordingAudio.value = isRecording!;
|
|
|
|
|
|
state.startRecordingAudioTime.value = DateTime.now();
|
2025-02-24 09:16:21 +08:00
|
|
|
|
|
|
|
|
|
|
// 增加录音帧监听器和错误监听器
|
|
|
|
|
|
state.voiceProcessor
|
|
|
|
|
|
?.addFrameListeners(<VoiceProcessorFrameListener>[_onFrame]);
|
|
|
|
|
|
state.voiceProcessor?.addErrorListener(_onError);
|
2024-12-30 11:53:42 +08:00
|
|
|
|
} else {
|
|
|
|
|
|
// state.errorMessage.value = 'Recording permission not granted';
|
|
|
|
|
|
}
|
|
|
|
|
|
} on PlatformException catch (ex) {
|
|
|
|
|
|
// state.errorMessage.value = 'Failed to start recorder: $ex';
|
|
|
|
|
|
}
|
2025-01-15 15:43:53 +08:00
|
|
|
|
state.isOpenVoice.value = false;
|
2024-12-30 11:53:42 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// 停止录音
|
|
|
|
|
|
Future<void> stopProcessingAudio() async {
|
|
|
|
|
|
try {
|
|
|
|
|
|
await state.voiceProcessor?.stop();
|
|
|
|
|
|
state.voiceProcessor?.removeFrameListener(_onFrame);
|
|
|
|
|
|
state.udpSendDataFrameNumber = 0;
|
|
|
|
|
|
// 记录结束时间
|
|
|
|
|
|
state.endRecordingAudioTime.value = DateTime.now();
|
|
|
|
|
|
|
|
|
|
|
|
// 计算录音的持续时间
|
2025-02-24 19:01:38 +08:00
|
|
|
|
final Duration duration = state.endRecordingAudioTime.value
|
|
|
|
|
|
.difference(state.startRecordingAudioTime.value);
|
2024-12-30 11:53:42 +08:00
|
|
|
|
|
|
|
|
|
|
state.recordingAudioTime.value = duration.inSeconds;
|
|
|
|
|
|
} on PlatformException catch (ex) {
|
|
|
|
|
|
// state.errorMessage.value = 'Failed to stop recorder: $ex';
|
|
|
|
|
|
} finally {
|
|
|
|
|
|
final bool? isRecording = await state.voiceProcessor?.isRecording();
|
|
|
|
|
|
state.isRecordingAudio.value = isRecording!;
|
2025-01-15 15:43:53 +08:00
|
|
|
|
state.isOpenVoice.value = true;
|
2024-12-30 11:53:42 +08:00
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2025-02-21 14:30:21 +08:00
|
|
|
|
// 音频帧处理
|
2024-12-30 11:53:42 +08:00
|
|
|
|
Future<void> _onFrame(List<int> frame) async {
|
2025-01-17 17:57:54 +08:00
|
|
|
|
final List<int> processedFrame = preprocessAudio(frame);
|
|
|
|
|
|
final List<int> list = listLinearToALaw(processedFrame);
|
2025-02-24 09:16:21 +08:00
|
|
|
|
_bufferedAudioFrames.addAll(list);
|
2025-02-21 14:30:21 +08:00
|
|
|
|
|
2025-01-17 17:57:54 +08:00
|
|
|
|
final int ms = DateTime.now().millisecondsSinceEpoch -
|
2025-01-15 15:43:53 +08:00
|
|
|
|
state.startRecordingAudioTime.value.millisecondsSinceEpoch;
|
2025-02-21 14:30:21 +08:00
|
|
|
|
|
2025-02-24 19:01:38 +08:00
|
|
|
|
int getFrameLength = state.frameLength;
|
|
|
|
|
|
if (Platform.isIOS) {
|
|
|
|
|
|
getFrameLength = state.frameLength * 2;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
if (_bufferedAudioFrames.length >= getFrameLength) {
|
2025-02-24 09:16:21 +08:00
|
|
|
|
// 发送音频数据到UDP
|
2025-02-24 19:01:38 +08:00
|
|
|
|
await StartChartManage()
|
|
|
|
|
|
.sendTalkDataMessage(
|
2025-02-24 09:16:21 +08:00
|
|
|
|
talkData: TalkData(
|
2025-02-24 19:01:38 +08:00
|
|
|
|
content: _bufferedAudioFrames,
|
2025-02-24 09:16:21 +08:00
|
|
|
|
contentType: TalkData_ContentTypeE.G711,
|
|
|
|
|
|
durationMs: ms,
|
|
|
|
|
|
),
|
2025-02-24 19:01:38 +08:00
|
|
|
|
)
|
|
|
|
|
|
.then((value) {
|
|
|
|
|
|
_bufferedAudioFrames.clear();
|
|
|
|
|
|
});
|
|
|
|
|
|
}
|
2024-12-30 11:53:42 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
2025-02-21 14:30:21 +08:00
|
|
|
|
// 错误监听
|
2024-12-30 11:53:42 +08:00
|
|
|
|
void _onError(VoiceProcessorException error) {
|
|
|
|
|
|
AppLog.log(error.message!);
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2025-01-17 17:57:54 +08:00
|
|
|
|
List<int> preprocessAudio(List<int> pcmList) {
|
|
|
|
|
|
// 简单的降噪处理
|
|
|
|
|
|
final List<int> processedList = [];
|
|
|
|
|
|
for (int pcmVal in pcmList) {
|
|
|
|
|
|
// 简单的降噪示例:将小于阈值的信号置为0
|
2025-02-21 14:30:21 +08:00
|
|
|
|
if (pcmVal.abs() < 200) {
|
2025-01-17 17:57:54 +08:00
|
|
|
|
pcmVal = 0;
|
|
|
|
|
|
}
|
|
|
|
|
|
processedList.add(pcmVal);
|
|
|
|
|
|
}
|
|
|
|
|
|
return processedList;
|
|
|
|
|
|
}
|
2025-02-21 10:21:58 +08:00
|
|
|
|
|
2025-02-20 09:05:30 +08:00
|
|
|
|
//test测试降噪算法
|
|
|
|
|
|
// List<int> preprocessAudio(List<int> pcmList) {
|
|
|
|
|
|
// final List<int> processedList = [];
|
|
|
|
|
|
// final int windowSize = 5;
|
|
|
|
|
|
// final int thresholdFactor = 2; // 动态阈值的倍数
|
|
|
|
|
|
|
|
|
|
|
|
// for (int i = 0; i < pcmList.length; i++) {
|
|
|
|
|
|
// int pcmVal = pcmList[i];
|
|
|
|
|
|
|
|
|
|
|
|
// // 计算当前窗口内的标准差
|
|
|
|
|
|
// int sum = 0;
|
|
|
|
|
|
// int count = 0;
|
|
|
|
|
|
// for (int j = i; j < i + windowSize && j < pcmList.length; j++) {
|
|
|
|
|
|
// sum += pcmList[j];
|
|
|
|
|
|
// count++;
|
|
|
|
|
|
// }
|
|
|
|
|
|
// int mean = sum ~/ count;
|
|
|
|
|
|
|
|
|
|
|
|
// // 计算标准差
|
|
|
|
|
|
// int varianceSum = 0;
|
|
|
|
|
|
// for (int j = i; j < i + windowSize && j < pcmList.length; j++) {
|
|
|
|
|
|
// varianceSum += (pcmList[j] - mean) * (pcmList[j] - mean);
|
|
|
|
|
|
// }
|
|
|
|
|
|
// double standardDeviation =
|
|
|
|
|
|
// sqrt(varianceSum / count); // Use sqrt from dart:math
|
|
|
|
|
|
|
|
|
|
|
|
// // 动态阈值
|
|
|
|
|
|
// int dynamicThreshold = (standardDeviation * thresholdFactor).toInt();
|
|
|
|
|
|
|
|
|
|
|
|
// // 动态降噪:如果信号小于动态阈值,则设为0
|
|
|
|
|
|
// if (pcmVal.abs() < dynamicThreshold) {
|
|
|
|
|
|
// pcmVal = 0;
|
|
|
|
|
|
// }
|
|
|
|
|
|
|
|
|
|
|
|
// // 移动平均滤波器
|
|
|
|
|
|
// int sumFilter = 0;
|
|
|
|
|
|
// int countFilter = 0;
|
|
|
|
|
|
// for (int j = i; j < i + windowSize && j < pcmList.length; j++) {
|
|
|
|
|
|
// sumFilter += pcmList[j];
|
|
|
|
|
|
// countFilter++;
|
|
|
|
|
|
// }
|
|
|
|
|
|
// int average = sumFilter ~/ countFilter;
|
|
|
|
|
|
|
|
|
|
|
|
// processedList.add(average);
|
|
|
|
|
|
// }
|
|
|
|
|
|
|
|
|
|
|
|
// return processedList;
|
|
|
|
|
|
// }
|
2025-01-17 17:57:54 +08:00
|
|
|
|
|
2025-02-24 09:16:21 +08:00
|
|
|
|
List<int> adjustVolume(List<int> pcmList, double volume) {
|
2025-02-25 14:54:27 +08:00
|
|
|
|
final List<int> adjustedPcmList = <int>[];
|
|
|
|
|
|
for (final int pcmVal in pcmList) {
|
2025-02-24 09:16:21 +08:00
|
|
|
|
// 调整音量
|
|
|
|
|
|
int adjustedPcmVal = (pcmVal * volume).round();
|
|
|
|
|
|
|
|
|
|
|
|
// 裁剪到 16-bit PCM 范围
|
|
|
|
|
|
if (adjustedPcmVal > 32767) {
|
|
|
|
|
|
adjustedPcmVal = 32767;
|
|
|
|
|
|
} else if (adjustedPcmVal < -32768) {
|
|
|
|
|
|
adjustedPcmVal = -32768;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
adjustedPcmList.add(adjustedPcmVal);
|
|
|
|
|
|
}
|
|
|
|
|
|
return adjustedPcmList;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2025-01-15 15:43:53 +08:00
|
|
|
|
List<int> listLinearToALaw(List<int> pcmList) {
|
2025-02-24 09:16:21 +08:00
|
|
|
|
// 先调节音量
|
|
|
|
|
|
final List<int> adjustedPcmList = adjustVolume(pcmList, 5.0);
|
|
|
|
|
|
|
|
|
|
|
|
// 再进行 A-law 编码
|
2025-02-25 14:54:27 +08:00
|
|
|
|
final List<int> aLawList = <int>[];
|
|
|
|
|
|
for (final int pcmVal in adjustedPcmList) {
|
2025-01-15 15:43:53 +08:00
|
|
|
|
final int aLawVal = linearToALaw(pcmVal);
|
|
|
|
|
|
aLawList.add(aLawVal);
|
|
|
|
|
|
}
|
|
|
|
|
|
return aLawList;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2025-01-14 17:57:33 +08:00
|
|
|
|
int linearToALaw(int pcmVal) {
|
2025-02-25 14:54:27 +08:00
|
|
|
|
const int alawMax = 0x7FFF; // 32767
|
|
|
|
|
|
const int alawBias = 0x84; // 132
|
2024-12-30 11:53:42 +08:00
|
|
|
|
|
2025-01-15 15:43:53 +08:00
|
|
|
|
int mask;
|
|
|
|
|
|
int seg;
|
|
|
|
|
|
int aLawVal;
|
|
|
|
|
|
|
|
|
|
|
|
// Handle sign
|
|
|
|
|
|
if (pcmVal < 0) {
|
|
|
|
|
|
pcmVal = -pcmVal;
|
|
|
|
|
|
mask = 0x7F; // 127 (sign bit is 1)
|
|
|
|
|
|
} else {
|
|
|
|
|
|
mask = 0xFF; // 255 (sign bit is 0)
|
2024-12-30 11:53:42 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
2025-01-15 15:43:53 +08:00
|
|
|
|
// Add bias and clamp to ALAW_MAX
|
2025-02-25 14:54:27 +08:00
|
|
|
|
pcmVal += alawBias;
|
|
|
|
|
|
if (pcmVal > alawMax) {
|
|
|
|
|
|
pcmVal = alawMax;
|
2024-12-30 11:53:42 +08:00
|
|
|
|
}
|
2025-01-14 17:57:33 +08:00
|
|
|
|
|
2025-01-15 15:43:53 +08:00
|
|
|
|
// Determine segment
|
|
|
|
|
|
seg = search(pcmVal);
|
2025-01-14 17:57:33 +08:00
|
|
|
|
|
2025-01-15 15:43:53 +08:00
|
|
|
|
// Calculate A-law value
|
|
|
|
|
|
if (seg >= 8) {
|
|
|
|
|
|
aLawVal = 0x7F ^ mask; // Clamp to maximum value
|
|
|
|
|
|
} else {
|
2025-02-25 14:54:27 +08:00
|
|
|
|
final int quantized = (pcmVal >> (seg + 3)) & 0xF;
|
2025-01-15 15:43:53 +08:00
|
|
|
|
aLawVal = (seg << 4) | quantized;
|
|
|
|
|
|
aLawVal ^= 0xD5; // XOR with 0xD5 to match standard A-law table
|
|
|
|
|
|
}
|
2025-01-14 17:57:33 +08:00
|
|
|
|
|
|
|
|
|
|
return aLawVal;
|
2024-12-30 11:53:42 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
2025-01-15 15:43:53 +08:00
|
|
|
|
int search(int val) {
|
2025-02-25 14:54:27 +08:00
|
|
|
|
final List<int> table = <int>[
|
2025-01-15 15:43:53 +08:00
|
|
|
|
0xFF, // Segment 0
|
|
|
|
|
|
0x1FF, // Segment 1
|
|
|
|
|
|
0x3FF, // Segment 2
|
|
|
|
|
|
0x7FF, // Segment 3
|
|
|
|
|
|
0xFFF, // Segment 4
|
|
|
|
|
|
0x1FFF, // Segment 5
|
|
|
|
|
|
0x3FFF, // Segment 6
|
|
|
|
|
|
0x7FFF // Segment 7
|
2024-12-30 11:53:42 +08:00
|
|
|
|
];
|
|
|
|
|
|
const int size = 8;
|
|
|
|
|
|
for (int i = 0; i < size; i++) {
|
2025-01-15 15:43:53 +08:00
|
|
|
|
if (val <= table[i]) {
|
2024-12-30 11:53:42 +08:00
|
|
|
|
return i;
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
return size;
|
|
|
|
|
|
}
|
2024-12-27 13:35:56 +08:00
|
|
|
|
}
|