app-starlock/lib/talk/startChart/views/talkView/talk_view_logic.dart

562 lines
18 KiB
Dart
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

import 'dart:async';
import 'dart:io';
import 'dart:ui' as ui;
import 'package:flutter/rendering.dart';
import 'package:flutter/services.dart';
import 'package:flutter_easyloading/flutter_easyloading.dart';
import 'package:flutter_pcm_sound/flutter_pcm_sound.dart';
import 'package:flutter_screen_recording/flutter_screen_recording.dart';
import 'package:flutter_voice_processor/flutter_voice_processor.dart';
import 'package:gallery_saver/gallery_saver.dart';
import 'package:get/get.dart';
import 'package:image_gallery_saver/image_gallery_saver.dart';
import 'package:path_provider/path_provider.dart';
import 'package:permission_handler/permission_handler.dart';
import 'package:star_lock/app_settings/app_settings.dart';
import 'package:star_lock/talk/startChart/constant/talk_status.dart';
import 'package:star_lock/talk/startChart/proto/talk_data.pb.dart';
import 'package:star_lock/talk/startChart/proto/talk_data.pbenum.dart';
import 'package:star_lock/talk/startChart/proto/talk_expect.pb.dart';
import 'package:star_lock/talk/startChart/start_chart_manage.dart';
import 'package:star_lock/talk/startChart/views/talkView/talk_view_state.dart';
import '../../../../tools/baseGetXController.dart';
class TalkViewLogic extends BaseGetXController {
final TalkViewState state = TalkViewState();
Timer? _syncTimer; // 音视频播放刷新率定时器
int _startTime = 0; // 开始播放时间戳,用于判断帧数据中的时间戳位置
final int bufferSize = 8; // 缓冲区大小(以帧为单位)
final List<int> frameTimestamps = []; // 帧时间戳用于计算 FPS
int frameIntervalMs = 45; // 初始帧间隔设置为45毫秒约22FPS
int minFrameIntervalMs = 30; // 最小帧间隔约33 FPS
int maxFrameIntervalMs = 500; // 最大帧间隔约2 FPS
// int maxFrameIntervalMs = 100; // 最大帧间隔约10 FPS
/// 初始化音频播放器
void _initFlutterPcmSound() {
const int sampleRate = 44100;
FlutterPcmSound.setLogLevel(LogLevel.verbose);
FlutterPcmSound.setup(sampleRate: sampleRate, channelCount: 2);
// 设置 feed 阈值
if (Platform.isAndroid) {
FlutterPcmSound.setFeedThreshold(-1); // Android 平台的特殊处理
} else {
FlutterPcmSound.setFeedThreshold(sampleRate ~/ 32); // 非 Android 平台的处理
}
}
/// 挂断
void udpHangUpAction() async {
if (state.talkStatus.value == TalkStatus.duringCall) {
// 如果是通话中就挂断
StartChartManage().sendTalkHangupMessage();
} else {
// 拒绝
StartChartManage().sendTalkRejectMessage();
}
Get.back();
}
// 发起接听命令
void initiateAnswerCommand() {
StartChartManage().sendTalkAcceptMessage();
}
void _updateFps(List<int> frameTimestamps) {
final int now = DateTime.now().millisecondsSinceEpoch;
// 移除超过1秒的时间戳
frameTimestamps.removeWhere((timestamp) => now - timestamp > 1000);
// 计算 FPS
final double fps = frameTimestamps.length.toDouble();
// 更新 FPS
state.fps.value = fps;
}
// 监听音视频数据流
void _startListenTalkData() {
state.talkDataRepository.talkDataStream.listen((TalkData talkData) {
final contentType = talkData.contentType;
final currentTimestamp = DateTime.now().millisecondsSinceEpoch;
/// 如果不是通话中的状态不处理对讲数据
if (state.startChartTalkStatus.status != TalkStatus.duringCall) {
return;
}
// 判断数据类型,进行分发处理
switch (contentType) {
case TalkData_ContentTypeE.G711:
if (state.audioBuffer.length < bufferSize) {
state.audioBuffer.add(talkData);
}
// print('收到音频数据');
break;
case TalkData_ContentTypeE.Image:
if (state.videoBuffer.length < bufferSize) {
state.videoBuffer.add(talkData);
}
print('talkData durationMs-->:${talkData.durationMs}');
/// 更新网络状态
updateNetworkStatus(currentTimestamp);
break;
}
});
}
/// 监听对讲状态
void _startListenTalkStatus() {
state.startChartTalkStatus.statusStream.listen((talkStatus) {
state.talkStatus.value = talkStatus;
switch (talkStatus) {
case TalkStatus.rejected:
case TalkStatus.hangingUpDuring:
case TalkStatus.notTalkData:
case TalkStatus.notTalkPing:
case TalkStatus.end:
_handleInvalidTalkStatus();
break;
default:
// 其他状态的处理
break;
}
});
}
/// 播放音频数据
void _playAudioData(TalkData talkData) {
// 将 PCM 数据转换为 PcmArrayInt16
final PcmArrayInt16 fromList = PcmArrayInt16.fromList(talkData.content);
FlutterPcmSound.feed(fromList);
if (!state.isPlaying.value) {
FlutterPcmSound.play();
state.isPlaying.value = true;
}
}
/// 播放视频数据
void _playVideoData(TalkData talkData) {
state.listData.value = Uint8List.fromList(talkData.content);
}
/// 启动播放
void _startPlayback() {
int frameIntervalMs = 45; // 初始帧间隔设置为45毫秒约22FPS
Future.delayed(Duration(milliseconds: 800), () {
_startTime = DateTime.now().millisecondsSinceEpoch;
_syncTimer ??=
Timer.periodic(Duration(milliseconds: frameIntervalMs), (timer) {
final currentTime = DateTime.now().millisecondsSinceEpoch;
final elapsedTime = currentTime - _startTime;
// 根据 elapsedTime 同步音频和视频
// AppLog.log('Elapsed Time: $elapsedTime ms');
// 动态调整帧间隔
_adjustFrameInterval();
// 播放合适的音频帧
if (state.audioBuffer.isNotEmpty &&
state.audioBuffer.first.durationMs <= elapsedTime) {
// 判断音频开关是否打开
if (state.isOpenVoice.value) {
_playAudioData(state.audioBuffer.removeAt(0));
} else {
// 如果不播放音频,只从缓冲区中读取数据,但不移除
// 你可以根据需要调整此处逻辑,例如保留缓冲区的最大长度,防止无限增长
// 仅移除缓冲区数据但不播放音频,确保音频也是实时更新的
state.audioBuffer.removeAt(0);
}
}
// 播放合适的视频帧
// 跳帧策略:如果缓冲区中有多个帧,且它们的时间戳都在当前时间之前,则播放最新的帧
while (state.videoBuffer.isNotEmpty &&
state.videoBuffer.first.durationMs <= elapsedTime) {
// 如果有多个帧,移除旧的帧,保持最新的帧
if (state.videoBuffer.length > 1) {
state.videoBuffer.removeAt(0);
} else {
// // 记录当前时间戳
// frameTimestamps.add(DateTime.now().millisecondsSinceEpoch);
// // 计算并更新 FPS
// _updateFps(frameTimestamps);
_playVideoData(state.videoBuffer.removeAt(0));
}
}
});
});
}
/// 动态调整帧间隔
void _adjustFrameInterval() {
if (state.videoBuffer.length < 10 && frameIntervalMs < maxFrameIntervalMs) {
// 如果缓冲区较小且帧间隔小于最大值,则增加帧间隔
frameIntervalMs += 5;
} else if (state.videoBuffer.length > 20 &&
frameIntervalMs > minFrameIntervalMs) {
// 如果缓冲区较大且帧间隔大于最小值,则减少帧间隔
frameIntervalMs -= 5;
}
_syncTimer?.cancel();
_syncTimer =
Timer.periodic(Duration(milliseconds: frameIntervalMs), (timer) {
final currentTime = DateTime.now().millisecondsSinceEpoch;
final elapsedTime = currentTime - _startTime;
// 播放合适的音频帧
if (state.audioBuffer.isNotEmpty &&
state.audioBuffer.first.durationMs <= elapsedTime) {
// 判断音频开关是否打开
if (state.isOpenVoice.value) {
_playAudioData(state.audioBuffer.removeAt(0));
} else {
// 如果不播放音频,只从缓冲区中读取数据,但不移除
// 你可以根据需要调整此处逻辑,例如保留缓冲区的最大长度,防止无限增长
// 仅移除缓冲区数据但不播放音频,确保音频也是实时更新的
state.audioBuffer.removeAt(0);
}
}
// 播放合适的视频帧
// 跳帧策略:如果缓冲区中有多个帧,且它们的时间戳都在当前时间之前,则播放最新的帧
while (state.videoBuffer.isNotEmpty &&
state.videoBuffer.first.durationMs <= elapsedTime) {
// 如果有多个帧,移除旧的帧,保持最新的帧
if (state.videoBuffer.length > 1) {
state.videoBuffer.removeAt(0);
} else {
// // 记录当前时间戳
// frameTimestamps.add(DateTime.now().millisecondsSinceEpoch);
// // 计算并更新 FPS
// _updateFps(frameTimestamps);
_playVideoData(state.videoBuffer.removeAt(0));
}
}
});
}
/// 修改网络状态
void updateNetworkStatus(int currentTimestamp) {
if (state.lastFrameTimestamp.value != 0) {
final frameInterval = currentTimestamp - state.lastFrameTimestamp.value;
if (frameInterval > 500 && frameInterval <= 1000) {
// 判断帧间隔是否在500毫秒到1秒之间
state.networkStatus.value = NetworkStatus.lagging;
showNetworkStatus("Network is lagging");
} else if (frameInterval > 1000) {
// 判断帧间隔是否超过1秒
state.networkStatus.value = NetworkStatus.delayed;
showNetworkStatus("Network is delayed");
} else {
state.networkStatus.value = NetworkStatus.normal;
state.alertCount.value = 0; // 重置计数器
}
}
state.lastFrameTimestamp.value = currentTimestamp;
}
/// 提示网络状态
void showNetworkStatus(String message) {
// 如果提示次数未达到最大值且 EasyLoading 未显示,则显示提示
if (state.alertCount.value < state.maxAlertNumber.value &&
!EasyLoading.isShow) {
showToast(message);
state.alertCount++;
}
}
/// 停止播放音频
void _stopPlayG711Data() async {
await FlutterPcmSound.pause();
await FlutterPcmSound.stop();
await FlutterPcmSound.clear();
}
/// 开门
udpOpenDoorAction(List<int> list) async {}
/// 获取权限状态
Future<bool> getPermissionStatus() async {
final Permission permission = Permission.microphone;
//granted 通过denied 被拒绝permanentlyDenied 拒绝且不在提示
final PermissionStatus status = await permission.status;
if (status.isGranted) {
return true;
} else if (status.isDenied) {
requestPermission(permission);
} else if (status.isPermanentlyDenied) {
openAppSettings();
} else if (status.isRestricted) {
requestPermission(permission);
} else {}
return false;
}
///申请权限
void requestPermission(Permission permission) async {
final PermissionStatus status = await permission.request();
if (status.isPermanentlyDenied) {
openAppSettings();
}
}
@override
void onReady() {
super.onReady();
}
@override
void onInit() {
super.onInit();
// 启动监听音视频数据流
_startListenTalkData();
// 启动监听对讲状态
_startListenTalkStatus();
// 在没有监听成功之前赋值一遍状态
// *** 由于页面会在状态变化之后才会初始化,导致识别不到最新的状态,在这里手动赋值 ***
state.talkStatus.value = state.startChartTalkStatus.status;
// 初始化音频播放器
_initFlutterPcmSound();
// 启动播放定时器
_startPlayback();
// 初始化录音控制器
_initAudioRecorder();
}
@override
void onClose() {
_stopPlayG711Data();
state.listData.value = Uint8List(0);
_syncTimer?.cancel();
_syncTimer = null;
}
/// 处理无效通话状态
void _handleInvalidTalkStatus() {
state.listData.value = Uint8List(0);
// 停止播放音频
_stopPlayG711Data();
// 状态错误,返回页面
Get.back();
}
/// 更新发送预期数据
void updateTalkExpect() {
TalkExpectReq talkExpectReq = TalkExpectReq();
if (state.isOpenVoice.value) {
talkExpectReq = TalkExpectReq(
videoType: [VideoTypeE.IMAGE],
audioType: [AudioTypeE.G711],
);
} else {
talkExpectReq = TalkExpectReq(
videoType: [VideoTypeE.IMAGE],
audioType: [],
);
}
/// 修改发送预期数据
StartChartManage().changeTalkExpectDataType(talkExpect: talkExpectReq);
state.isOpenVoice.value = !state.isOpenVoice.value;
}
/// 截图并保存到相册
Future<void> captureAndSavePng() async {
try {
if (state.globalKey.currentContext == null) {
AppLog.log('截图失败: 未找到当前上下文');
return;
}
final RenderRepaintBoundary boundary = state.globalKey.currentContext!
.findRenderObject()! as RenderRepaintBoundary;
final ui.Image image = await boundary.toImage();
final ByteData? byteData =
await image.toByteData(format: ui.ImageByteFormat.png);
if (byteData == null) {
AppLog.log('截图失败: 图像数据为空');
return;
}
final Uint8List pngBytes = byteData.buffer.asUint8List();
// 获取应用程序的文档目录
final Directory directory = await getApplicationDocumentsDirectory();
final String imagePath = '${directory.path}/screenshot.png';
// 将截图保存为文件
final File imgFile = File(imagePath);
await imgFile.writeAsBytes(pngBytes);
// 将截图保存到相册
await ImageGallerySaver.saveFile(imagePath);
AppLog.log('截图保存路径: $imagePath');
showToast('截图已保存到相册'.tr);
} catch (e) {
AppLog.log('截图失败: $e');
}
}
/// 开始录屏
Future<void> startRecording() async {
getPermissionStatus();
bool started =
await FlutterScreenRecording.startRecordScreenAndAudio("Recording");
if (started) {
state.isRecordingScreen.value = true;
}
}
/// 停止录屏
Future<void> stopRecording() async {
String path = await FlutterScreenRecording.stopRecordScreen;
if (path != null) {
state.isRecordingScreen.value = false;
// 保存录制的视频到相册
// await GallerySaver.saveVideo(path).then((bool? success) {});
// 将截图保存到相册
await ImageGallerySaver.saveFile(path);
showToast('录屏已保存到相册'.tr);
} else {
state.isRecordingScreen.value = false;
print("Recording failed");
}
}
/// 初始化音频录制器
void _initAudioRecorder() {
state.voiceProcessor = VoiceProcessor.instance;
}
//开始录音
Future<void> startProcessingAudio() async {
// 增加录音帧监听器和错误监听器
state.voiceProcessor?.addFrameListener(_onFrame);
state.voiceProcessor?.addErrorListener(_onError);
try {
if (await state.voiceProcessor?.hasRecordAudioPermission() ?? false) {
await state.voiceProcessor?.start(state.frameLength, state.sampleRate);
final bool? isRecording = await state.voiceProcessor?.isRecording();
state.isRecordingAudio.value = isRecording!;
state.startRecordingAudioTime.value = DateTime.now();
} else {
// state.errorMessage.value = 'Recording permission not granted';
}
} on PlatformException catch (ex) {
// state.errorMessage.value = 'Failed to start recorder: $ex';
}
}
/// 停止录音
Future<void> stopProcessingAudio() async {
try {
await state.voiceProcessor?.stop();
state.voiceProcessor?.removeFrameListener(_onFrame);
state.udpSendDataFrameNumber = 0;
// 记录结束时间
state.endRecordingAudioTime.value = DateTime.now();
// 计算录音的持续时间
final duration = state.endRecordingAudioTime.value!
.difference(state.startRecordingAudioTime.value!);
state.recordingAudioTime.value = duration.inSeconds;
} on PlatformException catch (ex) {
// state.errorMessage.value = 'Failed to stop recorder: $ex';
} finally {
final bool? isRecording = await state.voiceProcessor?.isRecording();
state.isRecordingAudio.value = isRecording!;
}
}
Future<void> _onFrame(List<int> frame) async {
state.recordingAudioAllFrames.add(frame); // 将帧添加到状态变量中
// final List<int> concatenatedFrames =
// _concatenateFrames(state.recordingAudioAllFrames); // 连接所有帧
final List<int> pcmBytes = _listLinearToULaw(frame);
// 发送音频数据
StartChartManage().sendTalkDataMessage(
talkData: TalkData(
content: pcmBytes,
contentType: TalkData_ContentTypeE.G711,
durationMs: DateTime.now().millisecondsSinceEpoch -
state.startRecordingAudioTime.value.millisecondsSinceEpoch,
),
);
}
void _onError(VoiceProcessorException error) {
// state.errorMessage.value = error.message!;
AppLog.log(error.message!);
}
// 拿到的音频转化成pcm
List<int> _listLinearToULaw(List<int> pcmList) {
final List<int> uLawList = [];
for (int pcmVal in pcmList) {
final int uLawVal = _linearToULaw(pcmVal);
uLawList.add(uLawVal);
}
return uLawList;
}
// 拿到的音频转化成pcm
int _linearToULaw(int pcmVal) {
int mask;
int seg;
int uval;
if (pcmVal < 0) {
pcmVal = 0x84 - pcmVal;
mask = 0x7F;
} else {
pcmVal += 0x84;
mask = 0xFF;
}
seg = search(pcmVal);
if (seg >= 8) {
return 0x7F ^ mask;
} else {
uval = seg << 4;
uval |= (pcmVal >> (seg + 3)) & 0xF;
return uval ^ mask;
}
}
int search(int val) {
final List<int> table = [
0xFF,
0x1FF,
0x3FF,
0x7FF,
0xFFF,
0x1FFF,
0x3FFF,
0x7FFF
];
const int size = 8;
for (int i = 0; i < size; i++) {
if (val <= table[i]) {
return i;
}
}
return size;
}
}