fix:增加图传全自动锁通话页面
This commit is contained in:
parent
4a725be23f
commit
905368ec8d
@ -203,6 +203,7 @@ import 'mine/valueAddedServices/valueAddedServicesRealName/value_added_services_
|
||||
import 'mine/valueAddedServices/valueAddedServicesSMSTemplate/valueAddedServicesAddSMSTemplate/newSMSTemplate_page.dart';
|
||||
import 'mine/valueAddedServices/valueAddedServicesSMSTemplate/valueAddedServicesListSMSTemplate/customSMSTemplateList_page.dart';
|
||||
import 'starLockApplication/starLockApplication.dart';
|
||||
import 'talk/starChart/views/imageTransmission/image_transmission_page.dart';
|
||||
import 'tools/seletKeyCyclicDate/seletKeyCyclicDate_page.dart';
|
||||
|
||||
abstract class Routers {
|
||||
@ -515,6 +516,8 @@ abstract class Routers {
|
||||
static const String starChartPage = '/starChartPage'; //星图
|
||||
static const String starChartTalkView = '/starChartTalkView'; //星图对讲页面
|
||||
static const String h264WebView = '/h264WebView'; //星图对讲页面
|
||||
static const String imageTransmissionView =
|
||||
'/imageTransmissionView'; //星图对讲页面(图传)
|
||||
}
|
||||
|
||||
abstract class AppRouters {
|
||||
@ -1185,7 +1188,13 @@ abstract class AppRouters {
|
||||
page: () => const DoubleLockLinkPage()),
|
||||
GetPage<dynamic>(
|
||||
name: Routers.starChartTalkView, page: () => const TalkViewPage()),
|
||||
GetPage<dynamic>(name: Routers.h264WebView, page: () => TalkViewNativeDecodePage()), // 插件播放页面
|
||||
GetPage<dynamic>(
|
||||
name: Routers.h264WebView, page: () => TalkViewNativeDecodePage()),
|
||||
// 插件播放页面
|
||||
GetPage<dynamic>(
|
||||
name: Routers.imageTransmissionView,
|
||||
page: () => ImageTransmissionPage()),
|
||||
// 插件播放页面
|
||||
// GetPage<dynamic>(name: Routers.h264WebView, page: () => H264WebView()), // webview播放页面
|
||||
];
|
||||
}
|
||||
|
||||
@ -0,0 +1,667 @@
|
||||
import 'dart:async';
|
||||
import 'dart:io';
|
||||
import 'dart:ui' as ui;
|
||||
import 'dart:math'; // Import the math package to use sqrt
|
||||
import 'dart:ui' show decodeImageFromList;
|
||||
|
||||
import 'package:flutter/foundation.dart';
|
||||
import 'package:flutter/rendering.dart';
|
||||
import 'package:flutter/services.dart';
|
||||
import 'package:flutter_pcm_sound/flutter_pcm_sound.dart';
|
||||
import 'package:flutter_voice_processor/flutter_voice_processor.dart';
|
||||
import 'package:gallery_saver/gallery_saver.dart';
|
||||
import 'package:get/get.dart';
|
||||
import 'package:image_gallery_saver/image_gallery_saver.dart';
|
||||
import 'package:path_provider/path_provider.dart';
|
||||
import 'package:permission_handler/permission_handler.dart';
|
||||
import 'package:star_lock/app_settings/app_settings.dart';
|
||||
import 'package:star_lock/login/login/entity/LoginEntity.dart';
|
||||
import 'package:star_lock/main/lockDetail/lockDetail/lockDetail_logic.dart';
|
||||
import 'package:star_lock/main/lockDetail/lockDetail/lockDetail_state.dart';
|
||||
import 'package:star_lock/main/lockDetail/lockDetail/lockNetToken_entity.dart';
|
||||
import 'package:star_lock/main/lockDetail/lockSet/lockSet/lockSetInfo_entity.dart';
|
||||
import 'package:star_lock/main/lockMian/entity/lockListInfo_entity.dart';
|
||||
import 'package:star_lock/network/api_repository.dart';
|
||||
import 'package:star_lock/talk/call/g711.dart';
|
||||
import 'package:star_lock/talk/starChart/constant/talk_status.dart';
|
||||
import 'package:star_lock/talk/starChart/handle/other/talk_data_model.dart';
|
||||
import 'package:star_lock/talk/starChart/proto/talk_data.pb.dart';
|
||||
import 'package:star_lock/talk/starChart/proto/talk_expect.pb.dart';
|
||||
import 'package:star_lock/talk/starChart/star_chart_manage.dart';
|
||||
import 'package:star_lock/talk/starChart/views/imageTransmission/image_transmission_state.dart';
|
||||
import 'package:star_lock/talk/starChart/views/talkView/talk_view_state.dart';
|
||||
import 'package:star_lock/tools/G711Tool.dart';
|
||||
import 'package:star_lock/tools/bugly/bugly_tool.dart';
|
||||
|
||||
import '../../../../tools/baseGetXController.dart';
|
||||
|
||||
class ImageTransmissionLogic extends BaseGetXController {
|
||||
ImageTransmissionState state = ImageTransmissionState();
|
||||
|
||||
final LockDetailState lockDetailState = Get.put(LockDetailLogic()).state;
|
||||
|
||||
int bufferSize = 8; // 增大缓冲区,满时才渲染
|
||||
|
||||
int audioBufferSize = 2; // 音频默认缓冲2帧
|
||||
bool _isFirstAudioFrame = true; // 是否是第一帧
|
||||
|
||||
int _startAudioTime = 0; // 开始播放时间戳
|
||||
|
||||
// 定义音频帧缓冲和发送函数
|
||||
final List<int> _bufferedAudioFrames = <int>[];
|
||||
|
||||
// 添加监听状态和订阅引用
|
||||
bool _isListening = false;
|
||||
StreamSubscription? _streamSubscription;
|
||||
|
||||
Timer? videoRenderTimer; // 视频渲染定时器
|
||||
|
||||
int _renderedFrameCount = 0;
|
||||
int _lastFpsPrintTime = DateTime.now().millisecondsSinceEpoch;
|
||||
|
||||
/// 初始化音频播放器
|
||||
void _initFlutterPcmSound() {
|
||||
const int sampleRate = 8000;
|
||||
FlutterPcmSound.setLogLevel(LogLevel.none);
|
||||
FlutterPcmSound.setup(sampleRate: sampleRate, channelCount: 1);
|
||||
// 设置 feed 阈值
|
||||
if (Platform.isAndroid) {
|
||||
FlutterPcmSound.setFeedThreshold(1024); // Android 平台的特殊处理
|
||||
} else {
|
||||
FlutterPcmSound.setFeedThreshold(2000); // 非 Android 平台的处理
|
||||
}
|
||||
}
|
||||
|
||||
/// 挂断
|
||||
void udpHangUpAction() async {
|
||||
if (state.talkStatus.value == TalkStatus.answeredSuccessfully) {
|
||||
// 如果是通话中就挂断
|
||||
StartChartManage().startTalkHangupMessageTimer();
|
||||
} else {
|
||||
// 拒绝
|
||||
StartChartManage().startTalkRejectMessageTimer();
|
||||
}
|
||||
Get.back();
|
||||
}
|
||||
|
||||
// 发起接听命令
|
||||
void initiateAnswerCommand() {
|
||||
StartChartManage().startTalkAcceptTimer();
|
||||
}
|
||||
|
||||
// 监听音视频数据流
|
||||
void _startListenTalkData() {
|
||||
// 防止重复监听
|
||||
if (_isListening) {
|
||||
AppLog.log("已经存在数据流监听,避免重复监听");
|
||||
return;
|
||||
}
|
||||
|
||||
AppLog.log("==== 启动新的数据流监听 ====");
|
||||
_isListening = true;
|
||||
_streamSubscription = state.talkDataRepository.talkDataStream
|
||||
.listen((TalkDataModel talkDataModel) async {
|
||||
final talkData = talkDataModel.talkData;
|
||||
final contentType = talkData!.contentType;
|
||||
final currentTime = DateTime.now().millisecondsSinceEpoch;
|
||||
|
||||
// 判断数据类型,进行分发处理
|
||||
switch (contentType) {
|
||||
case TalkData_ContentTypeE.G711:
|
||||
// // 第一帧到达时记录开始时间
|
||||
if (_isFirstAudioFrame) {
|
||||
_startAudioTime = currentTime;
|
||||
_isFirstAudioFrame = false;
|
||||
}
|
||||
|
||||
// 计算音频延迟
|
||||
final expectedTime = _startAudioTime + talkData.durationMs;
|
||||
final audioDelay = currentTime - expectedTime;
|
||||
|
||||
// 如果延迟太大,清空缓冲区并直接播放
|
||||
if (audioDelay > 500) {
|
||||
state.audioBuffer.clear();
|
||||
if (state.isOpenVoice.value) {
|
||||
_playAudioFrames();
|
||||
}
|
||||
return;
|
||||
}
|
||||
if (state.audioBuffer.length >= audioBufferSize) {
|
||||
state.audioBuffer.removeAt(0); // 丢弃最旧的数据
|
||||
}
|
||||
state.audioBuffer.add(talkData); // 添加新数据
|
||||
// 添加音频播放逻辑,与视频类似
|
||||
_playAudioFrames();
|
||||
break;
|
||||
case TalkData_ContentTypeE.Image:
|
||||
// 固定长度缓冲区,最多保留bufferSize帧
|
||||
state.videoBuffer.add(talkData);
|
||||
if (state.videoBuffer.length > bufferSize) {
|
||||
state.videoBuffer.removeAt(0); // 移除最旧帧
|
||||
}
|
||||
break;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// 新增:音频帧播放逻辑
|
||||
void _playAudioFrames() {
|
||||
// 如果缓冲区为空或未达到目标大小,不进行播放
|
||||
// 音频缓冲区要求更小,以减少延迟
|
||||
if (state.audioBuffer.isEmpty ||
|
||||
state.audioBuffer.length < audioBufferSize) {
|
||||
return;
|
||||
}
|
||||
|
||||
// 找出时间戳最小的音频帧
|
||||
TalkData? oldestFrame;
|
||||
int oldestIndex = -1;
|
||||
for (int i = 0; i < state.audioBuffer.length; i++) {
|
||||
if (oldestFrame == null ||
|
||||
state.audioBuffer[i].durationMs < oldestFrame.durationMs) {
|
||||
oldestFrame = state.audioBuffer[i];
|
||||
oldestIndex = i;
|
||||
}
|
||||
}
|
||||
|
||||
// 确保找到了有效帧
|
||||
if (oldestFrame != null && oldestIndex != -1) {
|
||||
if (state.isOpenVoice.value) {
|
||||
// 播放音频
|
||||
_playAudioData(oldestFrame);
|
||||
}
|
||||
state.audioBuffer.removeAt(oldestIndex);
|
||||
}
|
||||
}
|
||||
|
||||
/// 监听对讲状态
|
||||
void _startListenTalkStatus() {
|
||||
state.startChartTalkStatus.statusStream.listen((talkStatus) {
|
||||
state.talkStatus.value = talkStatus;
|
||||
switch (talkStatus) {
|
||||
case TalkStatus.rejected:
|
||||
case TalkStatus.hangingUpDuring:
|
||||
case TalkStatus.notTalkData:
|
||||
case TalkStatus.notTalkPing:
|
||||
case TalkStatus.end:
|
||||
_handleInvalidTalkStatus();
|
||||
break;
|
||||
case TalkStatus.answeredSuccessfully:
|
||||
state.oneMinuteTimeTimer?.cancel(); // 取消旧定时器
|
||||
state.oneMinuteTimeTimer ??=
|
||||
Timer.periodic(const Duration(seconds: 1), (Timer t) {
|
||||
if (state.listData.value.length > 0) {
|
||||
state.oneMinuteTime.value++;
|
||||
// if (state.oneMinuteTime.value >= 60) {
|
||||
// t.cancel(); // 取消定时器
|
||||
// state.oneMinuteTime.value = 0;
|
||||
// // 倒计时结束挂断
|
||||
// // udpHangUpAction();
|
||||
// }
|
||||
}
|
||||
});
|
||||
break;
|
||||
default:
|
||||
// 其他状态的处理
|
||||
break;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/// 播放音频数据
|
||||
void _playAudioData(TalkData talkData) async {
|
||||
if (state.isOpenVoice.value) {
|
||||
final list =
|
||||
G711().decodeAndDenoise(talkData.content, true, 8000, 300, 150);
|
||||
// // 将 PCM 数据转换为 PcmArrayInt16
|
||||
final PcmArrayInt16 fromList = PcmArrayInt16.fromList(list);
|
||||
FlutterPcmSound.feed(fromList);
|
||||
if (!state.isPlaying.value) {
|
||||
FlutterPcmSound.play();
|
||||
state.isPlaying.value = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// 停止播放音频
|
||||
void _stopPlayG711Data() async {
|
||||
await FlutterPcmSound.pause();
|
||||
await FlutterPcmSound.stop();
|
||||
await FlutterPcmSound.clear();
|
||||
}
|
||||
|
||||
/// 开门
|
||||
// udpOpenDoorAction() async {
|
||||
// final List<String>? privateKey =
|
||||
// await Storage.getStringList(saveBluePrivateKey);
|
||||
// final List<int> getPrivateKeyList = changeStringListToIntList(privateKey!);
|
||||
//
|
||||
// final List<String>? signKey = await Storage.getStringList(saveBlueSignKey);
|
||||
// final List<int> signKeyDataList = changeStringListToIntList(signKey!);
|
||||
//
|
||||
// final List<String>? token = await Storage.getStringList(saveBlueToken);
|
||||
// final List<int> getTokenList = changeStringListToIntList(token!);
|
||||
//
|
||||
// await _getLockNetToken();
|
||||
//
|
||||
// final OpenLockCommand openLockCommand = OpenLockCommand(
|
||||
// lockID: BlueManage().connectDeviceName,
|
||||
// userID: await Storage.getUid(),
|
||||
// openMode: lockDetailState.openDoorModel,
|
||||
// openTime: _getUTCNetTime(),
|
||||
// onlineToken: lockDetailState.lockNetToken,
|
||||
// token: getTokenList,
|
||||
// needAuthor: 1,
|
||||
// signKey: signKeyDataList,
|
||||
// privateKey: getPrivateKeyList,
|
||||
// );
|
||||
// final messageDetail = openLockCommand.packageData();
|
||||
// // 将 List<int> 转换为十六进制字符串
|
||||
// String hexString = messageDetail
|
||||
// .map((byte) => byte.toRadixString(16).padLeft(2, '0'))
|
||||
// .join(' ');
|
||||
//
|
||||
// AppLog.log('open lock hexString: $hexString');
|
||||
// // 发送远程开门消息
|
||||
// StartChartManage().sendRemoteUnLockMessage(
|
||||
// bluetoothDeviceName: BlueManage().connectDeviceName,
|
||||
// openLockCommand: messageDetail,
|
||||
// );
|
||||
// showToast('正在开锁中...'.tr);
|
||||
// }
|
||||
|
||||
int _getUTCNetTime() {
|
||||
if (lockDetailState.isHaveNetwork) {
|
||||
return DateTime.now().millisecondsSinceEpoch ~/ 1000 +
|
||||
lockDetailState.differentialTime;
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
/// 获取权限状态
|
||||
Future<bool> getPermissionStatus() async {
|
||||
final Permission permission = Permission.microphone;
|
||||
//granted 通过,denied 被拒绝,permanentlyDenied 拒绝且不在提示
|
||||
final PermissionStatus status = await permission.status;
|
||||
if (status.isGranted) {
|
||||
return true;
|
||||
} else if (status.isDenied) {
|
||||
requestPermission(permission);
|
||||
} else if (status.isPermanentlyDenied) {
|
||||
openAppSettings();
|
||||
} else if (status.isRestricted) {
|
||||
requestPermission(permission);
|
||||
} else {}
|
||||
return false;
|
||||
}
|
||||
|
||||
///申请权限
|
||||
void requestPermission(Permission permission) async {
|
||||
final PermissionStatus status = await permission.request();
|
||||
if (status.isPermanentlyDenied) {
|
||||
openAppSettings();
|
||||
}
|
||||
}
|
||||
|
||||
Future<void> requestPermissions() async {
|
||||
// 申请存储权限
|
||||
var storageStatus = await Permission.storage.request();
|
||||
// 申请录音权限
|
||||
var microphoneStatus = await Permission.microphone.request();
|
||||
|
||||
if (storageStatus.isGranted && microphoneStatus.isGranted) {
|
||||
print("Permissions granted");
|
||||
} else {
|
||||
print("Permissions denied");
|
||||
// 如果权限被拒绝,可以提示用户或跳转到设置页面
|
||||
if (await Permission.storage.isPermanentlyDenied) {
|
||||
openAppSettings(); // 跳转到应用设置页面
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Future<void> startRecording() async {
|
||||
// requestPermissions();
|
||||
// if (state.isRecordingScreen.value) {
|
||||
// showToast('录屏已开始,请勿重复点击');
|
||||
// }
|
||||
// bool start = await FlutterScreenRecording.startRecordScreen(
|
||||
// "Screen Recording", // 视频文件名
|
||||
// titleNotification: "Recording in progress", // 通知栏标题
|
||||
// messageNotification: "Tap to stop recording", // 通知栏内容
|
||||
// );
|
||||
//
|
||||
// if (start) {
|
||||
// state.isRecordingScreen.value = true;
|
||||
// }
|
||||
}
|
||||
|
||||
Future<void> stopRecording() async {
|
||||
// String path = await FlutterScreenRecording.stopRecordScreen;
|
||||
// print("Recording saved to: $path");
|
||||
//
|
||||
// // 将视频保存到系统相册
|
||||
// bool? success = await GallerySaver.saveVideo(path);
|
||||
// if (success == true) {
|
||||
// print("Video saved to gallery");
|
||||
// } else {
|
||||
// print("Failed to save video to gallery");
|
||||
// }
|
||||
//
|
||||
// showToast('录屏结束,已保存到系统相册');
|
||||
// state.isRecordingScreen.value = false;
|
||||
}
|
||||
|
||||
@override
|
||||
void onReady() {
|
||||
super.onReady();
|
||||
}
|
||||
|
||||
@override
|
||||
void onInit() {
|
||||
super.onInit();
|
||||
|
||||
// 启动监听音视频数据流
|
||||
_startListenTalkData();
|
||||
// 启动监听对讲状态
|
||||
_startListenTalkStatus();
|
||||
// 在没有监听成功之前赋值一遍状态
|
||||
// *** 由于页面会在状态变化之后才会初始化,导致识别不到最新的状态,在这里手动赋值 ***
|
||||
state.talkStatus.value = state.startChartTalkStatus.status;
|
||||
|
||||
// 初始化音频播放器
|
||||
_initFlutterPcmSound();
|
||||
|
||||
// 启动播放定时器
|
||||
// _startPlayback();
|
||||
|
||||
// 初始化录音控制器
|
||||
_initAudioRecorder();
|
||||
|
||||
requestPermissions();
|
||||
|
||||
// 启动视频渲染定时器(10fps)
|
||||
videoRenderTimer = Timer.periodic(const Duration(milliseconds: 100), (_) {
|
||||
final int now = DateTime.now().millisecondsSinceEpoch;
|
||||
if (state.videoBuffer.isNotEmpty) {
|
||||
final TalkData oldestFrame = state.videoBuffer.removeAt(0);
|
||||
if (oldestFrame.content.isNotEmpty) {
|
||||
state.listData.value =
|
||||
Uint8List.fromList(oldestFrame.content); // 备份原始数据
|
||||
final int decodeStart = DateTime.now().millisecondsSinceEpoch;
|
||||
decodeImageFromList(Uint8List.fromList(oldestFrame.content))
|
||||
.then((ui.Image img) {
|
||||
final int decodeEnd = DateTime.now().millisecondsSinceEpoch;
|
||||
state.currentImage.value = img;
|
||||
_renderedFrameCount++;
|
||||
// 每秒统计一次fps
|
||||
if (now - _lastFpsPrintTime >= 1000) {
|
||||
// print('实际渲染fps: $_renderedFrameCount');
|
||||
_renderedFrameCount = 0;
|
||||
_lastFpsPrintTime = now;
|
||||
}
|
||||
}).catchError((e) {
|
||||
print('图片解码失败: $e');
|
||||
});
|
||||
}
|
||||
}
|
||||
// 如果缓冲区为空,不做任何操作,保持上一次内容
|
||||
});
|
||||
}
|
||||
|
||||
@override
|
||||
void onClose() {
|
||||
_stopPlayG711Data(); // 停止播放音频
|
||||
state.listData.value = Uint8List(0); // 清空视频数据
|
||||
state.audioBuffer.clear(); // 清空音频缓冲区
|
||||
state.videoBuffer.clear(); // 清空视频缓冲区
|
||||
|
||||
state.oneMinuteTimeTimer?.cancel();
|
||||
state.oneMinuteTimeTimer = null;
|
||||
|
||||
stopProcessingAudio();
|
||||
// 清理图片缓存
|
||||
// _imageCache.clear();
|
||||
state.oneMinuteTimeTimer?.cancel(); // 取消旧定时器
|
||||
state.oneMinuteTimeTimer = null; // 取消旧定时器
|
||||
state.oneMinuteTime.value = 0;
|
||||
// 取消数据流监听
|
||||
_streamSubscription?.cancel();
|
||||
_isListening = false;
|
||||
|
||||
// 释放视频渲染定时器
|
||||
videoRenderTimer?.cancel();
|
||||
videoRenderTimer = null;
|
||||
|
||||
super.onClose();
|
||||
}
|
||||
|
||||
@override
|
||||
void dispose() {
|
||||
stopProcessingAudio();
|
||||
// 重置期望数据
|
||||
StartChartManage().reSetDefaultTalkExpect();
|
||||
// 释放视频渲染定时器
|
||||
videoRenderTimer?.cancel();
|
||||
videoRenderTimer = null;
|
||||
super.dispose();
|
||||
}
|
||||
|
||||
/// 处理无效通话状态
|
||||
void _handleInvalidTalkStatus() {
|
||||
state.listData.value = Uint8List(0);
|
||||
// 停止播放音频
|
||||
_stopPlayG711Data();
|
||||
stopProcessingAudio();
|
||||
}
|
||||
|
||||
/// 更新发送预期数据
|
||||
void updateTalkExpect() {
|
||||
TalkExpectReq talkExpectReq = TalkExpectReq();
|
||||
state.isOpenVoice.value = !state.isOpenVoice.value;
|
||||
if (!state.isOpenVoice.value) {
|
||||
talkExpectReq = TalkExpectReq(
|
||||
videoType: [VideoTypeE.IMAGE],
|
||||
audioType: [],
|
||||
);
|
||||
showToast('已静音'.tr);
|
||||
} else {
|
||||
talkExpectReq = TalkExpectReq(
|
||||
videoType: [VideoTypeE.IMAGE],
|
||||
audioType: [AudioTypeE.G711],
|
||||
);
|
||||
}
|
||||
|
||||
/// 修改发送预期数据
|
||||
StartChartManage().changeTalkExpectDataTypeAndReStartTalkExpectMessageTimer(
|
||||
talkExpect: talkExpectReq);
|
||||
}
|
||||
|
||||
/// 截图并保存到相册
|
||||
Future<void> captureAndSavePng() async {
|
||||
try {
|
||||
if (state.globalKey.currentContext == null) {
|
||||
AppLog.log('截图失败: 未找到当前上下文');
|
||||
return;
|
||||
}
|
||||
final RenderRepaintBoundary boundary = state.globalKey.currentContext!
|
||||
.findRenderObject()! as RenderRepaintBoundary;
|
||||
final ui.Image image = await boundary.toImage();
|
||||
final ByteData? byteData =
|
||||
await image.toByteData(format: ui.ImageByteFormat.png);
|
||||
|
||||
if (byteData == null) {
|
||||
AppLog.log('截图失败: 图像数据为空');
|
||||
return;
|
||||
}
|
||||
final Uint8List pngBytes = byteData.buffer.asUint8List();
|
||||
|
||||
// 获取应用程序的文档目录
|
||||
final Directory directory = await getApplicationDocumentsDirectory();
|
||||
final String imagePath = '${directory.path}/screenshot.png';
|
||||
|
||||
// 将截图保存为文件
|
||||
final File imgFile = File(imagePath);
|
||||
await imgFile.writeAsBytes(pngBytes);
|
||||
|
||||
// 将截图保存到相册
|
||||
await ImageGallerySaver.saveFile(imagePath);
|
||||
|
||||
AppLog.log('截图保存路径: $imagePath');
|
||||
showToast('截图已保存到相册'.tr);
|
||||
} catch (e) {
|
||||
AppLog.log('截图失败: $e');
|
||||
}
|
||||
}
|
||||
|
||||
// 远程开锁
|
||||
Future<void> remoteOpenLock() async {
|
||||
final lockPeerId = StartChartManage().lockPeerId;
|
||||
final lockListPeerId = StartChartManage().lockListPeerId;
|
||||
int lockId = lockDetailState.keyInfos.value.lockId ?? 0;
|
||||
|
||||
// 如果锁列表获取到peerId,代表有多个锁,使用锁列表的peerId
|
||||
// 从列表中遍历出对应的peerId
|
||||
lockListPeerId.forEach((element) {
|
||||
if (element.network?.peerId == lockPeerId) {
|
||||
lockId = element.lockId ?? 0;
|
||||
}
|
||||
});
|
||||
|
||||
final LockSetInfoEntity lockSetInfoEntity =
|
||||
await ApiRepository.to.getLockSettingInfoData(
|
||||
lockId: lockId.toString(),
|
||||
);
|
||||
if (lockSetInfoEntity.errorCode!.codeIsSuccessful) {
|
||||
if (lockSetInfoEntity.data?.lockFeature?.remoteUnlock == 1 &&
|
||||
lockSetInfoEntity.data?.lockSettingInfo?.remoteUnlock == 1) {
|
||||
final LoginEntity entity = await ApiRepository.to
|
||||
.remoteOpenLock(lockId: lockId.toString(), timeOut: 60);
|
||||
if (entity.errorCode!.codeIsSuccessful) {
|
||||
showToast('已开锁'.tr);
|
||||
StartChartManage().lockListPeerId = [];
|
||||
}
|
||||
} else {
|
||||
showToast('该锁的远程开锁功能未启用'.tr);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// 初始化音频录制器
|
||||
void _initAudioRecorder() {
|
||||
state.voiceProcessor = VoiceProcessor.instance;
|
||||
}
|
||||
|
||||
//开始录音
|
||||
Future<void> startProcessingAudio() async {
|
||||
try {
|
||||
if (await state.voiceProcessor?.hasRecordAudioPermission() ?? false) {
|
||||
await state.voiceProcessor?.start(state.frameLength, state.sampleRate);
|
||||
final bool? isRecording = await state.voiceProcessor?.isRecording();
|
||||
state.isRecordingAudio.value = isRecording!;
|
||||
state.startRecordingAudioTime.value = DateTime.now();
|
||||
|
||||
// 增加录音帧监听器和错误监听器
|
||||
state.voiceProcessor
|
||||
?.addFrameListeners(<VoiceProcessorFrameListener>[_onFrame]);
|
||||
state.voiceProcessor?.addErrorListener(_onError);
|
||||
} else {
|
||||
// state.errorMessage.value = 'Recording permission not granted';
|
||||
}
|
||||
} on PlatformException catch (ex) {
|
||||
// state.errorMessage.value = 'Failed to start recorder: $ex';
|
||||
}
|
||||
state.isOpenVoice.value = false;
|
||||
}
|
||||
|
||||
/// 停止录音
|
||||
Future<void> stopProcessingAudio() async {
|
||||
try {
|
||||
await state.voiceProcessor?.stop();
|
||||
state.voiceProcessor?.removeFrameListener(_onFrame);
|
||||
state.udpSendDataFrameNumber = 0;
|
||||
// 记录结束时间
|
||||
state.endRecordingAudioTime.value = DateTime.now();
|
||||
|
||||
// 计算录音的持续时间
|
||||
final Duration duration = state.endRecordingAudioTime.value
|
||||
.difference(state.startRecordingAudioTime.value);
|
||||
|
||||
state.recordingAudioTime.value = duration.inSeconds;
|
||||
} on PlatformException catch (ex) {
|
||||
// state.errorMessage.value = 'Failed to stop recorder: $ex';
|
||||
} finally {
|
||||
final bool? isRecording = await state.voiceProcessor?.isRecording();
|
||||
state.isRecordingAudio.value = isRecording!;
|
||||
state.isOpenVoice.value = true;
|
||||
}
|
||||
}
|
||||
|
||||
// 音频帧处理
|
||||
Future<void> _onFrame(List<int> frame) async {
|
||||
// 添加最大缓冲限制
|
||||
if (_bufferedAudioFrames.length > state.frameLength * 3) {
|
||||
_bufferedAudioFrames.clear(); // 清空过多积累的数据
|
||||
return;
|
||||
}
|
||||
|
||||
// 首先应用固定增益提升基础音量
|
||||
List<int> amplifiedFrame = _applyGain(frame, 1.6);
|
||||
// 编码为G711数据
|
||||
List<int> encodedData = G711Tool.encode(amplifiedFrame, 0); // 0表示A-law
|
||||
_bufferedAudioFrames.addAll(encodedData);
|
||||
// 使用相对时间戳
|
||||
final int ms = DateTime.now().millisecondsSinceEpoch % 1000000; // 使用循环时间戳
|
||||
int getFrameLength = state.frameLength;
|
||||
if (Platform.isIOS) {
|
||||
getFrameLength = state.frameLength * 2;
|
||||
}
|
||||
|
||||
// 添加发送间隔控制
|
||||
if (_bufferedAudioFrames.length >= state.frameLength) {
|
||||
try {
|
||||
await StartChartManage().sendTalkDataMessage(
|
||||
talkData: TalkData(
|
||||
content: _bufferedAudioFrames,
|
||||
contentType: TalkData_ContentTypeE.G711,
|
||||
durationMs: ms,
|
||||
),
|
||||
);
|
||||
} finally {
|
||||
_bufferedAudioFrames.clear(); // 确保清理缓冲区
|
||||
}
|
||||
} else {
|
||||
_bufferedAudioFrames.addAll(encodedData);
|
||||
}
|
||||
}
|
||||
|
||||
// 错误监听
|
||||
void _onError(VoiceProcessorException error) {
|
||||
AppLog.log(error.message!);
|
||||
}
|
||||
|
||||
// 添加音频增益处理方法
|
||||
List<int> _applyGain(List<int> pcmData, double gainFactor) {
|
||||
List<int> result = List<int>.filled(pcmData.length, 0);
|
||||
|
||||
for (int i = 0; i < pcmData.length; i++) {
|
||||
// PCM数据通常是有符号的16位整数
|
||||
int sample = pcmData[i];
|
||||
|
||||
// 应用增益
|
||||
double amplified = sample * gainFactor;
|
||||
|
||||
// 限制在有效范围内,防止溢出
|
||||
if (amplified > 32767) {
|
||||
amplified = 32767;
|
||||
} else if (amplified < -32768) {
|
||||
amplified = -32768;
|
||||
}
|
||||
|
||||
result[i] = amplified.toInt();
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,238 @@
|
||||
import 'package:flutter/material.dart';
|
||||
import 'package:flutter_screenutil/flutter_screenutil.dart';
|
||||
import 'package:get/get.dart';
|
||||
import 'package:star_lock/app_settings/app_colors.dart';
|
||||
import 'package:star_lock/talk/call/callTalk.dart';
|
||||
import 'package:star_lock/talk/starChart/constant/talk_status.dart';
|
||||
import 'package:star_lock/talk/starChart/star_chart_manage.dart';
|
||||
import 'package:star_lock/talk/starChart/views/imageTransmission/image_transmission_logic.dart';
|
||||
import 'package:star_lock/talk/starChart/views/imageTransmission/image_transmission_state.dart';
|
||||
import 'package:star_lock/tools/titleAppBar.dart';
|
||||
import 'package:slide_to_act/slide_to_act.dart';
|
||||
|
||||
// 可选:引入第三方滑动解锁库
|
||||
// import 'package:flutter_slider_button/flutter_slider_button.dart';
|
||||
|
||||
class ImageTransmissionPage extends StatefulWidget {
|
||||
const ImageTransmissionPage();
|
||||
|
||||
@override
|
||||
State<ImageTransmissionPage> createState() => _ImageTransmissionPageState();
|
||||
}
|
||||
|
||||
class _ImageTransmissionPageState extends State<ImageTransmissionPage>
|
||||
with TickerProviderStateMixin {
|
||||
final ImageTransmissionLogic logic = Get.put(ImageTransmissionLogic());
|
||||
final ImageTransmissionState state = Get.find<ImageTransmissionLogic>().state;
|
||||
final startChartManage = StartChartManage();
|
||||
|
||||
@override
|
||||
void initState() {
|
||||
super.initState();
|
||||
state.animationController = AnimationController(
|
||||
vsync: this, // 确保使用的TickerProvider是当前Widget
|
||||
duration: const Duration(seconds: 1),
|
||||
);
|
||||
state.animationController.repeat();
|
||||
state.animationController.addStatusListener((AnimationStatus status) {
|
||||
if (status == AnimationStatus.completed) {
|
||||
state.animationController.reset();
|
||||
state.animationController.forward();
|
||||
} else if (status == AnimationStatus.dismissed) {
|
||||
state.animationController.reset();
|
||||
state.animationController.forward();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@override
|
||||
void dispose() {
|
||||
state.animationController.dispose();
|
||||
CallTalk().finishAVData();
|
||||
super.dispose();
|
||||
}
|
||||
|
||||
@override
|
||||
Widget build(BuildContext context) {
|
||||
return Scaffold(
|
||||
backgroundColor: AppColors.mainBackgroundColor,
|
||||
resizeToAvoidBottomInset: false,
|
||||
appBar: TitleAppBar(
|
||||
barTitle: '图传全自动'.tr,
|
||||
haveBack: true,
|
||||
backgroundColor: AppColors.mainColor,
|
||||
backAction: (){
|
||||
logic.udpHangUpAction();
|
||||
},
|
||||
),
|
||||
body: Obx(() => Column(
|
||||
children: [
|
||||
SizedBox(height: 24.h),
|
||||
SizedBox(
|
||||
height: 0.6.sh,
|
||||
child: state.listData.value.isEmpty
|
||||
? _buildWaitingView()
|
||||
: _buildVideoView(),
|
||||
),
|
||||
SizedBox(height: 30.h),
|
||||
_buildBottomToolBar(),
|
||||
SizedBox(height: 30.h),
|
||||
],
|
||||
)),
|
||||
);
|
||||
}
|
||||
|
||||
Widget _buildWaitingView() {
|
||||
double barWidth = MediaQuery.of(context).size.width - 60.w;
|
||||
return Center(
|
||||
child: ClipRRect(
|
||||
borderRadius: BorderRadius.circular(30.h),
|
||||
child: Stack(
|
||||
alignment: Alignment.center,
|
||||
children: [
|
||||
Container(
|
||||
width: barWidth,
|
||||
height: double.infinity,
|
||||
child: Image.asset(
|
||||
'images/main/monitorBg.png',
|
||||
fit: BoxFit.cover,
|
||||
),
|
||||
),
|
||||
RotationTransition(
|
||||
turns: state.animationController,
|
||||
child: Image.asset(
|
||||
'images/main/realTime_connecting.png',
|
||||
width: 300.w,
|
||||
height: 300.w,
|
||||
fit: BoxFit.contain,
|
||||
),
|
||||
),
|
||||
],
|
||||
),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
Widget _buildVideoView() {
|
||||
double barWidth = MediaQuery.of(context).size.width - 60.w;
|
||||
return PopScope(
|
||||
canPop: false,
|
||||
child: RepaintBoundary(
|
||||
key: state.globalKey,
|
||||
child: Center(
|
||||
child: ClipRRect(
|
||||
borderRadius: BorderRadius.circular(30.h),
|
||||
child: Container(
|
||||
width: barWidth,
|
||||
height: double.infinity,
|
||||
child: RotatedBox(
|
||||
quarterTurns: startChartManage.rotateAngle ~/ 90,
|
||||
child: RawImage(
|
||||
image: state.currentImage.value,
|
||||
fit: BoxFit.cover,
|
||||
filterQuality: FilterQuality.high,
|
||||
),
|
||||
),
|
||||
),
|
||||
),
|
||||
),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
Widget _buildBottomToolBar() {
|
||||
return Container(
|
||||
margin: EdgeInsets.symmetric(horizontal: 30.w),
|
||||
padding: EdgeInsets.symmetric(vertical: 28.h, horizontal: 20.w),
|
||||
decoration: BoxDecoration(
|
||||
color: Colors.white,
|
||||
borderRadius: BorderRadius.circular(30.h),
|
||||
boxShadow: [
|
||||
BoxShadow(
|
||||
color: Colors.black12,
|
||||
blurRadius: 12,
|
||||
offset: Offset(0, 4),
|
||||
),
|
||||
],
|
||||
),
|
||||
child: Column(
|
||||
mainAxisSize: MainAxisSize.min,
|
||||
children: [
|
||||
Row(
|
||||
mainAxisAlignment: MainAxisAlignment.spaceEvenly,
|
||||
children: [
|
||||
_circleButton(
|
||||
icon: Icons.call,
|
||||
color: Colors.green,
|
||||
onTap: () {
|
||||
if (state.talkStatus.value ==
|
||||
TalkStatus.passiveCallWaitingAnswer) {
|
||||
// 接听
|
||||
logic.initiateAnswerCommand();
|
||||
}
|
||||
},
|
||||
),
|
||||
_circleButton(
|
||||
icon: Icons.call_end,
|
||||
color: Colors.red,
|
||||
onTap: () {
|
||||
logic.udpHangUpAction();
|
||||
},
|
||||
),
|
||||
_circleButton(
|
||||
icon: Icons.camera_alt,
|
||||
color: Colors.blue,
|
||||
onTap: () async {
|
||||
if (state.talkStatus.value ==
|
||||
TalkStatus.answeredSuccessfully) {
|
||||
await logic.captureAndSavePng();
|
||||
}
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
SizedBox(height: 36.h),
|
||||
SlideAction(
|
||||
height: 64.h,
|
||||
borderRadius: 24.h,
|
||||
elevation: 0,
|
||||
innerColor: Colors.amber,
|
||||
outerColor: Colors.amber.withOpacity(0.15),
|
||||
sliderButtonIcon: Icon(Icons.lock, color: Colors.white, size: 40.w),
|
||||
text: '滑动解锁',
|
||||
textStyle: TextStyle(fontSize: 26.sp, color: Colors.black54, fontWeight: FontWeight.bold),
|
||||
onSubmit: () {
|
||||
// TODO: 实现滑动解锁逻辑
|
||||
logic.remoteOpenLock();
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
Widget _circleButton(
|
||||
{required IconData icon,
|
||||
required Color color,
|
||||
required VoidCallback onTap}) {
|
||||
return GestureDetector(
|
||||
onTap: onTap,
|
||||
child: Container(
|
||||
width: 90.w,
|
||||
height: 90.w,
|
||||
decoration: BoxDecoration(
|
||||
color: color,
|
||||
shape: BoxShape.circle,
|
||||
boxShadow: [
|
||||
BoxShadow(
|
||||
color: color.withOpacity(0.3),
|
||||
blurRadius: 10,
|
||||
offset: Offset(0, 4),
|
||||
),
|
||||
],
|
||||
),
|
||||
child: Icon(icon, color: Colors.white, size: 48.w),
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,94 @@
|
||||
import 'dart:async';
|
||||
import 'dart:typed_data';
|
||||
import 'dart:ui' as ui;
|
||||
|
||||
import 'package:flutter/material.dart';
|
||||
import 'package:flutter_voice_processor/flutter_voice_processor.dart';
|
||||
import 'package:get/get.dart';
|
||||
import 'package:get/get_rx/get_rx.dart';
|
||||
import 'package:get/get_rx/src/rx_types/rx_types.dart';
|
||||
import 'package:get/state_manager.dart';
|
||||
import 'package:network_info_plus/network_info_plus.dart';
|
||||
import 'package:star_lock/talk/starChart/constant/talk_status.dart';
|
||||
import 'package:star_lock/talk/starChart/handle/other/talk_data_repository.dart';
|
||||
import 'package:star_lock/talk/starChart/proto/talk_data.pb.dart';
|
||||
import 'package:star_lock/talk/starChart/status/star_chart_talk_status.dart';
|
||||
|
||||
import '../../../../tools/storage.dart';
|
||||
|
||||
enum NetworkStatus {
|
||||
normal, // 0
|
||||
lagging, // 1
|
||||
delayed, // 2
|
||||
packetLoss // 3
|
||||
}
|
||||
|
||||
class ImageTransmissionState{
|
||||
int udpSendDataFrameNumber = 0; // 帧序号
|
||||
// var isSenderAudioData = false.obs;// 是否要发送音频数据
|
||||
|
||||
Future<String?> userMobileIP = NetworkInfo().getWifiIP();
|
||||
Future<String?> userUid = Storage.getUid();
|
||||
|
||||
RxInt udpStatus =
|
||||
0.obs; //0:初始状态 1:等待监视 2: 3:监视中 4:呼叫成功 5:主角通话中 6:被叫通话 8:被叫通话中 9:长按说话
|
||||
TextEditingController passwordTF = TextEditingController();
|
||||
|
||||
Rx<Uint8List> listData = Uint8List(0).obs; //得到的视频流字节数据
|
||||
RxList<int> listAudioData = <int>[].obs; //得到的音频流字节数据
|
||||
GlobalKey globalKey = GlobalKey();
|
||||
|
||||
Timer? oneMinuteTimeTimer; // 定时器超过60秒关闭当前界面
|
||||
RxInt oneMinuteTime = 0.obs; // 定时器秒数
|
||||
|
||||
// 定时器如果发送了接听的命令 而没收到回复就每秒重复发送10次
|
||||
late Timer answerTimer;
|
||||
late Timer hangUpTimer;
|
||||
late Timer openDoorTimer;
|
||||
Timer? fpsTimer;
|
||||
late AnimationController animationController;
|
||||
|
||||
late Timer autoBackTimer =
|
||||
Timer(const Duration(seconds: 1), () {}); //发送30秒监视后自动返回
|
||||
late Timer realTimePicTimer =
|
||||
Timer(const Duration(seconds: 1), () {}); //监视命令定时器
|
||||
RxInt elapsedSeconds = 0.obs;
|
||||
|
||||
// 星图对讲相关状态
|
||||
List<TalkData> audioBuffer = <TalkData>[].obs;
|
||||
List<TalkData> activeAudioBuffer = <TalkData>[].obs;
|
||||
List<TalkData> activeVideoBuffer = <TalkData>[].obs;
|
||||
|
||||
List<TalkData> videoBuffer = <TalkData>[].obs;
|
||||
List<TalkData> videoBuffer2 = <TalkData>[].obs;
|
||||
RxBool isPlaying = false.obs; // 是否开始播放
|
||||
Rx<TalkStatus> talkStatus = TalkStatus.none.obs; //星图对讲状态
|
||||
// 获取 startChartTalkStatus 的唯一实例
|
||||
final StartChartTalkStatus startChartTalkStatus =
|
||||
StartChartTalkStatus.instance;
|
||||
|
||||
// 通话数据流的单例流数据处理类
|
||||
final TalkDataRepository talkDataRepository = TalkDataRepository.instance;
|
||||
RxInt lastFrameTimestamp = 0.obs; // 上一帧的时间戳,用来判断网络环境
|
||||
Rx<NetworkStatus> networkStatus =
|
||||
NetworkStatus.normal.obs; // 网络状态:0-正常 1-网络卡顿 2-网络延迟 3-网络丢包
|
||||
RxInt alertCount = 0.obs; // 网络状态提示计数器
|
||||
RxInt maxAlertNumber = 3.obs; // 网络状态提示最大提示次数
|
||||
RxBool isOpenVoice = true.obs; // 是否打开声音
|
||||
RxBool isRecordingScreen = false.obs; // 是否录屏中
|
||||
RxBool isRecordingAudio = false.obs; // 是否录音中
|
||||
Rx<DateTime> startRecordingAudioTime = DateTime.now().obs; // 开始录音时间
|
||||
Rx<DateTime> endRecordingAudioTime = DateTime.now().obs; // 结束录音时间
|
||||
RxInt recordingAudioTime = 0.obs; // 录音时间持续时间
|
||||
RxInt fps = 0.obs; // 添加 FPS 计数
|
||||
late VoiceProcessor? voiceProcessor; // 音频处理器、录音
|
||||
final int frameLength = 320; //录音视频帧长度为640
|
||||
final int sampleRate = 8000; //录音频采样率为8000
|
||||
List<int> recordingAudioAllFrames = <int>[]; // 录制音频的所有帧
|
||||
List<int> lockRecordingAudioAllFrames = <int>[]; // 录制音频的所有帧
|
||||
RxInt rotateAngle = 0.obs; // 旋转角度(以弧度为单位)
|
||||
RxBool isLongPressing = false.obs; // 旋转角度(以弧度为单位)
|
||||
RxBool hasAudioData = false.obs; // 是否有音频数据
|
||||
RxInt lastAudioTimestamp = 0.obs; // 最后接收到的音频数据的时间戳
|
||||
Rx<ui.Image?> currentImage = Rx<ui.Image?>(null);
|
||||
}
|
||||
Loading…
x
Reference in New Issue
Block a user