app-starlock/lib/talk/starChart/webView/h264_web_logic.dart

435 lines
14 KiB
Dart
Raw Normal View History

2025-02-22 17:15:33 +08:00
import 'dart:async';
import 'dart:io';
import 'dart:ui' as ui;
import 'dart:math'; // Import the math package to use sqrt
2025-02-21 15:55:35 +08:00
2025-02-22 17:15:33 +08:00
import 'package:flutter/foundation.dart';
import 'package:flutter/rendering.dart';
2025-02-21 15:55:35 +08:00
import 'package:flutter/services.dart';
2025-02-22 17:15:33 +08:00
import 'package:flutter_pcm_sound/flutter_pcm_sound.dart';
import 'package:flutter_screen_recording/flutter_screen_recording.dart';
import 'package:flutter_voice_processor/flutter_voice_processor.dart';
import 'package:gallery_saver/gallery_saver.dart';
import 'package:get/get.dart';
import 'package:image_gallery_saver/image_gallery_saver.dart';
import 'package:path_provider/path_provider.dart';
import 'package:permission_handler/permission_handler.dart';
2025-02-21 15:55:35 +08:00
import 'package:star_lock/app_settings/app_settings.dart';
2025-02-22 17:15:33 +08:00
import 'package:star_lock/login/login/entity/LoginEntity.dart';
import 'package:star_lock/main/lockDetail/lockDetail/lockDetail_logic.dart';
import 'package:star_lock/main/lockDetail/lockDetail/lockDetail_state.dart';
import 'package:star_lock/main/lockDetail/lockDetail/lockNetToken_entity.dart';
import 'package:star_lock/main/lockDetail/lockSet/lockSet/lockSetInfo_entity.dart';
import 'package:star_lock/main/lockMian/entity/lockListInfo_entity.dart';
import 'package:star_lock/network/api_repository.dart';
import 'package:star_lock/talk/call/g711.dart';
import 'package:star_lock/talk/starChart/constant/talk_status.dart';
2025-02-21 15:55:35 +08:00
import 'package:star_lock/talk/starChart/proto/talk_data.pb.dart';
2025-02-22 17:15:33 +08:00
import 'package:star_lock/talk/starChart/proto/talk_expect.pb.dart';
2025-02-21 15:55:35 +08:00
import 'package:star_lock/talk/starChart/star_chart_manage.dart';
2025-02-22 17:15:33 +08:00
import 'package:star_lock/talk/starChart/views/talkView/talk_view_state.dart';
2025-02-21 15:55:35 +08:00
import 'package:star_lock/talk/starChart/webView/h264_web_view_state.dart';
2025-02-22 17:15:33 +08:00
import 'package:star_lock/tools/bugly/bugly_tool.dart';
2025-02-21 15:55:35 +08:00
import 'package:webview_flutter/webview_flutter.dart';
2025-02-22 17:15:33 +08:00
import '../../../../tools/baseGetXController.dart';
2025-02-21 15:55:35 +08:00
class H264WebViewLogic extends BaseGetXController {
final H264WebViewState state = H264WebViewState();
2025-02-22 17:15:33 +08:00
final LockDetailState lockDetailState = Get.put(LockDetailLogic()).state;
2025-02-21 15:55:35 +08:00
@override
void onInit() {
super.onInit();
// 初始化 WebView 控制器
state.webViewController = WebViewController()
..setJavaScriptMode(JavaScriptMode.unrestricted)
..enableZoom(false)
..addJavaScriptChannel(
'Flutter',
onMessageReceived: (message) {
print("来自 HTML 的消息: ${message.message}");
},
);
2025-02-22 17:15:33 +08:00
state.isShowLoading.value = true;
2025-02-21 15:55:35 +08:00
// 加载本地 HTML
_loadLocalHtml();
// 创建流数据监听
_createFramesStreamListen();
2025-02-22 17:15:33 +08:00
_startListenTalkStatus();
state.talkStatus.value = state.startChartTalkStatus.status;
// 初始化音频播放器
_initFlutterPcmSound();
// 初始化录音控制器
_initAudioRecorder();
}
/// 初始化音频录制器
void _initAudioRecorder() {
state.voiceProcessor = VoiceProcessor.instance;
}
/// 初始化音频播放器
void _initFlutterPcmSound() {
const int sampleRate = 8000;
FlutterPcmSound.setLogLevel(LogLevel.none);
FlutterPcmSound.setup(sampleRate: sampleRate, channelCount: 1);
// 设置 feed 阈值
if (Platform.isAndroid) {
FlutterPcmSound.setFeedThreshold(1024); // Android 平台的特殊处理
} else {
FlutterPcmSound.setFeedThreshold(2000); // 非 Android 平台的处理
}
2025-02-21 15:55:35 +08:00
}
void _createFramesStreamListen() async {
state.talkDataRepository.talkDataStream.listen((TalkData event) async {
// 发送数据给js处理
_sendBufferedData(event.content);
});
}
/// 加载html文件
Future<void> _loadLocalHtml() async {
// 加载 HTML 文件内容
final String fileHtmlContent =
await rootBundle.loadString('assets/html/h264.html');
// 加载 JS 文件内容
final String jsContent =
await rootBundle.loadString('assets/html/jmuxer.min.js');
// 将 JS 文件内容嵌入到 HTML 中
final String htmlWithJs = fileHtmlContent.replaceAll(
'<script src="jmuxer.min.js"></script>', // 替换掉引用外部 JS 的标签
'<script>$jsContent</script>' // 使用内联方式嵌入 JS 内容
);
// 加载最终的 HTML 字符串到 WebView 中
if (state.webViewController != null) {
state.webViewController.loadHtmlString(htmlWithJs); // 设置 baseUrl 避免资源加载问题
}
}
// 修改后的发送方法
_sendBufferedData(List<int> buffer) async {
// 原始发送逻辑
String jsCode = "feedDataFromFlutter($buffer);";
await state.webViewController.runJavaScript(jsCode);
2025-02-22 17:15:33 +08:00
if (state.isShowLoading.isTrue) {
await Future.delayed(Duration(seconds: 1));
state.isShowLoading.value = false;
}
}
/// 监听对讲状态
void _startListenTalkStatus() {
state.startChartTalkStatus.statusStream.listen((talkStatus) {
state.talkStatus.value = talkStatus;
switch (talkStatus) {
case TalkStatus.rejected:
case TalkStatus.hangingUpDuring:
case TalkStatus.notTalkData:
case TalkStatus.notTalkPing:
case TalkStatus.end:
_handleInvalidTalkStatus();
break;
case TalkStatus.answeredSuccessfully:
state.oneMinuteTimeTimer?.cancel(); // 取消旧定时器
state.oneMinuteTimeTimer ??=
Timer.periodic(const Duration(seconds: 1), (Timer t) {
if (state.isShowLoading.isFalse) {
state.oneMinuteTime.value++;
if (state.oneMinuteTime.value >= 60) {
t.cancel(); // 取消定时器
state.oneMinuteTime.value = 0;
}
}
});
break;
default:
// 其他状态的处理
break;
}
});
}
/// 更新发送预期数据
void updateTalkExpect() {
TalkExpectReq talkExpectReq = TalkExpectReq();
state.isOpenVoice.value = !state.isOpenVoice.value;
if (!state.isOpenVoice.value) {
talkExpectReq = TalkExpectReq(
videoType: [VideoTypeE.IMAGE],
audioType: [],
);
showToast('已静音'.tr);
} else {
talkExpectReq = TalkExpectReq(
videoType: [VideoTypeE.IMAGE],
audioType: [AudioTypeE.G711],
);
}
/// 修改发送预期数据
StartChartManage().changeTalkExpectDataTypeAndReStartTalkExpectMessageTimer(
talkExpect: talkExpectReq);
}
/// 处理无效通话状态
void _handleInvalidTalkStatus() {}
/// 截图并保存到相册
Future<void> captureAndSavePng() async {
try {
if (state.globalKey.currentContext == null) {
AppLog.log('截图失败: 未找到当前上下文');
return;
}
final RenderRepaintBoundary boundary = state.globalKey.currentContext!
.findRenderObject()! as RenderRepaintBoundary;
final ui.Image image = await boundary.toImage();
final ByteData? byteData =
await image.toByteData(format: ui.ImageByteFormat.png);
if (byteData == null) {
AppLog.log('截图失败: 图像数据为空');
return;
}
final Uint8List pngBytes = byteData.buffer.asUint8List();
// 获取应用程序的文档目录
final Directory directory = await getApplicationDocumentsDirectory();
final String imagePath = '${directory.path}/screenshot.png';
// 将截图保存为文件
final File imgFile = File(imagePath);
await imgFile.writeAsBytes(pngBytes);
// 将截图保存到相册
await ImageGallerySaver.saveFile(imagePath);
AppLog.log('截图保存路径: $imagePath');
showToast('截图已保存到相册'.tr);
} catch (e) {
AppLog.log('截图失败: $e');
}
}
// 发起接听命令
void initiateAnswerCommand() {
StartChartManage().startTalkAcceptTimer();
}
//开始录音
Future<void> startProcessingAudio() async {
// 增加录音帧监听器和错误监听器
state.voiceProcessor?.addFrameListener(_onFrame);
state.voiceProcessor?.addErrorListener(_onError);
try {
if (await state.voiceProcessor?.hasRecordAudioPermission() ?? false) {
await state.voiceProcessor?.start(state.frameLength, state.sampleRate);
final bool? isRecording = await state.voiceProcessor?.isRecording();
state.isRecordingAudio.value = isRecording!;
state.startRecordingAudioTime.value = DateTime.now();
} else {
// state.errorMessage.value = 'Recording permission not granted';
}
} on PlatformException catch (ex) {
// state.errorMessage.value = 'Failed to start recorder: $ex';
}
state.isOpenVoice.value = false;
}
/// 停止录音
Future<void> stopProcessingAudio() async {
try {
await state.voiceProcessor?.stop();
state.voiceProcessor?.removeFrameListener(_onFrame);
state.udpSendDataFrameNumber = 0;
// 记录结束时间
state.endRecordingAudioTime.value = DateTime.now();
// 计算录音的持续时间
final duration = state.endRecordingAudioTime.value!
.difference(state.startRecordingAudioTime.value!);
state.recordingAudioTime.value = duration.inSeconds;
} on PlatformException catch (ex) {
// state.errorMessage.value = 'Failed to stop recorder: $ex';
} finally {
final bool? isRecording = await state.voiceProcessor?.isRecording();
state.isRecordingAudio.value = isRecording!;
state.isOpenVoice.value = true;
}
}
// 音频帧处理
Future<void> _onFrame(List<int> frame) async {
// 预处理和转码操作放到异步计算线程
// final processedFrame = await compute(preprocessAudio, frame);
// final list = listLinearToALaw(processedFrame);
final List<int> processedFrame = preprocessAudio(frame);
final List<int> list = listLinearToALaw(processedFrame);
final int ms = DateTime.now().millisecondsSinceEpoch -
state.startRecordingAudioTime.value.millisecondsSinceEpoch;
// 发送音频数据到UDP
await StartChartManage().sendTalkDataMessage(
talkData: TalkData(
content: list,
contentType: TalkData_ContentTypeE.G711,
durationMs: ms,
),
);
}
/// 挂断
void udpHangUpAction() async {
if (state.talkStatus.value == TalkStatus.answeredSuccessfully) {
// 如果是通话中就挂断
StartChartManage().startTalkHangupMessageTimer();
} else {
// 拒绝
StartChartManage().startTalkRejectMessageTimer();
}
Get.back();
}
// 远程开锁
Future<void> remoteOpenLock() async {
final lockPeerId = StartChartManage().lockPeerId;
final lockListPeerId = StartChartManage().lockListPeerId;
int lockId = lockDetailState.keyInfos.value.lockId ?? 0;
// 如果锁列表获取到peerId代表有多个锁使用锁列表的peerId
// 从列表中遍历出对应的peerId
lockListPeerId.forEach((element) {
if (element.network?.peerId == lockPeerId) {
lockId = element.lockId ?? 0;
}
});
final LockSetInfoEntity lockSetInfoEntity =
await ApiRepository.to.getLockSettingInfoData(
lockId: lockId.toString(),
);
if (lockSetInfoEntity.errorCode!.codeIsSuccessful) {
if (lockSetInfoEntity.data?.lockFeature?.remoteUnlock == 1 &&
lockSetInfoEntity.data?.lockSettingInfo?.remoteUnlock == 1) {
final LoginEntity entity = await ApiRepository.to
.remoteOpenLock(lockId: lockId.toString(), timeOut: 60);
if (entity.errorCode!.codeIsSuccessful) {
showToast('已开锁'.tr);
StartChartManage().lockListPeerId = [];
}
} else {
showToast('该锁的远程开锁功能未启用'.tr);
}
}
}
List<int> preprocessAudio(List<int> pcmList) {
// 简单的降噪处理
final List<int> processedList = [];
for (int pcmVal in pcmList) {
// 简单的降噪示例将小于阈值的信号置为0
if (pcmVal.abs() < 200) {
pcmVal = 0;
}
processedList.add(pcmVal);
}
return processedList;
}
List<int> listLinearToALaw(List<int> pcmList) {
final List<int> aLawList = [];
for (int pcmVal in pcmList) {
final int aLawVal = linearToALaw(pcmVal);
aLawList.add(aLawVal);
}
return aLawList;
}
int linearToALaw(int pcmVal) {
const int ALAW_MAX = 0x7FFF; // 32767
const int ALAW_BIAS = 0x84; // 132
int mask;
int seg;
int aLawVal;
// Handle sign
if (pcmVal < 0) {
pcmVal = -pcmVal;
mask = 0x7F; // 127 (sign bit is 1)
} else {
mask = 0xFF; // 255 (sign bit is 0)
}
// Add bias and clamp to ALAW_MAX
pcmVal += ALAW_BIAS;
if (pcmVal > ALAW_MAX) {
pcmVal = ALAW_MAX;
}
// Determine segment
seg = search(pcmVal);
// Calculate A-law value
if (seg >= 8) {
aLawVal = 0x7F ^ mask; // Clamp to maximum value
} else {
int quantized = (pcmVal >> (seg + 3)) & 0xF;
aLawVal = (seg << 4) | quantized;
aLawVal ^= 0xD5; // XOR with 0xD5 to match standard A-law table
}
return aLawVal;
}
int search(int val) {
final List<int> table = [
0xFF, // Segment 0
0x1FF, // Segment 1
0x3FF, // Segment 2
0x7FF, // Segment 3
0xFFF, // Segment 4
0x1FFF, // Segment 5
0x3FFF, // Segment 6
0x7FFF // Segment 7
];
const int size = 8;
for (int i = 0; i < size; i++) {
if (val <= table[i]) {
return i;
}
}
return size;
}
// 错误监听
void _onError(VoiceProcessorException error) {
AppLog.log(error.message!);
2025-02-21 15:55:35 +08:00
}
@override
2025-02-22 17:15:33 +08:00
void dispose() {
// TODO: implement dispose
super.dispose();
2025-02-21 15:55:35 +08:00
StartChartManage().startTalkHangupMessageTimer();
2025-02-22 17:15:33 +08:00
state.animationController.dispose();
state.webViewController.clearCache();
state.webViewController.reload();
state.oneMinuteTimeTimer?.cancel();
state.oneMinuteTimeTimer = null;
stopProcessingAudio();
StartChartManage().reSetDefaultTalkExpect();
2025-02-21 15:55:35 +08:00
}
}