fix:调整无音频问题
This commit is contained in:
parent
a09237bc02
commit
2de9d32b61
@ -78,6 +78,7 @@ class UdpTalkAcceptHandler extends ScpMessageBaseHandle
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// 收到同意接听回复之后增加音频的期望数据
|
||||||
void _handleSendExpect() {
|
void _handleSendExpect() {
|
||||||
final LockListInfoItemEntity currentKeyInfo =
|
final LockListInfoItemEntity currentKeyInfo =
|
||||||
CommonDataManage().currentKeyInfo;
|
CommonDataManage().currentKeyInfo;
|
||||||
|
|||||||
@ -1,20 +1,15 @@
|
|||||||
import 'dart:convert';
|
import 'dart:convert';
|
||||||
import 'dart:io';
|
import 'dart:io';
|
||||||
|
|
||||||
import 'package:flutter/services.dart';
|
|
||||||
|
|
||||||
import 'package:flutter_local_notifications/flutter_local_notifications.dart';
|
|
||||||
import 'package:get/get.dart';
|
import 'package:get/get.dart';
|
||||||
import 'package:star_lock/appRouters.dart';
|
import 'package:star_lock/appRouters.dart';
|
||||||
import 'package:star_lock/app_settings/app_settings.dart';
|
import 'package:star_lock/app_settings/app_settings.dart';
|
||||||
import 'package:star_lock/main/lockMian/entity/lockListInfo_entity.dart';
|
import 'package:star_lock/main/lockMian/entity/lockListInfo_entity.dart';
|
||||||
import 'package:star_lock/talk/starChart/constant/message_type_constant.dart';
|
import 'package:star_lock/talk/starChart/constant/message_type_constant.dart';
|
||||||
import 'package:star_lock/talk/starChart/constant/talk_constant.dart';
|
|
||||||
import 'package:star_lock/talk/starChart/constant/talk_status.dart';
|
import 'package:star_lock/talk/starChart/constant/talk_status.dart';
|
||||||
import 'package:star_lock/talk/starChart/entity/scp_message.dart';
|
import 'package:star_lock/talk/starChart/entity/scp_message.dart';
|
||||||
import 'package:star_lock/talk/starChart/handle/scp_message_base_handle.dart';
|
import 'package:star_lock/talk/starChart/handle/scp_message_base_handle.dart';
|
||||||
import 'package:star_lock/talk/starChart/handle/scp_message_handle.dart';
|
import 'package:star_lock/talk/starChart/handle/scp_message_handle.dart';
|
||||||
import 'package:star_lock/talk/starChart/proto/gateway_reset.pb.dart';
|
|
||||||
import 'package:star_lock/talk/starChart/proto/generic.pb.dart';
|
import 'package:star_lock/talk/starChart/proto/generic.pb.dart';
|
||||||
import 'package:star_lock/talk/starChart/proto/talk_expect.pb.dart';
|
import 'package:star_lock/talk/starChart/proto/talk_expect.pb.dart';
|
||||||
import 'package:star_lock/talk/starChart/proto/talk_request.pb.dart';
|
import 'package:star_lock/talk/starChart/proto/talk_request.pb.dart';
|
||||||
@ -28,26 +23,10 @@ class UdpTalkRequestHandler extends ScpMessageBaseHandle
|
|||||||
RxString currentLanguage =
|
RxString currentLanguage =
|
||||||
CurrentLocaleTool.getCurrentLocaleString().obs; // 当前选择语言
|
CurrentLocaleTool.getCurrentLocaleString().obs; // 当前选择语言
|
||||||
|
|
||||||
// 添加上次处理请求的时间戳
|
|
||||||
int _lastRequestTime = 0;
|
|
||||||
|
|
||||||
@override
|
@override
|
||||||
void handleReq(ScpMessage scpMessage) async {
|
void handleReq(ScpMessage scpMessage) async {
|
||||||
final currentTime = DateTime.now().millisecondsSinceEpoch;
|
|
||||||
// 确保与上次请求间隔至少1秒
|
|
||||||
if (currentTime - _lastRequestTime < 1000) {
|
|
||||||
// 如果间隔小于1秒,直接拒绝请求
|
|
||||||
replyErrorMessage(scpMessage);
|
|
||||||
AppLog.log('对讲请求过于频繁,已拒绝');
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// 更新最后处理时间
|
|
||||||
_lastRequestTime = currentTime;
|
|
||||||
|
|
||||||
// 判断是否登录账户
|
// 判断是否登录账户
|
||||||
final loginData = await Storage.getLoginData();
|
final loginData = await Storage.getLoginData();
|
||||||
|
|
||||||
// 如果登录账户不为空,且不是被动接听状态,且不是接听成功状态
|
// 如果登录账户不为空,且不是被动接听状态,且不是接听成功状态
|
||||||
if (loginData != null &&
|
if (loginData != null &&
|
||||||
(talkStatus.status != TalkStatus.passiveCallWaitingAnswer ||
|
(talkStatus.status != TalkStatus.passiveCallWaitingAnswer ||
|
||||||
@ -77,6 +56,8 @@ class UdpTalkRequestHandler extends ScpMessageBaseHandle
|
|||||||
// 收到对讲请求的应答
|
// 收到对讲请求的应答
|
||||||
startChartManage.FromPeerId = scpMessage.ToPeerId!;
|
startChartManage.FromPeerId = scpMessage.ToPeerId!;
|
||||||
startChartManage.ToPeerId = scpMessage.FromPeerId!;
|
startChartManage.ToPeerId = scpMessage.FromPeerId!;
|
||||||
|
// 处理预期数据格式
|
||||||
|
_handleResponseSendExpect();
|
||||||
// 发送预期数据
|
// 发送预期数据
|
||||||
startChartManage.startTalkExpectTimer();
|
startChartManage.startTalkExpectTimer();
|
||||||
// 停止发送对讲请求
|
// 停止发送对讲请求
|
||||||
@ -99,7 +80,7 @@ class UdpTalkRequestHandler extends ScpMessageBaseHandle
|
|||||||
// 来电事件的处理
|
// 来电事件的处理
|
||||||
void _talkRequestEvent({required String talkObjectName}) {
|
void _talkRequestEvent({required String talkObjectName}) {
|
||||||
// 发送预期数据、通知锁板需要获取视频数据
|
// 发送预期数据、通知锁板需要获取视频数据
|
||||||
_handleSendExpect();
|
_handleRequestSendExpect();
|
||||||
// 播放铃声
|
// 播放铃声
|
||||||
//test:使用自定义铃声
|
//test:使用自定义铃声
|
||||||
playRingtone();
|
playRingtone();
|
||||||
@ -188,7 +169,8 @@ class UdpTalkRequestHandler extends ScpMessageBaseHandle
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void _handleSendExpect() {
|
/// app收到的对讲请求后,发送的预期数据
|
||||||
|
void _handleRequestSendExpect() {
|
||||||
final LockListInfoItemEntity currentKeyInfo =
|
final LockListInfoItemEntity currentKeyInfo =
|
||||||
CommonDataManage().currentKeyInfo;
|
CommonDataManage().currentKeyInfo;
|
||||||
final isH264 = currentKeyInfo.lockFeature?.isH264 == 1;
|
final isH264 = currentKeyInfo.lockFeature?.isH264 == 1;
|
||||||
@ -209,4 +191,27 @@ class UdpTalkRequestHandler extends ScpMessageBaseHandle
|
|||||||
print('锁不支持H264和MJPEG,默认发送图像视频格式期望数据');
|
print('锁不支持H264和MJPEG,默认发送图像视频格式期望数据');
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// app主动发请求,收到回复后发送的预期数据
|
||||||
|
void _handleResponseSendExpect() {
|
||||||
|
final LockListInfoItemEntity currentKeyInfo =
|
||||||
|
CommonDataManage().currentKeyInfo;
|
||||||
|
final isH264 = currentKeyInfo.lockFeature?.isH264 == 1;
|
||||||
|
final isMJpeg = currentKeyInfo.lockFeature?.isMJpeg == 1;
|
||||||
|
|
||||||
|
// 优先使用H264,其次是MJPEG
|
||||||
|
if (isH264) {
|
||||||
|
// 锁支持H264,发送H264视频和G711音频期望
|
||||||
|
startChartManage.sendH264VideoAndG711AudioTalkExpectData();
|
||||||
|
print('锁支持H264,发送H264视频格式期望数据');
|
||||||
|
} else if (isMJpeg) {
|
||||||
|
// 锁只支持MJPEG,发送图像视频和G711音频期望
|
||||||
|
startChartManage.sendImageVideoAndG711AudioTalkExpectData();
|
||||||
|
print('锁不支持H264,支持MJPEG,发送MJPEG视频格式期望数据');
|
||||||
|
} else {
|
||||||
|
// 默认使用图像视频
|
||||||
|
startChartManage.sendImageVideoAndG711AudioTalkExpectData();
|
||||||
|
print('锁不支持H264和MJPEG,默认发送图像视频格式期望数据');
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -51,7 +51,6 @@ class TalkViewLogic extends BaseGetXController {
|
|||||||
int _startAudioTime = 0; // 开始播放时间戳
|
int _startAudioTime = 0; // 开始播放时间戳
|
||||||
bool _isFirstFrame = true; // 是否是第一帧
|
bool _isFirstFrame = true; // 是否是第一帧
|
||||||
|
|
||||||
|
|
||||||
// 定义音频帧缓冲和发送函数
|
// 定义音频帧缓冲和发送函数
|
||||||
final List<int> _bufferedAudioFrames = <int>[];
|
final List<int> _bufferedAudioFrames = <int>[];
|
||||||
|
|
||||||
@ -65,6 +64,10 @@ class TalkViewLogic extends BaseGetXController {
|
|||||||
int _lastFpsUpdateTime = 0;
|
int _lastFpsUpdateTime = 0;
|
||||||
Timer? _fpsTimer;
|
Timer? _fpsTimer;
|
||||||
|
|
||||||
|
// 添加监听状态和订阅引用
|
||||||
|
bool _isListening = false;
|
||||||
|
StreamSubscription? _streamSubscription;
|
||||||
|
|
||||||
/// 初始化音频播放器
|
/// 初始化音频播放器
|
||||||
void _initFlutterPcmSound() {
|
void _initFlutterPcmSound() {
|
||||||
const int sampleRate = 8000;
|
const int sampleRate = 8000;
|
||||||
@ -97,7 +100,15 @@ class TalkViewLogic extends BaseGetXController {
|
|||||||
|
|
||||||
// 监听音视频数据流
|
// 监听音视频数据流
|
||||||
void _startListenTalkData() {
|
void _startListenTalkData() {
|
||||||
state.talkDataRepository.talkDataStream
|
// 防止重复监听
|
||||||
|
if (_isListening) {
|
||||||
|
AppLog.log("已经存在数据流监听,避免重复监听");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
AppLog.log("==== 启动新的数据流监听 ====");
|
||||||
|
_isListening = true;
|
||||||
|
_streamSubscription = state.talkDataRepository.talkDataStream
|
||||||
.listen((TalkDataModel talkDataModel) async {
|
.listen((TalkDataModel talkDataModel) async {
|
||||||
final talkData = talkDataModel.talkData;
|
final talkData = talkDataModel.talkData;
|
||||||
final contentType = talkData!.contentType;
|
final contentType = talkData!.contentType;
|
||||||
@ -106,13 +117,13 @@ class TalkViewLogic extends BaseGetXController {
|
|||||||
// 判断数据类型,进行分发处理
|
// 判断数据类型,进行分发处理
|
||||||
switch (contentType) {
|
switch (contentType) {
|
||||||
case TalkData_ContentTypeE.G711:
|
case TalkData_ContentTypeE.G711:
|
||||||
// // 第一帧到达时记录开始时间
|
// // 第一帧到达时记录开始时间
|
||||||
if (_isFirstAudioFrame) {
|
if (_isFirstAudioFrame) {
|
||||||
_startAudioTime = currentTime;
|
_startAudioTime = currentTime;
|
||||||
_isFirstAudioFrame = false;
|
_isFirstAudioFrame = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
// 计算音频延迟
|
// 计算音频延迟
|
||||||
final expectedTime = _startAudioTime + talkData.durationMs;
|
final expectedTime = _startAudioTime + talkData.durationMs;
|
||||||
final audioDelay = currentTime - expectedTime;
|
final audioDelay = currentTime - expectedTime;
|
||||||
|
|
||||||
@ -384,7 +395,6 @@ class TalkViewLogic extends BaseGetXController {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/// 获取权限状态
|
/// 获取权限状态
|
||||||
Future<bool> getPermissionStatus() async {
|
Future<bool> getPermissionStatus() async {
|
||||||
final Permission permission = Permission.microphone;
|
final Permission permission = Permission.microphone;
|
||||||
@ -504,6 +514,9 @@ class TalkViewLogic extends BaseGetXController {
|
|||||||
state.oneMinuteTimeTimer?.cancel(); // 取消旧定时器
|
state.oneMinuteTimeTimer?.cancel(); // 取消旧定时器
|
||||||
state.oneMinuteTimeTimer = null; // 取消旧定时器
|
state.oneMinuteTimeTimer = null; // 取消旧定时器
|
||||||
state.oneMinuteTime.value = 0;
|
state.oneMinuteTime.value = 0;
|
||||||
|
// 取消数据流监听
|
||||||
|
_streamSubscription?.cancel();
|
||||||
|
_isListening = false;
|
||||||
|
|
||||||
super.onClose();
|
super.onClose();
|
||||||
}
|
}
|
||||||
|
|||||||
@ -54,6 +54,10 @@ class H264WebViewLogic extends BaseGetXController {
|
|||||||
final Queue<List<int>> _frameBuffer = Queue<List<int>>();
|
final Queue<List<int>> _frameBuffer = Queue<List<int>>();
|
||||||
static const int FRAME_BUFFER_SIZE = 25;
|
static const int FRAME_BUFFER_SIZE = 25;
|
||||||
|
|
||||||
|
// 添加监听状态和订阅引用
|
||||||
|
bool _isListening = false;
|
||||||
|
StreamSubscription? _streamSubscription;
|
||||||
|
|
||||||
@override
|
@override
|
||||||
void onInit() {
|
void onInit() {
|
||||||
// 初始化 WebView 控制器
|
// 初始化 WebView 控制器
|
||||||
@ -122,7 +126,15 @@ class H264WebViewLogic extends BaseGetXController {
|
|||||||
}
|
}
|
||||||
|
|
||||||
void _createFramesStreamListen() async {
|
void _createFramesStreamListen() async {
|
||||||
state.talkDataRepository.talkDataStream
|
// 防止重复监听
|
||||||
|
if (_isListening) {
|
||||||
|
AppLog.log("已经存在数据流监听,避免重复监听");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
AppLog.log("==== 启动新的数据流监听 ====");
|
||||||
|
_isListening = true;
|
||||||
|
_streamSubscription = state.talkDataRepository.talkDataStream
|
||||||
.listen((TalkDataModel talkDataModel) async {
|
.listen((TalkDataModel talkDataModel) async {
|
||||||
final talkData = talkDataModel.talkData;
|
final talkData = talkDataModel.talkData;
|
||||||
final contentType = talkData!.contentType;
|
final contentType = talkData!.contentType;
|
||||||
@ -131,30 +143,33 @@ class H264WebViewLogic extends BaseGetXController {
|
|||||||
// 判断数据类型,进行分发处理
|
// 判断数据类型,进行分发处理
|
||||||
switch (contentType) {
|
switch (contentType) {
|
||||||
case TalkData_ContentTypeE.G711:
|
case TalkData_ContentTypeE.G711:
|
||||||
// // 第一帧到达时记录开始时间
|
if (state.isShowLoading.isFalse) {
|
||||||
if (_isFirstAudioFrame) {
|
// // 第一帧到达时记录开始时间
|
||||||
_startAudioTime = currentTime;
|
if (_isFirstAudioFrame) {
|
||||||
_isFirstAudioFrame = false;
|
_startAudioTime = currentTime;
|
||||||
}
|
_isFirstAudioFrame = false;
|
||||||
|
|
||||||
// 计算音频延迟
|
|
||||||
final expectedTime = _startAudioTime + talkData.durationMs;
|
|
||||||
final audioDelay = currentTime - expectedTime;
|
|
||||||
|
|
||||||
// 如果延迟太大,清空缓冲区并直接播放
|
|
||||||
if (audioDelay > 500) {
|
|
||||||
state.audioBuffer.clear();
|
|
||||||
if (state.isOpenVoice.value) {
|
|
||||||
_playAudioFrames();
|
|
||||||
}
|
}
|
||||||
return;
|
|
||||||
|
// 计算音频延迟
|
||||||
|
final expectedTime = _startAudioTime + talkData.durationMs;
|
||||||
|
final audioDelay = currentTime - expectedTime;
|
||||||
|
|
||||||
|
// 如果延迟太大,清空缓冲区并直接播放
|
||||||
|
if (audioDelay > 500) {
|
||||||
|
state.audioBuffer.clear();
|
||||||
|
if (state.isOpenVoice.value) {
|
||||||
|
_playAudioFrames();
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (state.audioBuffer.length >= audioBufferSize) {
|
||||||
|
state.audioBuffer.removeAt(0); // 丢弃最旧的数据
|
||||||
|
}
|
||||||
|
state.audioBuffer.add(talkData); // 添加新数据
|
||||||
|
// 添加音频播放逻辑,与视频类似
|
||||||
|
_playAudioFrames();
|
||||||
}
|
}
|
||||||
if (state.audioBuffer.length >= audioBufferSize) {
|
|
||||||
state.audioBuffer.removeAt(0); // 丢弃最旧的数据
|
|
||||||
}
|
|
||||||
state.audioBuffer.add(talkData); // 添加新数据
|
|
||||||
// 添加音频播放逻辑,与视频类似
|
|
||||||
_playAudioFrames();
|
|
||||||
break;
|
break;
|
||||||
case TalkData_ContentTypeE.H264:
|
case TalkData_ContentTypeE.H264:
|
||||||
// // 添加新帧到缓冲区
|
// // 添加新帧到缓冲区
|
||||||
@ -537,6 +552,39 @@ class H264WebViewLogic extends BaseGetXController {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// 停止播放音频
|
||||||
|
void _stopPlayG711Data() async {
|
||||||
|
await FlutterPcmSound.pause();
|
||||||
|
await FlutterPcmSound.stop();
|
||||||
|
await FlutterPcmSound.clear();
|
||||||
|
}
|
||||||
|
|
||||||
|
@override
|
||||||
|
void onClose() {
|
||||||
|
_stopPlayG711Data(); // 停止播放音频
|
||||||
|
|
||||||
|
state.audioBuffer.clear(); // 清空音频缓冲区
|
||||||
|
|
||||||
|
state.oneMinuteTimeTimer?.cancel();
|
||||||
|
state.oneMinuteTimeTimer = null;
|
||||||
|
|
||||||
|
// 停止播放音频
|
||||||
|
stopProcessingAudio();
|
||||||
|
|
||||||
|
state.oneMinuteTimeTimer?.cancel(); // 取消旧定时器
|
||||||
|
state.oneMinuteTimeTimer = null; // 取消旧定时器
|
||||||
|
state.oneMinuteTime.value = 0;
|
||||||
|
|
||||||
|
// 取消数据流监听
|
||||||
|
_streamSubscription?.cancel();
|
||||||
|
_isListening = false;
|
||||||
|
|
||||||
|
// 重置期望数据
|
||||||
|
StartChartManage().reSetDefaultTalkExpect();
|
||||||
|
|
||||||
|
super.onClose();
|
||||||
|
}
|
||||||
|
|
||||||
@override
|
@override
|
||||||
void dispose() {
|
void dispose() {
|
||||||
// _mockDataTimer?.cancel();
|
// _mockDataTimer?.cancel();
|
||||||
|
|||||||
@ -7,6 +7,7 @@ import 'package:flutter_screenutil/flutter_screenutil.dart';
|
|||||||
import 'package:get/get.dart';
|
import 'package:get/get.dart';
|
||||||
import 'package:star_lock/app_settings/app_colors.dart';
|
import 'package:star_lock/app_settings/app_colors.dart';
|
||||||
import 'package:star_lock/app_settings/app_settings.dart';
|
import 'package:star_lock/app_settings/app_settings.dart';
|
||||||
|
import 'package:star_lock/talk/call/callTalk.dart';
|
||||||
import 'package:star_lock/talk/starChart/constant/talk_status.dart';
|
import 'package:star_lock/talk/starChart/constant/talk_status.dart';
|
||||||
import 'package:star_lock/talk/starChart/handle/other/talk_data_repository.dart';
|
import 'package:star_lock/talk/starChart/handle/other/talk_data_repository.dart';
|
||||||
import 'package:star_lock/talk/starChart/proto/talk_data.pbserver.dart';
|
import 'package:star_lock/talk/starChart/proto/talk_data.pbserver.dart';
|
||||||
@ -413,8 +414,10 @@ class _H264WebViewState extends State<H264WebView>
|
|||||||
}
|
}
|
||||||
@override
|
@override
|
||||||
void dispose() {
|
void dispose() {
|
||||||
state.animationController.dispose(); // 确保释放控制器
|
state.animationController.dispose();
|
||||||
super.dispose();
|
|
||||||
|
|
||||||
|
CallTalk().finishAVData();
|
||||||
|
// UdpTalkDataHandler().resetDataRates();
|
||||||
|
super.dispose();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user