修复
This commit is contained in:
parent
197320dfbf
commit
c3750ff5a3
@ -956,8 +956,6 @@ class LockDetailLogic extends BaseGetXController {
|
||||
showToast('设备未配网'.tr);
|
||||
return;
|
||||
}
|
||||
// 记录开始时间
|
||||
TalkViewNativeDecodeLogic.setMonitorStartTime(DateTime.now());
|
||||
// 重置丢包率监控
|
||||
// PacketLossStatistics().reset();
|
||||
// 发送监控id - 监控模式不设置等待接听状态
|
||||
|
||||
@ -1,8 +1,9 @@
|
||||
import 'dart:async';
|
||||
import 'dart:io';
|
||||
import 'dart:math'; // 添加Random类支持
|
||||
import 'dart:ui' as ui;
|
||||
import 'dart:math'; // Import the math package to use sqrt
|
||||
|
||||
import 'package:flutter/cupertino.dart';
|
||||
import 'package:flutter/foundation.dart';
|
||||
import 'package:flutter/rendering.dart';
|
||||
import 'package:flutter/services.dart';
|
||||
@ -27,20 +28,21 @@ import 'package:star_lock/talk/starChart/proto/talk_expect.pb.dart';
|
||||
import 'package:star_lock/talk/starChart/star_chart_manage.dart';
|
||||
import 'package:star_lock/talk/starChart/views/native/talk_view_native_decode_state.dart';
|
||||
import 'package:star_lock/tools/G711Tool.dart';
|
||||
import 'package:star_lock/tools/baseGetXController.dart';
|
||||
import 'package:star_lock/tools/callkit_handler.dart';
|
||||
import 'package:star_lock/tools/commonDataManage.dart';
|
||||
import 'package:star_lock/tools/storage.dart';
|
||||
import 'package:video_decode_plugin/video_decode_plugin.dart';
|
||||
|
||||
import '../../../../tools/baseGetXController.dart';
|
||||
|
||||
class TalkViewNativeDecodeLogic extends BaseGetXController {
|
||||
final TalkViewNativeDecodeState state = TalkViewNativeDecodeState();
|
||||
|
||||
final LockDetailState lockDetailState = Get.put(LockDetailLogic()).state;
|
||||
|
||||
int bufferSize = 2; // 初始化为默认大小,减小缓冲区以降低延迟
|
||||
int bufferSize = 25; // 初始化为默认大小
|
||||
|
||||
int audioBufferSize = 3; // 音频默认缓冲3帧,减少音频延迟
|
||||
int audioBufferSize = 20; // 音频默认缓冲2帧
|
||||
|
||||
// 回绕阈值,动态调整,frameSeq较小时阈值也小
|
||||
int _getFrameSeqRolloverThreshold(int lastSeq) {
|
||||
@ -90,75 +92,15 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
|
||||
|
||||
int? lastDecodedIFrameSeq;
|
||||
|
||||
// 新增:记录最近I帧的时间戳,用于检测画面大幅变动
|
||||
DateTime? _lastIFrameTime;
|
||||
|
||||
// 添加一个标志来记录是否是第一个H264帧
|
||||
bool _isFirstH264FrameReceived = true;
|
||||
|
||||
// 记录接收到第一帧的时间
|
||||
DateTime? _firstFrameReceivedTime;
|
||||
|
||||
// 记录开始时间用于计算耗时
|
||||
static DateTime? _monitorStartTime;
|
||||
|
||||
// 设置开始时间的方法
|
||||
static void setMonitorStartTime(DateTime startTime) {
|
||||
_monitorStartTime = startTime;
|
||||
AppLog.log('监控启动时间已记录: $startTime');
|
||||
}
|
||||
|
||||
// 计算并打印耗时的方法
|
||||
static void printH264ReceiveTime() {
|
||||
if (_monitorStartTime != null) {
|
||||
final Duration duration = DateTime.now().difference(_monitorStartTime!);
|
||||
AppLog.log('从点击监控到接收H264数据耗时: ${duration.inMilliseconds} 毫秒 (${duration.inSeconds}.${duration.inMilliseconds % 1000} 秒)');
|
||||
|
||||
// 重置开始时间,避免重复计算
|
||||
_monitorStartTime = null;
|
||||
}
|
||||
}
|
||||
|
||||
// 初始化视频解码器
|
||||
Future<void> _initVideoDecoder() async {
|
||||
try {
|
||||
state.isLoading.value = true;
|
||||
int width = StartChartManage().videoWidth;
|
||||
int height = StartChartManage().videoHeight;
|
||||
|
||||
// ios第一次点击监控没画面
|
||||
if (Platform.isIOS && (width == 0 || height == 0)) {
|
||||
// 使用Future.microtask代替延时等待,提高响应速度
|
||||
int attempts = 0;
|
||||
const maxAttempts = 10; // 减少等待次数,提高响应速度
|
||||
|
||||
while ((width == 0 || height == 0) && attempts < maxAttempts) {
|
||||
await Future.microtask(() async {
|
||||
await Future.delayed(const Duration(milliseconds: 50)); // 减少等待时间
|
||||
width = StartChartManage().videoWidth;
|
||||
height = StartChartManage().videoHeight;
|
||||
});
|
||||
attempts++;
|
||||
}
|
||||
|
||||
// 如果仍然没有获取到参数,使用默认值
|
||||
if (width == 0 || height == 0) {
|
||||
width = 864;
|
||||
height = 480;
|
||||
AppLog.log('使用默认视频参数: ${width}x$height');
|
||||
} else {
|
||||
AppLog.log('获取到视频参数: ${width}x$height');
|
||||
}
|
||||
}
|
||||
|
||||
// 确保宽高为偶数,符合H264标准
|
||||
width = (width / 2).floor() * 2;
|
||||
height = (height / 2).floor() * 2;
|
||||
|
||||
// 创建解码器配置
|
||||
final config = VideoDecoderConfig(
|
||||
width: width,
|
||||
height: height,
|
||||
width: StartChartManage().videoWidth,
|
||||
// 实际视频宽度
|
||||
height: StartChartManage().videoHeight,
|
||||
codecType: 'h264',
|
||||
);
|
||||
// 初始化解码器并获取textureId
|
||||
@ -168,16 +110,6 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
|
||||
AppLog.log('视频解码器初始化成功:textureId=$textureId');
|
||||
VideoDecodePlugin.setOnFrameRenderedListener((textureId) {
|
||||
AppLog.log('已经开始渲染=======');
|
||||
|
||||
// 计算并打印从接收第一帧到关闭loading的耗时
|
||||
if (_firstFrameReceivedTime != null) {
|
||||
final Duration renderToLoadingDuration = DateTime.now().difference(_firstFrameReceivedTime!);
|
||||
AppLog.log('从接收第一帧到关闭loading耗时: ${renderToLoadingDuration.inMilliseconds} 毫秒 (${renderToLoadingDuration.inSeconds}.${renderToLoadingDuration.inMilliseconds % 1000} 秒)');
|
||||
|
||||
// 重置时间记录,避免重复计算
|
||||
_firstFrameReceivedTime = null;
|
||||
}
|
||||
|
||||
// 只有真正渲染出首帧时才关闭loading
|
||||
Future.microtask(() => state.isLoading.value = false);
|
||||
});
|
||||
@ -189,7 +121,7 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
|
||||
} catch (e) {
|
||||
AppLog.log('初始化视频解码器错误: $e');
|
||||
// 如果初始化失败,延迟后重试
|
||||
await Future.delayed(const Duration(milliseconds: 500)); // 减少重试等待时间
|
||||
await Future.delayed(const Duration(seconds: 2));
|
||||
if (!Get.isRegistered<TalkViewNativeDecodeLogic>()) {
|
||||
return; // 如果控制器已经被销毁,不再重试
|
||||
}
|
||||
@ -315,8 +247,7 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
|
||||
state.frameProcessTimer?.cancel();
|
||||
|
||||
// 计算定时器间隔,确保以目标帧率处理帧
|
||||
// final int intervalMs = (1000 / state.targetFps).round();
|
||||
final int intervalMs = state.frameProcessIntervalMs;
|
||||
final int intervalMs = (1000 / state.targetFps).round();
|
||||
|
||||
// 创建新定时器
|
||||
state.frameProcessTimer = Timer.periodic(Duration(milliseconds: intervalMs), (timer) {
|
||||
@ -325,6 +256,7 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
|
||||
AppLog.log('启动帧处理定时器,目标帧率: ${state.targetFps}fps,间隔: ${intervalMs}ms');
|
||||
}
|
||||
|
||||
/// 从缓冲区处理下一帧
|
||||
/// 从缓冲区处理下一帧
|
||||
void _processNextFrameFromBuffer() async {
|
||||
final startTime = DateTime.now().microsecondsSinceEpoch;
|
||||
@ -530,21 +462,8 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
|
||||
|
||||
/// 播放音频数据
|
||||
void _playAudioData(TalkData talkData) async {
|
||||
if (state.isOpenVoice.value && state.isLoading.isFalse) {
|
||||
// 先进行解码和降噪处理(根据音频处理顺序规范:先降噪后增益)
|
||||
if (state.isOpenVoice.value && state.isLoading.isFalse && state.isRecordingAudio.value == false) {
|
||||
List<int> encodedData = G711Tool.decode(talkData.content, 0); // 0表示A-law
|
||||
|
||||
// 根据动态音频增益补偿策略,实现基于状态的动态增益补偿
|
||||
if (Platform.isIOS) {
|
||||
if (state.isRecordingAudio.value) {
|
||||
// 当正在录音时,提高接收音频的增益,以匹配锁端第一次说话的音量
|
||||
encodedData = _applyGain(encodedData, 1.5);
|
||||
} else {
|
||||
// 非录音状态下使用标准增益
|
||||
encodedData = _applyGain(encodedData, 1.2);
|
||||
}
|
||||
}
|
||||
|
||||
// 将 PCM 数据转换为 PcmArrayInt16
|
||||
final PcmArrayInt16 fromList = PcmArrayInt16.fromList(encodedData);
|
||||
FlutterPcmSound.feed(fromList);
|
||||
@ -691,8 +610,6 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
|
||||
_startProcessingAudioTimer?.cancel();
|
||||
_startProcessingAudioTimer = null;
|
||||
_bufferedAudioFrames.clear();
|
||||
// 停止监控请求定时器,防止挂断后自动重连
|
||||
StartChartManage().stopCallRequestMessageTimer();
|
||||
super.onClose();
|
||||
}
|
||||
|
||||
@ -884,14 +801,10 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
|
||||
|
||||
// 音频帧处理
|
||||
Future<void> _onFrame(List<int> frame) async {
|
||||
// 根据平台设置不同的音频增益系数,iOS使用较低增益避免影响接收端灵敏度
|
||||
double gainFactor = Platform.isIOS ? 0.6 : 1.2;
|
||||
|
||||
// 应用增益
|
||||
final gainApplied = _applyGain(frame, gainFactor);
|
||||
final applyGain = _applyGain(frame, 1.6);
|
||||
|
||||
// 编码为G711数据
|
||||
List<int> encodedData = G711Tool.encode(gainApplied, 0); // 0表示A-law
|
||||
List<int> encodedData = G711Tool.encode(applyGain, 0); // 0表示A-law
|
||||
_bufferedAudioFrames.addAll(encodedData);
|
||||
|
||||
// 启动定时发送器(仅启动一次)
|
||||
@ -963,8 +876,8 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
|
||||
state.textureId.value = null;
|
||||
}
|
||||
|
||||
// 减少等待时间,提高响应速度
|
||||
await Future.delayed(Duration(milliseconds: 50));
|
||||
// 等待一小段时间确保资源释放完成
|
||||
await Future.delayed(Duration(milliseconds: 100));
|
||||
|
||||
// 创建新的解码器配置
|
||||
final config = VideoDecoderConfig(
|
||||
@ -1031,50 +944,14 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
|
||||
// 处理H264帧
|
||||
if (state.textureId.value != null) {
|
||||
if (talkDataH264Frame != null) {
|
||||
// 记录第一个H264帧接收时间并计算耗时
|
||||
if (_isFirstH264FrameReceived) {
|
||||
AppLog.log('第一个H264帧接收时间: ${DateTime.now()}');
|
||||
|
||||
// 计算并打印从点击监控到接收H264数据的耗时
|
||||
TalkViewNativeDecodeLogic.printH264ReceiveTime();
|
||||
|
||||
// 记录接收到第一帧的时间,用于计算到关闭loading的耗时
|
||||
_firstFrameReceivedTime = DateTime.now();
|
||||
|
||||
_isFirstH264FrameReceived = false;
|
||||
}
|
||||
|
||||
// 创建包含帧数据和类型的Map
|
||||
final Map<String, dynamic> frameMap = {
|
||||
'frameData': talkData.content,
|
||||
'frameType': talkDataH264Frame.frameType,
|
||||
'frameSeq': talkDataH264Frame.frameSeq,
|
||||
'frameSeqI': talkDataH264Frame.frameSeqI,
|
||||
'pts': talkData.durationMs,
|
||||
'scpMessage': scpMessage!,
|
||||
};
|
||||
|
||||
// 如果缓冲区超出最大大小,优先丢弃P/B帧
|
||||
if (state.h264FrameBuffer.length >= state.maxFrameBufferSize) {
|
||||
// 首先尝试快速查找P帧
|
||||
int pbIndex = -1;
|
||||
for (int i = 0; i < state.h264FrameBuffer.length; i++) {
|
||||
if (state.h264FrameBuffer[i]['frameType'] == TalkDataH264Frame_FrameTypeE.P) {
|
||||
pbIndex = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (pbIndex != -1) {
|
||||
state.h264FrameBuffer.removeAt(pbIndex);
|
||||
} else {
|
||||
// 如果没有找到P帧,则移除最旧的帧
|
||||
state.h264FrameBuffer.removeAt(0);
|
||||
}
|
||||
}
|
||||
|
||||
// 将帧添加到缓冲区
|
||||
state.h264FrameBuffer.add(frameMap);
|
||||
_addFrameToBuffer(
|
||||
talkData.content,
|
||||
talkDataH264Frame.frameType,
|
||||
talkData.durationMs,
|
||||
talkDataH264Frame.frameSeq,
|
||||
talkDataH264Frame.frameSeqI,
|
||||
scpMessage!,
|
||||
);
|
||||
}
|
||||
} else {
|
||||
AppLog.log('无法处理H264帧:textureId为空');
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user