diff --git a/lib/talk/starChart/views/native/talk_view_native_decode_logic.dart b/lib/talk/starChart/views/native/talk_view_native_decode_logic.dart index 6fb1aaca..e3220fe1 100644 --- a/lib/talk/starChart/views/native/talk_view_native_decode_logic.dart +++ b/lib/talk/starChart/views/native/talk_view_native_decode_logic.dart @@ -1,5 +1,6 @@ import 'dart:async'; import 'dart:io'; +import 'dart:math'; // 添加Random类支持 import 'dart:ui' as ui; import 'package:flutter/foundation.dart'; @@ -89,6 +90,35 @@ class TalkViewNativeDecodeLogic extends BaseGetXController { int? lastDecodedIFrameSeq; + // 新增:记录最近I帧的时间戳,用于检测画面大幅变动 + DateTime? _lastIFrameTime; + + // 添加一个标志来记录是否是第一个H264帧 + bool _isFirstH264FrameReceived = true; + + // 记录接收到第一帧的时间 + DateTime? _firstFrameReceivedTime; + + // 记录开始时间用于计算耗时 + static DateTime? _monitorStartTime; + + // 设置开始时间的方法 + static void setMonitorStartTime(DateTime startTime) { + _monitorStartTime = startTime; + AppLog.log('监控启动时间已记录: $startTime'); + } + + // 计算并打印耗时的方法 + static void printH264ReceiveTime() { + if (_monitorStartTime != null) { + final Duration duration = DateTime.now().difference(_monitorStartTime!); + AppLog.log('从点击监控到接收H264数据耗时: ${duration.inMilliseconds} 毫秒 (${duration.inSeconds}.${duration.inMilliseconds % 1000} 秒)'); + + // 重置开始时间,避免重复计算 + _monitorStartTime = null; + } + } + // 初始化视频解码器 Future _initVideoDecoder() async { try { @@ -96,33 +126,38 @@ class TalkViewNativeDecodeLogic extends BaseGetXController { int width = StartChartManage().videoWidth; int height = StartChartManage().videoHeight; - if(Platform.isIOS){ - // ios第一次点击监控没画面 - if (width == 0 || height == 0) { - int attempts = 0; - const maxAttempts = 20; // 最多等待2秒 (20 * 100ms) + // ios第一次点击监控没画面 + if (Platform.isIOS && (width == 0 || height == 0)) { + // 使用Future.microtask代替延时等待,提高响应速度 + int attempts = 0; + const maxAttempts = 10; // 减少等待次数,提高响应速度 - while ((width == 0 || height == 0) && attempts < maxAttempts) { - await Future.delayed(const Duration(milliseconds: 100)); + while ((width == 0 || height == 0) && attempts < maxAttempts) { + await Future.microtask(() async { + await Future.delayed(const Duration(milliseconds: 50)); // 减少等待时间 width = StartChartManage().videoWidth; height = StartChartManage().videoHeight; - attempts++; - } + }); + attempts++; + } - // 如果仍然没有获取到参数,使用默认值 - if (width == 0 || height == 0) { - width = 864; - height = 480; - AppLog.log('使用默认视频参数: ${width}x$height'); - } else { - AppLog.log('获取到视频参数: ${width}x$height'); - } + // 如果仍然没有获取到参数,使用默认值 + if (width == 0 || height == 0) { + width = 864; + height = 480; + AppLog.log('使用默认视频参数: ${width}x$height'); + } else { + AppLog.log('获取到视频参数: ${width}x$height'); } } + + // 确保宽高为偶数,符合H264标准 + width = (width / 2).floor() * 2; + height = (height / 2).floor() * 2; + // 创建解码器配置 final config = VideoDecoderConfig( width: width, - // 实际视频宽度 height: height, codecType: 'h264', ); @@ -133,6 +168,16 @@ class TalkViewNativeDecodeLogic extends BaseGetXController { AppLog.log('视频解码器初始化成功:textureId=$textureId'); VideoDecodePlugin.setOnFrameRenderedListener((textureId) { AppLog.log('已经开始渲染======='); + + // 计算并打印从接收第一帧到关闭loading的耗时 + if (_firstFrameReceivedTime != null) { + final Duration renderToLoadingDuration = DateTime.now().difference(_firstFrameReceivedTime!); + AppLog.log('从接收第一帧到关闭loading耗时: ${renderToLoadingDuration.inMilliseconds} 毫秒 (${renderToLoadingDuration.inSeconds}.${renderToLoadingDuration.inMilliseconds % 1000} 秒)'); + + // 重置时间记录,避免重复计算 + _firstFrameReceivedTime = null; + } + // 只有真正渲染出首帧时才关闭loading Future.microtask(() => state.isLoading.value = false); }); @@ -144,7 +189,7 @@ class TalkViewNativeDecodeLogic extends BaseGetXController { } catch (e) { AppLog.log('初始化视频解码器错误: $e'); // 如果初始化失败,延迟后重试 - await Future.delayed(const Duration(seconds: 2)); + await Future.delayed(const Duration(milliseconds: 500)); // 减少重试等待时间 if (!Get.isRegistered()) { return; // 如果控制器已经被销毁,不再重试 } @@ -633,6 +678,8 @@ class TalkViewNativeDecodeLogic extends BaseGetXController { _startProcessingAudioTimer?.cancel(); _startProcessingAudioTimer = null; _bufferedAudioFrames.clear(); + // 停止监控请求定时器,防止挂断后自动重连 + StartChartManage().stopCallRequestMessageTimer(); super.onClose(); } @@ -899,8 +946,8 @@ class TalkViewNativeDecodeLogic extends BaseGetXController { state.textureId.value = null; } - // 等待一小段时间确保资源释放完成 - await Future.delayed(Duration(milliseconds: 100)); + // 减少等待时间,提高响应速度 + await Future.delayed(Duration(milliseconds: 50)); // 创建新的解码器配置 final config = VideoDecoderConfig( @@ -967,14 +1014,50 @@ class TalkViewNativeDecodeLogic extends BaseGetXController { // 处理H264帧 if (state.textureId.value != null) { if (talkDataH264Frame != null) { - _addFrameToBuffer( - talkData.content, - talkDataH264Frame.frameType, - talkData.durationMs, - talkDataH264Frame.frameSeq, - talkDataH264Frame.frameSeqI, - scpMessage!, - ); + // 记录第一个H264帧接收时间并计算耗时 + if (_isFirstH264FrameReceived) { + AppLog.log('第一个H264帧接收时间: ${DateTime.now()}'); + + // 计算并打印从点击监控到接收H264数据的耗时 + TalkViewNativeDecodeLogic.printH264ReceiveTime(); + + // 记录接收到第一帧的时间,用于计算到关闭loading的耗时 + _firstFrameReceivedTime = DateTime.now(); + + _isFirstH264FrameReceived = false; + } + + // 创建包含帧数据和类型的Map + final Map frameMap = { + 'frameData': talkData.content, + 'frameType': talkDataH264Frame.frameType, + 'frameSeq': talkDataH264Frame.frameSeq, + 'frameSeqI': talkDataH264Frame.frameSeqI, + 'pts': talkData.durationMs, + 'scpMessage': scpMessage!, + }; + + // 如果缓冲区超出最大大小,优先丢弃P/B帧 + if (state.h264FrameBuffer.length >= state.maxFrameBufferSize) { + // 首先尝试快速查找P帧 + int pbIndex = -1; + for (int i = 0; i < state.h264FrameBuffer.length; i++) { + if (state.h264FrameBuffer[i]['frameType'] == TalkDataH264Frame_FrameTypeE.P) { + pbIndex = i; + break; + } + } + + if (pbIndex != -1) { + state.h264FrameBuffer.removeAt(pbIndex); + } else { + // 如果没有找到P帧,则移除最旧的帧 + state.h264FrameBuffer.removeAt(0); + } + } + + // 将帧添加到缓冲区 + state.h264FrameBuffer.add(frameMap); } } else { AppLog.log('无法处理H264帧:textureId为空'); diff --git a/lib/talk/starChart/views/native/talk_view_native_decode_state.dart b/lib/talk/starChart/views/native/talk_view_native_decode_state.dart index dfd2b846..7fd70e97 100644 --- a/lib/talk/starChart/views/native/talk_view_native_decode_state.dart +++ b/lib/talk/starChart/views/native/talk_view_native_decode_state.dart @@ -109,12 +109,12 @@ class TalkViewNativeDecodeState { // H264帧缓冲区相关 final List> h264FrameBuffer = >[]; // H264帧缓冲区,存储帧数据和类型 - int maxFrameBufferSize = 3; // 最大缓冲区大小,减小以降低延迟 + int maxFrameBufferSize = 2; // 最大缓冲区大小,减小以降低延迟 final int targetFps = 25; // 目标解码帧率,只是为了快速填充native的缓冲区 final int adaptiveBufferSizeMin = 2; // 自适应缓冲区最小大小 final int adaptiveBufferSizeMax = 6; // 自适应缓冲区最大大小 final int networkQualityCheckIntervalMs = 2000; // 网络质量检查间隔(毫秒) - final int frameProcessIntervalMs = 10; // 帧处理间隔(毫秒),提高响应速度 + int frameProcessIntervalMs = 10; // 帧处理间隔(毫秒),提高响应速度 Timer? frameProcessTimer; // 帧处理定时器 bool isProcessingFrame = false; // 是否正在处理帧 int lastProcessedTimestamp = 0; // 上次处理帧的时间戳