记录接收到第一帧的时间,防止挂断后自动重连,从接收第一帧到关闭loading耗时

This commit is contained in:
sky.min 2026-01-16 09:49:27 +08:00
parent ce350aecca
commit a2801fe613
2 changed files with 114 additions and 31 deletions

View File

@ -1,5 +1,6 @@
import 'dart:async';
import 'dart:io';
import 'dart:math'; // Random类支持
import 'dart:ui' as ui;
import 'package:flutter/foundation.dart';
@ -89,6 +90,35 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
int? lastDecodedIFrameSeq;
// I帧的时间戳
DateTime? _lastIFrameTime;
// H264帧
bool _isFirstH264FrameReceived = true;
//
DateTime? _firstFrameReceivedTime;
//
static DateTime? _monitorStartTime;
//
static void setMonitorStartTime(DateTime startTime) {
_monitorStartTime = startTime;
AppLog.log('监控启动时间已记录: $startTime');
}
//
static void printH264ReceiveTime() {
if (_monitorStartTime != null) {
final Duration duration = DateTime.now().difference(_monitorStartTime!);
AppLog.log('从点击监控到接收H264数据耗时: ${duration.inMilliseconds} 毫秒 (${duration.inSeconds}.${duration.inMilliseconds % 1000} 秒)');
//
_monitorStartTime = null;
}
}
//
Future<void> _initVideoDecoder() async {
try {
@ -96,33 +126,38 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
int width = StartChartManage().videoWidth;
int height = StartChartManage().videoHeight;
if(Platform.isIOS){
// ios第一次点击监控没画面
if (width == 0 || height == 0) {
int attempts = 0;
const maxAttempts = 20; // 2 (20 * 100ms)
// ios第一次点击监控没画面
if (Platform.isIOS && (width == 0 || height == 0)) {
// 使Future.microtask代替延时等待
int attempts = 0;
const maxAttempts = 10; //
while ((width == 0 || height == 0) && attempts < maxAttempts) {
await Future.delayed(const Duration(milliseconds: 100));
while ((width == 0 || height == 0) && attempts < maxAttempts) {
await Future.microtask(() async {
await Future.delayed(const Duration(milliseconds: 50)); //
width = StartChartManage().videoWidth;
height = StartChartManage().videoHeight;
attempts++;
}
});
attempts++;
}
// 使
if (width == 0 || height == 0) {
width = 864;
height = 480;
AppLog.log('使用默认视频参数: ${width}x$height');
} else {
AppLog.log('获取到视频参数: ${width}x$height');
}
// 使
if (width == 0 || height == 0) {
width = 864;
height = 480;
AppLog.log('使用默认视频参数: ${width}x$height');
} else {
AppLog.log('获取到视频参数: ${width}x$height');
}
}
// H264标准
width = (width / 2).floor() * 2;
height = (height / 2).floor() * 2;
//
final config = VideoDecoderConfig(
width: width,
//
height: height,
codecType: 'h264',
);
@ -133,6 +168,16 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
AppLog.log('视频解码器初始化成功textureId=$textureId');
VideoDecodePlugin.setOnFrameRenderedListener((textureId) {
AppLog.log('已经开始渲染=======');
// loading的耗时
if (_firstFrameReceivedTime != null) {
final Duration renderToLoadingDuration = DateTime.now().difference(_firstFrameReceivedTime!);
AppLog.log('从接收第一帧到关闭loading耗时: ${renderToLoadingDuration.inMilliseconds} 毫秒 (${renderToLoadingDuration.inSeconds}.${renderToLoadingDuration.inMilliseconds % 1000} 秒)');
//
_firstFrameReceivedTime = null;
}
// loading
Future.microtask(() => state.isLoading.value = false);
});
@ -144,7 +189,7 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
} catch (e) {
AppLog.log('初始化视频解码器错误: $e');
//
await Future.delayed(const Duration(seconds: 2));
await Future.delayed(const Duration(milliseconds: 500)); //
if (!Get.isRegistered<TalkViewNativeDecodeLogic>()) {
return; //
}
@ -633,6 +678,8 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
_startProcessingAudioTimer?.cancel();
_startProcessingAudioTimer = null;
_bufferedAudioFrames.clear();
//
StartChartManage().stopCallRequestMessageTimer();
super.onClose();
}
@ -899,8 +946,8 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
state.textureId.value = null;
}
//
await Future.delayed(Duration(milliseconds: 100));
//
await Future.delayed(Duration(milliseconds: 50));
//
final config = VideoDecoderConfig(
@ -967,14 +1014,50 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
// H264帧
if (state.textureId.value != null) {
if (talkDataH264Frame != null) {
_addFrameToBuffer(
talkData.content,
talkDataH264Frame.frameType,
talkData.durationMs,
talkDataH264Frame.frameSeq,
talkDataH264Frame.frameSeqI,
scpMessage!,
);
// H264帧接收时间并计算耗时
if (_isFirstH264FrameReceived) {
AppLog.log('第一个H264帧接收时间: ${DateTime.now()}');
// H264数据的耗时
TalkViewNativeDecodeLogic.printH264ReceiveTime();
// loading的耗时
_firstFrameReceivedTime = DateTime.now();
_isFirstH264FrameReceived = false;
}
// Map
final Map<String, dynamic> frameMap = {
'frameData': talkData.content,
'frameType': talkDataH264Frame.frameType,
'frameSeq': talkDataH264Frame.frameSeq,
'frameSeqI': talkDataH264Frame.frameSeqI,
'pts': talkData.durationMs,
'scpMessage': scpMessage!,
};
// P/B帧
if (state.h264FrameBuffer.length >= state.maxFrameBufferSize) {
// P帧
int pbIndex = -1;
for (int i = 0; i < state.h264FrameBuffer.length; i++) {
if (state.h264FrameBuffer[i]['frameType'] == TalkDataH264Frame_FrameTypeE.P) {
pbIndex = i;
break;
}
}
if (pbIndex != -1) {
state.h264FrameBuffer.removeAt(pbIndex);
} else {
// P帧
state.h264FrameBuffer.removeAt(0);
}
}
//
state.h264FrameBuffer.add(frameMap);
}
} else {
AppLog.log('无法处理H264帧textureId为空');

View File

@ -109,12 +109,12 @@ class TalkViewNativeDecodeState {
// H264帧缓冲区相关
final List<Map<String, dynamic>> h264FrameBuffer = <Map<String, dynamic>>[]; // H264帧缓冲区
int maxFrameBufferSize = 3; //
int maxFrameBufferSize = 2; //
final int targetFps = 25; // ,native的缓冲区
final int adaptiveBufferSizeMin = 2; //
final int adaptiveBufferSizeMax = 6; //
final int networkQualityCheckIntervalMs = 2000; // ()
final int frameProcessIntervalMs = 10; // ()
int frameProcessIntervalMs = 10; // ()
Timer? frameProcessTimer; //
bool isProcessingFrame = false; //
int lastProcessedTimestamp = 0; //