From a9f96b81393dd813ce4faf5fe6cf7c39ccf0382e Mon Sep 17 00:00:00 2001 From: liyi Date: Wed, 23 Apr 2025 16:36:55 +0800 Subject: [PATCH] =?UTF-8?q?feat:v1=E7=89=88=E6=9C=AC=E5=AE=9E=E7=8E=B0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- README.md | 1 - .../video_decode_plugin/VideoDecodePlugin.kt | 81 ++- .../video_decode_plugin/VideoDecoder.kt | 95 +--- .../video_decode_plugin/VideoDecoderConfig.kt | 18 +- example/lib/main.dart | 504 +++++++++++------- lib/video_decode_plugin.dart | 410 +++++--------- 6 files changed, 509 insertions(+), 600 deletions(-) diff --git a/README.md b/README.md index 84d9762..6bb3859 100644 --- a/README.md +++ b/README.md @@ -35,7 +35,6 @@ final config = VideoDecoderConfig( height: 480, // 视频高度 codecType: CodecType.h264, // 编解码类型:h264或h265 frameRate: 30, // 目标帧率(可选) - bufferSize: 30, // 缓冲区大小 isDebug: true, // 是否启用详细日志 ); diff --git a/android/src/main/kotlin/top/skychip/video_decode_plugin/VideoDecodePlugin.kt b/android/src/main/kotlin/top/skychip/video_decode_plugin/VideoDecodePlugin.kt index c100783..433a76e 100644 --- a/android/src/main/kotlin/top/skychip/video_decode_plugin/VideoDecodePlugin.kt +++ b/android/src/main/kotlin/top/skychip/video_decode_plugin/VideoDecodePlugin.kt @@ -129,10 +129,7 @@ class VideoDecodePlugin : FlutterPlugin, MethodCallHandler { val height = call.argument("height") ?: 360 val frameRate = call.argument("frameRate") val codecType = call.argument("codecType") ?: "h264" - val bufferSize = call.argument("bufferSize") ?: 25 - val threadCount = call.argument("threadCount") ?: 1 val isDebug = call.argument("isDebug") ?: false - val enableHardwareDecoder = call.argument("enableHardwareDecoder") ?: true // 更新插件的调试模式标志 this.isDebugMode = isDebug @@ -156,15 +153,7 @@ class VideoDecodePlugin : FlutterPlugin, MethodCallHandler { height = height, codecType = codecType, frameRate = frameRate, - enableHardwareDecoder = enableHardwareDecoder, - threadCount = threadCount, - bufferSize = bufferSize, - isDebug = isDebug, - enableDynamicThresholds = call.argument("enableDynamicThresholds") ?: true, - initialMaxPFrames = call.argument("initialMaxPFrames") ?: 10, - initialIFrameTimeoutMs = call.argument("initialIFrameTimeoutMs") ?: 500, - minMaxPFrames = call.argument("minMaxPFrames") ?: 5, - maxMaxPFrames = call.argument("maxMaxPFrames") ?: 30 + isDebug = isDebug ) // 创建解码器 @@ -184,12 +173,28 @@ class VideoDecodePlugin : FlutterPlugin, MethodCallHandler { if (renderedFrames == 0) { // 这是初始化预通知 logDebug("[预通知] 发送初始帧可用通知给Flutter,纹理ID: $textureId(无实际视频数据)") + + // 发送帧可用通知(带预通知标志) + channel.invokeMethod("onFrameAvailable", mapOf( + "textureId" to textureId, + "isPrenotification" to true + )) + + // 发送解码器状态通知(准备就绪) + sendDecoderState(textureId, "ready", stats) } else { // 这是实际帧通知 logDebug("发送帧可用通知给Flutter,纹理ID: $textureId,已渲染帧数: $renderedFrames") + + // 发送帧可用通知(实际帧) + channel.invokeMethod("onFrameAvailable", mapOf( + "textureId" to textureId, + "isPrenotification" to false + )) + + // 发送解码器状态通知(渲染中) + sendDecoderState(textureId, "rendering", stats) } - - channel.invokeMethod("onFrameAvailable", mapOf("textureId" to textureId)) } catch (e: Exception) { logError("通知Flutter更新纹理失败", e) } @@ -198,12 +203,19 @@ class VideoDecodePlugin : FlutterPlugin, MethodCallHandler { override fun onError(error: String) { logError("解码器错误: $error") + + // 发送错误状态通知 + val stats = decoders[textureId]?.getStatistics() ?: mapOf() + sendDecoderState(textureId, "error", stats + mapOf("errorMessage" to error)) } } // 保存解码器 decoders[textureId] = decoder + // 发送初始化状态 + sendDecoderState(textureId, "initializing", decoder.getStatistics()) + // 返回纹理ID result.success(textureId) @@ -235,6 +247,16 @@ class VideoDecodePlugin : FlutterPlugin, MethodCallHandler { // 解码帧 val success = decoder.decodeFrame(frameData, isIFrame) + // 发送更新后的解码器状态(在帧解码后,无论成功与否) + val stats = decoder.getStatistics() + + // 根据是否有渲染帧确定状态 + val renderedFrames = stats["renderedFrames"] as? Int ?: 0 + val state = if (renderedFrames > 0) "rendering" else "ready" + + // 发送状态更新 + sendDecoderState(textureId, state, stats) + // 返回结果 result.success(success) @@ -261,6 +283,9 @@ class VideoDecodePlugin : FlutterPlugin, MethodCallHandler { return } + // 发送释放状态 + sendDecoderState(textureId, "released", decoder.getStatistics()) + // 释放解码器 decoder.release() @@ -309,6 +334,28 @@ class VideoDecodePlugin : FlutterPlugin, MethodCallHandler { } } + /** + * 发送解码器状态更新 + */ + private fun sendDecoderState(textureId: Long, state: String, stats: Map) { + runOnMainThread { + try { + logDebug("发送解码器状态更新: 纹理ID=$textureId, 状态=$state") + + // 构造参数 + val params = HashMap() + params["textureId"] = textureId + params["state"] = state + params["stats"] = stats + + // 发送状态更新 + channel.invokeMethod("onDecoderState", params) + } catch (e: Exception) { + logError("发送解码器状态更新失败", e) + } + } + } + /** * 在主线程上执行任务 */ @@ -325,8 +372,12 @@ class VideoDecodePlugin : FlutterPlugin, MethodCallHandler { */ override fun onDetachedFromEngine(@NonNull binding: FlutterPlugin.FlutterPluginBinding) { // 释放所有解码器 - for (decoder in decoders.values) { + for ((textureId, decoder) in decoders) { try { + // 发送释放状态 + sendDecoderState(textureId, "released", decoder.getStatistics()) + + // 释放解码器 decoder.release() } catch (e: Exception) { logError("插件分离时释放解码器失败", e) diff --git a/android/src/main/kotlin/top/skychip/video_decode_plugin/VideoDecoder.kt b/android/src/main/kotlin/top/skychip/video_decode_plugin/VideoDecoder.kt index 520938d..277eca5 100644 --- a/android/src/main/kotlin/top/skychip/video_decode_plugin/VideoDecoder.kt +++ b/android/src/main/kotlin/top/skychip/video_decode_plugin/VideoDecoder.kt @@ -34,16 +34,6 @@ class VideoDecoder( private const val NAL_UNIT_TYPE_PPS = 8 private const val NAL_UNIT_TYPE_IDR = 5 private const val NAL_UNIT_TYPE_NON_IDR = 1 // P帧 - - // 最大允许连续P帧数 - private const val MAX_CONSECUTIVE_P_FRAMES = 10 - - // I帧超时时间(毫秒)- 超过此时间没有收到I帧则丢弃P帧 - private const val MAX_IFRAME_TIMEOUT_MS = 500 - - // 异步模式参数 - private const val LOW_LATENCY_MODE = true - private const val OPERATING_RATE = 90 // 解码速率提高到90FPS } // 回调接口 @@ -94,10 +84,6 @@ class VideoDecoder( private var iFrameIntervals = mutableListOf() private val GOP_HISTORY_SIZE = 5 // 记录最近5个GOP间隔 - // 动态阈值参数 - private var dynamicMaxConsecutivePFrames = config.initialMaxPFrames - private var dynamicIFrameTimeout = config.initialIFrameTimeoutMs - // 用于避免重复处理相同SPS/PPS的缓存 private var lastSPSHash: Int? = null private var lastPPSHash: Int? = null @@ -332,39 +318,6 @@ class VideoDecoder( } else if (effectiveType == NAL_UNIT_TYPE_IDR) { hasSentIDR.set(true) val currentTime = System.currentTimeMillis() - - // 计算I帧间隔并更新动态参数 - if (config.enableDynamicThresholds && lastDetectedIFrameTime > 0) { - val iFrameInterval = currentTime - lastDetectedIFrameTime - - // 添加到历史记录 - iFrameIntervals.add(iFrameInterval) - if (iFrameIntervals.size > GOP_HISTORY_SIZE) { - iFrameIntervals.removeAt(0) - } - - // 计算平均GOP大小 - if (iFrameIntervals.isNotEmpty()) { - val avgIFrameInterval = iFrameIntervals.average().toLong() - val frameRate = config.frameRate ?: 30 - detectedGopSize = (avgIFrameInterval * frameRate / 1000).toInt() - - if (detectedGopSize > 0) { - // 动态调整最大连续P帧阈值 - 设置为GOP的1.5倍,但受配置限制 - val newMaxPFrames = (detectedGopSize * 1.5).toInt() - dynamicMaxConsecutivePFrames = newMaxPFrames.coerceIn( - config.minMaxPFrames, - config.maxMaxPFrames - ) - - // 动态调整I帧超时时间 - 设置为平均I帧间隔的2倍,但至少为200ms - dynamicIFrameTimeout = Math.max(200, avgIFrameInterval.toInt() * 2) - - logDebug("动态参数更新: GOP=$detectedGopSize, 最大P帧=$dynamicMaxConsecutivePFrames, I帧超时=${dynamicIFrameTimeout}ms") - } - } - } - lastDetectedIFrameTime = currentTime lastIFrameTimeMs = currentTime consecutivePFrameCount = 0 @@ -377,41 +330,6 @@ class VideoDecoder( } consecutivePFrameCount++ - - // 检查连续P帧数是否超过阈值 - 使用动态阈值或固定阈值 - val maxPFrames = if (config.enableDynamicThresholds) - dynamicMaxConsecutivePFrames - else - MAX_CONSECUTIVE_P_FRAMES - - if (consecutivePFrameCount >= maxPFrames) { - logWarning("丢弃P帧,因为连续P帧过多($consecutivePFrameCount > $maxPFrames)") - droppedFrameCount++ - return false - } - - // 检查是否自上一个I帧过去太久 - 使用动态阈值或固定阈值 - if (lastIFrameTimeMs > 0) { - val timeSinceLastIFrame = System.currentTimeMillis() - lastIFrameTimeMs - val iFrameTimeout = if (config.enableDynamicThresholds) - dynamicIFrameTimeout - else - MAX_IFRAME_TIMEOUT_MS - - if (timeSinceLastIFrame > iFrameTimeout) { - logWarning("丢弃P帧,因为距离上一个I帧时间过长(${timeSinceLastIFrame}ms > ${iFrameTimeout}ms)") - droppedFrameCount++ - return false - } - } - - // 帧大小异常检测 - 如果帧过小或过大,可能是损坏的帧 - val expectedFrameSize = config.width * config.height / 8 // 粗略估计 - if (frameData.size < 10 || frameData.size > expectedFrameSize * 2) { - logWarning("丢弃帧,因为帧大小异常(${frameData.size}字节)") - droppedFrameCount++ - return false - } } // 记录帧信息 @@ -570,24 +488,13 @@ class VideoDecoder( "renderedFrames" to renderedFrameCount, "droppedFrames" to droppedFrameCount, "fps" to currentFps, - "detectedGopSize" to detectedGopSize, - "dynamicMaxConsecutivePFrames" to dynamicMaxConsecutivePFrames, - "dynamicIFrameTimeoutMs" to dynamicIFrameTimeout, "hasSentSPS" to hasSentSPS.get(), "hasSentPPS" to hasSentPPS.get(), "hasSentIDR" to hasSentIDR.get(), "consecutivePFrames" to consecutivePFrameCount, "targetWidth" to config.width, "targetHeight" to config.height, - "frameRate" to (config.frameRate ?: 0), - "enableDynamicThresholds" to config.enableDynamicThresholds + "frameRate" to (config.frameRate ?: 0) ) } - - /** - * 获取当前渲染FPS - */ - fun getCurrentFps(): Float { - return currentFps - } } \ No newline at end of file diff --git a/android/src/main/kotlin/top/skychip/video_decode_plugin/VideoDecoderConfig.kt b/android/src/main/kotlin/top/skychip/video_decode_plugin/VideoDecoderConfig.kt index ceb3091..105acc6 100644 --- a/android/src/main/kotlin/top/skychip/video_decode_plugin/VideoDecoderConfig.kt +++ b/android/src/main/kotlin/top/skychip/video_decode_plugin/VideoDecoderConfig.kt @@ -7,28 +7,12 @@ package top.skychip.video_decode_plugin * @param height 视频高度 * @param codecType 编解码器类型,默认为h264 * @param frameRate 帧率,可为空 - * @param enableHardwareDecoder 是否启用硬件解码 - * @param threadCount 解码线程数 - * @param bufferSize 输入缓冲区大小 * @param isDebug 是否开启调试日志 - * @param enableDynamicThresholds 是否启用动态阈值 - * @param initialMaxPFrames 初始最大连续P帧数 - * @param initialIFrameTimeoutMs 初始I帧超时时间 - * @param minMaxPFrames 最小最大连续P帧数 - * @param maxMaxPFrames 最大最大连续P帧数 */ data class VideoDecoderConfig( val width: Int, val height: Int, val codecType: String = "h264", val frameRate: Int? = null, - val enableHardwareDecoder: Boolean = true, - val threadCount: Int = 1, - val bufferSize: Int = 30, - val isDebug: Boolean = false, - val enableDynamicThresholds: Boolean = true, - val initialMaxPFrames: Int = 10, - val initialIFrameTimeoutMs: Int = 500, - val minMaxPFrames: Int = 5, - val maxMaxPFrames: Int = 30 + val isDebug: Boolean = false ) \ No newline at end of file diff --git a/example/lib/main.dart b/example/lib/main.dart index 95d54b9..c0b2e6f 100644 --- a/example/lib/main.dart +++ b/example/lib/main.dart @@ -8,54 +8,6 @@ import 'package:flutter/material.dart'; import 'package:flutter/services.dart'; import 'package:video_decode_plugin/video_decode_plugin.dart'; -// 测试图案绘制器 -class TestPatternPainter extends CustomPainter { - @override - void paint(Canvas canvas, Size size) { - final colors = [ - Colors.red, - Colors.green, - Colors.blue, - Colors.yellow, - Colors.purple, - ]; - - const int gridSize = 4; - final double cellWidth = size.width / gridSize; - final double cellHeight = size.height / gridSize; - - for (int x = 0; x < gridSize; x++) { - for (int y = 0; y < gridSize; y++) { - final paint = Paint() - ..color = colors[(x + y) % colors.length] - ..style = PaintingStyle.fill; - - final rect = - Rect.fromLTWH(x * cellWidth, y * cellHeight, cellWidth, cellHeight); - - canvas.drawRect(rect, paint); - } - } - - // 绘制中心白色十字 - final paint = Paint() - ..color = Colors.white - ..style = PaintingStyle.stroke - ..strokeWidth = 5.0; - - canvas.drawLine(Offset(size.width / 2 - 50, size.height / 2), - Offset(size.width / 2 + 50, size.height / 2), paint); - - canvas.drawLine(Offset(size.width / 2, size.height / 2 - 50), - Offset(size.width / 2, size.height / 2 + 50), paint); - } - - @override - bool shouldRepaint(covariant CustomPainter oldDelegate) { - return false; - } -} - // 用于存储H264文件中解析出的帧 class H264Frame { final Uint8List data; @@ -187,18 +139,17 @@ class _VideoViewState extends State { String _statusText = "未初始化"; String _error = ""; + // 解码器状态信息 + DecoderState _decoderState = DecoderState.initializing; + String _decoderStateText = "初始化中"; + bool _isActuallyRendering = false; // 区分预通知和实际渲染状态 + // 帧统计 int _renderedFrameCount = 0; DateTime? _lastFrameTime; double _fps = 0; double _decoderFps = 0; // 解码器内部计算的FPS - // 动态阈值参数 - int _detectedGopSize = 0; - int _dynamicMaxPFrames = 0; - int _dynamicIFrameTimeoutMs = 0; - bool _enableDynamicThresholds = true; - // 用于刷新解码器统计信息的定时器 Timer? _statsTimer; @@ -228,15 +179,25 @@ class _VideoViewState extends State { bool _showingErrorFrame = false; Timer? _errorFrameResetTimer; + // 额外的解码器属性 + int _totalFrames = 0; + int _droppedFrames = 0; + bool _hasSentIDR = false; + bool _hasSentSPS = false; + bool _hasSentPPS = false; + @override void initState() { super.initState(); _loadH264File(); - // 启动定时器刷新解码器统计信息 + // 仅在需要时使用定时器更新一些UI元素 _statsTimer = Timer.periodic(Duration(milliseconds: 1000), (timer) { - if (_isInitialized && _textureId != null) { - _updateDecoderStats(); + if (mounted) { + setState(() { + // 更新UI元素,例如帧率计算等 + // 解码器统计信息现在通过回调获取,不需要在这里请求 + }); } }); } @@ -246,7 +207,7 @@ class _VideoViewState extends State { _stopPlaying(); _releaseDecoder(); _frameTimer?.cancel(); - _statsTimer?.cancel(); // 停止统计信息更新定时器 + _statsTimer?.cancel(); super.dispose(); } @@ -421,14 +382,8 @@ class _VideoViewState extends State { width: 640, height: 480, codecType: CodecType.h264, - frameRate: 30, - bufferSize: 30, + frameRate: 24, // 设置为接近原视频的24fps (23.976) isDebug: true, // 打开调试日志 - enableDynamicThresholds: _enableDynamicThresholds, // 使用动态阈值 - initialMaxPFrames: 60, // 初始最大连续P帧数 - initialIFrameTimeoutMs: 5000, // 初始I帧超时时间 - minMaxPFrames: 5, // 最小最大连续P帧数 - maxMaxPFrames: 60, // 最大最大连续P帧数 ); final textureId = await VideoDecodePlugin.initDecoder(config); @@ -440,11 +395,18 @@ class _VideoViewState extends State { VideoDecodePlugin.setFrameCallbackForTexture( textureId, _onFrameAvailable); + // 设置状态回调 + VideoDecodePlugin.setStateCallbackForTexture( + textureId, _onDecoderStateChanged); + setState(() { _isInitialized = true; _error = ""; _statusText = "就绪"; _renderedFrameCount = 0; // 重置帧计数 + _decoderState = DecoderState.initializing; + _decoderStateText = "初始化中"; + _isActuallyRendering = false; }); _log("解码器初始化成功,纹理ID: $_textureId"); @@ -467,6 +429,73 @@ class _VideoViewState extends State { } } + // 解码器状态变化回调 + void _onDecoderStateChanged( + int textureId, DecoderState state, Map stats) { + if (!mounted) return; + + String stateText; + switch (state) { + case DecoderState.initializing: + stateText = "初始化中"; + break; + case DecoderState.ready: + stateText = "准备就绪"; + break; + case DecoderState.rendering: + stateText = "渲染中"; + // 标记实际渲染状态 + _isActuallyRendering = true; + break; + case DecoderState.error: + stateText = "出错"; + // 获取错误信息 + final errorMessage = stats['errorMessage'] as String?; + if (errorMessage != null) { + _log("解码器错误: $errorMessage"); + } + break; + case DecoderState.released: + stateText = "已释放"; + break; + default: + stateText = "未知状态"; + } + + // 更新解码器状态UI + setState(() { + _decoderState = state; + _decoderStateText = stateText; + + // 更新统计信息 + if (stats.isNotEmpty) { + _decoderFps = (stats['fps'] as num?)?.toDouble() ?? 0.0; + _renderedFrameCount = (stats['renderedFrames'] as int?) ?? 0; + + // 更新更多统计信息 + _totalFrames = (stats['totalFrames'] as int?) ?? 0; + _droppedFrames = (stats['droppedFrames'] as int?) ?? 0; + _hasSentIDR = (stats['hasSentIDR'] as bool?) ?? false; + _hasSentSPS = (stats['hasSentSPS'] as bool?) ?? false; + _hasSentPPS = (stats['hasSentPPS'] as bool?) ?? false; + + // 更新状态文本 + if (state == DecoderState.rendering) { + _statusText = _isPlaying + ? "播放中 (解码总帧: $_totalFrames, 丢弃: $_droppedFrames)" + : "已停止"; + } + } + }); + + String decoderInfo = "解码器状态更新: $_decoderStateText, " + + "帧数据: 渲染=$_renderedFrameCount, 总计=$_totalFrames, 丢弃=$_droppedFrames, " + + "FPS=${_decoderFps.toStringAsFixed(1)}, " + + "参数集: SPS=${_hasSentSPS}, PPS=${_hasSentPPS}, IDR=${_hasSentIDR}"; + + _log(decoderInfo); + } + // 添加一个测试I帧来触发渲染 Future _sendTestIFrame() async { if (_textureId == null || !_isInitialized) { @@ -498,6 +527,7 @@ class _VideoViewState extends State { } Future _releaseDecoder() async { + _statsTimer?.cancel(); // 取消统计信息定时器 if (_textureId != null) { _log("正在释放解码器资源"); @@ -508,6 +538,9 @@ class _VideoViewState extends State { _textureId = null; _isInitialized = false; _statusText = "已释放"; + _isActuallyRendering = false; + _decoderState = DecoderState.released; + _decoderStateText = "已释放"; }); _log("解码器资源释放成功"); @@ -597,8 +630,8 @@ class _VideoViewState extends State { void _startDecodingFrames() { _log("开始解码视频帧"); - // 使用更低的帧率更稳定 - const int frameIntervalMs = 50; // 20 fps + // 使用与原视频接近的帧率 + const int frameIntervalMs = 42; // 约23.8 fps (接近原视频23.9fps) _frameTimer = Timer.periodic(Duration(milliseconds: frameIntervalMs), (timer) async { @@ -733,19 +766,21 @@ class _VideoViewState extends State { Widget _buildVideoDisplay() { if (_textureId == null) { - return Center( - child: Container( - width: 640, - height: 480, - color: Colors.black, - child: CustomPaint( - painter: TestPatternPainter(), - child: Center( - child: Text( + return Container( + width: 640, + height: 480, + color: Colors.black54, + child: Center( + child: Column( + mainAxisSize: MainAxisSize.min, + children: [ + Icon(Icons.videocam_off, size: 48, color: Colors.white70), + SizedBox(height: 16), + Text( '无可用纹理', - style: TextStyle(color: Colors.white), + style: TextStyle(color: Colors.white, fontSize: 16), ), - ), + ], ), ), ); @@ -757,9 +792,27 @@ class _VideoViewState extends State { // 背景色 Container(color: Colors.black), - // 测试图案 - 如果没有渲染任何帧则显示 - if (_renderedFrameCount == 0) - CustomPaint(painter: TestPatternPainter()), + // 无帧时显示加载指示 + if (_renderedFrameCount == 0 || !_isActuallyRendering) + Center( + child: Column( + mainAxisSize: MainAxisSize.min, + children: [ + CircularProgressIndicator( + valueColor: AlwaysStoppedAnimation(Colors.white70), + ), + SizedBox(height: 16), + Text( + _decoderState == DecoderState.initializing + ? '初始化中...' + : _decoderState == DecoderState.ready + ? '准备就绪,等待首帧...' + : '加载中...', + style: TextStyle(color: Colors.white70, fontSize: 14), + ), + ], + ), + ), // 视频纹理 - 使用RepaintBoundary和ValueKey确保正确更新 RepaintBoundary( @@ -782,16 +835,60 @@ class _VideoViewState extends State { ), ), + // 解码器状态指示 + if (_decoderState == DecoderState.error) + Container( + color: Colors.red.withOpacity(0.3), + child: Center( + child: Column( + mainAxisSize: MainAxisSize.min, + children: [ + Icon(Icons.error_outline, size: 48, color: Colors.white), + SizedBox(height: 16), + Text( + '解码器错误', + style: TextStyle( + color: Colors.white, + fontSize: 16, + fontWeight: FontWeight.bold), + ), + ], + ), + ), + ), + // 显示帧计数 - 调试用 Positioned( right: 10, top: 10, child: Container( padding: EdgeInsets.all(5), - color: Colors.black.withOpacity(0.5), - child: Text( - '帧: $_renderedFrameCount${_enablePacketLoss ? ' (丢帧: $_droppedFramesCount)' : ''}', - style: TextStyle(color: Colors.white, fontSize: 12), + decoration: BoxDecoration( + color: Colors.black.withOpacity(0.5), + borderRadius: BorderRadius.circular(4), + ), + constraints: BoxConstraints( + maxWidth: 150, // 限制最大宽度 + ), + child: Column( + crossAxisAlignment: CrossAxisAlignment.end, + mainAxisSize: MainAxisSize.min, // 确保column只占用所需空间 + children: [ + Text( + '帧: $_renderedFrameCount', + style: TextStyle(color: Colors.white, fontSize: 12), + ), + if (_enablePacketLoss) + Text( + '丢帧: $_droppedFramesCount', + style: TextStyle( + color: _droppedFramesCount > 0 + ? Colors.orange + : Colors.white70, + fontSize: 12, + ), + ), + ], ), ), ), @@ -845,60 +942,123 @@ class _VideoViewState extends State { child: Column( crossAxisAlignment: CrossAxisAlignment.start, children: [ - Row( - mainAxisAlignment: - MainAxisAlignment.spaceBetween, - children: [ - Text('状态: $_statusText', - style: TextStyle( - fontWeight: FontWeight.bold)), - // Text('计算FPS: ${_fps.toStringAsFixed(1)}'), - ], - ), - Row( - mainAxisAlignment: - MainAxisAlignment.spaceBetween, - children: [ - Text( - '解码器FPS: ${_decoderFps.toStringAsFixed(1)}',style: TextStyle( - color: Colors.green - ),), - Text('已渲染帧数: $_renderedFrameCount'), - ], - ), - if (_error.isNotEmpty) - Text('错误: $_error', - style: TextStyle( - color: Colors.red, - fontWeight: FontWeight.bold)), - Row( - mainAxisAlignment: - MainAxisAlignment.spaceBetween, - children: [ - Text('检测到的GOP: $_detectedGopSize'), - Text('解析的帧数: ${_h264Frames.length}'), - ], - ), - Text( - 'H264文件大小: ${(_h264FileData?.length ?? 0) / 1024} KB'), + // 状态行 + Text('状态: $_statusText', + style: + TextStyle(fontWeight: FontWeight.bold)), - // 动态阈值参数显示 - if (_enableDynamicThresholds) - Padding( - padding: const EdgeInsets.only(top: 4.0), - child: Column( - crossAxisAlignment: - CrossAxisAlignment.start, - children: [ - Text('动态阈值参数:', - style: TextStyle( - fontWeight: FontWeight.bold)), - Text('最大连续P帧: $_dynamicMaxPFrames'), - Text( - 'I帧超时: ${_dynamicIFrameTimeoutMs}ms'), - ], + // 解码器状态行 + Padding( + padding: const EdgeInsets.only(top: 4.0), + child: Text( + '解码器状态: $_decoderStateText', + style: TextStyle( + color: _getStateColor(), + fontWeight: FontWeight.bold), + ), + ), + + // 实际渲染状态 + Padding( + padding: const EdgeInsets.only(top: 4.0), + child: Text( + '实际渲染: ${_isActuallyRendering ? "是" : "否"}', + style: TextStyle( + color: _isActuallyRendering + ? Colors.green + : Colors.orange, ), ), + ), + + // FPS和帧数信息 + Padding( + padding: const EdgeInsets.only(top: 4.0), + child: Text( + '解码器FPS: ${_decoderFps.toStringAsFixed(1)}', + style: TextStyle(color: Colors.green), + ), + ), + + Padding( + padding: const EdgeInsets.only(top: 4.0), + child: Text('已渲染帧数: $_renderedFrameCount'), + ), + + // 丢弃帧信息 + Padding( + padding: const EdgeInsets.only(top: 4.0), + child: Text( + '已丢弃帧数: $_droppedFramesCount', + style: TextStyle( + color: _droppedFramesCount > 0 + ? Colors.orange + : Colors.black), + ), + ), + + Padding( + padding: const EdgeInsets.only(top: 4.0), + child: Text('当前帧索引: $_currentFrameIndex'), + ), + + // 参数集状态 + Padding( + padding: const EdgeInsets.only(top: 4.0), + child: Column( + crossAxisAlignment: CrossAxisAlignment.start, + children: [ + Text('参数集状态:'), + Padding( + padding: const EdgeInsets.only( + left: 8.0, top: 2.0), + child: Text( + 'SPS: ${_hasSentSPS ? "已发送" : "未发送"}'), + ), + Padding( + padding: const EdgeInsets.only( + left: 8.0, top: 2.0), + child: Text( + 'PPS: ${_hasSentPPS ? "已发送" : "未发送"}'), + ), + Padding( + padding: const EdgeInsets.only( + left: 8.0, top: 2.0), + child: Text( + 'IDR: ${_hasSentIDR ? "已发送" : "未发送"}'), + ), + ], + ), + ), + + Padding( + padding: const EdgeInsets.only(top: 4.0), + child: Text('总帧数: $_totalFrames'), + ), + + // 错误信息 + if (_error.isNotEmpty) + Padding( + padding: const EdgeInsets.only(top: 4.0), + child: Text( + '错误: $_error', + style: TextStyle( + color: Colors.red, + fontWeight: FontWeight.bold), + ), + ), + + // H264文件信息 + Padding( + padding: const EdgeInsets.only(top: 4.0), + child: Text('解析的帧数: ${_h264Frames.length}'), + ), + + Padding( + padding: const EdgeInsets.only(top: 4.0), + child: Text( + 'H264文件大小: ${((_h264FileData?.length ?? 0) ~/ 1024)} KB'), + ), ], ), ), @@ -978,40 +1138,6 @@ class _VideoViewState extends State { ], ), - // 动态阈值开关 - Row( - mainAxisAlignment: - MainAxisAlignment.spaceBetween, - children: [ - Text('动态阈值', - style: TextStyle( - fontWeight: FontWeight.bold)), - Switch( - value: _enableDynamicThresholds, - onChanged: (value) { - setState(() { - _enableDynamicThresholds = value; - // 需要重新初始化解码器以应用新设置 - if (_isInitialized) { - _log("更改动态阈值设置,需要重新初始化解码器"); - // 如果正在播放,先停止 - if (_isPlaying) { - _stopPlaying(); - } - // 延迟一下再重新初始化 - Future.delayed( - Duration(milliseconds: 100), () { - _initializeDecoder(); - }); - } - }); - }, - ), - ], - ), - - Divider(), - // 丢包率控制 Row( children: [ @@ -1159,34 +1285,38 @@ class _VideoViewState extends State { ); } - // 更新解码器统计信息 + // 此方法保留用于手动获取最新统计信息,不再需要定时调用 Future _updateDecoderStats() async { if (_textureId == null || !_isInitialized) return; try { - // 获取FPS - final fps = await VideoDecodePlugin.getCurrentFps(_textureId); - - // 获取动态阈值参数 - final thresholdParams = - await VideoDecodePlugin.getDynamicThresholdParams(_textureId); + // 获取所有解码器统计信息 + final stats = await VideoDecodePlugin.getDecoderStats(_textureId!); if (mounted) { - setState(() { - _decoderFps = fps; - _detectedGopSize = thresholdParams['detectedGopSize'] ?? 0; - _dynamicMaxPFrames = - thresholdParams['dynamicMaxConsecutivePFrames'] ?? 0; - _dynamicIFrameTimeoutMs = - thresholdParams['dynamicIFrameTimeoutMs'] ?? 0; - _enableDynamicThresholds = - thresholdParams['enableDynamicThresholds'] ?? true; - }); + _log("手动更新解码器统计信息: $stats"); } } catch (e) { _log("获取解码器统计信息失败: $e"); } } + + Color _getStateColor() { + switch (_decoderState) { + case DecoderState.initializing: + return Colors.orange; + case DecoderState.ready: + return Colors.green; + case DecoderState.rendering: + return Colors.blue; + case DecoderState.error: + return Colors.red; + case DecoderState.released: + return Colors.grey; + default: + return Colors.black; + } + } } // 添加错误帧绘制器 diff --git a/lib/video_decode_plugin.dart b/lib/video_decode_plugin.dart index 6aef504..7517f89 100644 --- a/lib/video_decode_plugin.dart +++ b/lib/video_decode_plugin.dart @@ -7,181 +7,6 @@ import 'package:flutter/services.dart'; import 'video_decode_plugin_platform_interface.dart'; -/// H.265/HEVC NAL单元类型定义 -class HevcNalUnitType { - static const int TRAIL_N = 0; // 尾随图片 - 非参考图片 - static const int TRAIL_R = 1; // 尾随图片 - 参考图片 - - static const int TSA_N = 2; // 时间子层访问 - 非参考图片 - static const int TSA_R = 3; // 时间子层访问 - 参考图片 - - static const int STSA_N = 4; // 分步时间子层访问 - 非参考图片 - static const int STSA_R = 5; // 分步时间子层访问 - 参考图片 - - static const int RADL_N = 6; // 随机访问解码先导 - 非参考图片 - static const int RADL_R = 7; // 随机访问解码先导 - 参考图片 - - static const int RASL_N = 8; // 随机访问跳过先导 - 非参考图片 - static const int RASL_R = 9; // 随机访问跳过先导 - 参考图片 - - static const int RSV_VCL_N10 = 10; // 保留的非IRAP VCL NAL单元类型 - static const int RSV_VCL_R11 = 11; // 保留的非IRAP VCL NAL单元类型 - static const int RSV_VCL_N12 = 12; // 保留的非IRAP VCL NAL单元类型 - static const int RSV_VCL_R13 = 13; // 保留的非IRAP VCL NAL单元类型 - static const int RSV_VCL_N14 = 14; // 保留的非IRAP VCL NAL单元类型 - static const int RSV_VCL_R15 = 15; // 保留的非IRAP VCL NAL单元类型 - - static const int BLA_W_LP = 16; // 有前导的无损拼接访问 - static const int BLA_W_RADL = 17; // 有RADL的无损拼接访问 - static const int BLA_N_LP = 18; // 无前导的无损拼接访问 - - static const int IDR_W_RADL = 19; // 有RADL的瞬时解码刷新 (IDR) - static const int IDR_N_LP = 20; // 无前导的瞬时解码刷新 (IDR) - - static const int CRA_NUT = 21; // 清理随机访问 - - static const int RSV_IRAP_VCL22 = 22; // 保留的IRAP VCL NAL单元类型 - static const int RSV_IRAP_VCL23 = 23; // 保留的IRAP VCL NAL单元类型 - - static const int RSV_VCL24 = 24; // 保留的VCL NAL单元类型 - static const int RSV_VCL25 = 25; // 保留的VCL NAL单元类型 - static const int RSV_VCL26 = 26; // 保留的VCL NAL单元类型 - static const int RSV_VCL27 = 27; // 保留的VCL NAL单元类型 - static const int RSV_VCL28 = 28; // 保留的VCL NAL单元类型 - static const int RSV_VCL29 = 29; // 保留的VCL NAL单元类型 - static const int RSV_VCL30 = 30; // 保留的VCL NAL单元类型 - static const int RSV_VCL31 = 31; // 保留的VCL NAL单元类型 - - // 非VCL NAL单元类型 - static const int VPS = 32; // 视频参数集 - static const int SPS = 33; // 序列参数集 - static const int PPS = 34; // 图像参数集 - static const int AUD = 35; // 访问单元分隔符 - static const int EOS = 36; // 序列结束 - static const int EOB = 37; // 比特流结束 - static const int FD = 38; // 填充数据 - - static const int PREFIX_SEI = 39; // 前缀辅助增强信息 - static const int SUFFIX_SEI = 40; // 后缀辅助增强信息 - - static const int RSV_NVCL41 = 41; // 保留的非VCL NAL单元类型 - static const int RSV_NVCL42 = 42; // 保留的非VCL NAL单元类型 - static const int RSV_NVCL43 = 43; // 保留的非VCL NAL单元类型 - static const int RSV_NVCL44 = 44; // 保留的非VCL NAL单元类型 - static const int RSV_NVCL45 = 45; // 保留的非VCL NAL单元类型 - static const int RSV_NVCL46 = 46; // 保留的非VCL NAL单元类型 - static const int RSV_NVCL47 = 47; // 保留的非VCL NAL单元类型 - - static const int UNSPEC48 = 48; // 未指定的类型 - static const int UNSPEC49 = 49; // 未指定的类型 - static const int UNSPEC50 = 50; // 未指定的类型 - static const int UNSPEC51 = 51; // 未指定的类型 - static const int UNSPEC52 = 52; // 未指定的类型 - static const int UNSPEC53 = 53; // 未指定的类型 - static const int UNSPEC54 = 54; // 未指定的类型 - static const int UNSPEC55 = 55; // 未指定的类型 - static const int UNSPEC56 = 56; // 未指定的类型 - static const int UNSPEC57 = 57; // 未指定的类型 - static const int UNSPEC58 = 58; // 未指定的类型 - static const int UNSPEC59 = 59; // 未指定的类型 - static const int UNSPEC60 = 60; // 未指定的类型 - static const int UNSPEC61 = 61; // 未指定的类型 - static const int UNSPEC62 = 62; // 未指定的类型 - static const int UNSPEC63 = 63; // 未指定的类型 - - // 帧类型别名,方便判断 - // I帧类型:IDR_W_RADL, IDR_N_LP, BLA_W_LP, BLA_W_RADL, BLA_N_LP, CRA_NUT - static const List KEY_FRAMES = [ - IDR_W_RADL, - IDR_N_LP, - BLA_W_LP, - BLA_W_RADL, - BLA_N_LP, - CRA_NUT - ]; - - // 参数集类型:VPS, SPS, PPS - static const List PARAMETER_SETS = [VPS, SPS, PPS]; - - /// 判断是否为关键帧NAL类型 - static bool isKeyFrame(int nalUnitType) { - return KEY_FRAMES.contains(nalUnitType); - } - - /// 判断是否为参数集NAL类型 - static bool isParameterSet(int nalUnitType) { - return PARAMETER_SETS.contains(nalUnitType); - } - - /// 判断是否为IDR帧 - static bool isIdrFrame(int nalUnitType) { - return nalUnitType == IDR_W_RADL || nalUnitType == IDR_N_LP; - } - - /// 获取NAL单元类型名称 - static String getName(int type) { - switch (type) { - case TRAIL_N: - return "TRAIL_N"; - case TRAIL_R: - return "TRAIL_R"; - case TSA_N: - return "TSA_N"; - case TSA_R: - return "TSA_R"; - case STSA_N: - return "STSA_N"; - case STSA_R: - return "STSA_R"; - case RADL_N: - return "RADL_N"; - case RADL_R: - return "RADL_R"; - case RASL_N: - return "RASL_N"; - case RASL_R: - return "RASL_R"; - case BLA_W_LP: - return "BLA_W_LP"; - case BLA_W_RADL: - return "BLA_W_RADL"; - case BLA_N_LP: - return "BLA_N_LP"; - case IDR_W_RADL: - return "IDR_W_RADL"; - case IDR_N_LP: - return "IDR_N_LP"; - case CRA_NUT: - return "CRA_NUT"; - case VPS: - return "VPS"; - case SPS: - return "SPS"; - case PPS: - return "PPS"; - case AUD: - return "AUD"; - case EOS: - return "EOS"; - case EOB: - return "EOB"; - case FD: - return "FD"; - case PREFIX_SEI: - return "PREFIX_SEI"; - case SUFFIX_SEI: - return "SUFFIX_SEI"; - default: - if (type >= 10 && type <= 15) return "RSV_VCL_${type}"; - if (type >= 22 && type <= 23) return "RSV_IRAP_VCL${type}"; - if (type >= 24 && type <= 31) return "RSV_VCL${type}"; - if (type >= 41 && type <= 47) return "RSV_NVCL${type}"; - if (type >= 48 && type <= 63) return "UNSPEC${type}"; - return "未知(${type})"; - } - } -} - /// 视频帧类型 enum FrameType { /// I帧 @@ -200,13 +25,36 @@ enum CodecType { h265, } +/// 解码器状态枚举 +enum DecoderState { + /// 初始化中 + initializing, + + /// 准备就绪,但还未开始实际渲染 + ready, + + /// 渲染中 + rendering, + + /// 出错 + error, + + /// 已释放 + released, +} + /// 帧可用回调函数类型 typedef FrameAvailableCallback = void Function(int textureId); +/// 解码器状态回调函数类型 +typedef DecoderStateCallback = void Function( + int textureId, DecoderState state, Map stats); + /// 解码器实例内部类 class _DecoderInstance { final int textureId; FrameAvailableCallback? frameCallback; + DecoderStateCallback? stateCallback; _DecoderInstance(this.textureId); } @@ -225,48 +73,16 @@ class VideoDecoderConfig { /// 编码类型,默认h264 final CodecType codecType; - /// 缓冲区大小(帧数),默认25帧 - final int bufferSize; - - /// 解码线程数,默认1线程 - final int threadCount; - /// 是否为调试模式,默认false final bool isDebug; - /// 是否启用硬件解码,默认true - final bool enableHardwareDecoder; - - /// 是否启用动态阈值,默认true - final bool enableDynamicThresholds; - - /// 初始最大连续P帧数,默认10 - final int initialMaxPFrames; - - /// 初始I帧超时时间(毫秒),默认500 - final int initialIFrameTimeoutMs; - - /// 最小最大连续P帧数,默认5 - final int minMaxPFrames; - - /// 最大最大连续P帧数,默认30 - final int maxMaxPFrames; - /// 构造函数 VideoDecoderConfig({ this.width = 640, this.height = 360, this.frameRate, this.codecType = CodecType.h264, - this.bufferSize = 25, - this.threadCount = 1, this.isDebug = false, - this.enableHardwareDecoder = true, - this.enableDynamicThresholds = true, - this.initialMaxPFrames = 10, - this.initialIFrameTimeoutMs = 500, - this.minMaxPFrames = 5, - this.maxMaxPFrames = 30, }); /// 转换为Map @@ -276,15 +92,7 @@ class VideoDecoderConfig { 'height': height, 'frameRate': frameRate, 'codecType': codecType.toString().split('.').last, - 'bufferSize': bufferSize, - 'threadCount': threadCount, 'isDebug': isDebug, - 'enableHardwareDecoder': enableHardwareDecoder, - 'enableDynamicThresholds': enableDynamicThresholds, - 'initialMaxPFrames': initialMaxPFrames, - 'initialIFrameTimeoutMs': initialIFrameTimeoutMs, - 'minMaxPFrames': minMaxPFrames, - 'maxMaxPFrames': maxMaxPFrames, }; } } @@ -314,7 +122,6 @@ class VideoDecodePlugin { // 错误日志抑制 - 防止重复日志 static int _uninitializedErrorCount = 0; static int _lastErrorLogTime = 0; - static const int _ERROR_LOG_THRESHOLD = 5; // 每5秒最多输出一次汇总 /// 日志输出控制 - 调试信息 static void _logDebug(String message) { @@ -368,25 +175,79 @@ class VideoDecodePlugin { // 调用特定纹理ID的帧回调 final decoder = _decoders[textureId]; if (decoder != null && decoder.frameCallback != null) { - // 获取解码器统计信息来检查是否是预通知 - getDecoderStats(textureId).then((stats) { - final renderedFrames = stats['renderedFrames'] ?? 0; - if (renderedFrames == 0) { - _logDebug('[预通知] 收到初始帧可用通知(无实际视频数据),纹理ID: $textureId'); - } else { - _logDebug('收到帧可用通知,纹理ID: $textureId,已渲染帧数: $renderedFrames'); - } + // 获取是否是预通知 + final bool isPrenotification = args['isPrenotification'] ?? false; - // 调用回调函数 - decoder.frameCallback!(textureId); - }).catchError((error) { - // 如果无法获取统计信息,仍然调用回调但不区分类型 + if (isPrenotification) { + _logDebug('[预通知] 收到初始帧可用通知(无实际视频数据),纹理ID: $textureId'); + } else { _logDebug('收到帧可用通知,纹理ID: $textureId'); - decoder.frameCallback!(textureId); - }); + } + + // 调用回调函数 + decoder.frameCallback!(textureId); } return null; + + case 'onDecoderState': + final Map args = call.arguments; + final int textureId = args['textureId']; + final String stateStr = args['state']; + final Map statsMap = args['stats']; + + // 检查解码器是否正在释放 + bool isReleasing = false; + + // 同步访问解码器状态 + _withLock(_decoderStateLock, () { + isReleasing = _isDecoderReleasing[textureId] ?? false; + }); + + if (isReleasing && stateStr != 'released') { + _logDebug('收到状态回调但解码器 $textureId 正在释放,忽略'); + return null; + } + + // 将状态字符串转换为枚举 + DecoderState state; + switch (stateStr) { + case 'initializing': + state = DecoderState.initializing; + break; + case 'ready': + state = DecoderState.ready; + break; + case 'rendering': + state = DecoderState.rendering; + break; + case 'error': + state = DecoderState.error; + break; + case 'released': + state = DecoderState.released; + break; + default: + state = DecoderState.initializing; + } + + // 将statsMap转换为强类型Map + final Map stats = {}; + statsMap.forEach((key, value) { + if (key is String) { + stats[key] = value; + } + }); + + // 调用状态回调 + final decoder = _decoders[textureId]; + if (decoder != null && decoder.stateCallback != null) { + _logDebug('调用解码器状态回调:纹理ID=$textureId, 状态=$stateStr'); + decoder.stateCallback!(textureId, state, stats); + } + + return null; + default: throw PlatformException( code: 'Unimplemented', @@ -460,6 +321,24 @@ class VideoDecodePlugin { } } + /// 设置解码器状态回调(默认解码器) + static void setStateCallback(DecoderStateCallback callback) { + if (_defaultTextureId != null) { + setStateCallbackForTexture(_defaultTextureId!, callback); + } + } + + /// 为特定纹理ID设置状态回调 + static void setStateCallbackForTexture( + int textureId, DecoderStateCallback callback) { + _initializeMethodCallHandler(); + + final decoder = _decoders[textureId]; + if (decoder != null) { + decoder.stateCallback = callback; + } + } + /// 初始化解码器 static Future initDecoder(VideoDecoderConfig config) async { // 设置调试模式 @@ -691,7 +570,8 @@ class VideoDecodePlugin { final decoder = _decoders[textureId]; if (decoder != null) { decoder.frameCallback = null; - _logDebug('已清除纹理ID为$textureId的回调'); + decoder.stateCallback = null; + _logDebug('已清除纹理ID为$textureId的所有回调'); } } @@ -699,6 +579,7 @@ class VideoDecodePlugin { static void clearAllCallbacks() { for (final decoder in _decoders.values) { decoder.frameCallback = null; + decoder.stateCallback = null; } _logDebug('已清除所有回调'); } @@ -712,12 +593,19 @@ class VideoDecodePlugin { /// /// [textureId] 纹理ID /// 返回包含统计信息的Map,包括: - /// - totalFramesReceived: 接收的总帧数 - /// - framesRendered: 成功渲染的帧数 - /// - framesDropped: 丢弃的帧数 - /// - lastFrameTimestamp: 最后一帧时间戳 - /// - averageProcessingTimeMs: 平均处理时间(毫秒) + /// - totalFrames: 接收的总帧数 + /// - renderedFrames: 成功渲染的帧数 + /// - droppedFrames: 丢弃的帧数 + /// - fps: 当前渲染FPS + /// - hasSentSPS: 是否已发送SPS + /// - hasSentPPS: 是否已发送PPS + /// - hasSentIDR: 是否已发送IDR(I帧) + /// - consecutivePFrames: 当前连续P帧数 + /// - targetWidth: 目标宽度 + /// - targetHeight: 目标高度 + /// - frameRate: 目标帧率 /// - decoderCount: 当前活跃的解码器数量 + /// - textureId: 纹理ID static Future> getDecoderStats(int textureId) async { // 检查解码器是否正在释放 if (!_isDecoderReady(textureId)) { @@ -752,52 +640,6 @@ class VideoDecodePlugin { return {}; } } - - /// 获取当前渲染FPS - /// - /// 返回当前解码器的实时渲染帧率 - /// 如果解码器未初始化或获取失败,返回0.0 - static Future getCurrentFps([int? textureId]) async { - final targetTextureId = textureId ?? _defaultTextureId; - if (targetTextureId == null) { - return 0.0; - } - - try { - final stats = await getDecoderStats(targetTextureId); - return stats['fps'] as double? ?? 0.0; - } catch (e) { - _logError('获取FPS失败: $e'); - return 0.0; - } - } - - /// 获取动态阈值参数 - /// - /// 返回当前解码器使用的动态阈值参数 - /// 包括检测到的GOP大小、最大连续P帧数限制、I帧超时时间等 - static Future> getDynamicThresholdParams( - [int? textureId]) async { - final targetTextureId = textureId ?? _defaultTextureId; - if (targetTextureId == null) { - return {}; - } - - try { - final stats = await getDecoderStats(targetTextureId); - return { - 'detectedGopSize': stats['detectedGopSize'] as int? ?? 0, - 'dynamicMaxConsecutivePFrames': - stats['dynamicMaxConsecutivePFrames'] as int? ?? 0, - 'dynamicIFrameTimeoutMs': stats['dynamicIFrameTimeoutMs'] as int? ?? 0, - 'enableDynamicThresholds': - stats['enableDynamicThresholds'] as bool? ?? false, - }; - } catch (e) { - _logError('获取动态阈值参数失败: $e'); - return {}; - } - } } /// 在Dart中实现简单的同步锁 @@ -811,7 +653,3 @@ void synchronized(Object lock, Function() action) { T synchronizedWithResult(Object lock, T Function() action) { return action(); } - - - -