import 'dart:async'; import 'dart:io'; import 'dart:typed_data'; import 'package:flutter/foundation.dart'; import 'package:flutter/services.dart'; import 'video_decode_plugin_platform_interface.dart'; /// 视频帧类型 enum FrameType { /// I帧 iFrame, /// P帧 pFrame, } /// 视频编码类型 enum CodecType { /// H.264编码 h264, /// H.265编码 h265, } /// 解码器状态枚举 enum DecoderState { /// 初始化中 initializing, /// 准备就绪,但还未开始实际渲染 ready, /// 渲染中 rendering, /// 出错 error, /// 已释放 released, } /// 帧可用回调函数类型 typedef FrameAvailableCallback = void Function(int textureId); /// 解码器状态回调函数类型 typedef DecoderStateCallback = void Function( int textureId, DecoderState state, Map stats); /// 解码器实例内部类 class _DecoderInstance { final int textureId; FrameAvailableCallback? frameCallback; DecoderStateCallback? stateCallback; _DecoderInstance(this.textureId); } /// 视频解码器配置 class VideoDecoderConfig { /// 视频宽度,默认640 final int width; /// 视频高度,默认360 final int height; /// 帧率,可为空 final int? frameRate; /// 编码类型,默认h264 final CodecType codecType; /// 是否为调试模式,默认false final bool isDebug; /// 构造函数 VideoDecoderConfig({ this.width = 640, this.height = 360, this.frameRate, this.codecType = CodecType.h264, this.isDebug = false, }); /// 转换为Map Map toMap() { return { 'width': width, 'height': height, 'frameRate': frameRate, 'codecType': codecType.toString().split('.').last, 'isDebug': isDebug, }; } } /// 视频解码插件主类 class VideoDecodePlugin { static const MethodChannel _channel = MethodChannel('video_decode_plugin'); // 解码器映射表,支持多实例 static final Map _decoders = {}; // 默认解码器ID static int? _defaultTextureId; // 监听器初始化标志 static bool _listenerInitialized = false; // 是否处于调试模式 static bool _isDebugMode = false; // 解码器状态跟踪 - 防止释放后继续使用 static final Map _isDecoderReleasing = {}; // 解码器状态锁 - 防止并发访问状态 static final _decoderStateLock = Object(); // 错误日志抑制 - 防止重复日志 static int _uninitializedErrorCount = 0; static int _lastErrorLogTime = 0; /// 日志输出控制 - 调试信息 static void _logDebug(String message) { if (_isDebugMode) { debugPrint('[VideoDecodePlugin] $message'); } } /// 日志输出控制 - 错误信息(总是输出) static void _logError(String message, {bool throttle = false}) { if (throttle) { // 增加计数 _uninitializedErrorCount++; // 检查是否需要输出汇总日志 final now = DateTime.now().millisecondsSinceEpoch; if (now - _lastErrorLogTime > 5000 || _uninitializedErrorCount >= 50) { debugPrint( '[VideoDecodePlugin] ERROR: $message (发生 $_uninitializedErrorCount 次)'); _lastErrorLogTime = now; _uninitializedErrorCount = 0; } } else { // 直接输出日志 debugPrint('[VideoDecodePlugin] ERROR: $message'); } } /// 初始化方法通道监听器 static void _initializeMethodCallHandler() { if (!_listenerInitialized) { _channel.setMethodCallHandler((call) async { switch (call.method) { case 'onFrameAvailable': final Map args = call.arguments; final int textureId = args['textureId']; // 检查解码器是否正在释放 bool isReleasing = false; // 同步访问解码器状态 _withLock(_decoderStateLock, () { isReleasing = _isDecoderReleasing[textureId] ?? false; }); if (isReleasing) { _logDebug('收到帧通知但解码器 $textureId 正在释放,忽略'); return null; } // 调用特定纹理ID的帧回调 final decoder = _decoders[textureId]; if (decoder != null && decoder.frameCallback != null) { // 获取是否是预通知 final bool isPrenotification = args['isPrenotification'] ?? false; if (isPrenotification) { _logDebug('[预通知] 收到初始帧可用通知(无实际视频数据),纹理ID: $textureId'); } else { _logDebug('收到帧可用通知,纹理ID: $textureId'); } // 调用回调函数 decoder.frameCallback!(textureId); } return null; case 'onDecoderState': final Map args = call.arguments; final int textureId = args['textureId']; final String stateStr = args['state']; final Map statsMap = args['stats']; // 检查解码器是否正在释放 bool isReleasing = false; // 同步访问解码器状态 _withLock(_decoderStateLock, () { isReleasing = _isDecoderReleasing[textureId] ?? false; }); if (isReleasing && stateStr != 'released') { _logDebug('收到状态回调但解码器 $textureId 正在释放,忽略'); return null; } // 将状态字符串转换为枚举 DecoderState state; switch (stateStr) { case 'initializing': state = DecoderState.initializing; break; case 'ready': state = DecoderState.ready; break; case 'rendering': state = DecoderState.rendering; break; case 'error': state = DecoderState.error; break; case 'released': state = DecoderState.released; break; default: state = DecoderState.initializing; } // 将statsMap转换为强类型Map final Map stats = {}; statsMap.forEach((key, value) { if (key is String) { stats[key] = value; } }); // 调用状态回调 final decoder = _decoders[textureId]; if (decoder != null && decoder.stateCallback != null) { _logDebug('调用解码器状态回调:纹理ID=$textureId, 状态=$stateStr'); decoder.stateCallback!(textureId, state, stats); } return null; default: throw PlatformException( code: 'Unimplemented', details: 'The method ${call.method} is not implemented', ); } }); _listenerInitialized = true; } } /// 执行同步操作的辅助方法 static void _withLock(Object lock, Function() action) { // 在Dart中,Object实例可以直接用作锁对象 synchronized(lock, action); } /// 在锁保护下执行操作并返回结果 static T _withLockResult(Object lock, T Function() action) { return synchronizedWithResult(lock, action); } /// 检查解码器是否处于可用状态 static bool _isDecoderReady(int textureId) { bool isReleasing = false; _withLock(_decoderStateLock, () { isReleasing = _isDecoderReleasing[textureId] ?? false; }); return _decoders.containsKey(textureId) && !isReleasing; } /// 设置解码器释放状态 static void _setDecoderReleasing(int textureId, bool isReleasing) { _withLock(_decoderStateLock, () { if (isReleasing) { _isDecoderReleasing[textureId] = true; } else { _isDecoderReleasing.remove(textureId); } }); } /// 获取平台版本 static Future getPlatformVersion() { return VideoDecodePluginPlatform.instance.getPlatformVersion(); } /// 检查当前平台是否支持 static bool get isPlatformSupported { return Platform.isAndroid || Platform.isIOS; } /// 设置帧回调(默认解码器) static void setFrameCallback(FrameAvailableCallback callback) { if (_defaultTextureId != null) { setFrameCallbackForTexture(_defaultTextureId!, callback); } } /// 为特定纹理ID设置帧回调 static void setFrameCallbackForTexture( int textureId, FrameAvailableCallback callback) { _initializeMethodCallHandler(); final decoder = _decoders[textureId]; if (decoder != null) { decoder.frameCallback = callback; } } /// 设置解码器状态回调(默认解码器) static void setStateCallback(DecoderStateCallback callback) { if (_defaultTextureId != null) { setStateCallbackForTexture(_defaultTextureId!, callback); } } /// 为特定纹理ID设置状态回调 static void setStateCallbackForTexture( int textureId, DecoderStateCallback callback) { _initializeMethodCallHandler(); final decoder = _decoders[textureId]; if (decoder != null) { decoder.stateCallback = callback; } } /// 初始化解码器 static Future initDecoder(VideoDecoderConfig config) async { // 设置调试模式 _isDebugMode = config.isDebug; // 重置错误计数 _uninitializedErrorCount = 0; // 先释放之前的默认解码器 if (_defaultTextureId != null) { await releaseDecoder(); } return await createDecoder(config); } /// 创建新的解码器实例(支持多实例) static Future createDecoder(VideoDecoderConfig config) async { // 更新调试模式 _isDebugMode = config.isDebug; // 重置错误计数 _uninitializedErrorCount = 0; if (!isPlatformSupported) { _logError('当前平台不支持视频解码插件'); return null; } // 确保监听器已初始化 _initializeMethodCallHandler(); try { _logDebug( '创建解码器: ${config.width}x${config.height}, 编码: ${config.codecType}'); final textureId = await _channel.invokeMethod('initDecoder', config.toMap()); if (textureId != null) { // 创建新解码器实例并保存 final decoder = _DecoderInstance(textureId); _decoders[textureId] = decoder; // 初始化解码器状态 _setDecoderReleasing(textureId, false); // 设置为默认解码器 _defaultTextureId = textureId; _logDebug('解码器创建成功,纹理ID: $textureId'); } return _defaultTextureId; } catch (e) { _logError('初始化解码器失败: $e'); return null; } } /// 获取默认纹理ID static int? get textureId => _defaultTextureId; /// 获取所有活跃的纹理ID static List get allTextureIds => _decoders.keys.toList(); /// 解码视频帧(默认解码器) static Future decodeFrame( Uint8List frameData, FrameType frameType) async { // 使用本地变量缓存ID,防止并发修改 final int? decoderId = _defaultTextureId; if (decoderId == null) { // 使用节流日志报告错误,避免日志爆炸 _logError('解码器未初始化', throttle: true); return false; } // 检查解码器是否正在释放 if (!_isDecoderReady(decoderId)) { _logDebug('解码器正在释放,忽略解码请求'); return false; } return decodeFrameForTexture(decoderId, frameData, frameType); } /// 为特定纹理ID解码视频帧 static Future decodeFrameForTexture( int textureId, Uint8List frameData, FrameType frameType) async { // 检查解码器是否存在且不在释放过程中 if (!_isDecoderReady(textureId)) { _logDebug('解码器不可用或正在释放,忽略解码请求'); return false; } try { final bool isIFrame = frameType == FrameType.iFrame; _logDebug( '解码帧: textureId=$textureId, 大小=${frameData.length}字节, 类型=${isIFrame ? "I帧" : "P帧"}'); final result = await _channel.invokeMethod('decodeFrame', { 'textureId': textureId, 'frameData': frameData, 'frameType': frameType.index, }) ?? false; if (!result) { _logDebug('解码帧失败'); } return result; } catch (e) { // 检查是否是因为解码器已释放导致的错误 if (!_decoders.containsKey(textureId)) { _logDebug('解码器已释放,忽略解码错误'); return false; } _logError('解码帧失败: $e'); return false; } } /// 释放默认解码器资源 static Future releaseDecoder() async { final int? decoderId = _defaultTextureId; if (decoderId == null) { return true; } final result = await releaseDecoderForTexture(decoderId); if (result) { _defaultTextureId = null; } return result; } /// 释放特定纹理ID的解码器资源 static Future releaseDecoderForTexture(int textureId) async { // 检查解码器是否存在 if (!_decoders.containsKey(textureId)) { return true; } // 标记解码器正在释放,防止新的解码请求 _setDecoderReleasing(textureId, true); try { _logDebug('释放解码器: textureId=$textureId'); // 清除回调,防止帧回调继续被调用 clearCallbackForTexture(textureId); final result = await _channel.invokeMethod('releaseDecoder', { 'textureId': textureId, }) ?? false; if (result) { // 从映射表中移除 _decoders.remove(textureId); // 如果释放的是默认解码器,重置默认ID if (_defaultTextureId == textureId) { _defaultTextureId = null; } // 移除释放状态 _setDecoderReleasing(textureId, false); // 重置错误计数 _uninitializedErrorCount = 0; _logDebug('解码器释放成功: textureId=$textureId'); } else { // 释放失败,恢复状态 _setDecoderReleasing(textureId, false); _logError('解码器释放失败: textureId=$textureId'); } return result; } catch (e) { // 发生异常,但仍然移除解码器,避免资源泄漏 _decoders.remove(textureId); if (_defaultTextureId == textureId) { _defaultTextureId = null; } _setDecoderReleasing(textureId, false); _logError('释放解码器失败: $e'); return false; } } /// 释放所有解码器 static Future releaseAllDecoders() async { bool allSuccess = true; // 复制键列表,因为我们会在迭代过程中修改映射 final textureIds = List.from(_decoders.keys); _logDebug('释放所有解码器: 共${textureIds.length}个'); // 释放每个解码器 for (final textureId in textureIds) { final success = await releaseDecoderForTexture(textureId); if (!success) { allSuccess = false; } } // 清空状态 _decoders.clear(); _defaultTextureId = null; // 清空所有释放状态 _withLock(_decoderStateLock, () { _isDecoderReleasing.clear(); }); // 重置错误计数 _uninitializedErrorCount = 0; return allSuccess; } /// 清除特定纹理ID的回调 static void clearCallbackForTexture(int textureId) { final decoder = _decoders[textureId]; if (decoder != null) { decoder.frameCallback = null; decoder.stateCallback = null; _logDebug('已清除纹理ID为$textureId的所有回调'); } } /// 清除所有回调 static void clearAllCallbacks() { for (final decoder in _decoders.values) { decoder.frameCallback = null; decoder.stateCallback = null; } _logDebug('已清除所有回调'); } /// 注册插件(不需要手动调用) static void registerWith() { // 仅用于插件注册 } /// 获取解码器统计信息 /// /// [textureId] 纹理ID /// 返回包含统计信息的Map,包括: /// - totalFrames: 接收的总帧数 /// - renderedFrames: 成功渲染的帧数 /// - droppedFrames: 丢弃的帧数 /// - fps: 当前渲染FPS /// - hasSentSPS: 是否已发送SPS /// - hasSentPPS: 是否已发送PPS /// - hasSentIDR: 是否已发送IDR(I帧) /// - consecutivePFrames: 当前连续P帧数 /// - targetWidth: 目标宽度 /// - targetHeight: 目标高度 /// - frameRate: 目标帧率 /// - decoderCount: 当前活跃的解码器数量 /// - textureId: 纹理ID static Future> getDecoderStats(int textureId) async { // 检查解码器是否正在释放 if (!_isDecoderReady(textureId)) { _logDebug('解码器不可用或正在释放,无法获取统计信息'); return {}; } try { _logDebug('获取解码器统计信息: textureId=$textureId'); final params = { 'textureId': textureId, }; final result = await _channel.invokeMethod>( 'getDecoderStats', params); if (result == null) { return {}; } // 将Object?类型转换为明确的类型 final Map typedResult = {}; result.forEach((key, value) { if (key is String) { typedResult[key] = value; } }); _logDebug('获取解码器统计信息成功: $typedResult'); return typedResult; } catch (e) { _logError('获取解码器统计信息失败: $e'); return {}; } } } /// 在Dart中实现简单的同步锁 void synchronized(Object lock, Function() action) { // 在单线程的Dart中,我们不需要真正的锁 // 但我们保留这个结构以便将来可能的改进 action(); } /// 在同步锁中执行并返回结果的版本 T synchronizedWithResult(Object lock, T Function() action) { return action(); }