import 'dart:async'; import 'dart:io'; import 'dart:typed_data'; import 'package:flutter/foundation.dart'; import 'package:flutter/services.dart'; import 'video_decode_plugin_platform_interface.dart'; import 'nalu_utils.dart'; import 'frame_dependency_manager.dart'; /// 视频解码器配置 class VideoDecoderConfig { /// 视频宽度 final int width; /// 视频高度 final int height; /// 编码类型,默认h264 final String codecType; /// 构造函数 VideoDecoderConfig({ required this.width, required this.height, this.codecType = 'h264', }); /// 转换为Map Map toMap() { return { 'width': width, 'height': height, 'codecType': codecType, }; } } /// 视频解码插件主类 class VideoDecodePlugin { static const MethodChannel _channel = MethodChannel('video_decode_plugin'); static int? _textureId; /// onFrameRendered回调类型(解码并已开始渲染) static void Function(int textureId)? _onFrameRendered; /// 设置onFrameRendered监听 static void setOnFrameRenderedListener( void Function(int textureId) callback) { _onFrameRendered = callback; _channel.setMethodCallHandler(_handleMethodCall); } static Future _handleMethodCall(MethodCall call) async { if (call.method == 'onFrameRendered') { final int? textureId = call.arguments['textureId']; if (_onFrameRendered != null && textureId != null) { _onFrameRendered!(textureId); } } } /// 初始化解码器 static Future initDecoder(VideoDecoderConfig config) async { final textureId = await _channel.invokeMethod('initDecoder', config.toMap()); _textureId = textureId; return textureId; } /// 解码视频帧(参数扩展,仅供内部调用) static Future _decodeFrame({ required Uint8List frameData, required int frameType, // 0=I帧, 1=P帧 required int timestamp, // 毫秒或微秒 required int frameSeq, // 帧序号 int? refIFrameSeq, // P帧时可选 }) async { if (_textureId == null) return false; final params = { 'textureId': _textureId, 'frameData': frameData, 'frameType': frameType, 'timestamp': timestamp, 'frameSeq': frameSeq, if (refIFrameSeq != null) 'refIFrameSeq': refIFrameSeq, }; final result = await _channel.invokeMethod('decodeFrame', params); return result ?? false; } /// 释放解码器资源 static Future releaseDecoder() async { if (_textureId == null) return true; final result = await _channel.invokeMethod('releaseDecoder', { 'textureId': _textureId, }); _textureId = null; return result ?? false; } /// 获取平台版本 static Future getPlatformVersion() { return VideoDecodePluginPlatform.instance.getPlatformVersion(); } /// 检查当前平台是否支持 static bool get isPlatformSupported { return Platform.isAndroid || Platform.isIOS; } /// 获取默认纹理ID static int? get textureId => _textureId; static final _depManager = FrameDependencyManager(); /// /// [frameData]:帧数据 /// [frameType]:帧类型 0=I帧, 1=P帧 /// [timestamp]:帧时间戳(绝对时间戳) /// [frameSeq]:帧序号 /// [splitNalFromIFrame]:true时遇到I帧自动从I帧分割NALU并依赖管理,false时直接发送原始数据(适配SPS/PPS/I帧独立推送场景)。 /// static Future sendFrame({ required List frameData, required int frameType, required int timestamp, required int frameSeq, bool splitNalFromIFrame = false, }) async { if (splitNalFromIFrame && frameType == 0) { // 优先使用缓存的SPS/PPS if (_depManager.sps != null && _depManager.pps != null) { await _decodeFrame( frameData: _depManager.sps!, frameType: 0, timestamp: timestamp, frameSeq: frameSeq - 2, refIFrameSeq: frameSeq - 2, ); await _decodeFrame( frameData: _depManager.pps!, frameType: 0, timestamp: timestamp, frameSeq: frameSeq - 1, refIFrameSeq: frameSeq - 1, ); await _decodeFrame( frameData: Uint8List.fromList(frameData), frameType: 0, timestamp: timestamp, frameSeq: frameSeq, refIFrameSeq: frameSeq, ); _depManager.updateIFrameSeq(frameSeq); print('[VideoDecodePlugin] 发送I帧及SPS/PPS(缓存), frameSeq=$frameSeq'); return; } // 首次无缓存时分割并缓存SPS/PPS final nalus = NaluUtils.splitNalus(frameData); print('[调试] frameSeq=$frameSeq, 分割出NALU数量=${nalus.length}'); for (final nalu in nalus) { print('[调试] NALU type=${nalu.type}, length=${nalu.data.length}'); } List? sps, pps; for (final nalu in nalus) { if (nalu.type == 7) sps = nalu.data; else if (nalu.type == 8) pps = nalu.data; } if (sps != null) { print('[调试] SPS被缓存, 长度=${sps.length}'); _depManager.updateSps(Uint8List.fromList(sps)); } if (pps != null) { print('[调试] PPS被缓存, 长度=${pps.length}'); _depManager.updatePps(Uint8List.fromList(pps)); } if (_depManager.sps == null || _depManager.pps == null) { print('[VideoDecodePlugin] 丢弃I帧: 未缓存SPS/PPS'); return; } await _decodeFrame( frameData: _depManager.sps!, frameType: 0, timestamp: timestamp, frameSeq: frameSeq - 2, refIFrameSeq: frameSeq - 2, ); await _decodeFrame( frameData: _depManager.pps!, frameType: 0, timestamp: timestamp, frameSeq: frameSeq - 1, refIFrameSeq: frameSeq - 1, ); await _decodeFrame( frameData: Uint8List.fromList(frameData), frameType: 0, timestamp: timestamp, frameSeq: frameSeq, refIFrameSeq: frameSeq, ); _depManager.updateIFrameSeq(frameSeq); print('[VideoDecodePlugin] 发送I帧及SPS/PPS(首次分割), frameSeq=$frameSeq'); return; } // 兼容直接推送SPS/PPS/I帧/P帧等场景,直接发送 await _decodeFrame( frameData: Uint8List.fromList(frameData), frameType: frameType, timestamp: timestamp, frameSeq: frameSeq, refIFrameSeq: frameType == 0 ? frameSeq : _depManager.lastIFrameSeq, ); // 若为I帧,更新依赖管理 if (frameType == 0) _depManager.updateIFrameSeq(frameSeq); // P帧依赖链完整性校验 if (frameType == 1) { if (!_depManager.isIFrameDecoded(refIFrameSeq)) { print('[丢帧] P帧依赖的I帧未解码,丢弃 frameSeq=$frameSeq, refIFrameSeq=$refIFrameSeq'); return; } } } }