video_decode_plugin/lib/video_decode_plugin.dart
2025-04-28 09:11:53 +08:00

568 lines
16 KiB
Dart
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

import 'dart:async';
import 'dart:io';
import 'dart:typed_data';
import 'package:flutter/foundation.dart';
import 'package:flutter/services.dart';
import 'video_decode_plugin_platform_interface.dart';
/// 视频帧类型
enum FrameType {
/// I帧
iFrame,
/// P帧
pFrame,
}
/// 视频编码类型
enum CodecType {
/// H.264编码
h264,
/// H.265编码
h265,
}
/// 帧可用回调函数类型
typedef FrameAvailableCallback = void Function(int textureId);
/// 解码器实例内部类
class _DecoderInstance {
final int textureId;
FrameAvailableCallback? frameCallback;
_DecoderInstance(this.textureId);
}
/// 视频解码器配置
class VideoDecoderConfig {
/// 视频宽度默认640
final int width;
/// 视频高度默认360
final int height;
/// 帧率,可为空
final int? frameRate;
/// 编码类型默认h264
final CodecType codecType;
/// 缓冲区大小帧数默认25帧
final int bufferSize;
/// 解码线程数默认1线程
final int threadCount;
/// 是否为调试模式默认false
final bool isDebug;
/// 是否启用硬件解码默认true
final bool enableHardwareDecoder;
/// 构造函数
VideoDecoderConfig({
this.width = 640,
this.height = 360,
this.frameRate,
this.codecType = CodecType.h264,
this.bufferSize = 25,
this.threadCount = 1,
this.isDebug = false,
this.enableHardwareDecoder = true,
});
/// 转换为Map
Map<String, dynamic> toMap() {
return {
'width': width,
'height': height,
'frameRate': frameRate,
'codecType': codecType.toString().split('.').last,
'bufferSize': bufferSize,
'threadCount': threadCount,
'isDebug': isDebug,
'enableHardwareDecoder': enableHardwareDecoder,
};
}
}
/// 视频解码插件主类
class VideoDecodePlugin {
static const MethodChannel _channel = MethodChannel('video_decode_plugin');
// 解码器映射表,支持多实例
static final Map<int, _DecoderInstance> _decoders = {};
// 默认解码器ID
static int? _defaultTextureId;
// 监听器初始化标志
static bool _listenerInitialized = false;
// 是否处于调试模式
static bool _isDebugMode = false;
// 解码器状态跟踪 - 防止释放后继续使用
static final Map<int, bool> _isDecoderReleasing = {};
// 解码器状态锁 - 防止并发访问状态
static final _decoderStateLock = Object();
// 错误日志抑制 - 防止重复日志
static int _uninitializedErrorCount = 0;
static int _lastErrorLogTime = 0;
static const int _ERROR_LOG_THRESHOLD = 5; // 每5秒最多输出一次汇总
/// 日志输出控制 - 调试信息
static void _logDebug(String message) {
if (_isDebugMode) {
debugPrint('[VideoDecodePlugin] $message');
}
}
/// 日志输出控制 - 错误信息(总是输出)
static void _logError(String message, {bool throttle = false}) {
if (throttle) {
// 增加计数
_uninitializedErrorCount++;
// 检查是否需要输出汇总日志
final now = DateTime.now().millisecondsSinceEpoch;
if (now - _lastErrorLogTime > 5000 || _uninitializedErrorCount >= 50) {
debugPrint(
'[VideoDecodePlugin] ERROR: $message (发生 $_uninitializedErrorCount 次)');
_lastErrorLogTime = now;
_uninitializedErrorCount = 0;
}
} else {
// 直接输出日志
debugPrint('[VideoDecodePlugin] ERROR: $message');
}
}
/// 初始化方法通道监听器
static void _initializeMethodCallHandler() {
if (!_listenerInitialized) {
_channel.setMethodCallHandler((call) async {
switch (call.method) {
case 'onFrameAvailable':
final Map<dynamic, dynamic> args = call.arguments;
final int textureId = args['textureId'];
// 检查解码器是否正在释放
bool isReleasing = false;
// 同步访问解码器状态
_withLock(_decoderStateLock, () {
isReleasing = _isDecoderReleasing[textureId] ?? false;
});
if (isReleasing) {
_logDebug('收到帧通知但解码器 $textureId 正在释放,忽略');
return null;
}
// 调用特定纹理ID的帧回调
final decoder = _decoders[textureId];
if (decoder != null && decoder.frameCallback != null) {
// 获取解码器统计信息来检查是否是预通知
getDecoderStats(textureId).then((stats) {
final renderedFrames = stats['renderedFrames'] ?? 0;
if (renderedFrames == 0) {
_logDebug('[预通知] 收到初始帧可用通知无实际视频数据纹理ID: $textureId');
} else {
_logDebug('收到帧可用通知纹理ID: $textureId,已渲染帧数: $renderedFrames');
}
// 调用回调函数
decoder.frameCallback!(textureId);
}).catchError((error) {
// 如果无法获取统计信息,仍然调用回调但不区分类型
_logDebug('收到帧可用通知纹理ID: $textureId');
decoder.frameCallback!(textureId);
});
}
return null;
default:
throw PlatformException(
code: 'Unimplemented',
details: 'The method ${call.method} is not implemented',
);
}
});
_listenerInitialized = true;
}
}
/// 执行同步操作的辅助方法
static void _withLock(Object lock, Function() action) {
// 在Dart中Object实例可以直接用作锁对象
synchronized(lock, action);
}
/// 在锁保护下执行操作并返回结果
static T _withLockResult<T>(Object lock, T Function() action) {
return synchronizedWithResult(lock, action);
}
/// 检查解码器是否处于可用状态
static bool _isDecoderReady(int textureId) {
bool isReleasing = false;
_withLock(_decoderStateLock, () {
isReleasing = _isDecoderReleasing[textureId] ?? false;
});
return _decoders.containsKey(textureId) && !isReleasing;
}
/// 设置解码器释放状态
static void _setDecoderReleasing(int textureId, bool isReleasing) {
_withLock(_decoderStateLock, () {
if (isReleasing) {
_isDecoderReleasing[textureId] = true;
} else {
_isDecoderReleasing.remove(textureId);
}
});
}
/// 获取平台版本
static Future<String?> getPlatformVersion() {
return VideoDecodePluginPlatform.instance.getPlatformVersion();
}
/// 检查当前平台是否支持
static bool get isPlatformSupported {
return Platform.isAndroid || Platform.isIOS;
}
/// 设置帧回调(默认解码器)
static void setFrameCallback(FrameAvailableCallback callback) {
if (_defaultTextureId != null) {
setFrameCallbackForTexture(_defaultTextureId!, callback);
}
}
/// 为特定纹理ID设置帧回调
static void setFrameCallbackForTexture(
int textureId, FrameAvailableCallback callback) {
_initializeMethodCallHandler();
final decoder = _decoders[textureId];
if (decoder != null) {
decoder.frameCallback = callback;
}
}
/// 初始化解码器
static Future<int?> initDecoder(VideoDecoderConfig config) async {
// 设置调试模式
_isDebugMode = config.isDebug;
// 重置错误计数
_uninitializedErrorCount = 0;
// 先释放之前的默认解码器
if (_defaultTextureId != null) {
await releaseDecoder();
}
return await createDecoder(config);
}
/// 创建新的解码器实例(支持多实例)
static Future<int?> createDecoder(VideoDecoderConfig config) async {
// 更新调试模式
_isDebugMode = config.isDebug;
// 重置错误计数
_uninitializedErrorCount = 0;
if (!isPlatformSupported) {
_logError('当前平台不支持视频解码插件');
return null;
}
// 确保监听器已初始化
_initializeMethodCallHandler();
try {
_logDebug(
'创建解码器: ${config.width}x${config.height}, 编码: ${config.codecType}');
final textureId =
await _channel.invokeMethod<int>('initDecoder', config.toMap());
if (textureId != null) {
// 创建新解码器实例并保存
final decoder = _DecoderInstance(textureId);
_decoders[textureId] = decoder;
// 初始化解码器状态
_setDecoderReleasing(textureId, false);
// 设置为默认解码器
_defaultTextureId = textureId;
_logDebug('解码器创建成功纹理ID: $textureId');
}
return _defaultTextureId;
} catch (e) {
_logError('初始化解码器失败: $e');
return null;
}
}
/// 获取默认纹理ID
static int? get textureId => _defaultTextureId;
/// 获取所有活跃的纹理ID
static List<int> get allTextureIds => _decoders.keys.toList();
/// 解码视频帧(默认解码器)
static Future<bool> decodeFrame(
Uint8List frameData, FrameType frameType) async {
// 使用本地变量缓存ID防止并发修改
final int? decoderId = _defaultTextureId;
if (decoderId == null) {
// 使用节流日志报告错误,避免日志爆炸
_logError('解码器未初始化', throttle: true);
return false;
}
// 检查解码器是否正在释放
if (!_isDecoderReady(decoderId)) {
_logDebug('解码器正在释放,忽略解码请求');
return false;
}
return decodeFrameForTexture(decoderId, frameData, frameType);
}
/// 为特定纹理ID解码视频帧
static Future<bool> decodeFrameForTexture(
int textureId, Uint8List frameData, FrameType frameType) async {
// 检查解码器是否存在且不在释放过程中
if (!_isDecoderReady(textureId)) {
_logDebug('解码器不可用或正在释放,忽略解码请求');
return false;
}
try {
final bool isIFrame = frameType == FrameType.iFrame;
_logDebug(
'解码帧: textureId=$textureId, 大小=${frameData.length}字节, 类型=${isIFrame ? "I帧" : "P帧"}');
final result = await _channel.invokeMethod<bool>('decodeFrame', {
'textureId': textureId,
'frameData': frameData,
'frameType': frameType.index,
}) ??
false;
if (!result) {
_logDebug('解码帧失败');
}
return result;
} catch (e) {
// 检查是否是因为解码器已释放导致的错误
if (!_decoders.containsKey(textureId)) {
_logDebug('解码器已释放,忽略解码错误');
return false;
}
_logError('解码帧失败: $e');
return false;
}
}
/// 释放默认解码器资源
static Future<bool> releaseDecoder() async {
final int? decoderId = _defaultTextureId;
if (decoderId == null) {
return true;
}
final result = await releaseDecoderForTexture(decoderId);
if (result) {
_defaultTextureId = null;
}
return result;
}
/// 释放特定纹理ID的解码器资源
static Future<bool> releaseDecoderForTexture(int textureId) async {
// 检查解码器是否存在
if (!_decoders.containsKey(textureId)) {
return true;
}
// 标记解码器正在释放,防止新的解码请求
_setDecoderReleasing(textureId, true);
try {
_logDebug('释放解码器: textureId=$textureId');
// 清除回调,防止帧回调继续被调用
clearCallbackForTexture(textureId);
final result = await _channel.invokeMethod<bool>('releaseDecoder', {
'textureId': textureId,
}) ??
false;
if (result) {
// 从映射表中移除
_decoders.remove(textureId);
// 如果释放的是默认解码器重置默认ID
if (_defaultTextureId == textureId) {
_defaultTextureId = null;
}
// 移除释放状态
_setDecoderReleasing(textureId, false);
// 重置错误计数
_uninitializedErrorCount = 0;
_logDebug('解码器释放成功: textureId=$textureId');
} else {
// 释放失败,恢复状态
_setDecoderReleasing(textureId, false);
_logError('解码器释放失败: textureId=$textureId');
}
return result;
} catch (e) {
// 发生异常,但仍然移除解码器,避免资源泄漏
_decoders.remove(textureId);
if (_defaultTextureId == textureId) {
_defaultTextureId = null;
}
_setDecoderReleasing(textureId, false);
_logError('释放解码器失败: $e');
return false;
}
}
/// 释放所有解码器
static Future<bool> releaseAllDecoders() async {
bool allSuccess = true;
// 复制键列表,因为我们会在迭代过程中修改映射
final textureIds = List<int>.from(_decoders.keys);
_logDebug('释放所有解码器: 共${textureIds.length}');
// 释放每个解码器
for (final textureId in textureIds) {
final success = await releaseDecoderForTexture(textureId);
if (!success) {
allSuccess = false;
}
}
// 清空状态
_decoders.clear();
_defaultTextureId = null;
// 清空所有释放状态
_withLock(_decoderStateLock, () {
_isDecoderReleasing.clear();
});
// 重置错误计数
_uninitializedErrorCount = 0;
return allSuccess;
}
/// 清除特定纹理ID的回调
static void clearCallbackForTexture(int textureId) {
final decoder = _decoders[textureId];
if (decoder != null) {
decoder.frameCallback = null;
_logDebug('已清除纹理ID为$textureId的回调');
}
}
/// 清除所有回调
static void clearAllCallbacks() {
for (final decoder in _decoders.values) {
decoder.frameCallback = null;
}
_logDebug('已清除所有回调');
}
/// 注册插件(不需要手动调用)
static void registerWith() {
// 仅用于插件注册
}
/// 获取解码器统计信息
///
/// [textureId] 纹理ID
/// 返回包含统计信息的Map包括:
/// - totalFramesReceived: 接收的总帧数
/// - framesRendered: 成功渲染的帧数
/// - framesDropped: 丢弃的帧数
/// - lastFrameTimestamp: 最后一帧时间戳
/// - averageProcessingTimeMs: 平均处理时间(毫秒)
/// - decoderCount: 当前活跃的解码器数量
static Future<Map<String, dynamic>> getDecoderStats(int textureId) async {
// 检查解码器是否正在释放
if (!_isDecoderReady(textureId)) {
_logDebug('解码器不可用或正在释放,无法获取统计信息');
return {};
}
try {
_logDebug('获取解码器统计信息: textureId=$textureId');
final params = {
'textureId': textureId,
};
final result = await _channel.invokeMethod<Map<Object?, Object?>>(
'getDecoderStats', params);
if (result == null) {
return {};
}
// 将Object?类型转换为明确的类型
final Map<String, dynamic> typedResult = {};
result.forEach((key, value) {
if (key is String) {
typedResult[key] = value;
}
});
_logDebug('获取解码器统计信息成功: $typedResult');
return typedResult;
} catch (e) {
_logError('获取解码器统计信息失败: $e');
return {};
}
}
}
/// 在Dart中实现简单的同步锁
void synchronized(Object lock, Function() action) {
// 在单线程的Dart中我们不需要真正的锁
// 但我们保留这个结构以便将来可能的改进
action();
}
/// 在同步锁中执行并返回结果的版本
T synchronizedWithResult<T>(Object lock, T Function() action) {
return action();
}