video_decode_plugin/lib/video_decode_plugin.dart

661 lines
18 KiB
Dart
Raw Normal View History

2025-04-21 10:56:28 +08:00
import 'dart:async';
import 'dart:io';
import 'dart:typed_data';
import 'package:flutter/foundation.dart';
import 'package:flutter/services.dart';
import 'video_decode_plugin_platform_interface.dart';
/// 视频帧类型
enum FrameType {
/// I帧
iFrame,
/// P帧
pFrame,
}
/// 视频编码类型
enum CodecType {
/// H.264编码
h264,
/// H.265编码
h265,
}
2025-04-23 16:36:55 +08:00
/// 解码器状态枚举
enum DecoderState {
/// 初始化中
initializing,
/// 准备就绪,但还未开始实际渲染
ready,
/// 渲染中
rendering,
/// 出错
error,
/// 已释放
released,
}
2025-04-21 10:56:28 +08:00
/// 帧可用回调函数类型
typedef FrameAvailableCallback = void Function(int textureId);
2025-04-23 16:36:55 +08:00
/// 解码器状态回调函数类型
typedef DecoderStateCallback = void Function(
int textureId, DecoderState state, Map<String, dynamic> stats);
2025-04-21 10:56:28 +08:00
/// 解码器实例内部类
class _DecoderInstance {
final int textureId;
FrameAvailableCallback? frameCallback;
2025-04-23 16:36:55 +08:00
DecoderStateCallback? stateCallback;
2025-04-21 10:56:28 +08:00
_DecoderInstance(this.textureId);
}
/// 视频解码器配置
class VideoDecoderConfig {
/// 视频宽度默认640
final int width;
/// 视频高度默认360
final int height;
/// 帧率,可为空
final int? frameRate;
/// 编码类型默认h264
final CodecType codecType;
/// 是否为调试模式默认false
final bool isDebug;
/// 是否使用异步解码模式默认true
final bool isAsync;
2025-04-21 10:56:28 +08:00
/// 构造函数
VideoDecoderConfig({
this.width = 640,
this.height = 360,
this.frameRate,
this.codecType = CodecType.h264,
this.isDebug = false,
this.isAsync = true,
2025-04-21 10:56:28 +08:00
});
/// 转换为Map
Map<String, dynamic> toMap() {
return {
'width': width,
'height': height,
'frameRate': frameRate,
'codecType': codecType.toString().split('.').last,
'isDebug': isDebug,
'isAsync': isAsync,
2025-04-21 10:56:28 +08:00
};
}
}
/// 视频解码插件主类
class VideoDecodePlugin {
static const MethodChannel _channel = MethodChannel('video_decode_plugin');
// 解码器映射表,支持多实例
static final Map<int, _DecoderInstance> _decoders = {};
// 默认解码器ID
static int? _defaultTextureId;
// 监听器初始化标志
static bool _listenerInitialized = false;
2025-04-21 16:08:23 +08:00
// 是否处于调试模式
static bool _isDebugMode = false;
// 解码器状态跟踪 - 防止释放后继续使用
static final Map<int, bool> _isDecoderReleasing = {};
// 解码器状态锁 - 防止并发访问状态
static final _decoderStateLock = Object();
// 错误日志抑制 - 防止重复日志
static int _uninitializedErrorCount = 0;
static int _lastErrorLogTime = 0;
/// 日志输出控制 - 调试信息
static void _logDebug(String message) {
if (_isDebugMode) {
debugPrint('[VideoDecodePlugin] $message');
}
}
/// 日志输出控制 - 错误信息(总是输出)
static void _logError(String message, {bool throttle = false}) {
if (throttle) {
// 增加计数
_uninitializedErrorCount++;
// 检查是否需要输出汇总日志
final now = DateTime.now().millisecondsSinceEpoch;
if (now - _lastErrorLogTime > 5000 || _uninitializedErrorCount >= 50) {
debugPrint(
'[VideoDecodePlugin] ERROR: $message (发生 $_uninitializedErrorCount 次)');
_lastErrorLogTime = now;
_uninitializedErrorCount = 0;
}
} else {
// 直接输出日志
debugPrint('[VideoDecodePlugin] ERROR: $message');
}
}
2025-04-21 10:56:28 +08:00
/// 初始化方法通道监听器
static void _initializeMethodCallHandler() {
if (!_listenerInitialized) {
_channel.setMethodCallHandler((call) async {
switch (call.method) {
case 'onFrameAvailable':
final Map<dynamic, dynamic> args = call.arguments;
final int textureId = args['textureId'];
2025-04-21 16:08:23 +08:00
// 检查解码器是否正在释放
bool isReleasing = false;
// 同步访问解码器状态
_withLock(_decoderStateLock, () {
isReleasing = _isDecoderReleasing[textureId] ?? false;
});
if (isReleasing) {
_logDebug('收到帧通知但解码器 $textureId 正在释放,忽略');
return null;
}
2025-04-21 10:56:28 +08:00
// 调用特定纹理ID的帧回调
final decoder = _decoders[textureId];
if (decoder != null && decoder.frameCallback != null) {
2025-04-23 16:36:55 +08:00
// 获取是否是预通知
final bool isPrenotification = args['isPrenotification'] ?? false;
if (isPrenotification) {
_logDebug('[预通知] 收到初始帧可用通知无实际视频数据纹理ID: $textureId');
} else {
2025-04-21 16:08:23 +08:00
_logDebug('收到帧可用通知纹理ID: $textureId');
2025-04-23 16:36:55 +08:00
}
// 调用回调函数
decoder.frameCallback!(textureId);
2025-04-21 10:56:28 +08:00
}
return null;
2025-04-23 16:36:55 +08:00
case 'onDecoderState':
final Map<dynamic, dynamic> args = call.arguments;
final int textureId = args['textureId'];
final String stateStr = args['state'];
final Map<dynamic, dynamic> statsMap = args['stats'];
// 检查解码器是否正在释放
bool isReleasing = false;
// 同步访问解码器状态
_withLock(_decoderStateLock, () {
isReleasing = _isDecoderReleasing[textureId] ?? false;
});
if (isReleasing && stateStr != 'released') {
_logDebug('收到状态回调但解码器 $textureId 正在释放,忽略');
return null;
}
// 将状态字符串转换为枚举
DecoderState state;
switch (stateStr) {
case 'initializing':
state = DecoderState.initializing;
break;
case 'ready':
state = DecoderState.ready;
break;
case 'rendering':
state = DecoderState.rendering;
break;
case 'error':
state = DecoderState.error;
break;
case 'released':
state = DecoderState.released;
break;
default:
state = DecoderState.initializing;
}
// 将statsMap转换为强类型Map<String, dynamic>
final Map<String, dynamic> stats = {};
statsMap.forEach((key, value) {
if (key is String) {
stats[key] = value;
}
});
// 调用状态回调
final decoder = _decoders[textureId];
if (decoder != null && decoder.stateCallback != null) {
_logDebug('调用解码器状态回调纹理ID=$textureId, 状态=$stateStr');
decoder.stateCallback!(textureId, state, stats);
}
return null;
2025-04-21 10:56:28 +08:00
default:
throw PlatformException(
code: 'Unimplemented',
details: 'The method ${call.method} is not implemented',
);
}
});
_listenerInitialized = true;
}
}
2025-04-21 16:08:23 +08:00
/// 执行同步操作的辅助方法
static void _withLock(Object lock, Function() action) {
// 在Dart中Object实例可以直接用作锁对象
synchronized(lock, action);
}
/// 在锁保护下执行操作并返回结果
static T _withLockResult<T>(Object lock, T Function() action) {
return synchronizedWithResult(lock, action);
}
/// 检查解码器是否处于可用状态
static bool _isDecoderReady(int textureId) {
bool isReleasing = false;
_withLock(_decoderStateLock, () {
isReleasing = _isDecoderReleasing[textureId] ?? false;
});
return _decoders.containsKey(textureId) && !isReleasing;
}
/// 设置解码器释放状态
static void _setDecoderReleasing(int textureId, bool isReleasing) {
_withLock(_decoderStateLock, () {
if (isReleasing) {
_isDecoderReleasing[textureId] = true;
} else {
_isDecoderReleasing.remove(textureId);
}
});
}
2025-04-21 10:56:28 +08:00
/// 获取平台版本
static Future<String?> getPlatformVersion() {
return VideoDecodePluginPlatform.instance.getPlatformVersion();
}
/// 检查当前平台是否支持
static bool get isPlatformSupported {
return Platform.isAndroid || Platform.isIOS;
}
/// 设置帧回调(默认解码器)
static void setFrameCallback(FrameAvailableCallback callback) {
if (_defaultTextureId != null) {
setFrameCallbackForTexture(_defaultTextureId!, callback);
}
}
/// 为特定纹理ID设置帧回调
static void setFrameCallbackForTexture(
int textureId, FrameAvailableCallback callback) {
_initializeMethodCallHandler();
final decoder = _decoders[textureId];
if (decoder != null) {
decoder.frameCallback = callback;
}
}
2025-04-23 16:36:55 +08:00
/// 设置解码器状态回调(默认解码器)
static void setStateCallback(DecoderStateCallback callback) {
if (_defaultTextureId != null) {
setStateCallbackForTexture(_defaultTextureId!, callback);
}
}
/// 为特定纹理ID设置状态回调
static void setStateCallbackForTexture(
int textureId, DecoderStateCallback callback) {
_initializeMethodCallHandler();
final decoder = _decoders[textureId];
if (decoder != null) {
decoder.stateCallback = callback;
}
}
2025-04-21 10:56:28 +08:00
/// 初始化解码器
static Future<int?> initDecoder(VideoDecoderConfig config) async {
2025-04-21 16:08:23 +08:00
// 设置调试模式
_isDebugMode = config.isDebug;
// 重置错误计数
_uninitializedErrorCount = 0;
2025-04-21 10:56:28 +08:00
// 先释放之前的默认解码器
if (_defaultTextureId != null) {
await releaseDecoder();
}
return await createDecoder(config);
}
/// 创建新的解码器实例(支持多实例)
static Future<int?> createDecoder(VideoDecoderConfig config) async {
2025-04-21 16:08:23 +08:00
// 更新调试模式
_isDebugMode = config.isDebug;
// 重置错误计数
_uninitializedErrorCount = 0;
2025-04-21 10:56:28 +08:00
if (!isPlatformSupported) {
2025-04-21 16:08:23 +08:00
_logError('当前平台不支持视频解码插件');
2025-04-21 10:56:28 +08:00
return null;
}
// 确保监听器已初始化
_initializeMethodCallHandler();
try {
2025-04-21 16:08:23 +08:00
_logDebug(
'创建解码器: ${config.width}x${config.height}, 编码: ${config.codecType}');
2025-04-21 10:56:28 +08:00
final textureId =
await _channel.invokeMethod<int>('initDecoder', config.toMap());
if (textureId != null) {
// 创建新解码器实例并保存
final decoder = _DecoderInstance(textureId);
_decoders[textureId] = decoder;
2025-04-21 16:08:23 +08:00
// 初始化解码器状态
_setDecoderReleasing(textureId, false);
2025-04-21 10:56:28 +08:00
// 设置为默认解码器
_defaultTextureId = textureId;
2025-04-21 16:08:23 +08:00
_logDebug('解码器创建成功纹理ID: $textureId');
2025-04-21 10:56:28 +08:00
}
return _defaultTextureId;
} catch (e) {
2025-04-21 16:08:23 +08:00
_logError('初始化解码器失败: $e');
2025-04-21 10:56:28 +08:00
return null;
}
}
/// 获取默认纹理ID
static int? get textureId => _defaultTextureId;
/// 获取所有活跃的纹理ID
static List<int> get allTextureIds => _decoders.keys.toList();
/// 解码视频帧(默认解码器)
static Future<bool> decodeFrame(
Uint8List frameData, FrameType frameType) async {
2025-04-21 16:08:23 +08:00
// 使用本地变量缓存ID防止并发修改
final int? decoderId = _defaultTextureId;
if (decoderId == null) {
// 使用节流日志报告错误,避免日志爆炸
_logError('解码器未初始化', throttle: true);
return false;
}
// 检查解码器是否正在释放
if (!_isDecoderReady(decoderId)) {
_logDebug('解码器正在释放,忽略解码请求');
2025-04-21 10:56:28 +08:00
return false;
}
2025-04-21 16:08:23 +08:00
return decodeFrameForTexture(decoderId, frameData, frameType);
2025-04-21 10:56:28 +08:00
}
/// 为特定纹理ID解码视频帧
static Future<bool> decodeFrameForTexture(
int textureId, Uint8List frameData, FrameType frameType) async {
2025-04-21 16:08:23 +08:00
// 检查解码器是否存在且不在释放过程中
if (!_isDecoderReady(textureId)) {
_logDebug('解码器不可用或正在释放,忽略解码请求');
2025-04-21 10:56:28 +08:00
return false;
}
try {
2025-04-21 16:08:23 +08:00
final bool isIFrame = frameType == FrameType.iFrame;
_logDebug(
'解码帧: textureId=$textureId, 大小=${frameData.length}字节, 类型=${isIFrame ? "I帧" : "P帧"}');
final result = await _channel.invokeMethod<bool>('decodeFrame', {
2025-04-21 10:56:28 +08:00
'textureId': textureId,
'frameData': frameData,
'frameType': frameType.index,
}) ??
false;
2025-04-21 16:08:23 +08:00
if (!result) {
_logDebug('解码帧失败');
}
return result;
2025-04-21 10:56:28 +08:00
} catch (e) {
2025-04-21 16:08:23 +08:00
// 检查是否是因为解码器已释放导致的错误
if (!_decoders.containsKey(textureId)) {
_logDebug('解码器已释放,忽略解码错误');
return false;
}
_logError('解码帧失败: $e');
2025-04-21 10:56:28 +08:00
return false;
}
}
/// 释放默认解码器资源
static Future<bool> releaseDecoder() async {
2025-04-21 16:08:23 +08:00
final int? decoderId = _defaultTextureId;
if (decoderId == null) {
2025-04-21 10:56:28 +08:00
return true;
}
2025-04-21 16:08:23 +08:00
final result = await releaseDecoderForTexture(decoderId);
2025-04-21 10:56:28 +08:00
if (result) {
_defaultTextureId = null;
}
return result;
}
/// 释放特定纹理ID的解码器资源
static Future<bool> releaseDecoderForTexture(int textureId) async {
2025-04-21 16:08:23 +08:00
// 检查解码器是否存在
2025-04-21 10:56:28 +08:00
if (!_decoders.containsKey(textureId)) {
return true;
}
2025-04-21 16:08:23 +08:00
// 标记解码器正在释放,防止新的解码请求
_setDecoderReleasing(textureId, true);
2025-04-21 10:56:28 +08:00
try {
2025-04-21 16:08:23 +08:00
_logDebug('释放解码器: textureId=$textureId');
// 清除回调,防止帧回调继续被调用
clearCallbackForTexture(textureId);
2025-04-21 10:56:28 +08:00
final result = await _channel.invokeMethod<bool>('releaseDecoder', {
'textureId': textureId,
}) ??
false;
if (result) {
// 从映射表中移除
_decoders.remove(textureId);
// 如果释放的是默认解码器重置默认ID
if (_defaultTextureId == textureId) {
_defaultTextureId = null;
}
2025-04-21 16:08:23 +08:00
// 移除释放状态
_setDecoderReleasing(textureId, false);
// 重置错误计数
_uninitializedErrorCount = 0;
_logDebug('解码器释放成功: textureId=$textureId');
} else {
// 释放失败,恢复状态
_setDecoderReleasing(textureId, false);
_logError('解码器释放失败: textureId=$textureId');
2025-04-21 10:56:28 +08:00
}
return result;
} catch (e) {
2025-04-21 16:08:23 +08:00
// 发生异常,但仍然移除解码器,避免资源泄漏
_decoders.remove(textureId);
if (_defaultTextureId == textureId) {
_defaultTextureId = null;
}
_setDecoderReleasing(textureId, false);
_logError('释放解码器失败: $e');
2025-04-21 10:56:28 +08:00
return false;
}
}
/// 释放所有解码器
static Future<bool> releaseAllDecoders() async {
bool allSuccess = true;
// 复制键列表,因为我们会在迭代过程中修改映射
final textureIds = List<int>.from(_decoders.keys);
2025-04-21 16:08:23 +08:00
_logDebug('释放所有解码器: 共${textureIds.length}');
2025-04-21 10:56:28 +08:00
// 释放每个解码器
for (final textureId in textureIds) {
final success = await releaseDecoderForTexture(textureId);
if (!success) {
allSuccess = false;
}
}
// 清空状态
_decoders.clear();
_defaultTextureId = null;
2025-04-21 16:08:23 +08:00
// 清空所有释放状态
_withLock(_decoderStateLock, () {
_isDecoderReleasing.clear();
});
// 重置错误计数
_uninitializedErrorCount = 0;
2025-04-21 10:56:28 +08:00
return allSuccess;
}
/// 清除特定纹理ID的回调
static void clearCallbackForTexture(int textureId) {
final decoder = _decoders[textureId];
if (decoder != null) {
decoder.frameCallback = null;
2025-04-23 16:36:55 +08:00
decoder.stateCallback = null;
_logDebug('已清除纹理ID为$textureId的所有回调');
2025-04-21 10:56:28 +08:00
}
}
/// 清除所有回调
static void clearAllCallbacks() {
for (final decoder in _decoders.values) {
decoder.frameCallback = null;
2025-04-23 16:36:55 +08:00
decoder.stateCallback = null;
2025-04-21 10:56:28 +08:00
}
2025-04-21 16:08:23 +08:00
_logDebug('已清除所有回调');
2025-04-21 10:56:28 +08:00
}
/// 注册插件(不需要手动调用)
static void registerWith() {
// 仅用于插件注册
}
/// 获取解码器统计信息
///
/// [textureId] 纹理ID
/// 返回包含统计信息的Map包括:
2025-04-23 16:36:55 +08:00
/// - totalFrames: 接收的总帧数
/// - renderedFrames: 成功渲染的帧数
/// - droppedFrames: 丢弃的帧数
/// - fps: 当前渲染FPS
/// - hasSentSPS: 是否已发送SPS
/// - hasSentPPS: 是否已发送PPS
/// - hasSentIDR: 是否已发送IDR(I帧)
/// - consecutivePFrames: 当前连续P帧数
/// - targetWidth: 目标宽度
/// - targetHeight: 目标高度
/// - frameRate: 目标帧率
2025-04-21 10:56:28 +08:00
/// - decoderCount: 当前活跃的解码器数量
2025-04-23 16:36:55 +08:00
/// - textureId: 纹理ID
2025-04-21 10:56:28 +08:00
static Future<Map<String, dynamic>> getDecoderStats(int textureId) async {
2025-04-21 16:08:23 +08:00
// 检查解码器是否正在释放
if (!_isDecoderReady(textureId)) {
_logDebug('解码器不可用或正在释放,无法获取统计信息');
return {};
}
2025-04-21 10:56:28 +08:00
try {
2025-04-21 16:08:23 +08:00
_logDebug('获取解码器统计信息: textureId=$textureId');
2025-04-21 10:56:28 +08:00
final params = {
'textureId': textureId,
};
final result = await _channel.invokeMethod<Map<Object?, Object?>>(
'getDecoderStats', params);
if (result == null) {
return {};
}
// 将Object?类型转换为明确的类型
final Map<String, dynamic> typedResult = {};
result.forEach((key, value) {
if (key is String) {
typedResult[key] = value;
}
});
2025-04-21 16:08:23 +08:00
_logDebug('获取解码器统计信息成功: $typedResult');
2025-04-21 10:56:28 +08:00
return typedResult;
} catch (e) {
2025-04-21 16:08:23 +08:00
_logError('获取解码器统计信息失败: $e');
2025-04-21 10:56:28 +08:00
return {};
}
}
}
2025-04-21 16:08:23 +08:00
/// 在Dart中实现简单的同步锁
void synchronized(Object lock, Function() action) {
// 在单线程的Dart中我们不需要真正的锁
// 但我们保留这个结构以便将来可能的改进
action();
}
/// 在同步锁中执行并返回结果的版本
T synchronizedWithResult<T>(Object lock, T Function() action) {
return action();
}