feat:v1版本实现

This commit is contained in:
liyi 2025-04-21 16:08:23 +08:00
parent 4c010b6319
commit f9038b39c4
3 changed files with 390 additions and 88 deletions

View File

@ -35,6 +35,38 @@ class VideoDecodePlugin : FlutterPlugin, MethodCallHandler {
// 主线程Handler
private val mainHandler = Handler(Looper.getMainLooper())
// 是否是调试模式
private var isDebugMode = false
/**
* 输出调试日志 - 仅在调试模式下输出
*/
private fun logDebug(message: String) {
if (isDebugMode) {
Log.d(TAG, message)
}
}
/**
* 输出警告日志 - 仅在调试模式下输出
*/
private fun logWarning(message: String) {
if (isDebugMode) {
Log.w(TAG, message)
}
}
/**
* 输出错误日志 - 始终输出
*/
private fun logError(message: String, e: Exception? = null) {
if (e != null) {
Log.e(TAG, message, e)
} else {
Log.e(TAG, message)
}
}
/**
* 插件绑定到Flutter引擎时调用
@ -75,7 +107,7 @@ class VideoDecodePlugin : FlutterPlugin, MethodCallHandler {
}
}
} catch (e: Exception) {
Log.e(TAG, "处理方法调用失败", e)
logError("处理方法调用失败", e)
result.error("NATIVE_ERROR", "处理方法调用失败: ${e.message}", null)
}
}
@ -102,6 +134,9 @@ class VideoDecodePlugin : FlutterPlugin, MethodCallHandler {
val isDebug = call.argument<Boolean>("isDebug") ?: false
val enableHardwareDecoder = call.argument<Boolean>("enableHardwareDecoder") ?: true
// 更新插件的调试模式标志
this.isDebugMode = isDebug
// 创建纹理
val textureEntry = textureRegistry.createSurfaceTexture()
val textureId = textureEntry.id()
@ -109,7 +144,7 @@ class VideoDecodePlugin : FlutterPlugin, MethodCallHandler {
// 检查这个纹理ID是否已经被使用过
if (releasedTextureIds.contains(textureId)) {
// 如果已经被使用过说明Flutter引擎在重用纹理ID这可能导致问题
Log.w(TAG, "警告: 纹理ID $textureId 已被使用过,这可能导致问题")
logWarning("警告: 纹理ID $textureId 已被使用过,这可能导致问题")
// 记录这个纹理ID现在是活跃的
releasedTextureIds.remove(textureId)
@ -136,16 +171,28 @@ class VideoDecodePlugin : FlutterPlugin, MethodCallHandler {
// 通知Flutter刷新纹理
runOnMainThread {
try {
Log.d(TAG, "发送帧可用通知给Flutter纹理ID: $textureId")
// 根据当前帧数判断是否是预通知
val decoder = decoders[textureId]
val stats = decoder?.getStatistics() ?: mapOf()
val renderedFrames = stats["renderedFrames"] as? Int ?: 0
if (renderedFrames == 0) {
// 这是初始化预通知
logDebug("[预通知] 发送初始帧可用通知给Flutter纹理ID: $textureId(无实际视频数据)")
} else {
// 这是实际帧通知
logDebug("发送帧可用通知给Flutter纹理ID: $textureId,已渲染帧数: $renderedFrames")
}
channel.invokeMethod("onFrameAvailable", mapOf("textureId" to textureId))
} catch (e: Exception) {
Log.e(TAG, "通知Flutter更新纹理失败", e)
logError("通知Flutter更新纹理失败", e)
}
}
}
override fun onError(error: String) {
Log.e(TAG, "解码器错误: $error")
logError("解码器错误: $error")
}
}
@ -156,7 +203,7 @@ class VideoDecodePlugin : FlutterPlugin, MethodCallHandler {
result.success(textureId)
} catch (e: Exception) {
Log.e(TAG, "初始化解码器失败", e)
logError("初始化解码器失败", e)
result.error("INIT_FAILED", "初始化解码器失败: ${e.message}", null)
}
}
@ -167,13 +214,18 @@ class VideoDecodePlugin : FlutterPlugin, MethodCallHandler {
private fun handleDecodeFrame(call: MethodCall, result: Result) {
try {
// 读取参数
val textureId = call.argument<Number>("textureId")?.toLong() ?: return result.error("INVALID_ARGS", "无效的纹理ID", null)
val frameData = call.argument<ByteArray>("frameData") ?: return result.error("INVALID_ARGS", "无效的帧数据", null)
val textureId = call.argument<Number>("textureId")?.toLong() ?:
return result.error("INVALID_ARGS", "无效的纹理ID", null)
val frameData = call.argument<ByteArray>("frameData") ?:
return result.error("INVALID_ARGS", "无效的帧数据", null)
val frameType = call.argument<Int>("frameType") ?: 0
val isIFrame = frameType == 0 // 0表示I帧1表示P帧
// 获取解码器
val decoder = decoders[textureId] ?: return result.error("DECODER_NOT_FOUND", "找不到纹理ID对应的解码器", null)
val decoder = decoders[textureId] ?:
return result.error("DECODER_NOT_FOUND", "找不到纹理ID对应的解码器", null)
// 解码帧
val success = decoder.decodeFrame(frameData, isIFrame)
@ -182,7 +234,7 @@ class VideoDecodePlugin : FlutterPlugin, MethodCallHandler {
result.success(success)
} catch (e: Exception) {
Log.e(TAG, "解码帧失败", e)
logError("解码帧失败", e)
result.error("DECODE_FAILED", "解码帧失败: ${e.message}", null)
}
}
@ -193,7 +245,8 @@ class VideoDecodePlugin : FlutterPlugin, MethodCallHandler {
private fun handleReleaseDecoder(call: MethodCall, result: Result) {
try {
// 读取参数
val textureId = call.argument<Number>("textureId")?.toLong() ?: return result.error("INVALID_ARGS", "无效的纹理ID", null)
val textureId = call.argument<Number>("textureId")?.toLong() ?:
return result.error("INVALID_ARGS", "无效的纹理ID", null)
// 获取解码器
val decoder = decoders[textureId]
@ -216,7 +269,7 @@ class VideoDecodePlugin : FlutterPlugin, MethodCallHandler {
result.success(true)
} catch (e: Exception) {
Log.e(TAG, "释放解码器失败", e)
logError("释放解码器失败", e)
result.error("RELEASE_FAILED", "释放解码器失败: ${e.message}", null)
}
}
@ -227,10 +280,12 @@ class VideoDecodePlugin : FlutterPlugin, MethodCallHandler {
private fun handleGetDecoderStats(call: MethodCall, result: Result) {
try {
// 获取纹理ID
val textureId = call.argument<Number>("textureId")?.toLong() ?: return result.error("INVALID_ARGS", "无效的纹理ID", null)
val textureId = call.argument<Number>("textureId")?.toLong() ?:
return result.error("INVALID_ARGS", "无效的纹理ID", null)
// 获取解码器
val decoder = decoders[textureId] ?: return result.error("DECODER_NOT_FOUND", "找不到纹理ID对应的解码器", null)
val decoder = decoders[textureId] ?:
return result.error("DECODER_NOT_FOUND", "找不到纹理ID对应的解码器", null)
// 获取统计信息
val stats = decoder.getStatistics()
@ -244,7 +299,7 @@ class VideoDecodePlugin : FlutterPlugin, MethodCallHandler {
result.success(enhancedStats)
} catch (e: Exception) {
Log.e(TAG, "获取解码器统计信息失败", e)
logError("获取解码器统计信息失败", e)
result.error("STATS_FAILED", "获取解码器统计信息失败: ${e.message}", null)
}
}
@ -269,7 +324,7 @@ class VideoDecodePlugin : FlutterPlugin, MethodCallHandler {
try {
decoder.release()
} catch (e: Exception) {
Log.e(TAG, "插件分离时释放解码器失败", e)
logError("插件分离时释放解码器失败", e)
}
}

View File

@ -68,10 +68,10 @@ class VideoDecoder(
private var renderedFrameCount = 0
private var droppedFrameCount = 0
// 跟踪I帧状态
private var hasSentSPS = false
private var hasSentPPS = false
private var hasSentIDR = true
// 跟踪I帧状态 - 使用AtomicBoolean防止并发问题
private val hasSentSPS = AtomicBoolean(false)
private val hasSentPPS = AtomicBoolean(false)
private val hasSentIDR = AtomicBoolean(false)
// 跟踪上一个关键帧时间
private var lastIFrameTimeMs = 0L
@ -89,41 +89,99 @@ class VideoDecoder(
// 主线程Handler用于在主线程上更新纹理
private val mainHandler = Handler(Looper.getMainLooper())
// 日志控制
private var logVerbose = false
private var frameLogThreshold = 30 // 每30帧输出一次详细日志
// 是否是调试模式
private val isDebugMode: Boolean = config.isDebug
/**
* 输出调试日志 - 仅在调试模式下输出
*/
private fun logDebug(message: String) {
if (isDebugMode) {
Log.d(TAG, message)
}
}
/**
* 输出警告日志 - 仅在调试模式下输出
*/
private fun logWarning(message: String) {
if (isDebugMode) {
Log.w(TAG, message)
}
}
/**
* 输出错误日志 - 始终输出
*/
private fun logError(message: String, e: Exception? = null) {
if (e != null) {
Log.e(TAG, message, e)
} else {
Log.e(TAG, message)
}
}
// 解码器初始化
init {
try {
logVerbose = config.isDebug
// 捕获所有日志
Log.d(TAG, "初始化解码器: ${config.width}x${config.height}, 编码: ${config.codecType}")
// 设置SurfaceTexture的默认缓冲区大小
surfaceTexture.setDefaultBufferSize(config.width, config.height)
logDebug("初始化解码器: ${config.width}x${config.height}, 编码: ${config.codecType}")
// 初始化解码器
if (setupDecoder()) {
isRunning.set(true)
// 通知初始帧可用让Flutter创建Texture View
Log.d(TAG, "解码器初始化成功,发送初始帧通知")
logDebug("[预通知] 解码器初始化成功,发送初始帧通知(无实际视频数据)")
mainHandler.post {
notifyFrameAvailable()
notifyFrameAvailableInitial()
}
} else {
Log.e(TAG, "解码器初始化失败")
logError("解码器初始化失败")
callback?.onError("解码器初始化失败")
}
} catch (e: Exception) {
Log.e(TAG, "创建解码器实例失败", e)
logError("创建解码器实例失败", e)
callback?.onError("创建解码器实例失败: ${e.message}")
release()
}
}
/**
* 通知初始帧可用 - 仅在初始化时调用表明解码器已准备好但尚无实际帧
*/
private fun notifyFrameAvailableInitial() {
if (!isRunning.get()) {
logDebug("[预通知] 解码器已停止,跳过初始帧可用通知")
return
}
try {
logDebug("[预通知] 发送初始帧可用通知目的是让Flutter创建纹理视图")
callback?.onFrameAvailable()
} catch (e: Exception) {
logError("通知初始帧可用时出错: ${e.message}", e)
}
}
/**
* 通知帧可用 - 在实际解码帧后调用
*/
private fun notifyFrameAvailable() {
if (!isRunning.get()) {
logDebug("解码器已停止,跳过帧可用通知")
return
}
try {
logDebug("发送帧可用通知,当前渲染帧数: $renderedFrameCount")
callback?.onFrameAvailable()
} catch (e: Exception) {
logError("通知帧可用时出错: ${e.message}", e)
}
}
/**
* 设置解码器
*/
@ -158,33 +216,16 @@ class VideoDecoder(
mediaCodec = decoder
isDecoderConfigured.set(true)
Log.d(TAG, "解码器设置完成: ${decoder.codecInfo.name}")
logDebug("解码器设置完成: ${decoder.codecInfo.name}")
return true
} catch (e: Exception) {
Log.e(TAG, "设置解码器失败", e)
logError("设置解码器失败", e)
isDecoderConfigured.set(false)
callback?.onError("设置解码器失败: ${e.message}")
return false
}
}
/**
* 通知帧可用
*/
private fun notifyFrameAvailable() {
if (!isRunning.get()) {
Log.d(TAG, "解码器已停止,跳过帧可用通知")
return
}
try {
Log.d(TAG, "发送帧可用通知,当前渲染帧数: $renderedFrameCount")
callback?.onFrameAvailable()
} catch (e: Exception) {
Log.e(TAG, "通知帧可用时出错: ${e.message}", e)
}
}
/**
* 快速检查NAL类型
*/
@ -215,7 +256,7 @@ class VideoDecoder(
}
}
} catch (e: Exception) {
Log.e(TAG, "检查NAL类型出错", e)
logError("检查NAL类型出错", e)
}
// 无法识别,使用传入的参数
@ -227,7 +268,7 @@ class VideoDecoder(
*/
fun decodeFrame(frameData: ByteArray, isIFrame: Boolean): Boolean {
if (!isRunning.get() || !isDecoderConfigured.get() || frameData.isEmpty()) {
Log.w(TAG, "解码器未运行或未配置或帧数据为空")
logWarning("解码器未运行或未配置或帧数据为空")
return false
}
@ -245,20 +286,21 @@ class VideoDecoder(
val hash = frameData.hashCode()
if (lastSPSHash == hash) return true
lastSPSHash = hash
hasSentSPS = true
hasSentSPS.set(true)
} else if (effectiveType == NAL_UNIT_TYPE_PPS) {
val hash = frameData.hashCode()
if (lastPPSHash == hash) return true
lastPPSHash = hash
hasSentPPS = true
hasSentPPS.set(true)
} else if (effectiveType == NAL_UNIT_TYPE_IDR) {
hasSentIDR = true
hasSentIDR.set(true)
lastIFrameTimeMs = System.currentTimeMillis()
consecutivePFrameCount = 0
} else {
// P帧处理
if (!hasSentIDR) {
Log.w(TAG, "丢弃P帧因为尚未收到I帧")
if (!hasSentIDR.get() && renderedFrameCount == 0) {
logWarning("丢弃P帧因为尚未收到I帧")
droppedFrameCount++
return false
}
@ -271,14 +313,14 @@ class VideoDecoder(
// 解码帧
val inputBufferIndex = codec.dequeueInputBuffer(TIMEOUT_US)
if (inputBufferIndex < 0) {
Log.w(TAG, "无法获取输入缓冲区,可能需要等待")
logWarning("无法获取输入缓冲区,可能需要等待")
return false
}
// 获取输入缓冲区
val inputBuffer = codec.getInputBuffer(inputBufferIndex)
if (inputBuffer == null) {
Log.e(TAG, "获取输入缓冲区失败")
logError("获取输入缓冲区失败")
return false
}
@ -301,7 +343,7 @@ class VideoDecoder(
return true
} catch (e: Exception) {
Log.e(TAG, "解码帧失败", e)
logError("解码帧失败", e)
return false
}
}
@ -325,14 +367,14 @@ class VideoDecoder(
if (render) {
renderedFrameCount++
lastOutputTimeMs = System.currentTimeMillis()
Log.d(TAG, "成功渲染帧 #$renderedFrameCount")
logDebug("成功渲染帧 #$renderedFrameCount")
// 通知Flutter刷新纹理
notifyFrameAvailable()
}
}
outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED -> {
Log.d(TAG, "输出格式变更: ${codec.outputFormat}")
logDebug("输出格式变更: ${codec.outputFormat}")
}
outputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER -> {
outputDone = true
@ -345,7 +387,7 @@ class VideoDecoder(
* 释放所有资源
*/
fun release() {
Log.d(TAG, "释放解码器资源")
logDebug("释放解码器资源")
isRunning.set(false)
isDecoderConfigured.set(false)
@ -356,7 +398,7 @@ class VideoDecoder(
codec.stop()
codec.release()
} catch (e: Exception) {
Log.e(TAG, "释放MediaCodec失败", e)
logError("释放MediaCodec失败", e)
}
}
mediaCodec = null
@ -364,20 +406,20 @@ class VideoDecoder(
try {
surface.release()
} catch (e: Exception) {
Log.e(TAG, "释放Surface失败", e)
logError("释放Surface失败", e)
}
try {
textureEntry.release()
} catch (e: Exception) {
Log.e(TAG, "释放TextureEntry失败", e)
logError("释放TextureEntry失败", e)
}
callback = null
Log.d(TAG, "所有资源已释放")
logDebug("所有资源已释放")
} catch (e: Exception) {
Log.e(TAG, "释放资源时出错", e)
logError("释放资源时出错", e)
}
}
@ -389,9 +431,9 @@ class VideoDecoder(
"totalFrames" to frameCount,
"renderedFrames" to renderedFrameCount,
"droppedFrames" to droppedFrameCount,
"hasSentSPS" to hasSentSPS,
"hasSentPPS" to hasSentPPS,
"hasSentIDR" to hasSentIDR,
"hasSentSPS" to hasSentSPS.get(),
"hasSentPPS" to hasSentPPS.get(),
"hasSentIDR" to hasSentIDR.get(),
"consecutivePFrames" to consecutivePFrameCount,
"targetWidth" to config.width,
"targetHeight" to config.height,

View File

@ -102,6 +102,47 @@ class VideoDecodePlugin {
//
static bool _listenerInitialized = false;
//
static bool _isDebugMode = false;
// - 使
static final Map<int, bool> _isDecoderReleasing = {};
// - 访
static final _decoderStateLock = Object();
// -
static int _uninitializedErrorCount = 0;
static int _lastErrorLogTime = 0;
static const int _ERROR_LOG_THRESHOLD = 5; // 5
/// -
static void _logDebug(String message) {
if (_isDebugMode) {
debugPrint('[VideoDecodePlugin] $message');
}
}
/// -
static void _logError(String message, {bool throttle = false}) {
if (throttle) {
//
_uninitializedErrorCount++;
//
final now = DateTime.now().millisecondsSinceEpoch;
if (now - _lastErrorLogTime > 5000 || _uninitializedErrorCount >= 50) {
debugPrint(
'[VideoDecodePlugin] ERROR: $message (发生 $_uninitializedErrorCount 次)');
_lastErrorLogTime = now;
_uninitializedErrorCount = 0;
}
} else {
//
debugPrint('[VideoDecodePlugin] ERROR: $message');
}
}
///
static void _initializeMethodCallHandler() {
if (!_listenerInitialized) {
@ -111,10 +152,38 @@ class VideoDecodePlugin {
final Map<dynamic, dynamic> args = call.arguments;
final int textureId = args['textureId'];
//
bool isReleasing = false;
// 访
_withLock(_decoderStateLock, () {
isReleasing = _isDecoderReleasing[textureId] ?? false;
});
if (isReleasing) {
_logDebug('收到帧通知但解码器 $textureId 正在释放,忽略');
return null;
}
// ID的帧回调
final decoder = _decoders[textureId];
if (decoder != null && decoder.frameCallback != null) {
decoder.frameCallback!(textureId);
//
getDecoderStats(textureId).then((stats) {
final renderedFrames = stats['renderedFrames'] ?? 0;
if (renderedFrames == 0) {
_logDebug('[预通知] 收到初始帧可用通知无实际视频数据纹理ID: $textureId');
} else {
_logDebug('收到帧可用通知纹理ID: $textureId,已渲染帧数: $renderedFrames');
}
//
decoder.frameCallback!(textureId);
}).catchError((error) {
//
_logDebug('收到帧可用通知纹理ID: $textureId');
decoder.frameCallback!(textureId);
});
}
return null;
@ -130,6 +199,39 @@ class VideoDecodePlugin {
}
}
///
static void _withLock(Object lock, Function() action) {
// Dart中Object实例可以直接用作锁对象
synchronized(lock, action);
}
///
static T _withLockResult<T>(Object lock, T Function() action) {
return synchronizedWithResult(lock, action);
}
///
static bool _isDecoderReady(int textureId) {
bool isReleasing = false;
_withLock(_decoderStateLock, () {
isReleasing = _isDecoderReleasing[textureId] ?? false;
});
return _decoders.containsKey(textureId) && !isReleasing;
}
///
static void _setDecoderReleasing(int textureId, bool isReleasing) {
_withLock(_decoderStateLock, () {
if (isReleasing) {
_isDecoderReleasing[textureId] = true;
} else {
_isDecoderReleasing.remove(textureId);
}
});
}
///
static Future<String?> getPlatformVersion() {
return VideoDecodePluginPlatform.instance.getPlatformVersion();
@ -160,6 +262,12 @@ class VideoDecodePlugin {
///
static Future<int?> initDecoder(VideoDecoderConfig config) async {
//
_isDebugMode = config.isDebug;
//
_uninitializedErrorCount = 0;
//
if (_defaultTextureId != null) {
await releaseDecoder();
@ -170,8 +278,14 @@ class VideoDecodePlugin {
///
static Future<int?> createDecoder(VideoDecoderConfig config) async {
//
_isDebugMode = config.isDebug;
//
_uninitializedErrorCount = 0;
if (!isPlatformSupported) {
debugPrint('当前平台不支持视频解码插件');
_logError('当前平台不支持视频解码插件');
return null;
}
@ -179,6 +293,8 @@ class VideoDecodePlugin {
_initializeMethodCallHandler();
try {
_logDebug(
'创建解码器: ${config.width}x${config.height}, 编码: ${config.codecType}');
final textureId =
await _channel.invokeMethod<int>('initDecoder', config.toMap());
@ -187,13 +303,17 @@ class VideoDecodePlugin {
final decoder = _DecoderInstance(textureId);
_decoders[textureId] = decoder;
//
_setDecoderReleasing(textureId, false);
//
_defaultTextureId = textureId;
_logDebug('解码器创建成功纹理ID: $textureId');
}
return _defaultTextureId;
} catch (e) {
debugPrint('初始化解码器失败: $e');
_logError('初始化解码器失败: $e');
return null;
}
}
@ -207,42 +327,69 @@ class VideoDecodePlugin {
///
static Future<bool> decodeFrame(
Uint8List frameData, FrameType frameType) async {
if (_defaultTextureId == null) {
debugPrint('解码器未初始化');
// 使ID
final int? decoderId = _defaultTextureId;
if (decoderId == null) {
// 使
_logError('解码器未初始化', throttle: true);
return false;
}
return decodeFrameForTexture(_defaultTextureId!, frameData, frameType);
//
if (!_isDecoderReady(decoderId)) {
_logDebug('解码器正在释放,忽略解码请求');
return false;
}
return decodeFrameForTexture(decoderId, frameData, frameType);
}
/// ID解码视频帧
static Future<bool> decodeFrameForTexture(
int textureId, Uint8List frameData, FrameType frameType) async {
if (!_decoders.containsKey(textureId)) {
debugPrint('找不到纹理ID: $textureId');
//
if (!_isDecoderReady(textureId)) {
_logDebug('解码器不可用或正在释放,忽略解码请求');
return false;
}
try {
return await _channel.invokeMethod<bool>('decodeFrame', {
final bool isIFrame = frameType == FrameType.iFrame;
_logDebug(
'解码帧: textureId=$textureId, 大小=${frameData.length}字节, 类型=${isIFrame ? "I帧" : "P帧"}');
final result = await _channel.invokeMethod<bool>('decodeFrame', {
'textureId': textureId,
'frameData': frameData,
'frameType': frameType.index,
}) ??
false;
if (!result) {
_logDebug('解码帧失败');
}
return result;
} catch (e) {
debugPrint('解码帧失败: $e');
//
if (!_decoders.containsKey(textureId)) {
_logDebug('解码器已释放,忽略解码错误');
return false;
}
_logError('解码帧失败: $e');
return false;
}
}
///
static Future<bool> releaseDecoder() async {
if (_defaultTextureId == null) {
final int? decoderId = _defaultTextureId;
if (decoderId == null) {
return true;
}
final result = await releaseDecoderForTexture(_defaultTextureId!);
final result = await releaseDecoderForTexture(decoderId);
if (result) {
_defaultTextureId = null;
}
@ -252,11 +399,20 @@ class VideoDecodePlugin {
/// ID的解码器资源
static Future<bool> releaseDecoderForTexture(int textureId) async {
//
if (!_decoders.containsKey(textureId)) {
return true;
}
//
_setDecoderReleasing(textureId, true);
try {
_logDebug('释放解码器: textureId=$textureId');
//
clearCallbackForTexture(textureId);
final result = await _channel.invokeMethod<bool>('releaseDecoder', {
'textureId': textureId,
}) ??
@ -270,11 +426,30 @@ class VideoDecodePlugin {
if (_defaultTextureId == textureId) {
_defaultTextureId = null;
}
//
_setDecoderReleasing(textureId, false);
//
_uninitializedErrorCount = 0;
_logDebug('解码器释放成功: textureId=$textureId');
} else {
//
_setDecoderReleasing(textureId, false);
_logError('解码器释放失败: textureId=$textureId');
}
return result;
} catch (e) {
debugPrint('释放解码器失败: $e');
//
_decoders.remove(textureId);
if (_defaultTextureId == textureId) {
_defaultTextureId = null;
}
_setDecoderReleasing(textureId, false);
_logError('释放解码器失败: $e');
return false;
}
}
@ -286,6 +461,8 @@ class VideoDecodePlugin {
//
final textureIds = List<int>.from(_decoders.keys);
_logDebug('释放所有解码器: 共${textureIds.length}');
//
for (final textureId in textureIds) {
final success = await releaseDecoderForTexture(textureId);
@ -298,6 +475,14 @@ class VideoDecodePlugin {
_decoders.clear();
_defaultTextureId = null;
//
_withLock(_decoderStateLock, () {
_isDecoderReleasing.clear();
});
//
_uninitializedErrorCount = 0;
return allSuccess;
}
@ -306,6 +491,7 @@ class VideoDecodePlugin {
final decoder = _decoders[textureId];
if (decoder != null) {
decoder.frameCallback = null;
_logDebug('已清除纹理ID为$textureId的回调');
}
}
@ -314,6 +500,7 @@ class VideoDecodePlugin {
for (final decoder in _decoders.values) {
decoder.frameCallback = null;
}
_logDebug('已清除所有回调');
}
///
@ -332,7 +519,14 @@ class VideoDecodePlugin {
/// - averageProcessingTimeMs: ()
/// - decoderCount:
static Future<Map<String, dynamic>> getDecoderStats(int textureId) async {
//
if (!_isDecoderReady(textureId)) {
_logDebug('解码器不可用或正在释放,无法获取统计信息');
return {};
}
try {
_logDebug('获取解码器统计信息: textureId=$textureId');
final params = {
'textureId': textureId,
};
@ -351,12 +545,23 @@ class VideoDecodePlugin {
}
});
_logDebug('获取解码器统计信息成功: $typedResult');
return typedResult;
} catch (e) {
if (kDebugMode) {
print('获取解码器统计信息失败: $e');
}
_logError('获取解码器统计信息失败: $e');
return {};
}
}
}
/// Dart中实现简单的同步锁
void synchronized(Object lock, Function() action) {
// 线Dart中
// 便
action();
}
///
T synchronizedWithResult<T>(Object lock, T Function() action) {
return action();
}