feat:简化版提交

This commit is contained in:
liyi 2025-04-29 17:11:38 +08:00
parent 390978e5b1
commit d837a1206b
4 changed files with 333 additions and 2260 deletions

View File

@ -27,46 +27,11 @@ class VideoDecodePlugin : FlutterPlugin, MethodCallHandler {
// 纹理注册表
private lateinit var textureRegistry: TextureRegistry
// 解码器映射表 (纹理ID -> 解码器)
private val decoders = ConcurrentHashMap<Long, VideoDecoder>()
// 解码器
private var decoder: VideoDecoder? = null
// 已释放的纹理ID集合用于跟踪防止重用
private val releasedTextureIds = HashSet<Long>()
// 主线程Handler
private val mainHandler = Handler(Looper.getMainLooper())
// 是否是调试模式
private var isDebugMode = false
/**
* 输出调试日志 - 仅在调试模式下输出
*/
private fun logDebug(message: String) {
if (isDebugMode) {
Log.d(TAG, message)
}
}
/**
* 输出警告日志 - 仅在调试模式下输出
*/
private fun logWarning(message: String) {
if (isDebugMode) {
Log.w(TAG, message)
}
}
/**
* 输出错误日志 - 始终输出
*/
private fun logError(message: String, e: Exception? = null) {
if (e != null) {
Log.e(TAG, message, e)
} else {
Log.e(TAG, message)
}
}
// 纹理ID
private var textureId: Long? = null
/**
* 插件绑定到Flutter引擎时调用
@ -85,143 +50,30 @@ class VideoDecodePlugin : FlutterPlugin, MethodCallHandler {
* 处理Flutter方法调用
*/
override fun onMethodCall(@NonNull call: MethodCall, @NonNull result: Result) {
try {
when (call.method) {
"getPlatformVersion" -> {
handleGetPlatformVersion(result)
}
"initDecoder" -> {
handleInitDecoder(call, result)
}
"decodeFrame" -> {
handleDecodeFrame(call, result)
}
"releaseDecoder" -> {
handleReleaseDecoder(call, result)
}
"getDecoderStats" -> {
handleGetDecoderStats(call, result)
}
else -> {
result.notImplemented()
}
}
} catch (e: Exception) {
logError("处理方法调用失败", e)
result.error("NATIVE_ERROR", "处理方法调用失败: ${e.message}", null)
when (call.method) {
"initDecoder" -> handleInitDecoder(call, result)
"decodeFrame" -> handleDecodeFrame(call, result)
"releaseDecoder" -> handleReleaseDecoder(call, result)
else -> result.notImplemented()
}
}
/**
* 获取平台版本
*/
private fun handleGetPlatformVersion(result: Result) {
result.success("Android ${android.os.Build.VERSION.RELEASE}")
}
/**
* 初始化解码器
*/
private fun handleInitDecoder(call: MethodCall, result: Result) {
try {
// 读取参数
val width = call.argument<Int>("width") ?: 640
val height = call.argument<Int>("height") ?: 360
val frameRate = call.argument<Int?>("frameRate")
val codecType = call.argument<String>("codecType") ?: "h264"
val isDebug = call.argument<Boolean>("isDebug") ?: false
// 更新插件的调试模式标志
this.isDebugMode = isDebug
// 创建纹理
val textureEntry = textureRegistry.createSurfaceTexture()
val textureId = textureEntry.id()
// 检查这个纹理ID是否已经被使用过
if (releasedTextureIds.contains(textureId)) {
// 如果已经被使用过说明Flutter引擎在重用纹理ID这可能导致问题
logWarning("警告: 纹理ID $textureId 已被使用过,这可能导致问题")
// 记录这个纹理ID现在是活跃的
releasedTextureIds.remove(textureId)
textureId = textureEntry.id()
decoder = VideoDecoder(context, textureEntry, width, height, codecType) {
// onFrameAvailable callback
channel.invokeMethod("onFrameAvailable", mapOf("textureId" to textureId))
}
// 创建解码器配置
val config = VideoDecoderConfig(
width = width,
height = height,
codecType = codecType,
frameRate = frameRate,
isDebug = isDebug,
isAsync = call.argument<Boolean>("isAsync") ?: true
)
// 创建解码器
val decoder = VideoDecoder(context, textureEntry, config)
// 设置回调
decoder.callback = object : VideoDecoder.DecoderCallback {
override fun onFrameAvailable() {
// 通知Flutter刷新纹理
runOnMainThread {
try {
// 根据当前帧数判断是否是预通知
val decoder = decoders[textureId]
val stats = decoder?.getStatistics() ?: mapOf()
val renderedFrames = stats["renderedFrames"] as? Int ?: 0
if (renderedFrames == 0) {
// 这是初始化预通知
logDebug("[预通知] 发送初始帧可用通知给Flutter纹理ID: $textureId(无实际视频数据)")
// 发送帧可用通知(带预通知标志)
channel.invokeMethod("onFrameAvailable", mapOf(
"textureId" to textureId,
"isPrenotification" to true
))
// 发送解码器状态通知(准备就绪)
sendDecoderState(textureId, "ready", stats)
} else {
// 这是实际帧通知
logDebug("发送帧可用通知给Flutter纹理ID: $textureId,已渲染帧数: $renderedFrames")
// 发送帧可用通知(实际帧)
channel.invokeMethod("onFrameAvailable", mapOf(
"textureId" to textureId,
"isPrenotification" to false
))
// 发送解码器状态通知(渲染中)
sendDecoderState(textureId, "rendering", stats)
}
} catch (e: Exception) {
logError("通知Flutter更新纹理失败", e)
}
}
}
override fun onError(error: String) {
logError("解码器错误: $error")
// 发送错误状态通知
val stats = decoders[textureId]?.getStatistics() ?: mapOf()
sendDecoderState(textureId, "error", stats + mapOf("errorMessage" to error))
}
}
// 保存解码器
decoders[textureId] = decoder
// 发送初始化状态
sendDecoderState(textureId, "initializing", decoder.getStatistics())
// 返回纹理ID
result.success(textureId)
} catch (e: Exception) {
logError("初始化解码器失败", e)
result.error("INIT_FAILED", "初始化解码器失败: ${e.message}", null)
}
}
@ -231,38 +83,14 @@ class VideoDecodePlugin : FlutterPlugin, MethodCallHandler {
*/
private fun handleDecodeFrame(call: MethodCall, result: Result) {
try {
// 读取参数
val textureId = call.argument<Number>("textureId")?.toLong() ?:
return result.error("INVALID_ARGS", "无效的纹理ID", null)
val frameData = call.argument<ByteArray>("frameData") ?:
return result.error("INVALID_ARGS", "无效的帧数据", null)
val frameData = call.argument<ByteArray>("frameData") ?: return result.error("INVALID_ARGS", "无效的帧数据", null)
val frameType = call.argument<Int>("frameType") ?: 0
val isIFrame = frameType == 0 // 0表示I帧1表示P帧
// 获取解码器
val decoder = decoders[textureId] ?:
return result.error("DECODER_NOT_FOUND", "找不到纹理ID对应的解码器", null)
// 解码帧
val success = decoder.decodeFrame(frameData, isIFrame)
// 发送更新后的解码器状态(在帧解码后,无论成功与否)
val stats = decoder.getStatistics()
// 根据是否有渲染帧确定状态
val renderedFrames = stats["renderedFrames"] as? Int ?: 0
val state = if (renderedFrames > 0) "rendering" else "ready"
// 发送状态更新
sendDecoderState(textureId, state, stats)
// 返回结果
val timestamp = call.argument<Long>("timestamp") ?: 0L
val frameSeq = call.argument<Int>("frameSeq") ?: 0
val refIFrameSeq = call.argument<Int>("refIFrameSeq")
val success = decoder?.decodeFrame(frameData, frameType, timestamp, frameSeq, refIFrameSeq) ?: false
result.success(success)
} catch (e: Exception) {
logError("解码帧失败", e)
result.error("DECODE_FAILED", "解码帧失败: ${e.message}", null)
}
}
@ -272,123 +100,22 @@ class VideoDecodePlugin : FlutterPlugin, MethodCallHandler {
*/
private fun handleReleaseDecoder(call: MethodCall, result: Result) {
try {
// 读取参数
val textureId = call.argument<Number>("textureId")?.toLong() ?:
return result.error("INVALID_ARGS", "无效的纹理ID", null)
// 获取解码器
val decoder = decoders[textureId]
if (decoder == null) {
// 如果找不到解码器,可能已经释放,直接返回成功
result.success(true)
return
}
// 发送释放状态
sendDecoderState(textureId, "released", decoder.getStatistics())
// 释放解码器
decoder.release()
// 从映射中移除
decoders.remove(textureId)
// 记录已释放的纹理ID以便检测重用
releasedTextureIds.add(textureId)
// 返回成功
decoder?.release()
decoder = null
textureId = null
result.success(true)
} catch (e: Exception) {
logError("释放解码器失败", e)
result.error("RELEASE_FAILED", "释放解码器失败: ${e.message}", null)
}
}
/**
* 获取解码器统计信息
*/
private fun handleGetDecoderStats(call: MethodCall, result: Result) {
try {
// 获取纹理ID
val textureId = call.argument<Number>("textureId")?.toLong() ?:
return result.error("INVALID_ARGS", "无效的纹理ID", null)
// 获取解码器
val decoder = decoders[textureId] ?:
return result.error("DECODER_NOT_FOUND", "找不到纹理ID对应的解码器", null)
// 获取统计信息
val stats = decoder.getStatistics()
// 添加插件级别的信息
val enhancedStats = HashMap<String, Any>(stats)
enhancedStats["decoderCount"] = decoders.size
enhancedStats["textureId"] = textureId
// 返回统计信息
result.success(enhancedStats)
} catch (e: Exception) {
logError("获取解码器统计信息失败", e)
result.error("STATS_FAILED", "获取解码器统计信息失败: ${e.message}", null)
}
}
/**
* 发送解码器状态更新
*/
private fun sendDecoderState(textureId: Long, state: String, stats: Map<String, Any>) {
runOnMainThread {
try {
logDebug("发送解码器状态更新: 纹理ID=$textureId, 状态=$state")
// 构造参数
val params = HashMap<String, Any>()
params["textureId"] = textureId
params["state"] = state
params["stats"] = stats
// 发送状态更新
channel.invokeMethod("onDecoderState", params)
} catch (e: Exception) {
logError("发送解码器状态更新失败", e)
}
}
}
/**
* 在主线程上执行任务
*/
private fun runOnMainThread(task: () -> Unit) {
if (Looper.myLooper() == Looper.getMainLooper()) {
task()
} else {
mainHandler.post(task)
}
}
/**
* 插件从Flutter引擎解绑时调用
*/
override fun onDetachedFromEngine(@NonNull binding: FlutterPlugin.FlutterPluginBinding) {
// 释放所有解码器
for ((textureId, decoder) in decoders) {
try {
// 发送释放状态
sendDecoderState(textureId, "released", decoder.getStatistics())
// 释放解码器
decoder.release()
} catch (e: Exception) {
logError("插件分离时释放解码器失败", e)
}
}
// 清除映射
decoders.clear()
// 移除方法调用处理器
decoder?.release()
decoder = null
textureId = null
channel.setMethodCallHandler(null)
}
}

View File

@ -11,9 +11,9 @@ import 'package:video_decode_plugin/video_decode_plugin.dart';
// H264文件中解析出的帧
class H264Frame {
final Uint8List data;
final FrameType type;
H264Frame(this.data, this.type);
final int frameType; // 0=I帧, 1=P帧
final int? refIFrameSeq;
H264Frame(this.data, this.frameType, [this.refIFrameSeq]);
}
// H264 NAL
@ -131,6 +131,26 @@ class VideoView extends StatefulWidget {
State<VideoView> createState() => _VideoViewState();
}
///
///
/// 1. H264文件
/// 2.
/// 3.
/// 4. UI刷新与日志显示
/// 5. ID
///
///
/// - _textureId: ID
/// - _isInitialized:
/// - _isPlaying:
/// - _statusText:
/// - _error:
/// - _h264FileData: H264文件数据
/// - _h264Frames:
/// - _currentFrameIndex:
/// - _frameTimer:
/// - _logs:
/// - _logScrollController:
class _VideoViewState extends State<VideoView> {
//
int? _textureId;
@ -139,11 +159,6 @@ class _VideoViewState extends State<VideoView> {
String _statusText = "未初始化";
String _error = "";
//
DecoderState _decoderState = DecoderState.initializing;
String _decoderStateText = "初始化中";
bool _isActuallyRendering = false; //
//
int _renderedFrameCount = 0;
DateTime? _lastFrameTime;
@ -234,75 +249,39 @@ class _VideoViewState extends State<VideoView> {
// H264文件NAL单元
void _parseH264File() {
if (_h264FileData == null) return;
_log("开始解析H264文件...");
List<H264Frame> frames = [];
// 0x00000001 0x000001
int startIndex = 0;
bool hasSps = false;
bool hasPps = false;
while (startIndex < _h264FileData!.length - 4) {
//
int nextStartIndex = _findStartCode(_h264FileData!, startIndex + 3);
if (nextStartIndex == -1) {
nextStartIndex = _h264FileData!.length;
}
// NAL单元34
int skipBytes = (_h264FileData![startIndex] == 0x00 &&
_h264FileData![startIndex + 1] == 0x00 &&
_h264FileData![startIndex + 2] == 0x00 &&
_h264FileData![startIndex + 3] == 0x01)
? 4
: 3;
if (nextStartIndex > startIndex + skipBytes) {
// NAL类型
int nalType = _h264FileData![startIndex + skipBytes] & 0x1F;
// NAL单元数据
var nalData = Uint8List(nextStartIndex - startIndex);
for (int i = 0; i < nalData.length; i++) {
nalData[i] = _h264FileData![startIndex + i];
}
// NAL类型分类
switch (nalType) {
case NalUnitType.SPS:
_log("找到SPS: 位置=${startIndex}, 长度=${nalData.length}");
hasSps = true;
frames.add(H264Frame(nalData, FrameType.iFrame));
break;
case NalUnitType.PPS:
_log("找到PPS: 位置=${startIndex}, 长度=${nalData.length}");
hasPps = true;
frames.add(H264Frame(nalData, FrameType.iFrame));
break;
case NalUnitType.CODED_SLICE_IDR:
_log("找到I帧: 位置=${startIndex}, 长度=${nalData.length}");
frames.add(H264Frame(nalData, FrameType.iFrame));
break;
case NalUnitType.CODED_SLICE_NON_IDR:
frames.add(H264Frame(nalData, FrameType.pFrame));
break;
default:
// NAL单元也添加进去
frames.add(H264Frame(nalData, FrameType.pFrame));
break;
// 0=I帧, 1=P帧
if (nalType == 7 || nalType == 8 || nalType == 5) {
frames.add(H264Frame(nalData, 0));
} else {
frames.add(H264Frame(nalData, 1));
}
}
startIndex = nextStartIndex;
}
setState(() {
_h264Frames = frames;
});
_log("H264文件解析完成找到 ${frames.length} 个帧包含SPS=${hasSps}, PPS=${hasPps}");
_log("H264文件解析完成找到 "+frames.length.toString()+" 个帧");
}
//
@ -374,45 +353,19 @@ class _VideoViewState extends State<VideoView> {
if (_isInitialized) {
await _releaseDecoder();
}
_log("正在初始化解码器");
try {
final config = VideoDecoderConfig(
width: 640,
height: 480,
codecType: CodecType.h264,
frameRate: 24, // 24fps (23.976)
isDebug: true, //
);
final config = VideoDecoderConfig(width: 640, height: 480);
final textureId = await VideoDecodePlugin.initDecoder(config);
if (textureId != null) {
_textureId = textureId;
//
VideoDecodePlugin.setFrameCallbackForTexture(
textureId, _onFrameAvailable);
//
VideoDecodePlugin.setStateCallbackForTexture(
textureId, _onDecoderStateChanged);
setState(() {
_isInitialized = true;
_error = "";
_statusText = "就绪";
_renderedFrameCount = 0; //
_decoderState = DecoderState.initializing;
_decoderStateText = "初始化中";
_isActuallyRendering = false;
_renderedFrameCount = 0;
});
_log("解码器初始化成功纹理ID: $_textureId");
//
await _sendTestIFrame();
} else {
setState(() {
_error = "获取纹理ID失败";
@ -429,120 +382,40 @@ class _VideoViewState extends State<VideoView> {
}
}
//
void _onDecoderStateChanged(
int textureId, DecoderState state, Map<String, dynamic> stats) {
if (!mounted) return;
String stateText;
switch (state) {
case DecoderState.initializing:
stateText = "初始化中";
break;
case DecoderState.ready:
stateText = "准备就绪";
break;
case DecoderState.rendering:
stateText = "渲染中";
//
_isActuallyRendering = true;
break;
case DecoderState.error:
stateText = "出错";
//
final errorMessage = stats['errorMessage'] as String?;
if (errorMessage != null) {
_log("解码器错误: $errorMessage");
}
break;
case DecoderState.released:
stateText = "已释放";
break;
default:
stateText = "未知状态";
//
Future<bool> _decodeNextFrame(H264Frame frame, int frameSeq) async {
if (_textureId == null || !_isInitialized || !_isPlaying) {
return false;
}
// UI
setState(() {
_decoderState = state;
_decoderStateText = stateText;
//
if (stats.isNotEmpty) {
_decoderFps = (stats['fps'] as num?)?.toDouble() ?? 0.0;
_renderedFrameCount = (stats['renderedFrames'] as int?) ?? 0;
//
_totalFrames = (stats['totalFrames'] as int?) ?? 0;
_droppedFrames = (stats['droppedFrames'] as int?) ?? 0;
_hasSentIDR = (stats['hasSentIDR'] as bool?) ?? false;
_hasSentSPS = (stats['hasSentSPS'] as bool?) ?? false;
_hasSentPPS = (stats['hasSentPPS'] as bool?) ?? false;
//
if (state == DecoderState.rendering) {
_statusText = _isPlaying
? "播放中 (解码总帧: $_totalFrames, 丢弃: $_droppedFrames)"
: "已停止";
}
}
});
String decoderInfo = "解码器状态更新: $_decoderStateText, " +
"帧数据: 渲染=$_renderedFrameCount, 总计=$_totalFrames, 丢弃=$_droppedFrames, " +
"FPS=${_decoderFps.toStringAsFixed(1)}, " +
"参数集: SPS=${_hasSentSPS}, PPS=${_hasSentPPS}, IDR=${_hasSentIDR}";
_log(decoderInfo);
}
// I帧来触发渲染
Future<void> _sendTestIFrame() async {
if (_textureId == null || !_isInitialized) {
_log("解码器未准备好,无法发送测试帧");
return;
}
_log("生成并发送测试I帧");
// NAL单元 (IDR帧)
// 5 + NAL类型5(I帧) +
List<int> testFrameData = [
0x00, 0x00, 0x00, 0x01, 0x65, // + NAL类型 (0x65 = 101|0101 -> 5)
0x88, 0x84, 0x21, 0x43, 0x14, 0x56, 0x32, 0x80 //
];
Uint8List testFrame = Uint8List.fromList(testFrameData);
try {
_log("发送测试I帧: ${testFrame.length} 字节");
bool success = await VideoDecodePlugin.decodeFrameForTexture(
_textureId!, testFrame, FrameType.iFrame);
_log("测试I帧发送结果: ${success ? '成功' : '失败'}");
final timestamp = DateTime.now().microsecondsSinceEpoch;
final success = await VideoDecodePlugin.decodeFrame(
frameData: frame.data,
frameType: frame.frameType,
timestamp: timestamp,
frameSeq: frameSeq,
refIFrameSeq: frame.refIFrameSeq,
);
if (!success) {
_log("解码帧失败,索引 $frameSeq (type=${frame.frameType})");
}
return success;
} catch (e) {
_log("发送测试帧错误: $e");
_log("解码帧错误: $e");
return false;
}
}
Future<void> _releaseDecoder() async {
_statsTimer?.cancel(); //
if (_textureId != null) {
_log("正在释放解码器资源");
try {
await VideoDecodePlugin.releaseDecoderForTexture(_textureId!);
await VideoDecodePlugin.releaseDecoder();
setState(() {
_textureId = null;
_isInitialized = false;
_statusText = "已释放";
_isActuallyRendering = false;
_decoderState = DecoderState.released;
_decoderStateText = "已释放";
});
_log("解码器资源释放成功");
} catch (e) {
_log("释放解码器错误: $e");
@ -602,7 +475,7 @@ class _VideoViewState extends State<VideoView> {
if (nalType == NalUnitType.SPS || nalType == NalUnitType.PPS) {
_log("发送${nalType == NalUnitType.SPS ? 'SPS' : 'PPS'}数据");
await _decodeNextFrame(frame);
await _decodeNextFrame(frame, i);
//
await Future.delayed(Duration(milliseconds: 30));
}
@ -644,7 +517,7 @@ class _VideoViewState extends State<VideoView> {
}
final frame = _h264Frames[_currentFrameIndex];
bool decodeSuccess = await _decodeNextFrame(frame);
bool decodeSuccess = await _decodeNextFrame(frame, _currentFrameIndex);
//
if (!decodeSuccess && _enablePacketLoss) {
@ -656,91 +529,6 @@ class _VideoViewState extends State<VideoView> {
});
}
Future<bool> _decodeNextFrame(H264Frame frame) async {
if (_textureId == null || !_isInitialized || !_isPlaying) {
return false;
}
try {
// NAL类型
int nalType = _getNalType(frame.data);
//
if (_enablePacketLoss) {
bool shouldDrop = false;
//
if (_burstPacketLossMode && _burstPacketLossCounter > 0) {
shouldDrop = true;
_burstPacketLossCounter--;
}
//
else if (math.Random().nextDouble() < _packetLossRate) {
shouldDrop = true;
//
if (_burstPacketLossMode) {
_burstPacketLossCounter = math.Random().nextInt(5) + 1; // 1-5
}
}
// NAL的丢包策略
if (nalType == NalUnitType.CODED_SLICE_IDR && _dropIFrames) {
shouldDrop = true;
} else if ((nalType == NalUnitType.CODED_SLICE_NON_IDR ||
nalType == NalUnitType.CODED_SLICE_EXTENSION) &&
_dropPFrames) {
shouldDrop = true;
} else if ((nalType == NalUnitType.SPS || nalType == NalUnitType.PPS) &&
_dropSPSPPS) {
shouldDrop = true;
}
if (shouldDrop) {
_droppedFramesCount++;
String nalTypeName = NalUnitType.getName(nalType);
_log("丢弃帧NAL类型 = $nalTypeName");
//
setState(() {
_showingErrorFrame = true;
});
// 1
_errorFrameResetTimer?.cancel();
_errorFrameResetTimer = Timer(Duration(milliseconds: 1000), () {
if (mounted) {
setState(() {
_showingErrorFrame = false;
});
}
});
return false; // false
}
}
//
final success = await VideoDecodePlugin.decodeFrameForTexture(
_textureId!,
frame.data,
frame.type,
);
if (!success) {
_log("解码帧失败,索引 $_currentFrameIndex (${frame.type})");
} else {
String nalTypeName = NalUnitType.getName(nalType);
_log(
"解码帧成功,索引 $_currentFrameIndex (${frame.type}), NAL类型: $nalTypeName");
}
return success;
} catch (e) {
_log("解码帧错误: $e");
return false;
}
}
void _log(String message) {
final timestamp = DateTime.now().toString().split('.').first;
final logMessage = "[$timestamp] $message";
@ -793,7 +581,7 @@ class _VideoViewState extends State<VideoView> {
Container(color: Colors.black),
//
if (_renderedFrameCount == 0 || !_isActuallyRendering)
if (_renderedFrameCount == 0)
Center(
child: Column(
mainAxisSize: MainAxisSize.min,
@ -803,11 +591,7 @@ class _VideoViewState extends State<VideoView> {
),
SizedBox(height: 16),
Text(
_decoderState == DecoderState.initializing
? '初始化中...'
: _decoderState == DecoderState.ready
? '准备就绪,等待首帧...'
: '加载中...',
'初始化中...',
style: TextStyle(color: Colors.white70, fontSize: 14),
),
],
@ -835,63 +619,41 @@ class _VideoViewState extends State<VideoView> {
),
),
//
if (_decoderState == DecoderState.error)
Container(
color: Colors.red.withOpacity(0.3),
child: Center(
child: Column(
mainAxisSize: MainAxisSize.min,
children: [
Icon(Icons.error_outline, size: 48, color: Colors.white),
SizedBox(height: 16),
Text(
'解码器错误',
style: TextStyle(
color: Colors.white,
fontSize: 16,
fontWeight: FontWeight.bold),
),
],
),
),
),
// -
Positioned(
right: 10,
top: 10,
child: Container(
padding: EdgeInsets.all(5),
decoration: BoxDecoration(
color: Colors.black.withOpacity(0.5),
borderRadius: BorderRadius.circular(4),
),
constraints: BoxConstraints(
maxWidth: 150, //
),
child: Column(
crossAxisAlignment: CrossAxisAlignment.end,
mainAxisSize: MainAxisSize.min, // column只占用所需空间
children: [
Text(
'帧: $_renderedFrameCount',
style: TextStyle(color: Colors.white, fontSize: 12),
),
if (_enablePacketLoss)
Text(
'丢帧: $_droppedFramesCount',
style: TextStyle(
color: _droppedFramesCount > 0
? Colors.orange
: Colors.white70,
fontSize: 12,
),
),
],
),
),
),
// // -
// Positioned(
// right: 10,
// top: 10,
// child: Container(
// padding: EdgeInsets.all(5),
// decoration: BoxDecoration(
// color: Colors.black.withOpacity(0.5),
// borderRadius: BorderRadius.circular(4),
// ),
// constraints: BoxConstraints(
// maxWidth: 150, //
// ),
// child: Column(
// crossAxisAlignment: CrossAxisAlignment.end,
// mainAxisSize: MainAxisSize.min, // column只占用所需空间
// children: [
// Text(
// '帧: $_renderedFrameCount',
// style: TextStyle(color: Colors.white, fontSize: 12),
// ),
// if (_enablePacketLoss)
// Text(
// '丢帧: $_droppedFramesCount',
// style: TextStyle(
// color: _droppedFramesCount > 0
// ? Colors.orange
// : Colors.white70,
// fontSize: 12,
// ),
// ),
// ],
// ),
// ),
// ),
],
);
}
@ -947,106 +709,82 @@ class _VideoViewState extends State<VideoView> {
style:
TextStyle(fontWeight: FontWeight.bold)),
//
Padding(
padding: const EdgeInsets.only(top: 4.0),
child: Text(
'解码器状态: $_decoderStateText',
style: TextStyle(
color: _getStateColor(),
fontWeight: FontWeight.bold),
),
),
//
Padding(
padding: const EdgeInsets.only(top: 4.0),
child: Text(
'实际渲染: ${_isActuallyRendering ? "" : ""}',
style: TextStyle(
color: _isActuallyRendering
? Colors.green
: Colors.orange,
),
),
),
// FPS和帧数信息
Padding(
padding: const EdgeInsets.only(top: 4.0),
child: Text(
'解码器FPS: ${_decoderFps.toStringAsFixed(1)}',
style: TextStyle(color: Colors.green),
),
),
Padding(
padding: const EdgeInsets.only(top: 4.0),
child: Text('已渲染帧数: $_renderedFrameCount'),
),
//
Padding(
padding: const EdgeInsets.only(top: 4.0),
child: Text(
'已丢弃帧数: $_droppedFramesCount',
style: TextStyle(
color: _droppedFramesCount > 0
? Colors.orange
: Colors.black),
),
),
Padding(
padding: const EdgeInsets.only(top: 4.0),
child: Text('当前帧索引: $_currentFrameIndex'),
),
//
Padding(
padding: const EdgeInsets.only(top: 4.0),
child: Column(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
Text('参数集状态:'),
Padding(
padding: const EdgeInsets.only(
left: 8.0, top: 2.0),
child: Text(
'SPS: ${_hasSentSPS ? "已发送" : "未发送"}'),
),
Padding(
padding: const EdgeInsets.only(
left: 8.0, top: 2.0),
child: Text(
'PPS: ${_hasSentPPS ? "已发送" : "未发送"}'),
),
Padding(
padding: const EdgeInsets.only(
left: 8.0, top: 2.0),
child: Text(
'IDR: ${_hasSentIDR ? "已发送" : "未发送"}'),
),
],
),
),
Padding(
padding: const EdgeInsets.only(top: 4.0),
child: Text('总帧数: $_totalFrames'),
),
//
if (_error.isNotEmpty)
Padding(
padding: const EdgeInsets.only(top: 4.0),
child: Text(
'错误: $_error',
style: TextStyle(
color: Colors.red,
fontWeight: FontWeight.bold),
),
),
// Padding(
// padding: const EdgeInsets.only(top: 4.0),
// child: Text(
// '解码器FPS: ${_decoderFps.toStringAsFixed(1)}',
// style: TextStyle(color: Colors.green),
// ),
// ),
//
// Padding(
// padding: const EdgeInsets.only(top: 4.0),
// child: Text('已渲染帧数: $_renderedFrameCount'),
// ),
//
// //
// Padding(
// padding: const EdgeInsets.only(top: 4.0),
// child: Text(
// '已丢弃帧数: $_droppedFramesCount',
// style: TextStyle(
// color: _droppedFramesCount > 0
// ? Colors.orange
// : Colors.black),
// ),
// ),
//
// Padding(
// padding: const EdgeInsets.only(top: 4.0),
// child: Text('当前帧索引: $_currentFrameIndex'),
// ),
//
// //
// Padding(
// padding: const EdgeInsets.only(top: 4.0),
// child: Column(
// crossAxisAlignment: CrossAxisAlignment.start,
// children: [
// Text('参数集状态:'),
// Padding(
// padding: const EdgeInsets.only(
// left: 8.0, top: 2.0),
// child: Text(
// 'SPS: ${_hasSentSPS ? "已发送" : "未发送"}'),
// ),
// Padding(
// padding: const EdgeInsets.only(
// left: 8.0, top: 2.0),
// child: Text(
// 'PPS: ${_hasSentPPS ? "已发送" : "未发送"}'),
// ),
// Padding(
// padding: const EdgeInsets.only(
// left: 8.0, top: 2.0),
// child: Text(
// 'IDR: ${_hasSentIDR ? "已发送" : "未发送"}'),
// ),
// ],
// ),
// ),
//
// Padding(
// padding: const EdgeInsets.only(top: 4.0),
// child: Text('总帧数: $_totalFrames'),
// ),
//
// //
// if (_error.isNotEmpty)
// Padding(
// padding: const EdgeInsets.only(top: 4.0),
// child: Text(
// '错误: $_error',
// style: TextStyle(
// color: Colors.red,
// fontWeight: FontWeight.bold),
// ),
// ),
// H264文件信息
Padding(
@ -1104,6 +842,7 @@ class _VideoViewState extends State<VideoView> {
},
child: Text('刷新'),
),
],
),
),
@ -1284,39 +1023,6 @@ class _VideoViewState extends State<VideoView> {
),
);
}
//
Future<void> _updateDecoderStats() async {
if (_textureId == null || !_isInitialized) return;
try {
//
final stats = await VideoDecodePlugin.getDecoderStats(_textureId!);
if (mounted) {
_log("手动更新解码器统计信息: $stats");
}
} catch (e) {
_log("获取解码器统计信息失败: $e");
}
}
Color _getStateColor() {
switch (_decoderState) {
case DecoderState.initializing:
return Colors.orange;
case DecoderState.ready:
return Colors.green;
case DecoderState.rendering:
return Colors.blue;
case DecoderState.error:
return Colors.red;
case DecoderState.released:
return Colors.grey;
default:
return Colors.black;
}
}
}
//

View File

@ -7,86 +7,22 @@ import 'package:flutter/services.dart';
import 'video_decode_plugin_platform_interface.dart';
///
enum FrameType {
/// I帧
iFrame,
/// P帧
pFrame,
}
///
enum CodecType {
/// H.264
h264,
/// H.265
h265,
}
///
enum DecoderState {
///
initializing,
///
ready,
///
rendering,
///
error,
///
released,
}
///
typedef FrameAvailableCallback = void Function(int textureId);
///
typedef DecoderStateCallback = void Function(
int textureId, DecoderState state, Map<String, dynamic> stats);
///
class _DecoderInstance {
final int textureId;
FrameAvailableCallback? frameCallback;
DecoderStateCallback? stateCallback;
_DecoderInstance(this.textureId);
}
///
class VideoDecoderConfig {
/// 640
///
final int width;
/// 360
///
final int height;
///
final int? frameRate;
/// h264
final CodecType codecType;
/// false
final bool isDebug;
/// 使true
final bool isAsync;
final String codecType;
///
VideoDecoderConfig({
this.width = 640,
this.height = 360,
this.frameRate,
this.codecType = CodecType.h264,
this.isDebug = false,
this.isAsync = true,
required this.width,
required this.height,
this.codecType = 'h264',
});
/// Map
@ -94,10 +30,7 @@ class VideoDecoderConfig {
return {
'width': width,
'height': height,
'frameRate': frameRate,
'codecType': codecType.toString().split('.').last,
'isDebug': isDebug,
'isAsync': isAsync,
'codecType': codecType,
};
}
}
@ -106,196 +39,44 @@ class VideoDecoderConfig {
class VideoDecodePlugin {
static const MethodChannel _channel = MethodChannel('video_decode_plugin');
//
static final Map<int, _DecoderInstance> _decoders = {};
static int? _textureId;
// ID
static int? _defaultTextureId;
//
static bool _listenerInitialized = false;
//
static bool _isDebugMode = false;
// - 使
static final Map<int, bool> _isDecoderReleasing = {};
// - 访
static final _decoderStateLock = Object();
// -
static int _uninitializedErrorCount = 0;
static int _lastErrorLogTime = 0;
/// -
static void _logDebug(String message) {
if (_isDebugMode) {
debugPrint('[VideoDecodePlugin] $message');
}
///
static Future<int?> initDecoder(VideoDecoderConfig config) async {
final textureId = await _channel.invokeMethod<int>('initDecoder', config.toMap());
_textureId = textureId;
return textureId;
}
/// -
static void _logError(String message, {bool throttle = false}) {
if (throttle) {
//
_uninitializedErrorCount++;
//
final now = DateTime.now().millisecondsSinceEpoch;
if (now - _lastErrorLogTime > 5000 || _uninitializedErrorCount >= 50) {
debugPrint(
'[VideoDecodePlugin] ERROR: $message (发生 $_uninitializedErrorCount 次)');
_lastErrorLogTime = now;
_uninitializedErrorCount = 0;
}
} else {
//
debugPrint('[VideoDecodePlugin] ERROR: $message');
}
///
static Future<bool> decodeFrame({
required Uint8List frameData,
required int frameType, // 0=I帧, 1=P帧
required int timestamp, //
required int frameSeq, //
int? refIFrameSeq, // P帧时可选
}) async {
if (_textureId == null) return false;
final params = {
'textureId': _textureId,
'frameData': frameData,
'frameType': frameType,
'timestamp': timestamp,
'frameSeq': frameSeq,
if (refIFrameSeq != null) 'refIFrameSeq': refIFrameSeq,
};
final result = await _channel.invokeMethod<bool>('decodeFrame', params);
return result ?? false;
}
///
static void _initializeMethodCallHandler() {
if (!_listenerInitialized) {
_channel.setMethodCallHandler((call) async {
switch (call.method) {
case 'onFrameAvailable':
final Map<dynamic, dynamic> args = call.arguments;
final int textureId = args['textureId'];
//
bool isReleasing = false;
// 访
_withLock(_decoderStateLock, () {
isReleasing = _isDecoderReleasing[textureId] ?? false;
});
if (isReleasing) {
_logDebug('收到帧通知但解码器 $textureId 正在释放,忽略');
return null;
}
// ID的帧回调
final decoder = _decoders[textureId];
if (decoder != null && decoder.frameCallback != null) {
//
final bool isPrenotification = args['isPrenotification'] ?? false;
if (isPrenotification) {
_logDebug('[预通知] 收到初始帧可用通知无实际视频数据纹理ID: $textureId');
} else {
_logDebug('收到帧可用通知纹理ID: $textureId');
}
//
decoder.frameCallback!(textureId);
}
return null;
case 'onDecoderState':
final Map<dynamic, dynamic> args = call.arguments;
final int textureId = args['textureId'];
final String stateStr = args['state'];
final Map<dynamic, dynamic> statsMap = args['stats'];
//
bool isReleasing = false;
// 访
_withLock(_decoderStateLock, () {
isReleasing = _isDecoderReleasing[textureId] ?? false;
});
if (isReleasing && stateStr != 'released') {
_logDebug('收到状态回调但解码器 $textureId 正在释放,忽略');
return null;
}
//
DecoderState state;
switch (stateStr) {
case 'initializing':
state = DecoderState.initializing;
break;
case 'ready':
state = DecoderState.ready;
break;
case 'rendering':
state = DecoderState.rendering;
break;
case 'error':
state = DecoderState.error;
break;
case 'released':
state = DecoderState.released;
break;
default:
state = DecoderState.initializing;
}
// statsMap转换为强类型Map<String, dynamic>
final Map<String, dynamic> stats = {};
statsMap.forEach((key, value) {
if (key is String) {
stats[key] = value;
}
});
//
final decoder = _decoders[textureId];
if (decoder != null && decoder.stateCallback != null) {
_logDebug('调用解码器状态回调纹理ID=$textureId, 状态=$stateStr');
decoder.stateCallback!(textureId, state, stats);
}
return null;
default:
throw PlatformException(
code: 'Unimplemented',
details: 'The method ${call.method} is not implemented',
);
}
});
_listenerInitialized = true;
}
}
///
static void _withLock(Object lock, Function() action) {
// Dart中Object实例可以直接用作锁对象
synchronized(lock, action);
}
///
static T _withLockResult<T>(Object lock, T Function() action) {
return synchronizedWithResult(lock, action);
}
///
static bool _isDecoderReady(int textureId) {
bool isReleasing = false;
_withLock(_decoderStateLock, () {
isReleasing = _isDecoderReleasing[textureId] ?? false;
});
return _decoders.containsKey(textureId) && !isReleasing;
}
///
static void _setDecoderReleasing(int textureId, bool isReleasing) {
_withLock(_decoderStateLock, () {
if (isReleasing) {
_isDecoderReleasing[textureId] = true;
} else {
_isDecoderReleasing.remove(textureId);
}
///
static Future<bool> releaseDecoder() async {
if (_textureId == null) return true;
final result = await _channel.invokeMethod<bool>('releaseDecoder', {
'textureId': _textureId,
});
_textureId = null;
return result ?? false;
}
///
@ -308,343 +89,13 @@ class VideoDecodePlugin {
return Platform.isAndroid || Platform.isIOS;
}
///
static void setFrameCallback(FrameAvailableCallback callback) {
if (_defaultTextureId != null) {
setFrameCallbackForTexture(_defaultTextureId!, callback);
}
}
/// ID设置帧回调
static void setFrameCallbackForTexture(
int textureId, FrameAvailableCallback callback) {
_initializeMethodCallHandler();
final decoder = _decoders[textureId];
if (decoder != null) {
decoder.frameCallback = callback;
}
}
///
static void setStateCallback(DecoderStateCallback callback) {
if (_defaultTextureId != null) {
setStateCallbackForTexture(_defaultTextureId!, callback);
}
}
/// ID设置状态回调
static void setStateCallbackForTexture(
int textureId, DecoderStateCallback callback) {
_initializeMethodCallHandler();
final decoder = _decoders[textureId];
if (decoder != null) {
decoder.stateCallback = callback;
}
}
///
static Future<int?> initDecoder(VideoDecoderConfig config) async {
//
_isDebugMode = config.isDebug;
//
_uninitializedErrorCount = 0;
//
if (_defaultTextureId != null) {
await releaseDecoder();
}
return await createDecoder(config);
}
///
static Future<int?> createDecoder(VideoDecoderConfig config) async {
//
_isDebugMode = config.isDebug;
//
_uninitializedErrorCount = 0;
if (!isPlatformSupported) {
_logError('当前平台不支持视频解码插件');
return null;
}
//
_initializeMethodCallHandler();
try {
_logDebug(
'创建解码器: ${config.width}x${config.height}, 编码: ${config.codecType}');
final textureId =
await _channel.invokeMethod<int>('initDecoder', config.toMap());
if (textureId != null) {
//
final decoder = _DecoderInstance(textureId);
_decoders[textureId] = decoder;
//
_setDecoderReleasing(textureId, false);
//
_defaultTextureId = textureId;
_logDebug('解码器创建成功纹理ID: $textureId');
}
return _defaultTextureId;
} catch (e) {
_logError('初始化解码器失败: $e');
return null;
}
}
/// ID
static int? get textureId => _defaultTextureId;
/// ID
static List<int> get allTextureIds => _decoders.keys.toList();
///
static Future<bool> decodeFrame(
Uint8List frameData, FrameType frameType) async {
// 使ID
final int? decoderId = _defaultTextureId;
if (decoderId == null) {
// 使
_logError('解码器未初始化', throttle: true);
return false;
}
//
if (!_isDecoderReady(decoderId)) {
_logDebug('解码器正在释放,忽略解码请求');
return false;
}
return decodeFrameForTexture(decoderId, frameData, frameType);
}
/// ID解码视频帧
static Future<bool> decodeFrameForTexture(
int textureId, Uint8List frameData, FrameType frameType) async {
//
if (!_isDecoderReady(textureId)) {
_logDebug('解码器不可用或正在释放,忽略解码请求');
return false;
}
try {
final bool isIFrame = frameType == FrameType.iFrame;
_logDebug(
'解码帧: textureId=$textureId, 大小=${frameData.length}字节, 类型=${isIFrame ? "I帧" : "P帧"}');
final result = await _channel.invokeMethod<bool>('decodeFrame', {
'textureId': textureId,
'frameData': frameData,
'frameType': frameType.index,
}) ??
false;
if (!result) {
_logDebug('解码帧失败');
}
return result;
} catch (e) {
//
if (!_decoders.containsKey(textureId)) {
_logDebug('解码器已释放,忽略解码错误');
return false;
}
_logError('解码帧失败: $e');
return false;
}
}
///
static Future<bool> releaseDecoder() async {
final int? decoderId = _defaultTextureId;
if (decoderId == null) {
return true;
}
final result = await releaseDecoderForTexture(decoderId);
if (result) {
_defaultTextureId = null;
}
return result;
}
/// ID的解码器资源
static Future<bool> releaseDecoderForTexture(int textureId) async {
//
if (!_decoders.containsKey(textureId)) {
return true;
}
//
_setDecoderReleasing(textureId, true);
try {
_logDebug('释放解码器: textureId=$textureId');
//
clearCallbackForTexture(textureId);
final result = await _channel.invokeMethod<bool>('releaseDecoder', {
'textureId': textureId,
}) ??
false;
if (result) {
//
_decoders.remove(textureId);
// ID
if (_defaultTextureId == textureId) {
_defaultTextureId = null;
}
//
_setDecoderReleasing(textureId, false);
//
_uninitializedErrorCount = 0;
_logDebug('解码器释放成功: textureId=$textureId');
} else {
//
_setDecoderReleasing(textureId, false);
_logError('解码器释放失败: textureId=$textureId');
}
return result;
} catch (e) {
//
_decoders.remove(textureId);
if (_defaultTextureId == textureId) {
_defaultTextureId = null;
}
_setDecoderReleasing(textureId, false);
_logError('释放解码器失败: $e');
return false;
}
}
///
static Future<bool> releaseAllDecoders() async {
bool allSuccess = true;
//
final textureIds = List<int>.from(_decoders.keys);
_logDebug('释放所有解码器: 共${textureIds.length}');
//
for (final textureId in textureIds) {
final success = await releaseDecoderForTexture(textureId);
if (!success) {
allSuccess = false;
}
}
//
_decoders.clear();
_defaultTextureId = null;
//
_withLock(_decoderStateLock, () {
_isDecoderReleasing.clear();
});
//
_uninitializedErrorCount = 0;
return allSuccess;
}
/// ID的回调
static void clearCallbackForTexture(int textureId) {
final decoder = _decoders[textureId];
if (decoder != null) {
decoder.frameCallback = null;
decoder.stateCallback = null;
_logDebug('已清除纹理ID为$textureId的所有回调');
}
}
///
static void clearAllCallbacks() {
for (final decoder in _decoders.values) {
decoder.frameCallback = null;
decoder.stateCallback = null;
}
_logDebug('已清除所有回调');
}
static int? get textureId => _textureId;
///
static void registerWith() {
//
}
///
///
/// [textureId] ID
/// Map:
/// - totalFrames:
/// - renderedFrames:
/// - droppedFrames:
/// - fps: FPS
/// - hasSentSPS: SPS
/// - hasSentPPS: PPS
/// - hasSentIDR: IDR(I帧)
/// - consecutivePFrames: P帧数
/// - targetWidth:
/// - targetHeight:
/// - frameRate:
/// - decoderCount:
/// - textureId: ID
static Future<Map<String, dynamic>> getDecoderStats(int textureId) async {
//
if (!_isDecoderReady(textureId)) {
_logDebug('解码器不可用或正在释放,无法获取统计信息');
return {};
}
try {
_logDebug('获取解码器统计信息: textureId=$textureId');
final params = {
'textureId': textureId,
};
final result = await _channel.invokeMethod<Map<Object?, Object?>>(
'getDecoderStats', params);
if (result == null) {
return {};
}
// Object?
final Map<String, dynamic> typedResult = {};
result.forEach((key, value) {
if (key is String) {
typedResult[key] = value;
}
});
_logDebug('获取解码器统计信息成功: $typedResult');
return typedResult;
} catch (e) {
_logError('获取解码器统计信息失败: $e');
return {};
}
}
}
/// Dart中实现简单的同步锁