feat:v1版本实现

This commit is contained in:
liyi 2025-04-23 16:36:55 +08:00
parent 9f97db8852
commit a9f96b8139
6 changed files with 509 additions and 600 deletions

View File

@ -35,7 +35,6 @@ final config = VideoDecoderConfig(
height: 480, // 视频高度
codecType: CodecType.h264, // 编解码类型h264或h265
frameRate: 30, // 目标帧率(可选)
bufferSize: 30, // 缓冲区大小
isDebug: true, // 是否启用详细日志
);

View File

@ -129,10 +129,7 @@ class VideoDecodePlugin : FlutterPlugin, MethodCallHandler {
val height = call.argument<Int>("height") ?: 360
val frameRate = call.argument<Int?>("frameRate")
val codecType = call.argument<String>("codecType") ?: "h264"
val bufferSize = call.argument<Int>("bufferSize") ?: 25
val threadCount = call.argument<Int>("threadCount") ?: 1
val isDebug = call.argument<Boolean>("isDebug") ?: false
val enableHardwareDecoder = call.argument<Boolean>("enableHardwareDecoder") ?: true
// 更新插件的调试模式标志
this.isDebugMode = isDebug
@ -156,15 +153,7 @@ class VideoDecodePlugin : FlutterPlugin, MethodCallHandler {
height = height,
codecType = codecType,
frameRate = frameRate,
enableHardwareDecoder = enableHardwareDecoder,
threadCount = threadCount,
bufferSize = bufferSize,
isDebug = isDebug,
enableDynamicThresholds = call.argument<Boolean>("enableDynamicThresholds") ?: true,
initialMaxPFrames = call.argument<Int>("initialMaxPFrames") ?: 10,
initialIFrameTimeoutMs = call.argument<Int>("initialIFrameTimeoutMs") ?: 500,
minMaxPFrames = call.argument<Int>("minMaxPFrames") ?: 5,
maxMaxPFrames = call.argument<Int>("maxMaxPFrames") ?: 30
isDebug = isDebug
)
// 创建解码器
@ -184,12 +173,28 @@ class VideoDecodePlugin : FlutterPlugin, MethodCallHandler {
if (renderedFrames == 0) {
// 这是初始化预通知
logDebug("[预通知] 发送初始帧可用通知给Flutter纹理ID: $textureId(无实际视频数据)")
// 发送帧可用通知(带预通知标志)
channel.invokeMethod("onFrameAvailable", mapOf(
"textureId" to textureId,
"isPrenotification" to true
))
// 发送解码器状态通知(准备就绪)
sendDecoderState(textureId, "ready", stats)
} else {
// 这是实际帧通知
logDebug("发送帧可用通知给Flutter纹理ID: $textureId,已渲染帧数: $renderedFrames")
// 发送帧可用通知(实际帧)
channel.invokeMethod("onFrameAvailable", mapOf(
"textureId" to textureId,
"isPrenotification" to false
))
// 发送解码器状态通知(渲染中)
sendDecoderState(textureId, "rendering", stats)
}
channel.invokeMethod("onFrameAvailable", mapOf("textureId" to textureId))
} catch (e: Exception) {
logError("通知Flutter更新纹理失败", e)
}
@ -198,12 +203,19 @@ class VideoDecodePlugin : FlutterPlugin, MethodCallHandler {
override fun onError(error: String) {
logError("解码器错误: $error")
// 发送错误状态通知
val stats = decoders[textureId]?.getStatistics() ?: mapOf()
sendDecoderState(textureId, "error", stats + mapOf("errorMessage" to error))
}
}
// 保存解码器
decoders[textureId] = decoder
// 发送初始化状态
sendDecoderState(textureId, "initializing", decoder.getStatistics())
// 返回纹理ID
result.success(textureId)
@ -235,6 +247,16 @@ class VideoDecodePlugin : FlutterPlugin, MethodCallHandler {
// 解码帧
val success = decoder.decodeFrame(frameData, isIFrame)
// 发送更新后的解码器状态(在帧解码后,无论成功与否)
val stats = decoder.getStatistics()
// 根据是否有渲染帧确定状态
val renderedFrames = stats["renderedFrames"] as? Int ?: 0
val state = if (renderedFrames > 0) "rendering" else "ready"
// 发送状态更新
sendDecoderState(textureId, state, stats)
// 返回结果
result.success(success)
@ -261,6 +283,9 @@ class VideoDecodePlugin : FlutterPlugin, MethodCallHandler {
return
}
// 发送释放状态
sendDecoderState(textureId, "released", decoder.getStatistics())
// 释放解码器
decoder.release()
@ -309,6 +334,28 @@ class VideoDecodePlugin : FlutterPlugin, MethodCallHandler {
}
}
/**
* 发送解码器状态更新
*/
private fun sendDecoderState(textureId: Long, state: String, stats: Map<String, Any>) {
runOnMainThread {
try {
logDebug("发送解码器状态更新: 纹理ID=$textureId, 状态=$state")
// 构造参数
val params = HashMap<String, Any>()
params["textureId"] = textureId
params["state"] = state
params["stats"] = stats
// 发送状态更新
channel.invokeMethod("onDecoderState", params)
} catch (e: Exception) {
logError("发送解码器状态更新失败", e)
}
}
}
/**
* 在主线程上执行任务
*/
@ -325,8 +372,12 @@ class VideoDecodePlugin : FlutterPlugin, MethodCallHandler {
*/
override fun onDetachedFromEngine(@NonNull binding: FlutterPlugin.FlutterPluginBinding) {
// 释放所有解码器
for (decoder in decoders.values) {
for ((textureId, decoder) in decoders) {
try {
// 发送释放状态
sendDecoderState(textureId, "released", decoder.getStatistics())
// 释放解码器
decoder.release()
} catch (e: Exception) {
logError("插件分离时释放解码器失败", e)

View File

@ -34,16 +34,6 @@ class VideoDecoder(
private const val NAL_UNIT_TYPE_PPS = 8
private const val NAL_UNIT_TYPE_IDR = 5
private const val NAL_UNIT_TYPE_NON_IDR = 1 // P帧
// 最大允许连续P帧数
private const val MAX_CONSECUTIVE_P_FRAMES = 10
// I帧超时时间毫秒- 超过此时间没有收到I帧则丢弃P帧
private const val MAX_IFRAME_TIMEOUT_MS = 500
// 异步模式参数
private const val LOW_LATENCY_MODE = true
private const val OPERATING_RATE = 90 // 解码速率提高到90FPS
}
// 回调接口
@ -94,10 +84,6 @@ class VideoDecoder(
private var iFrameIntervals = mutableListOf<Long>()
private val GOP_HISTORY_SIZE = 5 // 记录最近5个GOP间隔
// 动态阈值参数
private var dynamicMaxConsecutivePFrames = config.initialMaxPFrames
private var dynamicIFrameTimeout = config.initialIFrameTimeoutMs
// 用于避免重复处理相同SPS/PPS的缓存
private var lastSPSHash: Int? = null
private var lastPPSHash: Int? = null
@ -332,39 +318,6 @@ class VideoDecoder(
} else if (effectiveType == NAL_UNIT_TYPE_IDR) {
hasSentIDR.set(true)
val currentTime = System.currentTimeMillis()
// 计算I帧间隔并更新动态参数
if (config.enableDynamicThresholds && lastDetectedIFrameTime > 0) {
val iFrameInterval = currentTime - lastDetectedIFrameTime
// 添加到历史记录
iFrameIntervals.add(iFrameInterval)
if (iFrameIntervals.size > GOP_HISTORY_SIZE) {
iFrameIntervals.removeAt(0)
}
// 计算平均GOP大小
if (iFrameIntervals.isNotEmpty()) {
val avgIFrameInterval = iFrameIntervals.average().toLong()
val frameRate = config.frameRate ?: 30
detectedGopSize = (avgIFrameInterval * frameRate / 1000).toInt()
if (detectedGopSize > 0) {
// 动态调整最大连续P帧阈值 - 设置为GOP的1.5倍,但受配置限制
val newMaxPFrames = (detectedGopSize * 1.5).toInt()
dynamicMaxConsecutivePFrames = newMaxPFrames.coerceIn(
config.minMaxPFrames,
config.maxMaxPFrames
)
// 动态调整I帧超时时间 - 设置为平均I帧间隔的2倍但至少为200ms
dynamicIFrameTimeout = Math.max(200, avgIFrameInterval.toInt() * 2)
logDebug("动态参数更新: GOP=$detectedGopSize, 最大P帧=$dynamicMaxConsecutivePFrames, I帧超时=${dynamicIFrameTimeout}ms")
}
}
}
lastDetectedIFrameTime = currentTime
lastIFrameTimeMs = currentTime
consecutivePFrameCount = 0
@ -377,41 +330,6 @@ class VideoDecoder(
}
consecutivePFrameCount++
// 检查连续P帧数是否超过阈值 - 使用动态阈值或固定阈值
val maxPFrames = if (config.enableDynamicThresholds)
dynamicMaxConsecutivePFrames
else
MAX_CONSECUTIVE_P_FRAMES
if (consecutivePFrameCount >= maxPFrames) {
logWarning("丢弃P帧因为连续P帧过多($consecutivePFrameCount > $maxPFrames)")
droppedFrameCount++
return false
}
// 检查是否自上一个I帧过去太久 - 使用动态阈值或固定阈值
if (lastIFrameTimeMs > 0) {
val timeSinceLastIFrame = System.currentTimeMillis() - lastIFrameTimeMs
val iFrameTimeout = if (config.enableDynamicThresholds)
dynamicIFrameTimeout
else
MAX_IFRAME_TIMEOUT_MS
if (timeSinceLastIFrame > iFrameTimeout) {
logWarning("丢弃P帧因为距离上一个I帧时间过长(${timeSinceLastIFrame}ms > ${iFrameTimeout}ms)")
droppedFrameCount++
return false
}
}
// 帧大小异常检测 - 如果帧过小或过大,可能是损坏的帧
val expectedFrameSize = config.width * config.height / 8 // 粗略估计
if (frameData.size < 10 || frameData.size > expectedFrameSize * 2) {
logWarning("丢弃帧,因为帧大小异常(${frameData.size}字节)")
droppedFrameCount++
return false
}
}
// 记录帧信息
@ -570,24 +488,13 @@ class VideoDecoder(
"renderedFrames" to renderedFrameCount,
"droppedFrames" to droppedFrameCount,
"fps" to currentFps,
"detectedGopSize" to detectedGopSize,
"dynamicMaxConsecutivePFrames" to dynamicMaxConsecutivePFrames,
"dynamicIFrameTimeoutMs" to dynamicIFrameTimeout,
"hasSentSPS" to hasSentSPS.get(),
"hasSentPPS" to hasSentPPS.get(),
"hasSentIDR" to hasSentIDR.get(),
"consecutivePFrames" to consecutivePFrameCount,
"targetWidth" to config.width,
"targetHeight" to config.height,
"frameRate" to (config.frameRate ?: 0),
"enableDynamicThresholds" to config.enableDynamicThresholds
"frameRate" to (config.frameRate ?: 0)
)
}
/**
* 获取当前渲染FPS
*/
fun getCurrentFps(): Float {
return currentFps
}
}

View File

@ -7,28 +7,12 @@ package top.skychip.video_decode_plugin
* @param height 视频高度
* @param codecType 编解码器类型默认为h264
* @param frameRate 帧率可为空
* @param enableHardwareDecoder 是否启用硬件解码
* @param threadCount 解码线程数
* @param bufferSize 输入缓冲区大小
* @param isDebug 是否开启调试日志
* @param enableDynamicThresholds 是否启用动态阈值
* @param initialMaxPFrames 初始最大连续P帧数
* @param initialIFrameTimeoutMs 初始I帧超时时间
* @param minMaxPFrames 最小最大连续P帧数
* @param maxMaxPFrames 最大最大连续P帧数
*/
data class VideoDecoderConfig(
val width: Int,
val height: Int,
val codecType: String = "h264",
val frameRate: Int? = null,
val enableHardwareDecoder: Boolean = true,
val threadCount: Int = 1,
val bufferSize: Int = 30,
val isDebug: Boolean = false,
val enableDynamicThresholds: Boolean = true,
val initialMaxPFrames: Int = 10,
val initialIFrameTimeoutMs: Int = 500,
val minMaxPFrames: Int = 5,
val maxMaxPFrames: Int = 30
val isDebug: Boolean = false
)

View File

@ -8,54 +8,6 @@ import 'package:flutter/material.dart';
import 'package:flutter/services.dart';
import 'package:video_decode_plugin/video_decode_plugin.dart';
//
class TestPatternPainter extends CustomPainter {
@override
void paint(Canvas canvas, Size size) {
final colors = [
Colors.red,
Colors.green,
Colors.blue,
Colors.yellow,
Colors.purple,
];
const int gridSize = 4;
final double cellWidth = size.width / gridSize;
final double cellHeight = size.height / gridSize;
for (int x = 0; x < gridSize; x++) {
for (int y = 0; y < gridSize; y++) {
final paint = Paint()
..color = colors[(x + y) % colors.length]
..style = PaintingStyle.fill;
final rect =
Rect.fromLTWH(x * cellWidth, y * cellHeight, cellWidth, cellHeight);
canvas.drawRect(rect, paint);
}
}
//
final paint = Paint()
..color = Colors.white
..style = PaintingStyle.stroke
..strokeWidth = 5.0;
canvas.drawLine(Offset(size.width / 2 - 50, size.height / 2),
Offset(size.width / 2 + 50, size.height / 2), paint);
canvas.drawLine(Offset(size.width / 2, size.height / 2 - 50),
Offset(size.width / 2, size.height / 2 + 50), paint);
}
@override
bool shouldRepaint(covariant CustomPainter oldDelegate) {
return false;
}
}
// H264文件中解析出的帧
class H264Frame {
final Uint8List data;
@ -187,18 +139,17 @@ class _VideoViewState extends State<VideoView> {
String _statusText = "未初始化";
String _error = "";
//
DecoderState _decoderState = DecoderState.initializing;
String _decoderStateText = "初始化中";
bool _isActuallyRendering = false; //
//
int _renderedFrameCount = 0;
DateTime? _lastFrameTime;
double _fps = 0;
double _decoderFps = 0; // FPS
//
int _detectedGopSize = 0;
int _dynamicMaxPFrames = 0;
int _dynamicIFrameTimeoutMs = 0;
bool _enableDynamicThresholds = true;
//
Timer? _statsTimer;
@ -228,15 +179,25 @@ class _VideoViewState extends State<VideoView> {
bool _showingErrorFrame = false;
Timer? _errorFrameResetTimer;
//
int _totalFrames = 0;
int _droppedFrames = 0;
bool _hasSentIDR = false;
bool _hasSentSPS = false;
bool _hasSentPPS = false;
@override
void initState() {
super.initState();
_loadH264File();
//
// 使UI元素
_statsTimer = Timer.periodic(Duration(milliseconds: 1000), (timer) {
if (_isInitialized && _textureId != null) {
_updateDecoderStats();
if (mounted) {
setState(() {
// UI元素
//
});
}
});
}
@ -246,7 +207,7 @@ class _VideoViewState extends State<VideoView> {
_stopPlaying();
_releaseDecoder();
_frameTimer?.cancel();
_statsTimer?.cancel(); //
_statsTimer?.cancel();
super.dispose();
}
@ -421,14 +382,8 @@ class _VideoViewState extends State<VideoView> {
width: 640,
height: 480,
codecType: CodecType.h264,
frameRate: 30,
bufferSize: 30,
frameRate: 24, // 24fps (23.976)
isDebug: true, //
enableDynamicThresholds: _enableDynamicThresholds, // 使
initialMaxPFrames: 60, // P帧数
initialIFrameTimeoutMs: 5000, // I帧超时时间
minMaxPFrames: 5, // P帧数
maxMaxPFrames: 60, // P帧数
);
final textureId = await VideoDecodePlugin.initDecoder(config);
@ -440,11 +395,18 @@ class _VideoViewState extends State<VideoView> {
VideoDecodePlugin.setFrameCallbackForTexture(
textureId, _onFrameAvailable);
//
VideoDecodePlugin.setStateCallbackForTexture(
textureId, _onDecoderStateChanged);
setState(() {
_isInitialized = true;
_error = "";
_statusText = "就绪";
_renderedFrameCount = 0; //
_decoderState = DecoderState.initializing;
_decoderStateText = "初始化中";
_isActuallyRendering = false;
});
_log("解码器初始化成功纹理ID: $_textureId");
@ -467,6 +429,73 @@ class _VideoViewState extends State<VideoView> {
}
}
//
void _onDecoderStateChanged(
int textureId, DecoderState state, Map<String, dynamic> stats) {
if (!mounted) return;
String stateText;
switch (state) {
case DecoderState.initializing:
stateText = "初始化中";
break;
case DecoderState.ready:
stateText = "准备就绪";
break;
case DecoderState.rendering:
stateText = "渲染中";
//
_isActuallyRendering = true;
break;
case DecoderState.error:
stateText = "出错";
//
final errorMessage = stats['errorMessage'] as String?;
if (errorMessage != null) {
_log("解码器错误: $errorMessage");
}
break;
case DecoderState.released:
stateText = "已释放";
break;
default:
stateText = "未知状态";
}
// UI
setState(() {
_decoderState = state;
_decoderStateText = stateText;
//
if (stats.isNotEmpty) {
_decoderFps = (stats['fps'] as num?)?.toDouble() ?? 0.0;
_renderedFrameCount = (stats['renderedFrames'] as int?) ?? 0;
//
_totalFrames = (stats['totalFrames'] as int?) ?? 0;
_droppedFrames = (stats['droppedFrames'] as int?) ?? 0;
_hasSentIDR = (stats['hasSentIDR'] as bool?) ?? false;
_hasSentSPS = (stats['hasSentSPS'] as bool?) ?? false;
_hasSentPPS = (stats['hasSentPPS'] as bool?) ?? false;
//
if (state == DecoderState.rendering) {
_statusText = _isPlaying
? "播放中 (解码总帧: $_totalFrames, 丢弃: $_droppedFrames)"
: "已停止";
}
}
});
String decoderInfo = "解码器状态更新: $_decoderStateText, " +
"帧数据: 渲染=$_renderedFrameCount, 总计=$_totalFrames, 丢弃=$_droppedFrames, " +
"FPS=${_decoderFps.toStringAsFixed(1)}, " +
"参数集: SPS=${_hasSentSPS}, PPS=${_hasSentPPS}, IDR=${_hasSentIDR}";
_log(decoderInfo);
}
// I帧来触发渲染
Future<void> _sendTestIFrame() async {
if (_textureId == null || !_isInitialized) {
@ -498,6 +527,7 @@ class _VideoViewState extends State<VideoView> {
}
Future<void> _releaseDecoder() async {
_statsTimer?.cancel(); //
if (_textureId != null) {
_log("正在释放解码器资源");
@ -508,6 +538,9 @@ class _VideoViewState extends State<VideoView> {
_textureId = null;
_isInitialized = false;
_statusText = "已释放";
_isActuallyRendering = false;
_decoderState = DecoderState.released;
_decoderStateText = "已释放";
});
_log("解码器资源释放成功");
@ -597,8 +630,8 @@ class _VideoViewState extends State<VideoView> {
void _startDecodingFrames() {
_log("开始解码视频帧");
// 使
const int frameIntervalMs = 50; // 20 fps
// 使
const int frameIntervalMs = 42; // 23.8 fps (23.9fps)
_frameTimer =
Timer.periodic(Duration(milliseconds: frameIntervalMs), (timer) async {
@ -733,19 +766,21 @@ class _VideoViewState extends State<VideoView> {
Widget _buildVideoDisplay() {
if (_textureId == null) {
return Center(
child: Container(
width: 640,
height: 480,
color: Colors.black,
child: CustomPaint(
painter: TestPatternPainter(),
child: Center(
child: Text(
return Container(
width: 640,
height: 480,
color: Colors.black54,
child: Center(
child: Column(
mainAxisSize: MainAxisSize.min,
children: [
Icon(Icons.videocam_off, size: 48, color: Colors.white70),
SizedBox(height: 16),
Text(
'无可用纹理',
style: TextStyle(color: Colors.white),
style: TextStyle(color: Colors.white, fontSize: 16),
),
),
],
),
),
);
@ -757,9 +792,27 @@ class _VideoViewState extends State<VideoView> {
//
Container(color: Colors.black),
// -
if (_renderedFrameCount == 0)
CustomPaint(painter: TestPatternPainter()),
//
if (_renderedFrameCount == 0 || !_isActuallyRendering)
Center(
child: Column(
mainAxisSize: MainAxisSize.min,
children: [
CircularProgressIndicator(
valueColor: AlwaysStoppedAnimation<Color>(Colors.white70),
),
SizedBox(height: 16),
Text(
_decoderState == DecoderState.initializing
? '初始化中...'
: _decoderState == DecoderState.ready
? '准备就绪,等待首帧...'
: '加载中...',
style: TextStyle(color: Colors.white70, fontSize: 14),
),
],
),
),
// - 使RepaintBoundary和ValueKey确保正确更新
RepaintBoundary(
@ -782,16 +835,60 @@ class _VideoViewState extends State<VideoView> {
),
),
//
if (_decoderState == DecoderState.error)
Container(
color: Colors.red.withOpacity(0.3),
child: Center(
child: Column(
mainAxisSize: MainAxisSize.min,
children: [
Icon(Icons.error_outline, size: 48, color: Colors.white),
SizedBox(height: 16),
Text(
'解码器错误',
style: TextStyle(
color: Colors.white,
fontSize: 16,
fontWeight: FontWeight.bold),
),
],
),
),
),
// -
Positioned(
right: 10,
top: 10,
child: Container(
padding: EdgeInsets.all(5),
color: Colors.black.withOpacity(0.5),
child: Text(
'帧: $_renderedFrameCount${_enablePacketLoss ? ' (丢帧: $_droppedFramesCount)' : ''}',
style: TextStyle(color: Colors.white, fontSize: 12),
decoration: BoxDecoration(
color: Colors.black.withOpacity(0.5),
borderRadius: BorderRadius.circular(4),
),
constraints: BoxConstraints(
maxWidth: 150, //
),
child: Column(
crossAxisAlignment: CrossAxisAlignment.end,
mainAxisSize: MainAxisSize.min, // column只占用所需空间
children: [
Text(
'帧: $_renderedFrameCount',
style: TextStyle(color: Colors.white, fontSize: 12),
),
if (_enablePacketLoss)
Text(
'丢帧: $_droppedFramesCount',
style: TextStyle(
color: _droppedFramesCount > 0
? Colors.orange
: Colors.white70,
fontSize: 12,
),
),
],
),
),
),
@ -845,60 +942,123 @@ class _VideoViewState extends State<VideoView> {
child: Column(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
Row(
mainAxisAlignment:
MainAxisAlignment.spaceBetween,
children: [
Text('状态: $_statusText',
style: TextStyle(
fontWeight: FontWeight.bold)),
// Text('计算FPS: ${_fps.toStringAsFixed(1)}'),
],
),
Row(
mainAxisAlignment:
MainAxisAlignment.spaceBetween,
children: [
Text(
'解码器FPS: ${_decoderFps.toStringAsFixed(1)}',style: TextStyle(
color: Colors.green
),),
Text('已渲染帧数: $_renderedFrameCount'),
],
),
if (_error.isNotEmpty)
Text('错误: $_error',
style: TextStyle(
color: Colors.red,
fontWeight: FontWeight.bold)),
Row(
mainAxisAlignment:
MainAxisAlignment.spaceBetween,
children: [
Text('检测到的GOP: $_detectedGopSize'),
Text('解析的帧数: ${_h264Frames.length}'),
],
),
Text(
'H264文件大小: ${(_h264FileData?.length ?? 0) / 1024} KB'),
//
Text('状态: $_statusText',
style:
TextStyle(fontWeight: FontWeight.bold)),
//
if (_enableDynamicThresholds)
Padding(
padding: const EdgeInsets.only(top: 4.0),
child: Column(
crossAxisAlignment:
CrossAxisAlignment.start,
children: [
Text('动态阈值参数:',
style: TextStyle(
fontWeight: FontWeight.bold)),
Text('最大连续P帧: $_dynamicMaxPFrames'),
Text(
'I帧超时: ${_dynamicIFrameTimeoutMs}ms'),
],
//
Padding(
padding: const EdgeInsets.only(top: 4.0),
child: Text(
'解码器状态: $_decoderStateText',
style: TextStyle(
color: _getStateColor(),
fontWeight: FontWeight.bold),
),
),
//
Padding(
padding: const EdgeInsets.only(top: 4.0),
child: Text(
'实际渲染: ${_isActuallyRendering ? "" : ""}',
style: TextStyle(
color: _isActuallyRendering
? Colors.green
: Colors.orange,
),
),
),
// FPS和帧数信息
Padding(
padding: const EdgeInsets.only(top: 4.0),
child: Text(
'解码器FPS: ${_decoderFps.toStringAsFixed(1)}',
style: TextStyle(color: Colors.green),
),
),
Padding(
padding: const EdgeInsets.only(top: 4.0),
child: Text('已渲染帧数: $_renderedFrameCount'),
),
//
Padding(
padding: const EdgeInsets.only(top: 4.0),
child: Text(
'已丢弃帧数: $_droppedFramesCount',
style: TextStyle(
color: _droppedFramesCount > 0
? Colors.orange
: Colors.black),
),
),
Padding(
padding: const EdgeInsets.only(top: 4.0),
child: Text('当前帧索引: $_currentFrameIndex'),
),
//
Padding(
padding: const EdgeInsets.only(top: 4.0),
child: Column(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
Text('参数集状态:'),
Padding(
padding: const EdgeInsets.only(
left: 8.0, top: 2.0),
child: Text(
'SPS: ${_hasSentSPS ? "已发送" : "未发送"}'),
),
Padding(
padding: const EdgeInsets.only(
left: 8.0, top: 2.0),
child: Text(
'PPS: ${_hasSentPPS ? "已发送" : "未发送"}'),
),
Padding(
padding: const EdgeInsets.only(
left: 8.0, top: 2.0),
child: Text(
'IDR: ${_hasSentIDR ? "已发送" : "未发送"}'),
),
],
),
),
Padding(
padding: const EdgeInsets.only(top: 4.0),
child: Text('总帧数: $_totalFrames'),
),
//
if (_error.isNotEmpty)
Padding(
padding: const EdgeInsets.only(top: 4.0),
child: Text(
'错误: $_error',
style: TextStyle(
color: Colors.red,
fontWeight: FontWeight.bold),
),
),
// H264文件信息
Padding(
padding: const EdgeInsets.only(top: 4.0),
child: Text('解析的帧数: ${_h264Frames.length}'),
),
Padding(
padding: const EdgeInsets.only(top: 4.0),
child: Text(
'H264文件大小: ${((_h264FileData?.length ?? 0) ~/ 1024)} KB'),
),
],
),
),
@ -978,40 +1138,6 @@ class _VideoViewState extends State<VideoView> {
],
),
//
Row(
mainAxisAlignment:
MainAxisAlignment.spaceBetween,
children: [
Text('动态阈值',
style: TextStyle(
fontWeight: FontWeight.bold)),
Switch(
value: _enableDynamicThresholds,
onChanged: (value) {
setState(() {
_enableDynamicThresholds = value;
//
if (_isInitialized) {
_log("更改动态阈值设置,需要重新初始化解码器");
//
if (_isPlaying) {
_stopPlaying();
}
//
Future.delayed(
Duration(milliseconds: 100), () {
_initializeDecoder();
});
}
});
},
),
],
),
Divider(),
//
Row(
children: [
@ -1159,34 +1285,38 @@ class _VideoViewState extends State<VideoView> {
);
}
//
//
Future<void> _updateDecoderStats() async {
if (_textureId == null || !_isInitialized) return;
try {
// FPS
final fps = await VideoDecodePlugin.getCurrentFps(_textureId);
//
final thresholdParams =
await VideoDecodePlugin.getDynamicThresholdParams(_textureId);
//
final stats = await VideoDecodePlugin.getDecoderStats(_textureId!);
if (mounted) {
setState(() {
_decoderFps = fps;
_detectedGopSize = thresholdParams['detectedGopSize'] ?? 0;
_dynamicMaxPFrames =
thresholdParams['dynamicMaxConsecutivePFrames'] ?? 0;
_dynamicIFrameTimeoutMs =
thresholdParams['dynamicIFrameTimeoutMs'] ?? 0;
_enableDynamicThresholds =
thresholdParams['enableDynamicThresholds'] ?? true;
});
_log("手动更新解码器统计信息: $stats");
}
} catch (e) {
_log("获取解码器统计信息失败: $e");
}
}
Color _getStateColor() {
switch (_decoderState) {
case DecoderState.initializing:
return Colors.orange;
case DecoderState.ready:
return Colors.green;
case DecoderState.rendering:
return Colors.blue;
case DecoderState.error:
return Colors.red;
case DecoderState.released:
return Colors.grey;
default:
return Colors.black;
}
}
}
//

View File

@ -7,181 +7,6 @@ import 'package:flutter/services.dart';
import 'video_decode_plugin_platform_interface.dart';
/// H.265/HEVC NAL单元类型定义
class HevcNalUnitType {
static const int TRAIL_N = 0; // -
static const int TRAIL_R = 1; // -
static const int TSA_N = 2; // 访 -
static const int TSA_R = 3; // 访 -
static const int STSA_N = 4; // 访 -
static const int STSA_R = 5; // 访 -
static const int RADL_N = 6; // 访 -
static const int RADL_R = 7; // 访 -
static const int RASL_N = 8; // 访 -
static const int RASL_R = 9; // 访 -
static const int RSV_VCL_N10 = 10; // IRAP VCL NAL单元类型
static const int RSV_VCL_R11 = 11; // IRAP VCL NAL单元类型
static const int RSV_VCL_N12 = 12; // IRAP VCL NAL单元类型
static const int RSV_VCL_R13 = 13; // IRAP VCL NAL单元类型
static const int RSV_VCL_N14 = 14; // IRAP VCL NAL单元类型
static const int RSV_VCL_R15 = 15; // IRAP VCL NAL单元类型
static const int BLA_W_LP = 16; // 访
static const int BLA_W_RADL = 17; // RADL的无损拼接访问
static const int BLA_N_LP = 18; // 访
static const int IDR_W_RADL = 19; // RADL的瞬时解码刷新 (IDR)
static const int IDR_N_LP = 20; // (IDR)
static const int CRA_NUT = 21; // 访
static const int RSV_IRAP_VCL22 = 22; // IRAP VCL NAL单元类型
static const int RSV_IRAP_VCL23 = 23; // IRAP VCL NAL单元类型
static const int RSV_VCL24 = 24; // VCL NAL单元类型
static const int RSV_VCL25 = 25; // VCL NAL单元类型
static const int RSV_VCL26 = 26; // VCL NAL单元类型
static const int RSV_VCL27 = 27; // VCL NAL单元类型
static const int RSV_VCL28 = 28; // VCL NAL单元类型
static const int RSV_VCL29 = 29; // VCL NAL单元类型
static const int RSV_VCL30 = 30; // VCL NAL单元类型
static const int RSV_VCL31 = 31; // VCL NAL单元类型
// VCL NAL单元类型
static const int VPS = 32; //
static const int SPS = 33; //
static const int PPS = 34; //
static const int AUD = 35; // 访
static const int EOS = 36; //
static const int EOB = 37; //
static const int FD = 38; //
static const int PREFIX_SEI = 39; //
static const int SUFFIX_SEI = 40; //
static const int RSV_NVCL41 = 41; // VCL NAL单元类型
static const int RSV_NVCL42 = 42; // VCL NAL单元类型
static const int RSV_NVCL43 = 43; // VCL NAL单元类型
static const int RSV_NVCL44 = 44; // VCL NAL单元类型
static const int RSV_NVCL45 = 45; // VCL NAL单元类型
static const int RSV_NVCL46 = 46; // VCL NAL单元类型
static const int RSV_NVCL47 = 47; // VCL NAL单元类型
static const int UNSPEC48 = 48; //
static const int UNSPEC49 = 49; //
static const int UNSPEC50 = 50; //
static const int UNSPEC51 = 51; //
static const int UNSPEC52 = 52; //
static const int UNSPEC53 = 53; //
static const int UNSPEC54 = 54; //
static const int UNSPEC55 = 55; //
static const int UNSPEC56 = 56; //
static const int UNSPEC57 = 57; //
static const int UNSPEC58 = 58; //
static const int UNSPEC59 = 59; //
static const int UNSPEC60 = 60; //
static const int UNSPEC61 = 61; //
static const int UNSPEC62 = 62; //
static const int UNSPEC63 = 63; //
// 便
// I帧类型IDR_W_RADL, IDR_N_LP, BLA_W_LP, BLA_W_RADL, BLA_N_LP, CRA_NUT
static const List<int> KEY_FRAMES = [
IDR_W_RADL,
IDR_N_LP,
BLA_W_LP,
BLA_W_RADL,
BLA_N_LP,
CRA_NUT
];
// VPS, SPS, PPS
static const List<int> PARAMETER_SETS = [VPS, SPS, PPS];
/// NAL类型
static bool isKeyFrame(int nalUnitType) {
return KEY_FRAMES.contains(nalUnitType);
}
/// NAL类型
static bool isParameterSet(int nalUnitType) {
return PARAMETER_SETS.contains(nalUnitType);
}
/// IDR帧
static bool isIdrFrame(int nalUnitType) {
return nalUnitType == IDR_W_RADL || nalUnitType == IDR_N_LP;
}
/// NAL单元类型名称
static String getName(int type) {
switch (type) {
case TRAIL_N:
return "TRAIL_N";
case TRAIL_R:
return "TRAIL_R";
case TSA_N:
return "TSA_N";
case TSA_R:
return "TSA_R";
case STSA_N:
return "STSA_N";
case STSA_R:
return "STSA_R";
case RADL_N:
return "RADL_N";
case RADL_R:
return "RADL_R";
case RASL_N:
return "RASL_N";
case RASL_R:
return "RASL_R";
case BLA_W_LP:
return "BLA_W_LP";
case BLA_W_RADL:
return "BLA_W_RADL";
case BLA_N_LP:
return "BLA_N_LP";
case IDR_W_RADL:
return "IDR_W_RADL";
case IDR_N_LP:
return "IDR_N_LP";
case CRA_NUT:
return "CRA_NUT";
case VPS:
return "VPS";
case SPS:
return "SPS";
case PPS:
return "PPS";
case AUD:
return "AUD";
case EOS:
return "EOS";
case EOB:
return "EOB";
case FD:
return "FD";
case PREFIX_SEI:
return "PREFIX_SEI";
case SUFFIX_SEI:
return "SUFFIX_SEI";
default:
if (type >= 10 && type <= 15) return "RSV_VCL_${type}";
if (type >= 22 && type <= 23) return "RSV_IRAP_VCL${type}";
if (type >= 24 && type <= 31) return "RSV_VCL${type}";
if (type >= 41 && type <= 47) return "RSV_NVCL${type}";
if (type >= 48 && type <= 63) return "UNSPEC${type}";
return "未知(${type})";
}
}
}
///
enum FrameType {
/// I帧
@ -200,13 +25,36 @@ enum CodecType {
h265,
}
///
enum DecoderState {
///
initializing,
///
ready,
///
rendering,
///
error,
///
released,
}
///
typedef FrameAvailableCallback = void Function(int textureId);
///
typedef DecoderStateCallback = void Function(
int textureId, DecoderState state, Map<String, dynamic> stats);
///
class _DecoderInstance {
final int textureId;
FrameAvailableCallback? frameCallback;
DecoderStateCallback? stateCallback;
_DecoderInstance(this.textureId);
}
@ -225,48 +73,16 @@ class VideoDecoderConfig {
/// h264
final CodecType codecType;
/// 25
final int bufferSize;
/// 线1线
final int threadCount;
/// false
final bool isDebug;
/// true
final bool enableHardwareDecoder;
/// true
final bool enableDynamicThresholds;
/// P帧数10
final int initialMaxPFrames;
/// I帧超时时间()500
final int initialIFrameTimeoutMs;
/// P帧数5
final int minMaxPFrames;
/// P帧数30
final int maxMaxPFrames;
///
VideoDecoderConfig({
this.width = 640,
this.height = 360,
this.frameRate,
this.codecType = CodecType.h264,
this.bufferSize = 25,
this.threadCount = 1,
this.isDebug = false,
this.enableHardwareDecoder = true,
this.enableDynamicThresholds = true,
this.initialMaxPFrames = 10,
this.initialIFrameTimeoutMs = 500,
this.minMaxPFrames = 5,
this.maxMaxPFrames = 30,
});
/// Map
@ -276,15 +92,7 @@ class VideoDecoderConfig {
'height': height,
'frameRate': frameRate,
'codecType': codecType.toString().split('.').last,
'bufferSize': bufferSize,
'threadCount': threadCount,
'isDebug': isDebug,
'enableHardwareDecoder': enableHardwareDecoder,
'enableDynamicThresholds': enableDynamicThresholds,
'initialMaxPFrames': initialMaxPFrames,
'initialIFrameTimeoutMs': initialIFrameTimeoutMs,
'minMaxPFrames': minMaxPFrames,
'maxMaxPFrames': maxMaxPFrames,
};
}
}
@ -314,7 +122,6 @@ class VideoDecodePlugin {
// -
static int _uninitializedErrorCount = 0;
static int _lastErrorLogTime = 0;
static const int _ERROR_LOG_THRESHOLD = 5; // 5
/// -
static void _logDebug(String message) {
@ -368,25 +175,79 @@ class VideoDecodePlugin {
// ID的帧回调
final decoder = _decoders[textureId];
if (decoder != null && decoder.frameCallback != null) {
//
getDecoderStats(textureId).then((stats) {
final renderedFrames = stats['renderedFrames'] ?? 0;
if (renderedFrames == 0) {
_logDebug('[预通知] 收到初始帧可用通知无实际视频数据纹理ID: $textureId');
} else {
_logDebug('收到帧可用通知纹理ID: $textureId,已渲染帧数: $renderedFrames');
}
//
final bool isPrenotification = args['isPrenotification'] ?? false;
//
decoder.frameCallback!(textureId);
}).catchError((error) {
//
if (isPrenotification) {
_logDebug('[预通知] 收到初始帧可用通知无实际视频数据纹理ID: $textureId');
} else {
_logDebug('收到帧可用通知纹理ID: $textureId');
decoder.frameCallback!(textureId);
});
}
//
decoder.frameCallback!(textureId);
}
return null;
case 'onDecoderState':
final Map<dynamic, dynamic> args = call.arguments;
final int textureId = args['textureId'];
final String stateStr = args['state'];
final Map<dynamic, dynamic> statsMap = args['stats'];
//
bool isReleasing = false;
// 访
_withLock(_decoderStateLock, () {
isReleasing = _isDecoderReleasing[textureId] ?? false;
});
if (isReleasing && stateStr != 'released') {
_logDebug('收到状态回调但解码器 $textureId 正在释放,忽略');
return null;
}
//
DecoderState state;
switch (stateStr) {
case 'initializing':
state = DecoderState.initializing;
break;
case 'ready':
state = DecoderState.ready;
break;
case 'rendering':
state = DecoderState.rendering;
break;
case 'error':
state = DecoderState.error;
break;
case 'released':
state = DecoderState.released;
break;
default:
state = DecoderState.initializing;
}
// statsMap转换为强类型Map<String, dynamic>
final Map<String, dynamic> stats = {};
statsMap.forEach((key, value) {
if (key is String) {
stats[key] = value;
}
});
//
final decoder = _decoders[textureId];
if (decoder != null && decoder.stateCallback != null) {
_logDebug('调用解码器状态回调纹理ID=$textureId, 状态=$stateStr');
decoder.stateCallback!(textureId, state, stats);
}
return null;
default:
throw PlatformException(
code: 'Unimplemented',
@ -460,6 +321,24 @@ class VideoDecodePlugin {
}
}
///
static void setStateCallback(DecoderStateCallback callback) {
if (_defaultTextureId != null) {
setStateCallbackForTexture(_defaultTextureId!, callback);
}
}
/// ID设置状态回调
static void setStateCallbackForTexture(
int textureId, DecoderStateCallback callback) {
_initializeMethodCallHandler();
final decoder = _decoders[textureId];
if (decoder != null) {
decoder.stateCallback = callback;
}
}
///
static Future<int?> initDecoder(VideoDecoderConfig config) async {
//
@ -691,7 +570,8 @@ class VideoDecodePlugin {
final decoder = _decoders[textureId];
if (decoder != null) {
decoder.frameCallback = null;
_logDebug('已清除纹理ID为$textureId的回调');
decoder.stateCallback = null;
_logDebug('已清除纹理ID为$textureId的所有回调');
}
}
@ -699,6 +579,7 @@ class VideoDecodePlugin {
static void clearAllCallbacks() {
for (final decoder in _decoders.values) {
decoder.frameCallback = null;
decoder.stateCallback = null;
}
_logDebug('已清除所有回调');
}
@ -712,12 +593,19 @@ class VideoDecodePlugin {
///
/// [textureId] ID
/// Map:
/// - totalFramesReceived:
/// - framesRendered:
/// - framesDropped:
/// - lastFrameTimestamp:
/// - averageProcessingTimeMs: ()
/// - totalFrames:
/// - renderedFrames:
/// - droppedFrames:
/// - fps: FPS
/// - hasSentSPS: SPS
/// - hasSentPPS: PPS
/// - hasSentIDR: IDR(I帧)
/// - consecutivePFrames: P帧数
/// - targetWidth:
/// - targetHeight:
/// - frameRate:
/// - decoderCount:
/// - textureId: ID
static Future<Map<String, dynamic>> getDecoderStats(int textureId) async {
//
if (!_isDecoderReady(textureId)) {
@ -752,52 +640,6 @@ class VideoDecodePlugin {
return {};
}
}
/// FPS
///
///
/// 0.0
static Future<double> getCurrentFps([int? textureId]) async {
final targetTextureId = textureId ?? _defaultTextureId;
if (targetTextureId == null) {
return 0.0;
}
try {
final stats = await getDecoderStats(targetTextureId);
return stats['fps'] as double? ?? 0.0;
} catch (e) {
_logError('获取FPS失败: $e');
return 0.0;
}
}
///
///
/// 使
/// GOP大小P帧数限制I帧超时时间等
static Future<Map<String, dynamic>> getDynamicThresholdParams(
[int? textureId]) async {
final targetTextureId = textureId ?? _defaultTextureId;
if (targetTextureId == null) {
return {};
}
try {
final stats = await getDecoderStats(targetTextureId);
return {
'detectedGopSize': stats['detectedGopSize'] as int? ?? 0,
'dynamicMaxConsecutivePFrames':
stats['dynamicMaxConsecutivePFrames'] as int? ?? 0,
'dynamicIFrameTimeoutMs': stats['dynamicIFrameTimeoutMs'] as int? ?? 0,
'enableDynamicThresholds':
stats['enableDynamicThresholds'] as bool? ?? false,
};
} catch (e) {
_logError('获取动态阈值参数失败: $e');
return {};
}
}
}
/// Dart中实现简单的同步锁
@ -811,7 +653,3 @@ void synchronized(Object lock, Function() action) {
T synchronizedWithResult<T>(Object lock, T Function() action) {
return action();
}