feat:v1版本完成

This commit is contained in:
liyi 2025-04-30 16:12:22 +08:00
parent 369d35cd2e
commit 023fe0a0f3
6 changed files with 304 additions and 93 deletions

View File

@ -69,8 +69,8 @@ class VideoDecodePlugin : FlutterPlugin, MethodCallHandler {
val textureEntry = textureRegistry.createSurfaceTexture()
textureId = textureEntry.id()
decoder = VideoDecoder(context, textureEntry, width, height, codecType) {
// onFrameAvailable callback
channel.invokeMethod("onFrameAvailable", mapOf("textureId" to textureId))
// onFrameRendered callback
channel.invokeMethod("onFrameRendered", mapOf("textureId" to textureId))
}
result.success(textureId)
} catch (e: Exception) {

View File

@ -24,9 +24,9 @@ import android.os.SystemClock
* 主要职责
* 1. 初始化并配置Android MediaCodec解码器支持H264/H265视频流解码
* 2. 管理解码输入帧队列将解码后的视频帧渲染到Surface
* 3. 支持同步与异步解码模式自动处理输入/输出缓冲区
* 3. 支持多线程解码与渲染解耦数据流
* 4. 负责解码器的生命周期管理启动释放等
* 5. 通过回调通知Flutter端有新帧可用
* 5. 通过回调通知Flutter端有新帧渲染
*
* 构造参数说明
* - context: Android上下文
@ -34,23 +34,7 @@ import android.os.SystemClock
* - width: 视频宽度
* - height: 视频高度
* - codecType: 编解码器类型"h264""h265"
* - onFrameAvailable: 新帧渲染回调
*
* 主要成员变量
* - surfaceTexture/surface: 视频渲染目标
* - mediaCodec: Android MediaCodec解码器实例
* - inputFrameQueue: 输入帧队列支持并发
* - running: 解码器运行状态
* - frameSeqSet: 用于去重的线程安全Set防止重复帧入队
* - outputFrameQueue: 解码输出缓冲区容量为10帧
* - renderThreadRunning: 渲染线程控制
* - renderThread: 渲染线程
* - mainHandler: 主线程Handler用于安全切换onFrameAvailable到主线程
* - renderFps: 渲染帧率fps可由外部控制默认15
*
* 主要方法
* - decodeFrame: 向解码器输入一帧数据
* - release: 释放解码器和相关资源
* - onFrameRendered: 开始解码并且渲染成功后的回调
*/
class VideoDecoder(
context: Context,
@ -58,40 +42,40 @@ class VideoDecoder(
width: Int,
height: Int,
codecType: String,
private val onFrameAvailable: () -> Unit
private val onFrameRendered: () -> Unit
) {
companion object {
private const val TAG = "VideoDecoder"
private const val TIMEOUT_US = 10000L
private const val INPUT_BUFFER_QUEUE_CAPACITY = 50
private const val INPUT_BUFFER_QUEUE_CAPACITY = 250 // 输入缓冲区容量
}
// region 成员变量定义
// SurfaceTexture与Surface用于视频渲染
private val surfaceTexture: SurfaceTexture = textureEntry.surfaceTexture()
private val surface: Surface = Surface(surfaceTexture)
private var mediaCodec: MediaCodec? = null
private val inputFrameQueue = LinkedBlockingQueue<FrameData>(INPUT_BUFFER_QUEUE_CAPACITY)
private var running = true
private val frameSeqSet = Collections.newSetFromMap(ConcurrentHashMap<Int, Boolean>())
// 解码输出缓冲区容量为10帧
private val outputFrameQueue = LinkedBlockingQueue<DecodedFrame>(50)
// 输入帧队列,支持并发,容量较大以防止丢帧
private val inputFrameQueue = LinkedBlockingQueue<FrameData>(INPUT_BUFFER_QUEUE_CAPACITY)
private var running = true // 解码器运行状态
private val frameSeqSet = Collections.newSetFromMap(ConcurrentHashMap<Int, Boolean>()) // 防止重复帧入队
// 解码输出缓冲区容量为100帧
private val outputFrameQueue = LinkedBlockingQueue<DecodedFrame>(100)
// 渲染线程控制
@Volatile private var renderThreadRunning = true
private var renderThread: Thread? = null
// 主线程Handler用于安全切换onFrameAvailable到主线程
// 主线程Handler用于安全切换onFrameRendered到主线程
private val mainHandler = Handler(Looper.getMainLooper())
// 首帧原始时间戳(微秒),用于归零
private var firstTimestampUs: Long? = null
// 上一帧归一化时间戳(微秒),用于误差兼容
private var lastTimestampUs: Long = 0L
// 容忍误差区间15fps一帧时长单位微秒
private val toleranceUs = 66000L
// 渲染帧率fps可由外部控制默认15
@Volatile var renderFps: Int = 20
// 渲染帧率fps可由外部控制默认18
@Volatile var renderFps: Int = 18
// 输入帧结构体
private data class FrameData(
val data: ByteArray,
val frameType: Int,
@ -100,7 +84,7 @@ class VideoDecoder(
val refIFrameSeq: Int?
)
// 解码后帧结构,显式携带时间戳(单位:微秒)
// 解码后帧结构,显式携带时间戳(单位:微秒)
private data class DecodedFrame(
val codec: MediaCodec,
val bufferIndex: Int,
@ -108,19 +92,28 @@ class VideoDecoder(
val timestampUs: Long // 帧时间戳,单位微秒
)
// endregion
// region 初始化与解码器配置
init {
// 配置Surface尺寸
surfaceTexture.setDefaultBufferSize(width, height)
// 选择MIME类型
val mime = when (codecType) {
"h264" -> "video/avc"
"h265" -> "video/hevc"
else -> "video/avc"
}
// 创建并配置MediaFormat
val format = MediaFormat.createVideoFormat(mime, width, height)
format.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, width * height)
// 创建解码器
val decoder = MediaCodec.createDecoderByType(mime)
// 设置解码回调
decoder.setCallback(object : MediaCodec.Callback() {
override fun onInputBufferAvailable(codec: MediaCodec, index: Int) {
if (!running) return
// 从输入队列取出一帧数据
val frame = inputFrameQueue.poll()
if (frame != null) {
frameSeqSet.remove(frame.frameSeq)
@ -130,6 +123,7 @@ class VideoDecoder(
inputBuffer.put(frame.data)
val start = System.nanoTime()
val ptsUs = frame.timestamp * 1000L
// 入队到解码器
codec.queueInputBuffer(
index,
0,
@ -137,8 +131,6 @@ class VideoDecoder(
ptsUs,
if (frame.frameType == 0) MediaCodec.BUFFER_FLAG_KEY_FRAME else 0
)
val end = System.nanoTime()
Log.d(TAG, "queueInputBuffer cost: "+ (end - start) + " ns, frameSeq="+frame.frameSeq+", type="+frame.frameType+", ptsUs="+ptsUs)
} else {
codec.queueInputBuffer(index, 0, 0, 0, 0)
}
@ -148,7 +140,7 @@ class VideoDecoder(
}
override fun onOutputBufferAvailable(codec: MediaCodec, index: Int, info: MediaCodec.BufferInfo) {
if (!running) return
// 解码后帧入输出缓冲区,由渲染线程处理
// 解码后帧入输出缓冲区,由渲染线程处理
val frame = DecodedFrame(codec, index, MediaCodec.BufferInfo().apply {
set(0, info.size, info.presentationTimeUs, info.flags)
}, info.presentationTimeUs)
@ -156,7 +148,6 @@ class VideoDecoder(
// 缓冲区满,丢弃最旧帧再插入
outputFrameQueue.poll()
outputFrameQueue.offer(frame)
Log.w(TAG, "outputFrameQueue full, drop oldest frame")
}
}
override fun onError(codec: MediaCodec, e: MediaCodec.CodecException) {
@ -171,32 +162,32 @@ class VideoDecoder(
// 启动渲染线程
renderThreadRunning = true
renderThread = Thread {
var renderedFrameCount = 0 // 渲染帧计数器
while (renderThreadRunning) {
// 计算每帧渲染间隔
val frameIntervalMs = if (renderFps > 0) 1000L / renderFps else 66L
val loopStart = SystemClock.elapsedRealtime()
try {
val frame = outputFrameQueue.poll()
if (frame != null) {
Log.i(TAG, "[RenderThread] 定时渲染帧: frame.timestampUs=${frame.timestampUs}")
val start = System.nanoTime()
frame.codec.releaseOutputBuffer(frame.bufferIndex, true)
val end = System.nanoTime()
Log.d(TAG, "[RenderThread] releaseOutputBuffer cost: "+ (end - start) + " ns, frame.timestampUs=${frame.timestampUs}")
// 确保onFrameAvailable在主线程执行避免FlutterJNI线程异常
mainHandler.post { onFrameAvailable() }
} else {
Log.d(TAG, "[RenderThread] 定时渲染无帧可用")
// 阻塞式等待新帧,避免空转
val frame = outputFrameQueue.take()
frame.codec.releaseOutputBuffer(frame.bufferIndex, true)
renderedFrameCount++
// 每累计renderFps帧回调一次onFrameRendered
if (renderedFrameCount >= renderFps) {
// 回调到Flutter端 通知解码并渲染完81帧了
mainHandler.post { onFrameRendered() }
}
} catch (e: Exception) {
Log.e(TAG, "[RenderThread] Exception", e)
}
// 控制渲染节奏
val loopCost = SystemClock.elapsedRealtime() - loopStart
val sleepMs = frameIntervalMs - loopCost
if (sleepMs > 0) {
try { Thread.sleep(sleepMs) } catch (_: Exception) {}
}
}
// 清理剩余帧
// 清理剩余帧,防止内存泄漏
while (true) {
val frame = outputFrameQueue.poll() ?: break
try {
@ -206,7 +197,13 @@ class VideoDecoder(
}
renderThread?.start()
}
// endregion
// region 核心方法
/**
* 向解码器输入一帧数据所有类型均允许入队去重
*/
fun decodeFrame(
frameData: ByteArray,
frameType: Int,
@ -215,13 +212,13 @@ class VideoDecoder(
refIFrameSeq: Int?
): Boolean {
if (!running || mediaCodec == null) return false
if (!frameSeqSet.add(frameSeq)) {
return false
}
// 直接使用外部传入的递增时间戳,无需归零和对齐
if (!frameSeqSet.add(frameSeq)) return false // 防止重复帧
return inputFrameQueue.offer(FrameData(frameData, frameType, timestamp, frameSeq, refIFrameSeq), 50, TimeUnit.MILLISECONDS)
}
/**
* 释放解码器和相关资源
*/
fun release() {
running = false
inputFrameQueue.clear()
@ -239,4 +236,5 @@ class VideoDecoder(
surface.release()
} catch (_: Exception) {}
}
// endregion
}

View File

@ -382,27 +382,22 @@ class _VideoViewState extends State<VideoView> {
}
}
//
Future<bool> _decodeNextFrame(H264Frame frame, int frameSeq) async {
// sendFramesplitNalFromIFrame=false
Future<void> _decodeNextFrame(H264Frame frame, int frameSeq) async {
if (_textureId == null || !_isInitialized || !_isPlaying) {
return false;
return;
}
try {
final timestamp = DateTime.now().microsecondsSinceEpoch;
final success = await VideoDecodePlugin.decodeFrame(
await VideoDecodePlugin.sendFrame(
frameData: frame.data,
frameType: frame.frameType,
timestamp: timestamp,
frameSeq: frameSeq,
refIFrameSeq: frame.refIFrameSeq,
splitNalFromIFrame: false,
);
if (!success) {
_log("解码帧失败,索引 $frameSeq (type=${frame.frameType})");
}
return success;
} catch (e) {
_log("解码帧错误: $e");
return false;
}
}
@ -517,12 +512,12 @@ class _VideoViewState extends State<VideoView> {
}
final frame = _h264Frames[_currentFrameIndex];
bool decodeSuccess = await _decodeNextFrame(frame, _currentFrameIndex);
await _decodeNextFrame(frame, _currentFrameIndex);
//
if (!decodeSuccess && _enablePacketLoss) {
_log("跳过索引 $_currentFrameIndex 的帧(丢帧模拟)");
}
// if (!decodeSuccess && _enablePacketLoss) {
// _log("跳过索引 $_currentFrameIndex 的帧(丢帧模拟)");
// }
//
_currentFrameIndex++;

View File

@ -0,0 +1,31 @@
import 'dart:typed_data';
/// SPS/PPS/I帧依赖关系管理器
class FrameDependencyManager {
Uint8List? _sps;
Uint8List? _pps;
int? _lastIFrameSeq;
/// SPS缓存
void updateSps(Uint8List sps) {
_sps = sps;
}
/// PPS缓存
void updatePps(Uint8List pps) {
_pps = pps;
}
Uint8List? get sps => _sps;
Uint8List? get pps => _pps;
/// I帧
bool get hasIFrame => _lastIFrameSeq != null;
int? get lastIFrameSeq => _lastIFrameSeq;
void updateIFrameSeq(int seq) {
_lastIFrameSeq = seq;
}
/// I帧序号是否为最近一次成功解码的I帧
bool isIFrameDecoded(int? seq) {
return seq != null && seq == _lastIFrameSeq;
}
}

70
lib/nalu_utils.dart Normal file
View File

@ -0,0 +1,70 @@
/// NALU相关工具类与结构体
import 'dart:typed_data';
/// NALU单元结构体
class NaluUnit {
final int type; // NALU类型
final List<int> data;
NaluUnit(this.type, this.data);
}
class NaluUtils {
/// NALU单元
static List<NaluUnit> splitNalus(List<int> data) {
final List<NaluUnit> nalus = [];
int i = 0;
List<int> startCodes = [];
//
while (i < data.length - 3) {
if (i < data.length - 4 &&
data[i] == 0 && data[i + 1] == 0 && data[i + 2] == 0 && data[i + 3] == 1) {
startCodes.add(i);
i += 4;
} else if (data[i] == 0 && data[i + 1] == 0 && data[i + 2] == 1) {
startCodes.add(i);
i += 3;
} else {
i++;
}
}
//
startCodes.add(data.length);
// NALU
int nalusTotalLen = 0;
for (int idx = 0; idx < startCodes.length - 1; idx++) {
int start = startCodes[idx];
int next = startCodes[idx + 1];
int skip = (data[start] == 0 && data[start + 1] == 0 && data[start + 2] == 0 && data[start + 3] == 1) ? 4 : 3;
int naluStart = start + skip;
if (naluStart < next) {
final nalu = data.sublist(start, next);
nalusTotalLen += nalu.length;
if (nalu.isNotEmpty) {
nalus.add(NaluUnit(getNaluType(nalu), nalu));
}
}
}
if (nalus.isEmpty && data.isNotEmpty) {
nalus.add(NaluUnit(getNaluType(data), data));
} else if (nalusTotalLen < data.length) {
nalus.add(NaluUnit(getNaluType(data.sublist(nalusTotalLen)), data.sublist(nalusTotalLen)));
}
return nalus;
}
/// NALU类型
static int getNaluType(List<int> nalu) {
if (nalu.isEmpty) return -1;
int offset = 0;
if (nalu.length >= 4 && nalu[0] == 0x00 && nalu[1] == 0x00) {
if (nalu[2] == 0x01)
offset = 3;
else if (nalu[2] == 0x00 && nalu[3] == 0x01)
offset = 4;
}
if (nalu.length > offset) {
return nalu[offset] & 0x1F;
}
return -1;
}
}

View File

@ -6,6 +6,8 @@ import 'package:flutter/foundation.dart';
import 'package:flutter/services.dart';
import 'video_decode_plugin_platform_interface.dart';
import 'nalu_utils.dart';
import 'frame_dependency_manager.dart';
///
class VideoDecoderConfig {
@ -41,20 +43,40 @@ class VideoDecodePlugin {
static int? _textureId;
/// onFrameRendered回调类型
static void Function(int textureId)? _onFrameRendered;
/// onFrameRendered监听
static void setOnFrameRenderedListener(
void Function(int textureId) callback) {
_onFrameRendered = callback;
_channel.setMethodCallHandler(_handleMethodCall);
}
static Future<void> _handleMethodCall(MethodCall call) async {
if (call.method == 'onFrameRendered') {
final int? textureId = call.arguments['textureId'];
if (_onFrameRendered != null && textureId != null) {
_onFrameRendered!(textureId);
}
}
}
///
static Future<int?> initDecoder(VideoDecoderConfig config) async {
final textureId = await _channel.invokeMethod<int>('initDecoder', config.toMap());
final textureId =
await _channel.invokeMethod<int>('initDecoder', config.toMap());
_textureId = textureId;
return textureId;
}
///
static Future<bool> decodeFrame({
///
static Future<bool> _decodeFrame({
required Uint8List frameData,
required int frameType, // 0=I帧, 1=P帧
required int timestamp, //
required int frameSeq, //
int? refIFrameSeq, // P帧时可选
required int frameSeq, //
int? refIFrameSeq, // P帧时可选
}) async {
if (_textureId == null) return false;
final params = {
@ -92,20 +114,115 @@ class VideoDecodePlugin {
/// ID
static int? get textureId => _textureId;
///
static void registerWith() {
//
static final _depManager = FrameDependencyManager();
///
/// [frameData]
/// [frameType] 0=I帧, 1=P帧
/// [timestamp]
/// [frameSeq]
/// [splitNalFromIFrame]true时遇到I帧自动从I帧分割NALU并依赖管理false时直接发送原始数据SPS/PPS/I帧独立推送场景
///
static Future<void> sendFrame({
required List<int> frameData,
required int frameType,
required int timestamp,
required int frameSeq,
bool splitNalFromIFrame = false,
}) async {
if (splitNalFromIFrame && frameType == 0) {
// 使SPS/PPS
if (_depManager.sps != null && _depManager.pps != null) {
await _decodeFrame(
frameData: _depManager.sps!,
frameType: 0,
timestamp: timestamp,
frameSeq: frameSeq - 2,
refIFrameSeq: frameSeq - 2,
);
await _decodeFrame(
frameData: _depManager.pps!,
frameType: 0,
timestamp: timestamp,
frameSeq: frameSeq - 1,
refIFrameSeq: frameSeq - 1,
);
await _decodeFrame(
frameData: Uint8List.fromList(frameData),
frameType: 0,
timestamp: timestamp,
frameSeq: frameSeq,
refIFrameSeq: frameSeq,
);
_depManager.updateIFrameSeq(frameSeq);
print('[VideoDecodePlugin] 发送I帧及SPS/PPS(缓存), frameSeq=$frameSeq');
return;
}
// SPS/PPS
final nalus = NaluUtils.splitNalus(frameData);
print('[调试] frameSeq=$frameSeq, 分割出NALU数量=${nalus.length}');
for (final nalu in nalus) {
print('[调试] NALU type=${nalu.type}, length=${nalu.data.length}');
}
List<int>? sps, pps;
for (final nalu in nalus) {
if (nalu.type == 7) sps = nalu.data;
else if (nalu.type == 8) pps = nalu.data;
}
if (sps != null) {
print('[调试] SPS被缓存, 长度=${sps.length}');
_depManager.updateSps(Uint8List.fromList(sps));
}
if (pps != null) {
print('[调试] PPS被缓存, 长度=${pps.length}');
_depManager.updatePps(Uint8List.fromList(pps));
}
if (_depManager.sps == null || _depManager.pps == null) {
print('[VideoDecodePlugin] 丢弃I帧: 未缓存SPS/PPS');
return;
}
await _decodeFrame(
frameData: _depManager.sps!,
frameType: 0,
timestamp: timestamp,
frameSeq: frameSeq - 2,
refIFrameSeq: frameSeq - 2,
);
await _decodeFrame(
frameData: _depManager.pps!,
frameType: 0,
timestamp: timestamp,
frameSeq: frameSeq - 1,
refIFrameSeq: frameSeq - 1,
);
await _decodeFrame(
frameData: Uint8List.fromList(frameData),
frameType: 0,
timestamp: timestamp,
frameSeq: frameSeq,
refIFrameSeq: frameSeq,
);
_depManager.updateIFrameSeq(frameSeq);
print('[VideoDecodePlugin] 发送I帧及SPS/PPS(首次分割), frameSeq=$frameSeq');
return;
}
// SPS/PPS/I帧/P帧等场景
await _decodeFrame(
frameData: Uint8List.fromList(frameData),
frameType: frameType,
timestamp: timestamp,
frameSeq: frameSeq,
refIFrameSeq: frameType == 0 ? frameSeq : _depManager.lastIFrameSeq,
);
// I帧
if (frameType == 0) _depManager.updateIFrameSeq(frameSeq);
// P帧依赖链完整性校验
if (frameType == 1) {
if (!_depManager.isIFrameDecoded(refIFrameSeq)) {
print('[丢帧] P帧依赖的I帧未解码丢弃 frameSeq=$frameSeq, refIFrameSeq=$refIFrameSeq');
return;
}
}
}
}
/// Dart中实现简单的同步锁
void synchronized(Object lock, Function() action) {
// 线Dart中
// 便
action();
}
///
T synchronizedWithResult<T>(Object lock, T Function() action) {
return action();
}