diff --git a/README.md b/README.md index 5ce3cdf..087ae06 100644 --- a/README.md +++ b/README.md @@ -225,4 +225,15 @@ await VideoDecodePlugin.sendFrame(frameData: iFrame, frameType: 0, ...); await VideoDecodePlugin.sendFrame(frameData: pFrame, frameType: 1, refIFrameSeq: iFrameSeq, ...); ``` +## iOS端视频解码与渲染实现说明 + +- iOS端基于VideoToolbox实现H264/H265硬件解码,输出CVPixelBuffer。 +- 插件内部实现了解码输入缓冲区、输出缓冲区,解码与渲染完全解耦。 +- 独立渲染线程定时从输出缓冲区取帧,刷新Flutter纹理,支持EMA自适应帧率平滑调整,提升流畅度与健壮性。 +- P帧推送前会校验依赖的I帧是否已解码,若依赖链断裂则P帧直接丢弃,避免马赛克。 +- 仅推送标准NALU(type=1的P帧、type=5的I帧、type=7/8的SPS/PPS)进入解码器,SEI等异常NALU自动丢弃。 +- Flutter端建议用AspectRatio、FittedBox等包裹Texture,确保宽高比一致,避免白边。 +- 由于Flutter Texture机制无法直接控制原生UIView属性,建议Flutter端容器背景色设为透明,布局自适应。 +- 如需更强原生控制力,可考虑自定义PlatformView方案。 + diff --git a/ios/Classes/VideoDecoder.swift b/ios/Classes/VideoDecoder.swift index 2db6dfc..cb2fec0 100644 --- a/ios/Classes/VideoDecoder.swift +++ b/ios/Classes/VideoDecoder.swift @@ -15,50 +15,74 @@ class VideoDecoder { } } + // ====== 关键成员变量注释 ====== + /// 解码会话对象 private var decompressionSession: VTDecompressionSession? + /// 视频格式描述 private var formatDesc: CMVideoFormatDescription? + /// 视频宽度 private let width: Int + /// 视频高度 private let height: Int + /// 编码类型(H264/H265) private let codecType: CodecType + /// 解码线程队列 private let decodeQueue = DispatchQueue(label: "video_decode_plugin.decode.queue") + /// 解码会话是否已准备好 private var isSessionReady = false + /// 最近一次I帧序号 private var lastIFrameSeq: Int? + /// 已处理帧序号集合 private var frameSeqSet = Set() + /// 最大允许延迟(毫秒) private let maxAllowedDelayMs: Int = 350 + /// 时间戳基准 private var timestampBaseMs: Int64? + /// 首帧相对时间戳 private var firstFrameRelativeTimestamp: Int64? - // ===== 新增:缓冲区与自适应帧率相关成员 ===== + // ====== 新增:缓冲区与自适应帧率相关成员 ====== + /// 输入缓冲区(待解码帧队列),线程安全 private let inputQueue = DispatchQueue(label: "video_decode_plugin.input.queue", attributes: .concurrent) private var inputBuffer: [(frameData: Data, frameType: Int, timestamp: Int64, frameSeq: Int, refIFrameSeq: Int?, sps: Data?, pps: Data?)] = [] private let inputBufferSemaphore = DispatchSemaphore(value: 1) private let inputBufferMaxCount = 30 - + /// 输出缓冲区(解码后帧队列),线程安全 private let outputQueue = DispatchQueue(label: "video_decode_plugin.output.queue", attributes: .concurrent) private var outputBuffer: [(pixelBuffer: CVPixelBuffer, timestamp: Int64)] = [] private let outputBufferSemaphore = DispatchSemaphore(value: 1) private let outputBufferMaxCount = 20 - + /// 渲染线程 private var renderThread: Thread? + /// 渲染线程运行标志 private var renderThreadRunning = false + /// 首次渲染回调标志 private var hasNotifiedFlutter = false - - // 帧率自适应参数 + /// 当前渲染帧率 private var renderFps: Int = 15 + /// EMA平滑后的帧率 private var smoothedFps: Double = 15.0 + /// EMA平滑系数 private let alpha: Double = 0.2 + /// 最小帧率 private let minFps: Double = 8.0 + /// 最大帧率 private let maxFps: Double = 30.0 + /// 单次最大调整幅度 private let maxStep: Double = 2.0 + /// 渲染帧时间戳队列 private var renderedTimestamps: [Int64] = [] // ms + /// 渲染帧时间戳最大数量 private let renderedTimestampsMaxCount = 20 + /// 已渲染帧计数 private var renderedFrameCount = 0 + /// 每N帧调整一次帧率 private let fpsAdjustInterval = 10 - /// 解码回调 + /// 解码回调,输出CVPixelBuffer和时间戳 var onFrameDecoded: ((CVPixelBuffer, Int64) -> Void)? = { _, _ in } - /// 初始化解码器 + /// 初始化解码器,启动渲染线程 init(width: Int, height: Int, codecType: String) { self.width = width self.height = height @@ -66,7 +90,8 @@ class VideoDecoder { startRenderThread() } - // ===== 新增:输入缓冲区入队方法 ===== + // ====== 输入缓冲区操作 ====== + /// 入队待解码帧 private func enqueueInput(_ item: (Data, Int, Int64, Int, Int?, Data?, Data?)) { inputQueue.async(flags: .barrier) { if self.inputBuffer.count >= self.inputBufferMaxCount { @@ -75,7 +100,7 @@ class VideoDecoder { self.inputBuffer.append(item) } } - // ===== 新增:输入缓冲区出队方法 ===== + /// 出队待解码帧 private func dequeueInput() -> (Data, Int, Int64, Int, Int?, Data?, Data?)? { var item: (Data, Int, Int64, Int, Int?, Data?, Data?)? inputQueue.sync { @@ -85,7 +110,8 @@ class VideoDecoder { } return item } - // ===== 新增:输出缓冲区入队方法 ===== + // ====== 输出缓冲区操作 ====== + /// 入队解码后帧 private func enqueueOutput(_ item: (CVPixelBuffer, Int64)) { outputQueue.async(flags: .barrier) { if self.outputBuffer.count >= self.outputBufferMaxCount { @@ -94,7 +120,7 @@ class VideoDecoder { self.outputBuffer.append(item) } } - // ===== 新增:输出缓冲区出队方法 ===== + /// 出队解码后帧 private func dequeueOutput() -> (CVPixelBuffer, Int64)? { var item: (CVPixelBuffer, Int64)? outputQueue.sync { @@ -104,7 +130,8 @@ class VideoDecoder { } return item } - // ===== 新增:渲染线程启动与停止 ===== + // ====== 渲染线程相关 ====== + /// 启动渲染线程,定时从输出缓冲区取帧并刷新Flutter纹理,支持EMA自适应帧率 private func startRenderThread() { renderThreadRunning = true renderThread = Thread { [weak self] in @@ -144,12 +171,14 @@ class VideoDecoder { } renderThread?.start() } + /// 停止渲染线程 private func stopRenderThread() { renderThreadRunning = false renderThread?.cancel() renderThread = nil } - // ===== 新增:帧率统计与EMA算法 ===== + // ====== EMA帧率平滑算法 ====== + /// 计算最近N帧的平均解码帧率 private func calculateDecodeFps() -> Double { guard renderedTimestamps.count >= 2 else { return smoothedFps } let first = renderedTimestamps.first! @@ -158,6 +187,7 @@ class VideoDecoder { let durationMs = max(last - first, 1) return Double(frameCount) * 1000.0 / Double(durationMs) } + /// EMA平滑更新渲染帧率 private func updateSmoothedFps(_ measuredFps: Double) -> Int { let safeFps = min(max(measuredFps, minFps), maxFps) let targetFps = alpha * safeFps + (1 - alpha) * smoothedFps