fix:增加ios注释说明

This commit is contained in:
liyi 2025-05-07 15:09:53 +08:00
parent e375c0aeb4
commit 57b9b0ce20
2 changed files with 54 additions and 13 deletions

View File

@ -225,4 +225,15 @@ await VideoDecodePlugin.sendFrame(frameData: iFrame, frameType: 0, ...);
await VideoDecodePlugin.sendFrame(frameData: pFrame, frameType: 1, refIFrameSeq: iFrameSeq, ...);
```
## iOS端视频解码与渲染实现说明
- iOS端基于VideoToolbox实现H264/H265硬件解码输出CVPixelBuffer。
- 插件内部实现了解码输入缓冲区、输出缓冲区,解码与渲染完全解耦。
- 独立渲染线程定时从输出缓冲区取帧刷新Flutter纹理支持EMA自适应帧率平滑调整提升流畅度与健壮性。
- P帧推送前会校验依赖的I帧是否已解码若依赖链断裂则P帧直接丢弃避免马赛克。
- 仅推送标准NALUtype=1的P帧、type=5的I帧、type=7/8的SPS/PPS进入解码器SEI等异常NALU自动丢弃。
- Flutter端建议用AspectRatio、FittedBox等包裹Texture确保宽高比一致避免白边。
- 由于Flutter Texture机制无法直接控制原生UIView属性建议Flutter端容器背景色设为透明布局自适应。
- 如需更强原生控制力可考虑自定义PlatformView方案。

View File

@ -15,50 +15,74 @@ class VideoDecoder {
}
}
// ====== ======
///
private var decompressionSession: VTDecompressionSession?
///
private var formatDesc: CMVideoFormatDescription?
///
private let width: Int
///
private let height: Int
/// H264/H265
private let codecType: CodecType
/// 线
private let decodeQueue = DispatchQueue(label: "video_decode_plugin.decode.queue")
///
private var isSessionReady = false
/// I
private var lastIFrameSeq: Int?
///
private var frameSeqSet = Set<Int>()
///
private let maxAllowedDelayMs: Int = 350
///
private var timestampBaseMs: Int64?
///
private var firstFrameRelativeTimestamp: Int64?
// ===== =====
// ====== ======
/// 线
private let inputQueue = DispatchQueue(label: "video_decode_plugin.input.queue", attributes: .concurrent)
private var inputBuffer: [(frameData: Data, frameType: Int, timestamp: Int64, frameSeq: Int, refIFrameSeq: Int?, sps: Data?, pps: Data?)] = []
private let inputBufferSemaphore = DispatchSemaphore(value: 1)
private let inputBufferMaxCount = 30
/// 线
private let outputQueue = DispatchQueue(label: "video_decode_plugin.output.queue", attributes: .concurrent)
private var outputBuffer: [(pixelBuffer: CVPixelBuffer, timestamp: Int64)] = []
private let outputBufferSemaphore = DispatchSemaphore(value: 1)
private let outputBufferMaxCount = 20
/// 线
private var renderThread: Thread?
/// 线
private var renderThreadRunning = false
///
private var hasNotifiedFlutter = false
//
///
private var renderFps: Int = 15
/// EMA
private var smoothedFps: Double = 15.0
/// EMA
private let alpha: Double = 0.2
///
private let minFps: Double = 8.0
///
private let maxFps: Double = 30.0
///
private let maxStep: Double = 2.0
///
private var renderedTimestamps: [Int64] = [] // ms
///
private let renderedTimestampsMaxCount = 20
///
private var renderedFrameCount = 0
/// N
private let fpsAdjustInterval = 10
///
/// CVPixelBuffer
var onFrameDecoded: ((CVPixelBuffer, Int64) -> Void)? = { _, _ in }
///
/// 线
init(width: Int, height: Int, codecType: String) {
self.width = width
self.height = height
@ -66,7 +90,8 @@ class VideoDecoder {
startRenderThread()
}
// ===== =====
// ====== ======
///
private func enqueueInput(_ item: (Data, Int, Int64, Int, Int?, Data?, Data?)) {
inputQueue.async(flags: .barrier) {
if self.inputBuffer.count >= self.inputBufferMaxCount {
@ -75,7 +100,7 @@ class VideoDecoder {
self.inputBuffer.append(item)
}
}
// ===== =====
///
private func dequeueInput() -> (Data, Int, Int64, Int, Int?, Data?, Data?)? {
var item: (Data, Int, Int64, Int, Int?, Data?, Data?)?
inputQueue.sync {
@ -85,7 +110,8 @@ class VideoDecoder {
}
return item
}
// ===== =====
// ====== ======
///
private func enqueueOutput(_ item: (CVPixelBuffer, Int64)) {
outputQueue.async(flags: .barrier) {
if self.outputBuffer.count >= self.outputBufferMaxCount {
@ -94,7 +120,7 @@ class VideoDecoder {
self.outputBuffer.append(item)
}
}
// ===== =====
///
private func dequeueOutput() -> (CVPixelBuffer, Int64)? {
var item: (CVPixelBuffer, Int64)?
outputQueue.sync {
@ -104,7 +130,8 @@ class VideoDecoder {
}
return item
}
// ===== 线 =====
// ====== 线 ======
/// 线FlutterEMA
private func startRenderThread() {
renderThreadRunning = true
renderThread = Thread { [weak self] in
@ -144,12 +171,14 @@ class VideoDecoder {
}
renderThread?.start()
}
/// 线
private func stopRenderThread() {
renderThreadRunning = false
renderThread?.cancel()
renderThread = nil
}
// ===== EMA =====
// ====== EMA ======
/// N
private func calculateDecodeFps() -> Double {
guard renderedTimestamps.count >= 2 else { return smoothedFps }
let first = renderedTimestamps.first!
@ -158,6 +187,7 @@ class VideoDecoder {
let durationMs = max(last - first, 1)
return Double(frameCount) * 1000.0 / Double(durationMs)
}
/// EMA
private func updateSmoothedFps(_ measuredFps: Double) -> Int {
let safeFps = min(max(measuredFps, minFps), maxFps)
let targetFps = alpha * safeFps + (1 - alpha) * smoothedFps