diff --git a/android/src/main/kotlin/top/skychip/video_decode_plugin/VideoDecoder.kt b/android/src/main/kotlin/top/skychip/video_decode_plugin/VideoDecoder.kt index de73381..7db41e1 100644 --- a/android/src/main/kotlin/top/skychip/video_decode_plugin/VideoDecoder.kt +++ b/android/src/main/kotlin/top/skychip/video_decode_plugin/VideoDecoder.kt @@ -48,7 +48,7 @@ class VideoDecoder( companion object { private const val TAG = "VideoDecoder" private const val TIMEOUT_US = 10000L - private const val INPUT_BUFFER_QUEUE_CAPACITY = 250 // 输入缓冲区容量 + private const val INPUT_BUFFER_QUEUE_CAPACITY = 30 // 输入缓冲区容量 } // region 成员变量定义 @@ -64,7 +64,7 @@ class VideoDecoder( private val frameSeqSet = Collections.newSetFromMap(ConcurrentHashMap()) // 防止重复帧入队 // 解码输出缓冲区,容量为100帧 - private val outputFrameQueue = LinkedBlockingQueue(100) + private val outputFrameQueue = LinkedBlockingQueue(30) // 渲染线程控制 @Volatile private var renderThreadRunning = true @@ -90,6 +90,12 @@ class VideoDecoder( private val maxFps = 30 // 渲染帧率上限,防止过高 private val maxStep = 2.0 // 单次最大调整幅度,防止突变 + // 1. 新增成员变量 + @Volatile private var latestRenderedTimestampMs: Long? = null + private val MAX_ALLOWED_DELAY_MS = 60 // 最大允许延迟,单位毫秒 + @Volatile private var timestampBaseMs: Long? = null + @Volatile private var firstFrameRelativeTimestamp: Long? = null + // 输入帧结构体 private data class FrameData( val data: ByteArray, @@ -131,14 +137,24 @@ class VideoDecoder( // 从输入队列取出一帧数据 val frame = inputFrameQueue.poll() if (frame != null) { + // 5. 取绝对时间戳 + val base = timestampBaseMs ?: 0L + val firstRel = firstFrameRelativeTimestamp ?: 0L + val absTimestamp = base + (frame.timestamp - firstRel) + val now = System.currentTimeMillis() + if (absTimestamp < now - MAX_ALLOWED_DELAY_MS) { + Log.w(TAG, "[onInputBufferAvailable] Drop frame due to delay: absFrameTs=$absTimestamp, now=$now, maxDelay=$MAX_ALLOWED_DELAY_MS") + frameSeqSet.remove(frame.frameSeq) + codec.queueInputBuffer(index, 0, 0, 0, 0) + return + } frameSeqSet.remove(frame.frameSeq) val inputBuffer = codec.getInputBuffer(index) if (inputBuffer != null) { inputBuffer.clear() inputBuffer.put(frame.data) val start = System.nanoTime() - val ptsUs = frame.timestamp * 1000L - // 入队到解码器 + val ptsUs = absTimestamp * 1000L // 6. 送入解码器用绝对时间戳 codec.queueInputBuffer( index, 0, @@ -196,6 +212,8 @@ class VideoDecoder( // 阻塞式等待新帧,避免空转 val frame = outputFrameQueue.take() frame.codec.releaseOutputBuffer(frame.bufferIndex, true) + // 7. 渲染线程用系统时间推进 + latestRenderedTimestampMs = System.currentTimeMillis() renderedFrameCount++ // 只在首次渲染时回调Flutter if (!hasNotifiedFlutter) { @@ -247,6 +265,27 @@ class VideoDecoder( ): Boolean { if (!running || mediaCodec == null) return false if (!frameSeqSet.add(frameSeq)) return false // 防止重复帧 + // 2. 初始化起点 + if (timestampBaseMs == null) { + synchronized(this) { + if (timestampBaseMs == null) { + timestampBaseMs = System.currentTimeMillis() + firstFrameRelativeTimestamp = timestamp + Log.i(TAG, "[timestampBase] Set timestampBaseMs=$timestampBaseMs, firstFrameRelativeTimestamp=$firstFrameRelativeTimestamp") + } + } + } + val base = timestampBaseMs ?: 0L + val firstRel = firstFrameRelativeTimestamp ?: 0L + val absTimestamp = base + (timestamp - firstRel) + // 3. decodeFrame延迟丢弃判断(用系统时间) + val now = System.currentTimeMillis() + val diff = now - absTimestamp + // Log.d(TAG, "[decodeFrame] absTimestamp=$absTimestamp, now=$now, now-absTimestamp=$diff, maxDelay=$MAX_ALLOWED_DELAY_MS") + if (absTimestamp < now - MAX_ALLOWED_DELAY_MS) { + Log.w(TAG, "[decodeFrame] Drop frame due to delay: absFrameTs=$absTimestamp, now=$now, maxDelay=$MAX_ALLOWED_DELAY_MS") + return false + } var allow = false if (frameType == 0) { // I帧 lastIFrameSeq = frameSeq @@ -263,6 +302,7 @@ class VideoDecoder( return false } } + // 4. 入队时FrameData仍用原始相对时间戳 return inputFrameQueue.offer(FrameData(frameData, frameType, timestamp, frameSeq, refIFrameSeq), 50, TimeUnit.MILLISECONDS) }