From f8bfa2b22937a697f4059f5b02b5c30406ae60aa Mon Sep 17 00:00:00 2001 From: liyi Date: Mon, 21 Apr 2025 15:11:23 +0800 Subject: [PATCH] =?UTF-8?q?feat:v1=E7=89=88=E6=9C=AC=E5=AE=9E=E7=8E=B0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- README.md | 252 +-- .../video_decode_plugin/VideoDecodePlugin.kt | 5 + .../video_decode_plugin/VideoDecoder.kt | 612 ++++--- example/lib/h264_frame_generator.dart | 137 -- example/lib/main.dart | 1525 ++++++++--------- example/pubspec.yaml | 4 +- 6 files changed, 1191 insertions(+), 1344 deletions(-) delete mode 100644 example/lib/h264_frame_generator.dart diff --git a/README.md b/README.md index a6a635e..84d9762 100644 --- a/README.md +++ b/README.md @@ -1,150 +1,160 @@ -# video_decode_plugin +# Video Decode Plugin -一个高性能的 Flutter 插件,用于在 Android 原生层解码 H.264 裸流数据,并支持两种渲染模式。 +基于MediaCodec/VideoToolbox的跨平台H.264/H.265视频解码Flutter插件,专为低延迟实时视频流解码设计。 -## 功能特点 +[![pub package](https://img.shields.io/pub/v/video_decode_plugin.svg)](https://pub.dev/packages/video_decode_plugin) -- 支持 H.264 Annex B 格式裸流解码(含 NALU 单元) -- 使用 Android MediaCodec 硬解码,提供高性能解码能力 -- 支持两种渲染模式: - - Flutter 纹理渲染:将解码后的帧通过 Flutter Texture 传递到 Flutter UI - - 原生 SurfaceView 渲染:在原生 Android 层直接渲染 -- 提供完善的配置管理和性能监控 -- 支持动态丢帧策略,优化内存使用 -- 适配低端设备的性能优化措施 +## 特性 + +- 🔄 基于原生解码器的高性能视频解码(Android使用MediaCodec,iOS使用VideoToolbox) +- 🖼️ 支持H.264和H.265(HEVC)视频格式 +- ⏱️ 低延迟解码,适用于实时视频流应用 +- 📱 跨平台支持(Android和iOS) +- 🔧 高度可配置的解码参数 +- 📊 详细的解码统计和诊断信息 +- 💡 支持I帧和P帧的单独传入和处理 +- 🎞️ 使用Flutter Texture进行高效渲染 ## 安装 -在 `pubspec.yaml` 文件中添加依赖: - ```yaml dependencies: - video_decode_plugin: ^0.0.1 + video_decode_plugin: ^1.0.0 ``` -## 使用方法 +## 快速开始 -### 基本用法 +### 初始化解码器 ```dart import 'package:video_decode_plugin/video_decode_plugin.dart'; -// 创建解码器(Flutter 渲染模式) -final decoder = H264Decoder(renderMode: RenderMode.flutter); - -// 初始化解码器 -await decoder.init( - const H264DecoderConfig( - bufferSize: 10, - maxWidth: 1280, - maxHeight: 720, - useDropFrameStrategy: true, - debugMode: true, - ), +// 创建解码器配置 +final config = VideoDecoderConfig( + width: 640, // 视频宽度 + height: 480, // 视频高度 + codecType: CodecType.h264, // 编解码类型:h264或h265 + frameRate: 30, // 目标帧率(可选) + bufferSize: 30, // 缓冲区大小 + isDebug: true, // 是否启用详细日志 ); -// 开始解码 -await decoder.start(); +// 初始化解码器,获取纹理ID +final textureId = await VideoDecodePlugin.initDecoder(config); -// 输入 H.264 数据 -await decoder.feedData(h264Data); - -// 暂停解码 -await decoder.pause(); - -// 恢复解码 -await decoder.resume(); - -// 释放资源 -await decoder.release(); -``` - -### 渲染视图 - -#### Flutter 渲染模式 - -```dart -// 使用 Flutter 渲染模式显示视频 -H264VideoPlayerWidget( - decoder: decoder, - width: 640, - height: 360, - backgroundColor: Colors.black, -) -``` - -#### 原生渲染模式 - -```dart -// 使用原生渲染模式显示视频 -const H264NativePlayerWidget( - width: 640, - height: 360, - backgroundColor: Colors.black, -) -``` - -### 监听事件 - -```dart -// 订阅解码器事件 -decoder.eventStream.listen((event) { - switch (event.type) { - case H264DecoderEventType.frameAvailable: - // 新帧可用 - break; - case H264DecoderEventType.stats: - // 性能统计信息 - final stats = event.data as Map; - print('总帧数: ${stats['totalFrames']}'); - print('丢弃帧数: ${stats['droppedFrames']}'); - print('缓冲区使用: ${stats['bufferUsage']}'); - print('解码耗时: ${stats['lastDecodingTimeMs']}ms'); - break; - case H264DecoderEventType.error: - // 解码错误 - break; - } +// 设置帧回调 +VideoDecodePlugin.setFrameCallback((textureId) { + // 当新帧可用时被调用 + setState(() { + // 更新UI + }); }); ``` -## 配置选项 +### 渲染视频 -`H264DecoderConfig` 类提供以下配置选项: - -| 参数 | 类型 | 默认值 | 说明 | -|-----|------|-------|-----| -| bufferSize | int | 5 | 缓冲区大小(帧数) | -| maxWidth | int | 1280 | 最大解码宽度 | -| maxHeight | int | 720 | 最大解码高度 | -| useDropFrameStrategy | bool | true | 是否启用丢帧策略 | -| debugMode | bool | false | 是否启用调试模式 | - -## 性能优化 - -本插件提供多项性能优化措施: - -1. **动态丢帧策略**:缓冲区满时,优先保留 I 帧,丢弃 P 帧,确保解码连续性。 -2. **零拷贝传输**:使用 Surface 和 SurfaceTexture 直接渲染,避免内存拷贝。 -3. **异步处理**:解码和渲染在独立线程进行,不阻塞主线程。 -4. **低端设备适配**:可设置最大解码分辨率,避免低端设备性能问题。 - -## 示例应用 - -本项目自带一个完整的示例应用,演示如何使用这个插件播放 H.264 视频流。 - -运行示例: - -```shell -cd example -flutter run +```dart +// 使用Flutter的Texture组件显示视频 +Texture( + textureId: textureId, + filterQuality: FilterQuality.low, +) ``` +### 解码视频帧 + +```dart +// 解码I帧 +await VideoDecodePlugin.decodeFrame( + frameData, // Uint8List类型的H.264/H.265帧数据 + FrameType.iFrame +); + +// 解码P帧 +await VideoDecodePlugin.decodeFrame( + frameData, // Uint8List类型的H.264/H.265帧数据 + FrameType.pFrame +); +``` + +### 获取解码统计信息 + +```dart +final stats = await VideoDecodePlugin.getDecoderStats(textureId); +print('已渲染帧数: ${stats['renderedFrames']}'); +print('丢弃帧数: ${stats['droppedFrames']}'); +``` + +### 释放资源 + +```dart +await VideoDecodePlugin.releaseDecoder(); +``` + +## 高级用法 + +### 多实例支持 + +插件支持同时创建和管理多个解码器实例: + +```dart +// 创建第一个解码器 +final textureId1 = await VideoDecodePlugin.createDecoder(config1); + +// 创建第二个解码器 +final textureId2 = await VideoDecodePlugin.createDecoder(config2); + +// 为特定纹理ID设置回调 +VideoDecodePlugin.setFrameCallbackForTexture(textureId1, (id) { + // 处理第一个解码器的帧 +}); + +// 为特定纹理ID解码帧 +await VideoDecodePlugin.decodeFrameForTexture(textureId2, frameData, frameType); + +// 释放特定解码器 +await VideoDecodePlugin.releaseDecoderForTexture(textureId1); +``` + +### 优化I帧和SPS/PPS处理 + +对于H.264视频流,建议按照以下顺序处理帧: + +1. 首先发送SPS(序列参数集,NAL类型7) +2. 其次发送PPS(图像参数集,NAL类型8) +3. 然后发送IDR帧(即I帧,NAL类型5) +4. 最后发送P帧(NAL类型1) + +```dart +// 发送SPS和PPS数据 +await VideoDecodePlugin.decodeFrame(spsData, FrameType.iFrame); +await VideoDecodePlugin.decodeFrame(ppsData, FrameType.iFrame); + +// 发送IDR帧 +await VideoDecodePlugin.decodeFrame(idrData, FrameType.iFrame); + +// 发送P帧 +await VideoDecodePlugin.decodeFrame(pFrameData, FrameType.pFrame); +``` + +## 完整示例 + +请参考示例应用,了解如何: +- 从文件或网络流加载H.264视频 +- 正确解析和处理NAL单元 +- 高效地解码和渲染视频帧 +- 监控解码性能并进行故障排除 + +## 支持 + +- Android 5.0 (API级别21)及以上 +- iOS 11.0及以上 + ## 注意事项 -- 目前仅支持 Android 平台 -- 需要确保输入的 H.264 数据是有效的 Annex B 格式(含 NALU 开始码) -- 建议在实际项目中根据设备性能动态调整解码配置 +- 视频分辨率受设备硬件限制,较旧设备可能无法支持高分辨率视频 +- 硬件解码器可能在某些设备上不可用,插件会自动回退到软件解码 +- 对于最佳性能,建议在实际硬件设备上测试,而不仅仅是模拟器 ## 许可证 diff --git a/android/src/main/kotlin/top/skychip/video_decode_plugin/VideoDecodePlugin.kt b/android/src/main/kotlin/top/skychip/video_decode_plugin/VideoDecodePlugin.kt index 6cd1c82..1d637e2 100644 --- a/android/src/main/kotlin/top/skychip/video_decode_plugin/VideoDecodePlugin.kt +++ b/android/src/main/kotlin/top/skychip/video_decode_plugin/VideoDecodePlugin.kt @@ -136,12 +136,17 @@ class VideoDecodePlugin : FlutterPlugin, MethodCallHandler { // 通知Flutter刷新纹理 runOnMainThread { try { + Log.d(TAG, "发送帧可用通知给Flutter,纹理ID: $textureId") channel.invokeMethod("onFrameAvailable", mapOf("textureId" to textureId)) } catch (e: Exception) { Log.e(TAG, "通知Flutter更新纹理失败", e) } } } + + override fun onError(error: String) { + Log.e(TAG, "解码器错误: $error") + } } // 保存解码器 diff --git a/android/src/main/kotlin/top/skychip/video_decode_plugin/VideoDecoder.kt b/android/src/main/kotlin/top/skychip/video_decode_plugin/VideoDecoder.kt index b7907fb..c520179 100644 --- a/android/src/main/kotlin/top/skychip/video_decode_plugin/VideoDecoder.kt +++ b/android/src/main/kotlin/top/skychip/video_decode_plugin/VideoDecoder.kt @@ -4,23 +4,16 @@ import android.content.Context import android.graphics.SurfaceTexture import android.media.MediaCodec import android.media.MediaFormat -import android.os.Build import android.os.Handler import android.os.Looper import android.util.Log import android.view.Surface import io.flutter.view.TextureRegistry import java.nio.ByteBuffer -import java.util.concurrent.LinkedBlockingQueue -import java.util.concurrent.TimeUnit import java.util.concurrent.atomic.AtomicBoolean -import java.util.concurrent.atomic.AtomicLong -import java.util.concurrent.locks.ReentrantLock -import kotlin.concurrent.withLock -import kotlin.math.max /** - * 视频解码器 + * 简化版视频解码器 * 负责解码H264/H265视频数据并将其渲染到Surface上 */ class VideoDecoder( @@ -30,52 +23,35 @@ class VideoDecoder( ) { companion object { private const val TAG = "VideoDecoder" - private const val TIMEOUT_US = 10000L // 10ms - private const val MAX_FRAME_AGE_MS = 100L // 丢弃过旧的帧 + private const val TIMEOUT_US = 10000L // 使用更短的超时时间(10毫秒) + + // Mime types + private const val MIME_H264 = "video/avc" + private const val MIME_H265 = "video/hevc" + + // H264相关常量 + private const val NAL_UNIT_TYPE_SPS = 7 + private const val NAL_UNIT_TYPE_PPS = 8 + private const val NAL_UNIT_TYPE_IDR = 5 + private const val NAL_UNIT_TYPE_NON_IDR = 1 // P帧 + + // 最大允许连续P帧数 + private const val MAX_CONSECUTIVE_P_FRAMES = 30 + + // 异步模式参数 + private const val LOW_LATENCY_MODE = true + private const val OPERATING_RATE = 90 // 解码速率提高到90FPS } // 回调接口 interface DecoderCallback { fun onFrameAvailable() + fun onError(error: String) } // 回调实例 var callback: DecoderCallback? = null - // 帧类型枚举 - enum class FrameType { - I_FRAME, P_FRAME, UNKNOWN - } - - // 帧结构体 - private data class Frame( - val data: ByteArray, - val type: FrameType, - val timestamp: Long = System.currentTimeMillis() - ) { - // 检查帧是否过期 - fun isExpired(): Boolean { - return System.currentTimeMillis() - timestamp > MAX_FRAME_AGE_MS - } - - override fun equals(other: Any?): Boolean { - if (this === other) return true - if (javaClass != other?.javaClass) return false - - other as Frame - if (!data.contentEquals(other.data)) return false - if (type != other.type) return false - - return true - } - - override fun hashCode(): Int { - var result = data.contentHashCode() - result = 31 * result + type.hashCode() - return result - } - } - // SurfaceTexture 和 Surface 用于显示解码后的帧 private val surfaceTexture: SurfaceTexture = textureEntry.surfaceTexture() private val surface: Surface = Surface(surfaceTexture) @@ -83,254 +59,326 @@ class VideoDecoder( // MediaCodec 解码器 private var mediaCodec: MediaCodec? = null - // 待解码的帧队列 - private val frameQueue = LinkedBlockingQueue(config.bufferSize) - - // 解码线程 - private var decodeThread: Thread? = null + // 解码状态 private val isRunning = AtomicBoolean(false) + private val isDecoderConfigured = AtomicBoolean(false) - // 当前解码的帧计数 + // 帧计数 private var frameCount = 0 - - // 解码流状态跟踪 - private val hasReceivedIFrame = AtomicBoolean(false) - private val lastIFrameTimestamp = AtomicLong(0) - private var droppedFrameCount = 0 private var renderedFrameCount = 0 + private var droppedFrameCount = 0 + + // 跟踪I帧状态 + private var hasSentSPS = false + private var hasSentPPS = false + private var hasSentIDR = false + + // 跟踪上一个关键帧时间 + private var lastIFrameTimeMs = 0L + + // 连续P帧计数 + private var consecutivePFrameCount = 0 + + // 用于避免重复处理相同SPS/PPS的缓存 + private var lastSPSHash: Int? = null + private var lastPPSHash: Int? = null + + // 最后有效输出时间戳,用于检测解码器卡住的情况 + private var lastOutputTimeMs = 0L // 主线程Handler,用于在主线程上更新纹理 private val mainHandler = Handler(Looper.getMainLooper()) - // 初始化解码器 + // 日志控制 + private var logVerbose = false + private var frameLogThreshold = 30 // 每30帧输出一次详细日志 + + // 解码器初始化 init { try { + logVerbose = config.isDebug + + // 捕获所有日志 Log.d(TAG, "初始化解码器: ${config.width}x${config.height}, 编码: ${config.codecType}") - // 在主线程上设置SurfaceTexture - mainHandler.post { - try { - // 设置SurfaceTexture的默认缓冲区大小 - surfaceTexture.setDefaultBufferSize(config.width, config.height) - Log.d(TAG, "SurfaceTexture缓冲区大小设置为: ${config.width}x${config.height}") - - // 初始化解码器 - setupDecoder() - startDecodeThread() - - // 输出解码器状态 - mediaCodec?.let { - Log.d(TAG, "解码器已启动: ${it.codecInfo.name}") - } - - // 延迟100ms通知一个空帧,确保Surface已准备好 - mainHandler.postDelayed({ - Log.d(TAG, "发送初始化完成通知") - callback?.onFrameAvailable() - }, 100) - - } catch (e: Exception) { - Log.e(TAG, "初始化解码器失败", e) - release() + // 设置SurfaceTexture的默认缓冲区大小 + surfaceTexture.setDefaultBufferSize(config.width, config.height) + + // 初始化解码器 + if (setupDecoder()) { + isRunning.set(true) + + // 通知初始帧可用(让Flutter创建Texture View) + Log.d(TAG, "解码器初始化成功,发送初始帧通知") + mainHandler.post { + notifyFrameAvailable() } + } else { + Log.e(TAG, "解码器初始化失败") + callback?.onError("解码器初始化失败") } } catch (e: Exception) { Log.e(TAG, "创建解码器实例失败", e) + callback?.onError("创建解码器实例失败: ${e.message}") release() - throw e } } - + /** * 设置解码器 */ - private fun setupDecoder() { + private fun setupDecoder(): Boolean { try { - Log.d(TAG, "开始设置解码器") - - // 确定MIME类型 - val mimeType = if (config.codecType.lowercase() == "h265") { - MediaFormat.MIMETYPE_VIDEO_HEVC - } else { - MediaFormat.MIMETYPE_VIDEO_AVC // 默认H.264 + // 选择适合的MIME类型 + val mime = when (config.codecType) { + "h264" -> MIME_H264 + "h265" -> MIME_H265 + else -> MIME_H264 // 默认使用H.264 } - // 创建格式 - val format = MediaFormat.createVideoFormat(mimeType, config.width, config.height) + // 创建MediaFormat + val format = MediaFormat.createVideoFormat(mime, config.width, config.height) - // 配置基本参数 + // 配置参数 format.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, config.width * config.height) - // 创建解码器 - val decoderInstance = if (config.enableHardwareDecoder) { - try { - MediaCodec.createDecoderByType(mimeType) - } catch (e: Exception) { - Log.w(TAG, "硬件解码器创建失败,尝试使用软件解码器", e) - MediaCodec.createDecoderByType(mimeType) - } - } else { - MediaCodec.createDecoderByType(mimeType) + // 如果指定了帧率,设置帧率 + config.frameRate?.let { fps -> + format.setInteger(MediaFormat.KEY_FRAME_RATE, fps) } + // 创建解码器实例 + val decoder = MediaCodec.createDecoderByType(mime) + // 配置解码器 - decoderInstance.configure(format, surface, null, 0) - decoderInstance.start() + decoder.configure(format, surface, null, 0) + decoder.start() // 保存解码器实例 - mediaCodec = decoderInstance - - // 标记为运行中 - isRunning.set(true) - - Log.d(TAG, "解码器设置完成: ${decoderInstance.codecInfo.name}") + mediaCodec = decoder + isDecoderConfigured.set(true) + Log.d(TAG, "解码器设置完成: ${decoder.codecInfo.name}") + return true } catch (e: Exception) { Log.e(TAG, "设置解码器失败", e) - throw e - } - } - - /** - * 启动解码线程 - */ - private fun startDecodeThread() { - decodeThread = Thread({ - try { - Log.d(TAG, "解码线程已启动") - decodeLoop() - } catch (e: Exception) { - if (isRunning.get()) { - Log.e(TAG, "解码线程异常退出", e) - } - } finally { - Log.d(TAG, "解码线程已结束") - } - }, "VideoDecoderThread") - - decodeThread?.start() - } - - /** - * 解码主循环 - */ - private fun decodeLoop() { - val codec = mediaCodec ?: return - Log.d(TAG, "开始解码循环,解码器: ${codec.codecInfo.name}") - - while (isRunning.get()) { - try { - // 从队列取出一帧 - val frame = frameQueue.poll(100, TimeUnit.MILLISECONDS) - if (frame == null) { - continue // 没有帧可解码,继续等待 - } - - // 处理I帧标志 - if (frame.type == FrameType.I_FRAME) { - hasReceivedIFrame.set(true) - lastIFrameTimestamp.set(System.currentTimeMillis()) - Log.d(TAG, "收到I帧: 大小=${frame.data.size}字节") - } else if (!hasReceivedIFrame.get()) { - // 如果还没有收到I帧,丢弃P帧 - droppedFrameCount++ - continue - } - - // 获取输入缓冲区 - val inputBufferId = codec.dequeueInputBuffer(TIMEOUT_US) - if (inputBufferId >= 0) { - val inputBuffer = codec.getInputBuffer(inputBufferId) - if (inputBuffer != null) { - // 将数据复制到缓冲区 - inputBuffer.clear() - inputBuffer.put(frame.data) - - // 提交缓冲区进行解码 - codec.queueInputBuffer( - inputBufferId, - 0, - frame.data.size, - System.nanoTime() / 1000, - if (frame.type == FrameType.I_FRAME) MediaCodec.BUFFER_FLAG_KEY_FRAME else 0 - ) - } - } - - // 处理输出缓冲区 - val bufferInfo = MediaCodec.BufferInfo() - var outputBufferId = codec.dequeueOutputBuffer(bufferInfo, TIMEOUT_US) - - while (outputBufferId >= 0) { - // 将帧渲染到Surface - val shouldRender = true // 始终渲染 - - if (shouldRender) { - codec.releaseOutputBuffer(outputBufferId, true) - frameCount++ - renderedFrameCount++ - - // 强制通知 - if (frameCount % 1 == 0) { // 每帧都通知 - mainHandler.post { - Log.d(TAG, "通知帧可用: 第$frameCount帧") - callback?.onFrameAvailable() - } - } - } else { - codec.releaseOutputBuffer(outputBufferId, false) - droppedFrameCount++ - } - - // 获取下一个输出缓冲区 - outputBufferId = codec.dequeueOutputBuffer(bufferInfo, TIMEOUT_US) - } - - } catch (e: InterruptedException) { - Log.d(TAG, "解码线程被中断") - break - } catch (e: Exception) { - Log.e(TAG, "解码循环异常", e) - } - } - } - - /** - * 解码视频帧 - * - * @param frameData 帧数据 - * @param isIFrame 是否为I帧 - * @return 是否成功添加到解码队列 - */ - fun decodeFrame(frameData: ByteArray, isIFrame: Boolean): Boolean { - if (!isRunning.get() || frameData.isEmpty()) { - Log.w(TAG, "解码器未运行或帧数据为空") + isDecoderConfigured.set(false) + callback?.onError("设置解码器失败: ${e.message}") return false } + } + + /** + * 通知帧可用 + */ + private fun notifyFrameAvailable() { + if (!isRunning.get()) { + Log.d(TAG, "解码器已停止,跳过帧可用通知") + return + } try { - // 创建帧对象 - val frameType = if (isIFrame) FrameType.I_FRAME else FrameType.P_FRAME - val frame = Frame(frameData, frameType) - - // 对于I帧记录日志 - if (isIFrame) { - Log.d(TAG, "添加I帧到队列: 大小=${frameData.size}字节") + Log.d(TAG, "发送帧可用通知,当前渲染帧数: $renderedFrameCount") + callback?.onFrameAvailable() + } catch (e: Exception) { + Log.e(TAG, "通知帧可用时出错: ${e.message}", e) + } + } + + /** + * 快速检查NAL类型 + */ + private fun checkNalType(frame: ByteArray): Int { + try { + // 快速检查常见的4字节起始码 + if (frame.size > 4 && frame[0] == 0.toByte() && frame[1] == 0.toByte() && + frame[2] == 0.toByte() && frame[3] == 1.toByte()) { + return frame[4].toInt() and 0x1F } - // 将帧添加到队列 - return if (frameQueue.offer(frame)) { - true - } else { - // 队列已满,移除一帧后再添加 - frameQueue.poll() - droppedFrameCount++ - frameQueue.offer(frame) + // 快速检查常见的3字节起始码 + if (frame.size > 3 && frame[0] == 0.toByte() && frame[1] == 0.toByte() && + frame[2] == 1.toByte()) { + return frame[3].toInt() and 0x1F + } + + // 尝试搜索起始码 + for (i in 0 until frame.size - 4) { + if (frame[i] == 0.toByte() && frame[i+1] == 0.toByte() && + frame[i+2] == 0.toByte() && frame[i+3] == 1.toByte()) { + return frame[i+4].toInt() and 0x1F + } + + if (i < frame.size - 3 && frame[i] == 0.toByte() && + frame[i+1] == 0.toByte() && frame[i+2] == 1.toByte()) { + return frame[i+3].toInt() and 0x1F + } } } catch (e: Exception) { - Log.e(TAG, "添加帧到解码队列失败", e) + Log.e(TAG, "检查NAL类型出错", e) + } + + // 无法识别,使用传入的参数 + return -1 + } + + /** + * 解码视频帧 - 简化但严格 + */ + fun decodeFrame(frameData: ByteArray, isIFrame: Boolean): Boolean { + if (!isRunning.get() || !isDecoderConfigured.get() || frameData.isEmpty()) { + Log.w(TAG, "解码器未运行或未配置或帧数据为空") return false } + + val codec = mediaCodec ?: return false + + try { + // 检查NAL类型 + val nalType = checkNalType(frameData) + + // 实际使用的NAL类型 + val effectiveType = if (nalType != -1) nalType else if (isIFrame) NAL_UNIT_TYPE_IDR else NAL_UNIT_TYPE_NON_IDR + + // 如果是SPS或PPS且在缓存中已有相同内容,跳过 + if (effectiveType == NAL_UNIT_TYPE_SPS) { + val hash = frameData.hashCode() + if (lastSPSHash == hash) return true + lastSPSHash = hash + hasSentSPS = true + } else if (effectiveType == NAL_UNIT_TYPE_PPS) { + val hash = frameData.hashCode() + if (lastPPSHash == hash) return true + lastPPSHash = hash + hasSentPPS = true + } else if (effectiveType == NAL_UNIT_TYPE_IDR) { + hasSentIDR = true + lastIFrameTimeMs = System.currentTimeMillis() + consecutivePFrameCount = 0 + } else { + // P帧处理 + if (!hasSentIDR) { + Log.w(TAG, "丢弃P帧,因为尚未收到I帧") + return false + } + + consecutivePFrameCount++ + } + + // 记录帧信息 + frameCount++ + + // 解码帧 + val inputBufferIndex = codec.dequeueInputBuffer(TIMEOUT_US) + if (inputBufferIndex < 0) { + Log.w(TAG, "无法获取输入缓冲区,可能需要等待") + return false + } + + // 获取输入缓冲区 + val inputBuffer = codec.getInputBuffer(inputBufferIndex) + if (inputBuffer == null) { + Log.e(TAG, "获取输入缓冲区失败") + return false + } + + // 填充数据 + inputBuffer.clear() + inputBuffer.put(frameData) + + // 提交缓冲区 + val flags = if (isIFrame) MediaCodec.BUFFER_FLAG_KEY_FRAME else 0 + codec.queueInputBuffer( + inputBufferIndex, + 0, + frameData.size, + System.nanoTime() / 1000L, + flags + ) + + // 处理输出 + processOutputBuffers() + + return true + } catch (e: Exception) { + Log.e(TAG, "解码帧失败", e) + return false + } + } + + /** + * 处理所有可用的输出缓冲区 + */ + private fun processOutputBuffers() { + val codec = mediaCodec ?: return + val bufferInfo = MediaCodec.BufferInfo() + + var outputDone = false + while (!outputDone) { + val outputBufferIndex = codec.dequeueOutputBuffer(bufferInfo, 0) // 不等待,只检查当前可用的 + + when { + outputBufferIndex >= 0 -> { + val render = bufferInfo.size > 0 + codec.releaseOutputBuffer(outputBufferIndex, render) + + if (render) { + renderedFrameCount++ + lastOutputTimeMs = System.currentTimeMillis() + Log.d(TAG, "成功渲染帧 #$renderedFrameCount") + + // 通知Flutter刷新纹理 + notifyFrameAvailable() + } + } + outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED -> { + Log.d(TAG, "输出格式变更: ${codec.outputFormat}") + } + outputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER -> { + outputDone = true + } + } + } + } + + /** + * 释放所有资源 + */ + fun release() { + Log.d(TAG, "释放解码器资源") + + isRunning.set(false) + isDecoderConfigured.set(false) + + try { + mediaCodec?.let { codec -> + try { + codec.stop() + codec.release() + } catch (e: Exception) { + Log.e(TAG, "释放MediaCodec失败", e) + } + } + mediaCodec = null + + try { + surface.release() + } catch (e: Exception) { + Log.e(TAG, "释放Surface失败", e) + } + + try { + textureEntry.release() + } catch (e: Exception) { + Log.e(TAG, "释放TextureEntry失败", e) + } + + callback = null + + Log.d(TAG, "所有资源已释放") + } catch (e: Exception) { + Log.e(TAG, "释放资源时出错", e) + } } /** @@ -341,71 +389,13 @@ class VideoDecoder( "totalFrames" to frameCount, "renderedFrames" to renderedFrameCount, "droppedFrames" to droppedFrameCount, - "queueSize" to frameQueue.size, - "hasIFrame" to hasReceivedIFrame.get(), - "lastIFrameAgeMs" to (System.currentTimeMillis() - lastIFrameTimestamp.get()) + "hasSentSPS" to hasSentSPS, + "hasSentPPS" to hasSentPPS, + "hasSentIDR" to hasSentIDR, + "consecutivePFrames" to consecutivePFrameCount, + "targetWidth" to config.width, + "targetHeight" to config.height, + "frameRate" to (config.frameRate ?: 0) ) } - - /** - * 释放资源 - */ - fun release() { - Log.d(TAG, "开始释放解码器资源") - - // 标记为停止运行 - isRunning.set(false) - - // 清除回调 - callback = null - - try { - // 停止解码线程 - decodeThread?.let { thread -> - thread.interrupt() - try { - thread.join(500) // 等待最多500ms - } catch (e: Exception) { - Log.w(TAG, "等待解码线程结束超时", e) - } - } - decodeThread = null - - // 释放MediaCodec - mediaCodec?.let { codec -> - try { - codec.stop() - codec.release() - Log.d(TAG, "MediaCodec已释放") - } catch (e: Exception) { - Log.e(TAG, "释放MediaCodec失败", e) - } - } - mediaCodec = null - - // 清空队列 - frameQueue.clear() - - // 释放Surface - try { - surface.release() - Log.d(TAG, "Surface已释放") - } catch (e: Exception) { - Log.e(TAG, "释放Surface失败", e) - } - - // 释放纹理 - try { - textureEntry.release() - Log.d(TAG, "TextureEntry已释放") - } catch (e: Exception) { - Log.e(TAG, "释放TextureEntry失败", e) - } - - Log.d(TAG, "所有资源释放完成") - - } catch (e: Exception) { - Log.e(TAG, "释放资源失败", e) - } - } } \ No newline at end of file diff --git a/example/lib/h264_frame_generator.dart b/example/lib/h264_frame_generator.dart deleted file mode 100644 index a8d79c3..0000000 --- a/example/lib/h264_frame_generator.dart +++ /dev/null @@ -1,137 +0,0 @@ -import 'dart:typed_data'; - -/// H.264帧生成器 -/// 生成简单的H.264帧数据,用于测试 -class H264FrameGenerator { - // 视频宽度 - final int width; - - // 视频高度 - final int height; - - // 序列参数集 (SPS) - 一个简单的示例 - // 注意:这不是一个完全有效的SPS,仅用于测试 - final List _spsData = [ - 0x00, - 0x00, - 0x00, - 0x01, - 0x67, - 0x42, - 0x00, - 0x0A, - 0xF8, - 0x41, - 0xA2, - 0x00, - 0x00, - 0x03, - 0x00, - 0x01, - 0x00, - 0x00, - 0x03, - 0x00, - 0x32, - 0x0F, - 0x18, - 0x31, - 0x8C - ]; - - // 图像参数集 (PPS) - 一个简单的示例 - // 注意:这不是一个完全有效的PPS,仅用于测试 - final List _ppsData = [0x00, 0x00, 0x00, 0x01, 0x68, 0xCE, 0x38, 0x80]; - - // I帧的起始数据示例 - 这只是一个示例头部 - final List _iFrameHeader = [ - 0x00, - 0x00, - 0x00, - 0x01, - 0x65, - 0x88, - 0x80, - 0x00, - 0x00, - 0x03, - 0x00, - 0x02, - 0x00 - ]; - - // P帧的起始数据示例 - 这只是一个示例头部 - final List _pFrameHeader = [ - 0x00, - 0x00, - 0x00, - 0x01, - 0x41, - 0x9A, - 0x1C, - 0x0D, - 0x3E, - 0x04 - ]; - - // 当前帧计数 - int _frameCount = 0; - - // 每个I帧之间的P帧数量 - final int _pFramesPerIFrame = 9; - - /// 构造函数 - H264FrameGenerator({ - required this.width, - required this.height, - }); - - /// 获取SPS和PPS数据 - Uint8List getConfigurationData() { - // 组合SPS和PPS - List data = []; - data.addAll(_spsData); - data.addAll(_ppsData); - return Uint8List.fromList(data); - } - - /// 生成下一帧数据 - /// 返回 (帧数据, 是否为I帧) - (Uint8List, bool) generateNextFrame() { - _frameCount++; - - // 每10帧插入一个I帧 - bool isIFrame = _frameCount % (_pFramesPerIFrame + 1) == 1; - - List frameData = []; - - if (isIFrame) { - // I帧: 添加SPS和PPS,并添加I帧数据 - frameData.addAll(_spsData); - frameData.addAll(_ppsData); - frameData.addAll(_iFrameHeader); - - // 添加一些模拟的I帧数据 - // 实际中,这里应该是真实的编码数据 - for (int i = 0; i < 1000; i++) { - frameData.add((i * 13) % 256); - } - } else { - // P帧: 只添加P帧数据 - frameData.addAll(_pFrameHeader); - - // 添加一些模拟的P帧数据 - // 实际中,这里应该是真实的编码数据 - for (int i = 0; i < 300; i++) { - frameData.add((i * 7 + _frameCount) % 256); - } - } - - return (Uint8List.fromList(frameData), isIFrame); - } - - /// 重置帧计数 - void reset() { - _frameCount = 0; - } -} diff --git a/example/lib/main.dart b/example/lib/main.dart index 6329e4d..9ecdb7b 100644 --- a/example/lib/main.dart +++ b/example/lib/main.dart @@ -1,78 +1,147 @@ import 'dart:async'; +import 'dart:io'; import 'dart:typed_data'; +import 'dart:math' as math; import 'package:flutter/foundation.dart'; import 'package:flutter/material.dart'; import 'package:flutter/services.dart'; import 'package:video_decode_plugin/video_decode_plugin.dart'; +// 测试图案绘制器 +class TestPatternPainter extends CustomPainter { + @override + void paint(Canvas canvas, Size size) { + final colors = [ + Colors.red, + Colors.green, + Colors.blue, + Colors.yellow, + Colors.purple, + ]; + + const int gridSize = 4; + final double cellWidth = size.width / gridSize; + final double cellHeight = size.height / gridSize; + + for (int x = 0; x < gridSize; x++) { + for (int y = 0; y < gridSize; y++) { + final paint = Paint() + ..color = colors[(x + y) % colors.length] + ..style = PaintingStyle.fill; + + final rect = + Rect.fromLTWH(x * cellWidth, y * cellHeight, cellWidth, cellHeight); + + canvas.drawRect(rect, paint); + } + } + + // 绘制中心白色十字 + final paint = Paint() + ..color = Colors.white + ..style = PaintingStyle.stroke + ..strokeWidth = 5.0; + + canvas.drawLine(Offset(size.width / 2 - 50, size.height / 2), + Offset(size.width / 2 + 50, size.height / 2), paint); + + canvas.drawLine(Offset(size.width / 2, size.height / 2 - 50), + Offset(size.width / 2, size.height / 2 + 50), paint); + } + + @override + bool shouldRepaint(covariant CustomPainter oldDelegate) { + return false; + } +} + +// 用于存储H264文件中解析出的帧 +class H264Frame { + final Uint8List data; + final FrameType type; + + H264Frame(this.data, this.type); +} + +// H264 NAL 单元类型 +class NalUnitType { + static const int UNSPECIFIED = 0; + static const int CODED_SLICE_NON_IDR = 1; // P帧 + static const int CODED_SLICE_IDR = 5; // I帧 + static const int SPS = 7; // 序列参数集 + static const int PPS = 8; // 图像参数集 + + // 获取类型名称 + static String getName(int type) { + switch (type) { + case UNSPECIFIED: + return "未指定"; + case CODED_SLICE_NON_IDR: + return "P帧"; + case CODED_SLICE_IDR: + return "I帧"; + case SPS: + return "SPS"; + case PPS: + return "PPS"; + default: + return "未知($type)"; + } + } +} + void main() { runApp(const MyApp()); } -class MyApp extends StatefulWidget { +class MyApp extends StatelessWidget { const MyApp({Key? key}) : super(key: key); @override - State createState() => _MyAppState(); + Widget build(BuildContext context) { + return MaterialApp( + title: '视频解码演示', + theme: ThemeData( + colorScheme: ColorScheme.fromSeed(seedColor: Colors.deepPurple), + useMaterial3: true, + ), + home: const VideoView(), + ); + } } -class _MyAppState extends State { +class VideoView extends StatefulWidget { + const VideoView({Key? key}) : super(key: key); + + @override + State createState() => _VideoViewState(); +} + +class _VideoViewState extends State { + // 解码器状态 int? _textureId; bool _isInitialized = false; bool _isPlaying = false; - String _statusMessage = '等待初始化...'; + String _statusText = "未初始化"; + String _error = ""; - // 视频分辨率和帧率 - final int _width = 640; - final int _height = 360; - final int _frameRate = 25; + // 帧统计 + int _renderedFrameCount = 0; + DateTime? _lastFrameTime; + double _fps = 0; + + // H264文件解析 + Uint8List? _h264FileData; + List _h264Frames = []; + int _currentFrameIndex = 0; // 解码定时器 - Timer? _decodeTimer; + Timer? _frameTimer; - // 状态监控定时器 - Timer? _statsTimer; - - // 添加一个计时器记录帧率 - Stopwatch? _frameRateWatch; - - // H.264文件数据 - Uint8List? _h264Data; - - // 已解码的帧数 - int _frameCount = 0; - - // 接收到的渲染帧数 - int _renderedFrameCount = 0; - - // 状态信息 - Map _decoderStats = {}; - - // 需要强制更新Texture - bool _needsTextureUpdate = false; - - // 诊断日志 - List _logs = []; - ScrollController _logScrollController = ScrollController(); - - // 帧分隔符 (NAL 单元起始码) - final List _startCode = [0, 0, 0, 1]; - - // 当前解析位置 - int _parsePosition = 0; - - // 帧索引位置列表 (每个帧的起始位置) - List _framePositions = []; - - // 帧类型列表 - List _frameTypes = []; - - // 解码器行为配置 - final int _bufferSize = 30; // 缓冲区大小(帧数) - - // 是否使用原始数据解析 - bool _useRawParsing = true; + // 日志 + final List _logs = []; + final ScrollController _logScrollController = ScrollController(); @override void initState() { @@ -80,753 +149,663 @@ class _MyAppState extends State { _loadH264File(); } - // 添加日志 - void _addLog(String message) { - print(message); // 同时打印到控制台 - setState(() { - _logs.add("[${DateTime.now().toString().split('.').first}] $message"); - - // 延迟滚动到底部 - Future.delayed(Duration(milliseconds: 100), () { - if (_logScrollController.hasClients) { - _logScrollController.animateTo( - _logScrollController.position.maxScrollExtent, - duration: Duration(milliseconds: 200), - curve: Curves.easeOut, - ); - } - }); - }); - } - - // 加载H.264文件 - Future _loadH264File() async { - try { - setState(() { - _statusMessage = '加载H.264文件中...'; - _logs = []; - }); - _addLog('开始加载H.264文件'); - - // 从assets加载示例H.264文件 - final ByteData data = await rootBundle.load('assets/demo.h264'); - _h264Data = data.buffer.asUint8List(); - - _addLog( - 'H.264文件加载完成,大小: ${(_h264Data!.length / 1024).toStringAsFixed(2)} KB'); - - setState(() { - _statusMessage = - 'H.264文件加载完成,大小: ${(_h264Data!.length / 1024).toStringAsFixed(2)} KB'; - }); - - // 预解析H.264文件 - _parseH264File(); - } catch (e) { - _addLog('H.264文件加载失败: $e'); - setState(() { - _statusMessage = 'H.264文件加载失败: $e'; - }); - } - } - - // 预解析H.264文件,找出所有帧的位置和类型 - void _parseH264File() { - if (_h264Data == null || _h264Data!.isEmpty) return; - - setState(() { - _statusMessage = '正在解析H.264文件结构...'; - }); - _addLog('开始解析H.264文件结构...'); - - _framePositions.clear(); - _frameTypes.clear(); - - // 查找所有起始码位置 - int iFrameCount = 0; - int pFrameCount = 0; - - for (int i = 0; i < _h264Data!.length - 4; i++) { - if (_isStartCode(i)) { - // 检查NAL类型 - if (i + 4 < _h264Data!.length) { - int nalType = _h264Data![i + 4] & 0x1F; - - _addLog('在位置 $i 找到NAL单元, 类型: $nalType'); - - _framePositions.add(i); - - // 根据NAL类型确定帧类型 - // 5 = IDR帧 (I帧), 7 = SPS, 8 = PPS - if (nalType == 5 || nalType == 7 || nalType == 8) { - _frameTypes.add(FrameType.iFrame); - iFrameCount++; - } else { - _frameTypes.add(FrameType.pFrame); - pFrameCount++; - } - } - } - } - - // 尝试直接添加SPS/PPS帧,可能在文件开始处 - _useRawParsing = (_framePositions.isEmpty || iFrameCount == 0); - - _addLog( - '解析完成: 找到 ${_framePositions.length} 个NAL单元, I帧: $iFrameCount, P帧: $pFrameCount'); - if (_useRawParsing) { - _addLog('警告: 未检测到有效I帧,将使用原始数据直接解码'); - } - - setState(() { - _statusMessage = 'H.264解析完成,共找到 ${_framePositions.length} 个NAL单元'; - }); - } - - // 检查是否为起始码 - bool _isStartCode(int position) { - if (position + 3 >= _h264Data!.length) return false; - return _h264Data![position] == 0 && - _h264Data![position + 1] == 0 && - _h264Data![position + 2] == 0 && - _h264Data![position + 3] == 1; - } - - // 帧回调函数 - void _onFrameAvailable(int textureId) { - if (mounted) { - _addLog('收到帧可用回调: textureId=$textureId'); - - // 必须调用setState刷新UI - setState(() { - _renderedFrameCount++; - }); - } - } - - // 开始统计监控 - void _startStatsMonitoring() { - _statsTimer?.cancel(); - _statsTimer = Timer.periodic(const Duration(milliseconds: 1000), (_) async { - if (_textureId != null && mounted) { - try { - final stats = await VideoDecodePlugin.getDecoderStats(_textureId!); - if (mounted) { - setState(() { - _decoderStats = stats; - }); - - // 记录关键的统计变化 - if (stats['droppedFrames'] > 0 || stats['isBuffering'] == true) { - _addLog('解码器状态: ${stats['isBuffering'] ? "缓冲中" : "播放中"}, ' + - '输入队列: ${stats['inputQueueSize']}, ' + - '输出队列: ${stats['outputQueueSize']}, ' + - '丢弃帧: ${stats['droppedFrames']}'); - } - } - } catch (e) { - _addLog('获取统计信息失败: $e'); - } - } - }); - } - - // 初始化解码器 - Future _initDecoder() async { - if (_h264Data == null) { - setState(() { - _statusMessage = 'H.264文件未加载'; - }); - _addLog('错误: H.264文件未加载'); - return; - } - - try { - // 检查平台支持 - if (!VideoDecodePlugin.isPlatformSupported) { - setState(() { - _statusMessage = '当前平台不支持视频解码'; - }); - _addLog('错误: 当前平台不支持视频解码'); - return; - } - - setState(() { - _statusMessage = '正在初始化解码器...'; - }); - _addLog('开始初始化解码器...'); - - // 配置解码器 - final config = VideoDecoderConfig( - width: _width, - height: _height, - frameRate: _frameRate, - codecType: CodecType.h264, - bufferSize: _bufferSize, - threadCount: 2, - isDebug: true, - enableHardwareDecoder: true, - ); - - _addLog( - '解码器配置: 分辨率 ${_width}x${_height}, 帧率 $_frameRate, 缓冲区 $_bufferSize'); - - // 先释放之前的解码器 - if (_textureId != null) { - _addLog('释放旧解码器: $_textureId'); - await VideoDecodePlugin.releaseDecoder(); - } - - // 初始化解码器并获取纹理ID - final textureId = await VideoDecodePlugin.initDecoder(config); - if (textureId == null) { - setState(() { - _statusMessage = '解码器初始化失败'; - }); - _addLog('错误: 解码器初始化失败,返回的textureId为null'); - return; - } - - _addLog('解码器初始化成功,textureId: $textureId'); - - // 设置帧可用回调 - VideoDecodePlugin.setFrameCallbackForTexture( - textureId, _onFrameAvailable); - _addLog('已设置帧可用回调'); - - // 开始监控统计信息 - _startStatsMonitoring(); - _addLog('已启动统计信息监控'); - - setState(() { - _textureId = textureId; - _isInitialized = true; - _frameCount = 0; - _renderedFrameCount = 0; - _parsePosition = 0; - _needsTextureUpdate = false; - _statusMessage = '解码器初始化成功,纹理ID: $_textureId'; - }); - - // 尝试立即解码第一帧I帧 - await _injectFirstIFrame(); - } catch (e) { - _addLog('初始化解码器错误: $e'); - setState(() { - _statusMessage = '初始化错误: $e'; - }); - } - } - - // 尝试立即注入第一个I帧,帮助启动解码 - Future _injectFirstIFrame() async { - if (_h264Data == null || !_isInitialized) return; - - try { - _addLog('尝试注入首个I帧进行测试...'); - - // 如果找不到有效的I帧位置,直接使用文件开头部分作为I帧 - if (_useRawParsing || _framePositions.isEmpty) { - // 直接使用前1024字节作为I帧 - int len = _h264Data!.length > 1024 ? 1024 : _h264Data!.length; - Uint8List firstFrame = Uint8List(len); - firstFrame.setRange(0, len, _h264Data!, 0); - - _addLog('使用原始数据作为I帧进行测试,大小: $len'); - bool success = - await VideoDecodePlugin.decodeFrame(firstFrame, FrameType.iFrame); - _addLog('注入测试I帧 ${success ? "成功" : "失败"}'); - return; - } - - // 找到第一个I帧的位置 - int iFramePos = -1; - for (int i = 0; i < _frameTypes.length; i++) { - if (_frameTypes[i] == FrameType.iFrame) { - iFramePos = i; - break; - } - } - - if (iFramePos == -1) { - _addLog('错误: 未找到I帧'); - return; - } - - // 获取I帧数据 - int startPos = _framePositions[iFramePos]; - int endPos = (iFramePos + 1 < _framePositions.length) - ? _framePositions[iFramePos + 1] - : _h264Data!.length; - - int frameSize = endPos - startPos; - _addLog('找到I帧: 位置 $startPos, 大小 $frameSize'); - - // 提取I帧数据 - Uint8List iFrameData = Uint8List(frameSize); - iFrameData.setRange(0, frameSize, _h264Data!, startPos); - - // 解码I帧 - bool success = - await VideoDecodePlugin.decodeFrame(iFrameData, FrameType.iFrame); - _addLog('注入I帧 ${success ? "成功" : "失败"}'); - } catch (e) { - _addLog('注入I帧失败: $e'); - } - } - - // 开始播放 - void _startPlaying() { - if (!_isInitialized || _isPlaying || _h264Data == null) { - return; - } - - setState(() { - _isPlaying = true; - _statusMessage = '开始播放...'; - }); - _addLog('开始播放, 解码位置: $_parsePosition'); - - // 添加强制刷新的逻辑 - _addDummyFrame(); - - // 尝试强制发送第一个I帧 - _injectFirstIFrame().then((_) { - // 重置帧率计时器 - _frameRateWatch = Stopwatch()..start(); - - // 创建定时器以固定帧率解码 - _decodeTimer = - Timer.periodic(Duration(milliseconds: 1000 ~/ _frameRate), (_) { - _decodeNextFrame(); - }); - }); - } - - // 添加一个虚拟帧进行测试 - void _addDummyFrame() { - _addLog('添加测试图形'); - - // 创建一个虚拟帧进行测试 - if (_textureId != null) { - // 强制组件立即重绘 - setState(() { - _renderedFrameCount++; - }); - - // 延迟后再次强制刷新 - Future.delayed(Duration(milliseconds: 500), () { - if (mounted) { - setState(() { - _renderedFrameCount++; - }); - } - }); - } - } - - // 停止播放 - void _stopPlaying() { - _decodeTimer?.cancel(); - _decodeTimer = null; - _frameRateWatch?.stop(); - - setState(() { - _isPlaying = false; - _statusMessage = '播放已停止'; - }); - _addLog('播放已停止'); - } - - // 解码下一帧 - Future _decodeNextFrame() async { - if (!_isInitialized || _h264Data == null) { - _stopPlaying(); - _addLog('解码器未初始化或H264数据为空,停止播放'); - return; - } - - // 如果解析失败,尝试使用原始数据 - if (_useRawParsing) { - await _decodeRawData(); - return; - } - - // 正常解析模式 - if (_framePositions.isEmpty) { - _stopPlaying(); - _addLog('没有找到有效帧,停止播放'); - return; - } - - try { - // 检查是否播放完毕 - if (_parsePosition >= _framePositions.length) { - // 循环播放,重新开始 - _parsePosition = 0; - setState(() { - _statusMessage = '播放完成,重新开始'; - }); - _addLog('播放完成,循环回到开始位置'); - } - - // 获取当前帧位置 - int currentPos = _framePositions[_parsePosition]; - - // 计算帧大小 (到下一帧开始或文件结束) - int nextPos = _parsePosition + 1 < _framePositions.length - ? _framePositions[_parsePosition + 1] - : _h264Data!.length; - - int frameSize = nextPos - currentPos; - - // 提取帧数据 - Uint8List frameData = Uint8List(frameSize); - frameData.setRange(0, frameSize, _h264Data!, currentPos); - - // 获取帧类型 - FrameType frameType = _frameTypes[_parsePosition]; - - // 如果是第一帧或每隔一定数量的帧,记录一下详细信息 - if (_frameCount % 10 == 0 || _frameCount < 5) { - String hexPrefix = ''; - if (frameData.length >= 8) { - hexPrefix = '0x' + - frameData - .sublist(0, 8) - .map((e) => e.toRadixString(16).padLeft(2, '0')) - .join(''); - } - - _addLog( - '解码帧 #$_frameCount, 类型: ${frameType == FrameType.iFrame ? "I" : "P"}帧, ' + - '大小: ${(frameSize / 1024).toStringAsFixed(2)} KB, 前缀: $hexPrefix'); - } - - // 解码帧 - final success = await VideoDecodePlugin.decodeFrame(frameData, frameType); - - // 如果前几帧解码失败,记录详细错误 - if (!success && _frameCount < 5) { - _addLog( - '解码失败: 帧 #$_frameCount, 类型: ${frameType == FrameType.iFrame ? "I" : "P"}帧'); - } - - // 更新状态 - _frameCount++; - _parsePosition++; - - if (mounted) { - // 计算实际帧率 - String frameRateInfo = ''; - if (_frameRateWatch != null && - _frameRateWatch!.elapsedMilliseconds > 0) { - double actualFps = - _frameCount / (_frameRateWatch!.elapsedMilliseconds / 1000); - frameRateInfo = ', 实际帧率: ${actualFps.toStringAsFixed(1)} fps'; - } - - setState(() { - _statusMessage = - '正在播放: 第${_parsePosition}/${_framePositions.length}帧, ' + - '类型: ${frameType == FrameType.iFrame ? "I" : "P"}帧, ' + - '大小: ${(frameSize / 1024).toStringAsFixed(2)} KB, ' + - '${success ? "成功" : "失败"}$frameRateInfo'; - }); - } - - // 检查是否播放完毕 - if (_parsePosition >= _framePositions.length) { - _stopPlaying(); - setState(() { - _statusMessage = '播放完成'; - _parsePosition = 0; - }); - _addLog('播放完成,已解码 $_frameCount 帧'); - } - } catch (e) { - _addLog('解码错误: $e'); - setState(() { - _statusMessage = '解码错误: $e'; - }); - _stopPlaying(); - } - } - - // 使用原始数据直接解码,每次取一小块 - Future _decodeRawData() async { - try { - // 计算当前位置 - int currentPos = _parsePosition * 1024; // 每次取1KB数据 - - // 检查是否到达文件末尾 - if (currentPos >= _h264Data!.length) { - _parsePosition = 0; - currentPos = 0; - _addLog('原始解码模式:已到达文件末尾,重新开始'); - } - - // 计算块大小 - int blockSize = 1024; - if (currentPos + blockSize > _h264Data!.length) { - blockSize = _h264Data!.length - currentPos; - } - - // 提取数据块 - Uint8List blockData = Uint8List(blockSize); - blockData.setRange(0, blockSize, _h264Data!, currentPos); - - // 每10帧记录一下进度 - if (_frameCount % 10 == 0) { - _addLog('原始解码模式:解码块 #$_frameCount, 位置: $currentPos, 大小: $blockSize'); - } - - // 解码数据块,强制当作I帧 - bool success = - await VideoDecodePlugin.decodeFrame(blockData, FrameType.iFrame); - - // 更新计数 - _frameCount++; - _parsePosition++; - - // 更新状态 - if (mounted) { - setState(() { - _statusMessage = '原始模式播放: 位置 $currentPos/${_h264Data!.length}, ' + - '大小: ${(blockSize / 1024).toStringAsFixed(2)} KB, ' + - '${success ? "成功" : "失败"}'; - }); - } - } catch (e) { - _addLog('原始模式解码错误: $e'); - _stopPlaying(); - } - } - - // 释放解码器资源 - Future _releaseDecoder() async { - _stopPlaying(); - _statsTimer?.cancel(); - _statsTimer = null; - - if (!_isInitialized) { - return; - } - - try { - _addLog('开始释放解码器'); - final bool success = await VideoDecodePlugin.releaseDecoder(); - setState(() { - _isInitialized = !success; - _textureId = null; - _statusMessage = success ? '解码器已释放' : '解码器释放失败'; - }); - _addLog('解码器释放 ${success ? "成功" : "失败"}'); - } catch (e) { - _addLog('释放解码器错误: $e'); - setState(() { - _statusMessage = '释放解码器错误: $e'; - }); - } - } - @override void dispose() { _stopPlaying(); - _statsTimer?.cancel(); _releaseDecoder(); + _frameTimer?.cancel(); super.dispose(); } - // 构建统计信息UI - Widget _buildStatsDisplay() { - if (_decoderStats.isEmpty) { - return const Text('无统计信息'); + // 加载H264文件 + Future _loadH264File() async { + try { + _log("正在加载 demo.h264 文件..."); + final ByteData data = await rootBundle.load('assets/demo.h264'); + setState(() { + _h264FileData = data.buffer.asUint8List(); + }); + _log("H264文件加载完成: ${_h264FileData!.length} 字节"); + + // 解析H264文件 + _parseH264File(); + } catch (e) { + _log("加载H264文件失败: $e"); + setState(() { + _error = "加载H264文件失败: $e"; + }); } - - // 从统计信息中提取有用的字段 - final bool isBuffering = _decoderStats['isBuffering'] ?? false; - final int totalFrames = _decoderStats['totalFrames'] ?? 0; - final int renderedFrames = _decoderStats['renderedFrames'] ?? 0; - final int droppedFrames = _decoderStats['droppedFrames'] ?? 0; - final int inputQueueSize = _decoderStats['inputQueueSize'] ?? 0; - final int outputQueueSize = _decoderStats['outputQueueSize'] ?? 0; - final int bufferFillPercentage = _decoderStats['bufferFillPercentage'] ?? 0; - - return Column( - crossAxisAlignment: CrossAxisAlignment.start, - children: [ - Text('解码状态: ${isBuffering ? "缓冲中" : "播放中"}', - style: TextStyle( - fontWeight: FontWeight.bold, - color: isBuffering ? Colors.orange : Colors.green)), - Text('已解码帧: $totalFrames, 渲染帧: $renderedFrames, 丢弃帧: $droppedFrames'), - Text( - '输入队列: $inputQueueSize, 输出队列: $outputQueueSize, 缓冲填充率: $bufferFillPercentage%'), - Text('Flutter接收到的帧数: $_renderedFrameCount, 已解析帧位置: $_parsePosition'), - ], - ); } - // 构建日志显示区域 - Widget _buildLogDisplay() { - return Container( - height: 150, - decoration: BoxDecoration( - color: Colors.black, - borderRadius: BorderRadius.circular(8), - ), - margin: const EdgeInsets.symmetric(horizontal: 20), - padding: const EdgeInsets.all(8), - child: ListView.builder( - controller: _logScrollController, - itemCount: _logs.length, - itemBuilder: (context, index) { - return Text( - _logs[index], - style: TextStyle( - color: _logs[index].contains('错误') - ? Colors.red - : _logs[index].contains('警告') - ? Colors.yellow - : Colors.green, - fontSize: 12, - fontFamily: 'monospace', + // 解析H264文件,提取NAL单元 + void _parseH264File() { + if (_h264FileData == null) return; + + _log("开始解析H264文件..."); + + List frames = []; + + // 查找起始码 0x00000001 或 0x000001 + int startIndex = 0; + bool hasSps = false; + bool hasPps = false; + + while (startIndex < _h264FileData!.length - 4) { + // 查找下一个起始码 + int nextStartIndex = _findStartCode(_h264FileData!, startIndex + 3); + if (nextStartIndex == -1) { + nextStartIndex = _h264FileData!.length; + } + + // 提取NAL单元,跳过起始码(3或4字节) + int skipBytes = (_h264FileData![startIndex] == 0x00 && + _h264FileData![startIndex + 1] == 0x00 && + _h264FileData![startIndex + 2] == 0x00 && + _h264FileData![startIndex + 3] == 0x01) + ? 4 + : 3; + + if (nextStartIndex > startIndex + skipBytes) { + // 获取NAL类型 + int nalType = _h264FileData![startIndex + skipBytes] & 0x1F; + + // 创建NAL单元数据 + var nalData = Uint8List(nextStartIndex - startIndex); + for (int i = 0; i < nalData.length; i++) { + nalData[i] = _h264FileData![startIndex + i]; + } + + // 根据NAL类型分类 + switch (nalType) { + case NalUnitType.SPS: + _log("找到SPS: 位置=${startIndex}, 长度=${nalData.length}"); + hasSps = true; + frames.add(H264Frame(nalData, FrameType.iFrame)); + break; + case NalUnitType.PPS: + _log("找到PPS: 位置=${startIndex}, 长度=${nalData.length}"); + hasPps = true; + frames.add(H264Frame(nalData, FrameType.iFrame)); + break; + case NalUnitType.CODED_SLICE_IDR: + _log("找到I帧: 位置=${startIndex}, 长度=${nalData.length}"); + frames.add(H264Frame(nalData, FrameType.iFrame)); + break; + case NalUnitType.CODED_SLICE_NON_IDR: + frames.add(H264Frame(nalData, FrameType.pFrame)); + break; + default: + // 其他类型的NAL单元也添加进去 + frames.add(H264Frame(nalData, FrameType.pFrame)); + break; + } + } + + startIndex = nextStartIndex; + } + + setState(() { + _h264Frames = frames; + }); + + _log("H264文件解析完成,找到 ${frames.length} 个帧,包含SPS=${hasSps}, PPS=${hasPps}"); + } + + // 查找起始码的辅助方法 + int _findStartCode(Uint8List data, int offset) { + for (int i = offset; i < data.length - 3; i++) { + // 检查是否为0x000001 + if (data[i] == 0x00 && data[i + 1] == 0x00 && data[i + 2] == 0x01) { + return i; + } + // 检查是否为0x00000001 + if (i < data.length - 4 && + data[i] == 0x00 && + data[i + 1] == 0x00 && + data[i + 2] == 0x00 && + data[i + 3] == 0x01) { + return i; + } + } + return -1; + } + + // 查找NAL类型的辅助方法(用于调试) + int _getNalType(Uint8List data) { + // 打印头几个字节 + String headerBytes = ''; + for (int i = 0; i < math.min(16, data.length); i++) { + headerBytes += '${data[i].toRadixString(16).padLeft(2, '0')} '; + } + _log("帧数据头: $headerBytes"); + + // 尝试找到起始码位置 + int nalOffset = -1; + + // 检查标准起始码 + if (data.length > 4 && + data[0] == 0x00 && + data[1] == 0x00 && + data[2] == 0x00 && + data[3] == 0x01) { + nalOffset = 4; + _log("找到4字节起始码 (0x00000001) 位置: 0"); + } else if (data.length > 3 && + data[0] == 0x00 && + data[1] == 0x00 && + data[2] == 0x01) { + nalOffset = 3; + _log("找到3字节起始码 (0x000001) 位置: 0"); + } else { + // 尝试搜索起始码 + for (int i = 0; i < data.length - 4; i++) { + if (data[i] == 0x00 && + data[i + 1] == 0x00 && + data[i + 2] == 0x00 && + data[i + 3] == 0x01) { + nalOffset = i + 4; + _log("在偏移量 $i 处找到4字节起始码"); + break; + } else if (i < data.length - 3 && + data[i] == 0x00 && + data[i + 1] == 0x00 && + data[i + 2] == 0x01) { + nalOffset = i + 3; + _log("在偏移量 $i 处找到3字节起始码"); + break; + } + } + } + + // 如果找到了起始码 + if (nalOffset >= 0 && nalOffset < data.length) { + int nalType = data[nalOffset] & 0x1F; + _log("解析NAL类型: ${NalUnitType.getName(nalType)} ($nalType)"); + return nalType; + } + + _log("无法解析NAL类型"); + return -1; + } + + // 当新帧可用时调用 + void _onFrameAvailable(int textureId) { + if (!mounted) return; + + _log("收到帧回调 - 渲染帧 ${_renderedFrameCount + 1}"); + + // 更新帧时间用于FPS计算 + _lastFrameTime = DateTime.now(); + + // 立即更新UI以显示新帧 + setState(() { + _renderedFrameCount++; + }); + } + + Future _initializeDecoder() async { + if (_isInitialized) { + await _releaseDecoder(); + } + + _log("正在初始化解码器"); + + try { + final config = VideoDecoderConfig( + width: 640, + height: 480, + codecType: CodecType.h264, + frameRate: 30, + bufferSize: 30, + isDebug: true, // 打开调试日志 + ); + + final textureId = await VideoDecodePlugin.initDecoder(config); + + if (textureId != null) { + _textureId = textureId; + + // 设置帧回调 + VideoDecodePlugin.setFrameCallbackForTexture( + textureId, _onFrameAvailable); + + setState(() { + _isInitialized = true; + _error = ""; + _statusText = "就绪"; + _renderedFrameCount = 0; // 重置帧计数 + }); + + _log("解码器初始化成功,纹理ID: $_textureId"); + + // 自动发送测试帧以触发渲染 + await _sendTestIFrame(); + } else { + setState(() { + _error = "获取纹理ID失败"; + _statusText = "初始化失败"; + }); + _log("解码器初始化失败 - 返回空纹理ID"); + } + } catch (e) { + setState(() { + _error = e.toString(); + _statusText = "初始化错误"; + }); + _log("解码器初始化错误: $e"); + } + } + + // 添加一个测试I帧来触发渲染 + Future _sendTestIFrame() async { + if (_textureId == null || !_isInitialized) { + _log("解码器未准备好,无法发送测试帧"); + return; + } + + _log("生成并发送测试I帧"); + + // 创建一个简单的NAL单元 (IDR帧) + // 5字节的起始码 + NAL类型5(I帧) + 一些简单的数据 + List testFrameData = [ + 0x00, 0x00, 0x00, 0x01, 0x65, // 起始码 + NAL类型 (0x65 = 101|0101 -> 类型5) + 0x88, 0x84, 0x21, 0x43, 0x14, 0x56, 0x32, 0x80 // 一些随机数据 + ]; + + Uint8List testFrame = Uint8List.fromList(testFrameData); + + try { + _log("发送测试I帧: ${testFrame.length} 字节"); + + bool success = await VideoDecodePlugin.decodeFrameForTexture( + _textureId!, testFrame, FrameType.iFrame); + + _log("测试I帧发送结果: ${success ? '成功' : '失败'}"); + } catch (e) { + _log("发送测试帧错误: $e"); + } + } + + Future _releaseDecoder() async { + if (_textureId != null) { + _log("正在释放解码器资源"); + + try { + await VideoDecodePlugin.releaseDecoderForTexture(_textureId!); + + setState(() { + _textureId = null; + _isInitialized = false; + _statusText = "已释放"; + }); + + _log("解码器资源释放成功"); + } catch (e) { + _log("释放解码器错误: $e"); + } + } + } + + Future _startPlaying() async { + if (!_isInitialized || _isPlaying || _h264Frames.isEmpty) { + _log( + "播放条件未满足: 初始化=${_isInitialized}, 播放中=${_isPlaying}, 帧数量=${_h264Frames.length}"); + return; + } + + _log("开始播放H264视频"); + + try { + // 重置帧率跟踪 + _renderedFrameCount = 0; + _lastFrameTime = null; + _fps = 0; + _currentFrameIndex = 0; + + // 确保首先发送SPS和PPS + await _sendSpsAndPps(); + + // 开始解码帧 + _startDecodingFrames(); + + setState(() { + _isPlaying = true; + _statusText = "播放中"; + }); + + _log("播放已开始"); + } catch (e) { + _log("播放开始错误: $e"); + } + } + + // 发送SPS和PPS + Future _sendSpsAndPps() async { + for (int i = 0; i < math.min(10, _h264Frames.length); i++) { + H264Frame frame = _h264Frames[i]; + + // 检查是否是SPS或PPS (通过检查NAL类型) + if (frame.data.length > 4) { + int skipBytes = (frame.data[0] == 0x00 && + frame.data[1] == 0x00 && + frame.data[2] == 0x00 && + frame.data[3] == 0x01) + ? 4 + : 3; + + if (skipBytes < frame.data.length) { + int nalType = frame.data[skipBytes] & 0x1F; + + if (nalType == NalUnitType.SPS || nalType == NalUnitType.PPS) { + _log("发送${nalType == NalUnitType.SPS ? 'SPS' : 'PPS'}数据"); + await _decodeNextFrame(frame); + // 发送后等待一小段时间,确保解码器处理 + await Future.delayed(Duration(milliseconds: 30)); + } + } + } + } + } + + void _stopPlaying() { + if (!_isPlaying) return; + + _log("停止播放"); + + _frameTimer?.cancel(); + _frameTimer = null; + + setState(() { + _isPlaying = false; + _statusText = "已停止"; + }); + + _log("播放已停止"); + } + + void _startDecodingFrames() { + _log("开始解码视频帧"); + + // 使用更低的帧率更稳定 + const int frameIntervalMs = 50; // 20 fps + + _frameTimer = + Timer.periodic(Duration(milliseconds: frameIntervalMs), (timer) async { + if (_currentFrameIndex >= _h264Frames.length) { + _log("所有帧已解码,重新开始"); + _currentFrameIndex = 0; + + // 重新发送SPS和PPS + await _sendSpsAndPps(); + } + + final frame = _h264Frames[_currentFrameIndex]; + await _decodeNextFrame(frame); + _currentFrameIndex++; + }); + } + + Future _decodeNextFrame(H264Frame frame) async { + if (_textureId == null || !_isInitialized) return; + + try { + // 检查帧的NAL类型(仅用于调试) + int nalType = _getNalType(frame.data); + + final success = await VideoDecodePlugin.decodeFrameForTexture( + _textureId!, + frame.data, + frame.type, + ); + + if (!success) { + _log( + "解码帧失败,索引 $_currentFrameIndex (${frame.type}), NAL类型: ${NalUnitType.getName(nalType)}"); + } else { + _log( + "解码帧成功,索引 $_currentFrameIndex (${frame.type}), NAL类型: ${NalUnitType.getName(nalType)}"); + } + } catch (e) { + _log("解码帧错误: $e"); + } + } + + void _log(String message) { + final timestamp = DateTime.now().toString().split('.').first; + final logMessage = "[$timestamp] $message"; + + setState(() { + _logs.add(logMessage); + if (_logs.length > 100) { + _logs.removeAt(0); + } + }); + + // 滚动到底部 + WidgetsBinding.instance.addPostFrameCallback((_) { + if (_logScrollController.hasClients) { + _logScrollController.animateTo( + _logScrollController.position.maxScrollExtent, + duration: Duration(milliseconds: 200), + curve: Curves.easeOut, + ); + } + }); + } + + Widget _buildVideoDisplay() { + if (_textureId == null) { + return Center( + child: Container( + width: 640, + height: 480, + color: Colors.black, + child: CustomPaint( + painter: TestPatternPainter(), + child: Center( + child: Text( + '无可用纹理', + style: TextStyle(color: Colors.white), + ), ), - ); - }, - ), + ), + ), + ); + } + + return Stack( + fit: StackFit.expand, + children: [ + // 背景色 + Container(color: Colors.black), + + // 测试图案 - 如果没有渲染任何帧则显示 + if (_renderedFrameCount == 0) + CustomPaint(painter: TestPatternPainter()), + + // 视频纹理 - 使用RepaintBoundary和ValueKey确保正确更新 + RepaintBoundary( + child: Texture( + textureId: _textureId!, + filterQuality: FilterQuality.medium, + key: ValueKey('texture_${_renderedFrameCount}'), + ), + ), + + // 显示帧计数 - 调试用 + Positioned( + right: 10, + top: 10, + child: Container( + padding: EdgeInsets.all(5), + color: Colors.black.withOpacity(0.5), + child: Text( + '帧: $_renderedFrameCount', + style: TextStyle(color: Colors.white, fontSize: 12), + ), + ), + ), + ], ); } @override Widget build(BuildContext context) { - return MaterialApp( - theme: ThemeData( - primarySwatch: Colors.blue, - visualDensity: VisualDensity.adaptivePlatformDensity, + // 更新FPS计算 + if (_lastFrameTime != null && _renderedFrameCount > 0) { + final now = DateTime.now(); + final elapsed = now.difference(_lastFrameTime!).inMilliseconds; + if (elapsed > 0) { + _fps = 1000 / elapsed; + } + } + + return Scaffold( + appBar: AppBar( + title: Text('视频解码插件演示'), + backgroundColor: Theme.of(context).colorScheme.primaryContainer, ), - home: Scaffold( - appBar: AppBar( - title: const Text('视频解码插件示例'), - ), - body: Center( - child: Column( - mainAxisAlignment: MainAxisAlignment.center, - children: [ - // 显示解码的视频,使用Flutter的Texture组件 - if (_textureId != null) - Container( - width: _width.toDouble(), - height: _height.toDouble(), - decoration: BoxDecoration( - border: Border.all(color: Colors.grey), - borderRadius: BorderRadius.circular(8), - ), - clipBehavior: Clip.antiAlias, - child: Stack( + body: SafeArea( + child: Column( + crossAxisAlignment: CrossAxisAlignment.stretch, + children: [ + // 视频显示区域 - 使用AspectRatio控制大小 + AspectRatio( + aspectRatio: 16 / 9, + child: Container( + decoration: BoxDecoration( + border: Border.all(color: Colors.grey), + ), + child: _buildVideoDisplay(), + ), + ), + + // 控制面板和状态信息 - 使用Expanded和SingleChildScrollView + Expanded( + child: Padding( + padding: const EdgeInsets.all(8.0), + child: SingleChildScrollView( + child: Column( + crossAxisAlignment: CrossAxisAlignment.stretch, children: [ - RepaintBoundary( - child: Texture( - textureId: _textureId!, - filterQuality: FilterQuality.medium, - ), - ), - // 添加一个覆盖层用于触发刷新 - if (_renderedFrameCount > 0) - Positioned.fill( - child: IgnorePointer( - child: Opacity( - opacity: 0.0, - child: Container( - color: Colors.transparent, - key: ValueKey( - _renderedFrameCount), // 通过key强制刷新 + // 状态信息区 - 使用Card使其更紧凑 + Card( + child: Padding( + padding: const EdgeInsets.all(8.0), + child: Column( + crossAxisAlignment: CrossAxisAlignment.start, + children: [ + Row( + mainAxisAlignment: + MainAxisAlignment.spaceBetween, + children: [ + Text('状态: $_statusText', + style: TextStyle( + fontWeight: FontWeight.bold)), + Text('FPS: ${_fps.toStringAsFixed(1)}'), + ], ), - ), + if (_error.isNotEmpty) + Text('错误: $_error', + style: TextStyle( + color: Colors.red, + fontWeight: FontWeight.bold)), + Row( + mainAxisAlignment: + MainAxisAlignment.spaceBetween, + children: [ + Text('已渲染帧数: $_renderedFrameCount'), + Text('解析的帧数: ${_h264Frames.length}'), + ], + ), + Text( + 'H264文件大小: ${(_h264FileData?.length ?? 0) / 1024} KB'), + ], ), ), + ), + + // 控制按钮区 + Card( + child: Padding( + padding: const EdgeInsets.all(8.0), + child: Wrap( + spacing: 8, + runSpacing: 8, + children: [ + ElevatedButton( + onPressed: + _isInitialized ? null : _initializeDecoder, + child: Text('初始化'), + ), + ElevatedButton( + onPressed: (!_isInitialized || + _isPlaying || + _h264Frames.isEmpty) + ? null + : _startPlaying, + child: Text('播放'), + ), + ElevatedButton( + onPressed: (!_isPlaying) ? null : _stopPlaying, + child: Text('停止'), + ), + ElevatedButton( + onPressed: (_textureId == null) + ? null + : _releaseDecoder, + child: Text('释放'), + ), + ElevatedButton( + onPressed: () { + setState(() { + // 强制重绘 + _renderedFrameCount++; + }); + }, + child: Text('刷新'), + ), + ], + ), + ), + ), + + // 日志区域 + SizedBox(height: 8), + Text('日志:', + style: TextStyle(fontWeight: FontWeight.bold)), + SizedBox(height: 4), + Container( + height: 280, + decoration: BoxDecoration( + color: Colors.black, + border: Border.all(color: Colors.grey), + borderRadius: BorderRadius.circular(4.0), + ), + padding: EdgeInsets.all(4), + child: ListView.builder( + controller: _logScrollController, + itemCount: _logs.length, + itemBuilder: (context, index) { + return Text( + _logs[index], + style: TextStyle( + color: _logs[index].contains('错误') + ? Colors.red + : Colors.green, + fontSize: 11, + fontFamily: 'monospace', + ), + ); + }, + ), + ), ], ), - ) - else - Container( - width: _width.toDouble(), - height: _height.toDouble(), - decoration: BoxDecoration( - border: Border.all(color: Colors.grey), - borderRadius: BorderRadius.circular(8), - color: Colors.black, - ), - child: Center( - child: Text( - '未初始化', - style: TextStyle(color: Colors.white), - ), - ), ), - const SizedBox(height: 20), - Text( - _statusMessage, - textAlign: TextAlign.center, - style: TextStyle(fontWeight: FontWeight.bold), ), - const SizedBox(height: 10), - // 显示解码统计信息 - Container( - width: double.infinity, - padding: const EdgeInsets.all(8), - margin: const EdgeInsets.symmetric(horizontal: 20), - decoration: BoxDecoration( - border: Border.all(color: Colors.grey.shade300), - borderRadius: BorderRadius.circular(8), - color: Colors.grey.shade50, - ), - child: _buildStatsDisplay(), - ), - const SizedBox(height: 10), - // 显示日志 - _buildLogDisplay(), - const SizedBox(height: 20), - // 控制按钮 - Row( - mainAxisAlignment: MainAxisAlignment.center, - children: [ - if (!_isInitialized) - ElevatedButton( - onPressed: _initDecoder, - child: const Text('初始化解码器'), - ) - else if (!_isPlaying) - ElevatedButton( - onPressed: _startPlaying, - child: const Text('播放'), - ) - else - ElevatedButton( - onPressed: _stopPlaying, - child: const Text('停止'), - ), - const SizedBox(width: 20), - ElevatedButton( - onPressed: _isInitialized ? _releaseDecoder : null, - child: const Text('释放解码器'), - ), - if (_isInitialized) - Padding( - padding: const EdgeInsets.only(left: 20.0), - child: ElevatedButton( - onPressed: () { - setState(() {}); // 强制重绘 - _addLog('触发强制刷新'); - }, - child: const Text('强制刷新'), - ), - ), - ], - ), - ], - ), + ), + ], ), ), ); diff --git a/example/pubspec.yaml b/example/pubspec.yaml index 63f5711..800ba6e 100644 --- a/example/pubspec.yaml +++ b/example/pubspec.yaml @@ -30,7 +30,7 @@ dependencies: # permission_handler: ^12.0.0+1 # The following adds the Cupertino Icons font to your application. # Use with the CupertinoIcons class for iOS style icons. - cupertino_icons: ^1.0.6 + cupertino_icons: ^1.0.2 dev_dependencies: integration_test: @@ -56,7 +56,7 @@ flutter: # the material Icons class. uses-material-design: true - # 添加示例资源 + # 添加assets资源 assets: - assets/demo.h264