fix:增加native解码插件支持720P对讲页面

This commit is contained in:
liyi 2025-04-30 17:55:57 +08:00
parent e806987fa0
commit 357eaac746
5 changed files with 115 additions and 225 deletions

View File

@ -1185,7 +1185,7 @@ abstract class AppRouters {
page: () => const DoubleLockLinkPage()),
GetPage<dynamic>(
name: Routers.starChartTalkView, page: () => const TalkViewPage()),
// GetPage<dynamic>(name: Routers.h264WebView, page: () => TalkViewNativeDecodePage()), //
GetPage<dynamic>(name: Routers.h264WebView, page: () => H264WebView()), // webview播放页面
GetPage<dynamic>(name: Routers.h264WebView, page: () => TalkViewNativeDecodePage()), //
// GetPage<dynamic>(name: Routers.h264WebView, page: () => H264WebView()), // webview播放页面
];
}

View File

@ -13,7 +13,11 @@ class TalkConstant {
audioType: [AudioTypeE.G711],
);
static TalkExpectReq H264Expect = TalkExpectReq(
videoType: [VideoTypeE.H264],
videoType: [VideoTypeE.H264_720P],
audioType: [AudioTypeE.G711],
);
static TalkExpectReq H264_720P_Expect = TalkExpectReq(
videoType: [VideoTypeE.H264_720P],
audioType: [AudioTypeE.G711],
);
}

View File

@ -60,25 +60,10 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
// I帧序号
final Set<int> _decodedIFrames = <int>{};
//
int? _previousFrameTimestamp;
int _flutterToNativeFrameCount = 0;
int _lastFlutterToNativePrintTime = 0;
int _networkFrameCount = 0;
int _lastNetworkPrintTime = 0;
Timer? _frameRefreshTimer;
bool _isFrameAvailable = true;
int _renderedFrameCount = 0;
int _lastRenderedFrameTime = 0;
// I帧前
final List<List<int>> _preIFrameCache = [];
bool _hasWrittenFirstIFrame = false;
bool _isStartNative = false;
// SPS/PPS状态追踪变量
bool hasSps = false;
bool hasPps = false;
@ -87,35 +72,13 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
List<int>? spsCache;
List<int>? ppsCache;
void _setupFrameRefresh() {
// 16ms对应约60fps
_frameRefreshTimer =
Timer.periodic(const Duration(milliseconds: 16), (timer) {
if (_isFrameAvailable) {
_isFrameAvailable = false;
_renderedFrameCount++;
//
int now = DateTime.now().millisecondsSinceEpoch;
if (now - _lastRenderedFrameTime > 1000) {
print('[Flutter] 每秒渲染帧数: $_renderedFrameCount');
_renderedFrameCount = 0;
_lastRenderedFrameTime = now;
}
// Flutter重建widget
WidgetsBinding.instance.scheduleFrame();
}
});
}
void onFrameAvailable() {
_isFrameAvailable = true;
}
// frameSeq
int? _lastFrameSeq;
//
Future<void> _initVideoDecoder() async {
try {
state.isLoading.value = true;
//
final config = VideoDecoderConfig(
width: 1280,
@ -128,10 +91,15 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
if (textureId != null) {
state.textureId.value = textureId;
AppLog.log('视频解码器初始化成功textureId=$textureId');
//
VideoDecodePlugin.setOnFrameRenderedListener((textureId) {
state.isLoading.value = false;
AppLog.log('已经开始渲染=======');
});
} else {
AppLog.log('视频解码器初始化失败');
}
//
_startFrameProcessTimer();
} catch (e) {
AppLog.log('初始化视频解码器错误: $e');
@ -178,15 +146,13 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
int frameSeq,
int frameSeqI,
) {
_networkFrameCount++;
int now = DateTime.now().millisecondsSinceEpoch;
if (now - _lastNetworkPrintTime > 1000) {
AppLog.log('[Flutter] 每秒收到网络H264帧数: ' + _networkFrameCount.toString());
state.networkH264Fps.value = _networkFrameCount;
_networkFrameCount = 0;
_lastNetworkPrintTime = now;
// frameSeq严格递增
if (_lastFrameSeq != null && frameSeq <= _lastFrameSeq!) {
//
AppLog.log('丢弃乱序或重复帧: frameSeq=$frameSeq, lastFrameSeq=$_lastFrameSeq');
return;
}
_lastFrameSeq = frameSeq;
// Map
final Map<String, dynamic> frameMap = {
'frameData': frameData,
@ -203,15 +169,6 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
while (state.h264FrameBuffer.length > state.maxFrameBufferSize) {
state.h264FrameBuffer.removeAt(0);
}
_flutterToNativeFrameCount++;
if (now - _lastFlutterToNativePrintTime > 1000) {
AppLog.log(
'[Flutter] 每秒送入Native帧数: ' + _flutterToNativeFrameCount.toString());
state.nativeSendFps.value = _flutterToNativeFrameCount;
_flutterToNativeFrameCount = 0;
_lastFlutterToNativePrintTime = now;
}
}
///
@ -225,12 +182,8 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
//
state.frameProcessTimer =
Timer.periodic(Duration(milliseconds: intervalMs), (timer) {
if (state.isLoading.isTrue) {
state.isLoading.value = false;
}
_processNextFrameFromBuffer();
});
AppLog.log('启动帧处理定时器,目标帧率: ${state.targetFps}fps间隔: ${intervalMs}ms');
}
@ -258,40 +211,28 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
final int frameSeqI = frameMap['frameSeqI'];
int pts = DateTime.now().millisecondsSinceEpoch;
if (frameType == TalkDataH264Frame_FrameTypeE.P) {
// frameSeqI为I帧序号标识
if (!(_decodedIFrames.contains(frameSeqI))) {
AppLog.log('丢弃P帧未收到对应I帧frameSeqI=${frameSeqI}');
return;
}
} else if (frameType == TalkDataH264Frame_FrameTypeE.I) {
// I帧序号
_decodedIFrames.add(frameSeq);
}
// if (frameType == TalkDataH264Frame_FrameTypeE.P) {
// // frameSeqI为I帧序号标识
// if (!(_decodedIFrames.contains(frameSeqI))) {
// AppLog.log('丢弃P帧未收到对应I帧frameSeqI=${frameSeqI}');
// return;
// }
// } else if (frameType == TalkDataH264Frame_FrameTypeE.I) {
// // I帧序号
// _decodedIFrames.add(frameSeq);
// }
// h264文件
_appendH264FrameToFile(frameData, frameType);
// _appendH264FrameToFile(frameData, frameType);
final timestamp = DateTime.now().microsecondsSinceEpoch;
VideoDecodePlugin.decodeFrame(
frameData: Uint8List.fromList(frameData),
frameType: frameType == TalkDataH264Frame_FrameTypeE.I ? 1 : 0,
VideoDecodePlugin.sendFrame(
frameData: frameData,
frameType: frameType == TalkDataH264Frame_FrameTypeE.I ? 0 : 1,
frameSeq: frameSeq,
timestamp: timestamp,
splitNalFromIFrame: true,
refIFrameSeq: frameSeqI,
);
// P帧对应I帧是否已解码P帧
if (frameType == TalkDataH264Frame_FrameTypeE.P) {
// frameSeqI为I帧序号标识
if (!(_decodedIFrames.contains(frameSeqI))) {
AppLog.log('丢弃P帧未收到对应I帧frameSeqI=${frameSeqI}');
}
} else if (frameType == TalkDataH264Frame_FrameTypeE.I) {
// I帧序号
_decodedIFrames.add(frameSeq);
}
// h264文件
_appendH264FrameToFile(frameData, frameType);
} catch (e) {
AppLog.log('处理缓冲帧失败: $e');
} finally {
@ -306,7 +247,6 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
state.frameProcessTimer = null;
state.h264FrameBuffer.clear();
state.isProcessingFrame = false;
// AppLog.log('停止帧处理定时器');
}
//
@ -342,54 +282,35 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
_playAudioFrames();
break;
case TalkData_ContentTypeE.H264:
// if (_isStartNative) {
// if (talkDataH264Frame != null) {
// // I帧NALUSPS/PPS并优先放入缓冲区
// if (talkDataH264Frame.frameType ==
// TalkDataH264Frame_FrameTypeE.I) {
// // I帧前所有未处理帧SPS/PPS/I帧
// state.h264FrameBuffer.clear();
// _extractAndBufferSpsPpsForBuffer(
// talkData.content,
// talkData.durationMs,
// talkDataH264Frame.frameSeq,
// talkDataH264Frame.frameSeqI);
// }
// _addFrameToBuffer(
// talkData.content,
// talkDataH264Frame.frameType,
// talkData.durationMs,
// talkDataH264Frame.frameSeq,
// talkDataH264Frame.frameSeqI);
// }
// } else {
// await VideoDecodePlugin.startNativePlayer(
// VideoDecoderConfig(width: 1280, height: 720, codecType: 'h264'),
// );
// _isStartNative = true;
// }
// H264帧
if (state.textureId.value != null) {
if (talkDataH264Frame != null) {
if (talkDataH264Frame.frameType ==
TalkDataH264Frame_FrameTypeE.I) {
_handleIFrameWithSpsPpsAndIdr(
talkData.content,
talkData.durationMs,
talkDataH264Frame.frameSeq,
talkDataH264Frame.frameSeqI,
);
return;
} else if (talkDataH264Frame.frameType ==
TalkDataH264Frame_FrameTypeE.P) {
_handlePFrame(
talkData.content,
talkData.durationMs,
talkDataH264Frame.frameSeq,
talkDataH264Frame.frameSeqI,
);
return;
}
_addFrameToBuffer(
talkData.content,
talkDataH264Frame.frameType,
talkData.durationMs,
talkDataH264Frame.frameSeq,
talkDataH264Frame.frameSeqI,
);
// if (talkDataH264Frame.frameType ==
// TalkDataH264Frame_FrameTypeE.I) {
// _handleIFrameWithSpsPpsAndIdr(
// talkData.content,
// talkData.durationMs,
// talkDataH264Frame.frameSeq,
// talkDataH264Frame.frameSeqI,
// );
// return;
// } else if (talkDataH264Frame.frameType ==
// TalkDataH264Frame_FrameTypeE.P) {
// _handlePFrame(
// talkData.content,
// talkData.durationMs,
// talkDataH264Frame.frameSeq,
// talkDataH264Frame.frameSeqI,
// );
// return;
// }
}
} else {
AppLog.log('无法处理H264帧textureId为空');
@ -459,7 +380,7 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
///
void _playAudioData(TalkData talkData) async {
if (state.isOpenVoice.value) {
if (state.isOpenVoice.value && state.isLoading.isFalse) {
final list =
G711().decodeAndDenoise(talkData.content, true, 8000, 300, 150);
// // PCM PcmArrayInt16
@ -545,9 +466,6 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
//
_initFlutterPcmSound();
//
// _startPlayback();
//
_initAudioRecorder();
@ -559,13 +477,11 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
// H264帧缓冲区
state.h264FrameBuffer.clear();
state.isProcessingFrame = false;
_setupFrameRefresh();
}
@override
void onClose() {
_closeH264File();
// _closeH264File();
//
_stopFrameProcessTimer();
@ -603,8 +519,7 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
// I帧集合
_decodedIFrames.clear();
_frameRefreshTimer?.cancel();
_frameRefreshTimer = null;
super.onClose();
}
@ -894,16 +809,16 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
int naluType = nalu[offset] & 0x1F;
if (naluType == 7) {
spsList.add(nalu);
AppLog.log('SPS内容: ' +
nalu
.map((b) => b.toRadixString(16).padLeft(2, '0'))
.join(' '));
// AppLog.log('SPS内容: ' +
// nalu
// .map((b) => b.toRadixString(16).padLeft(2, '0'))
// .join(' '));
} else if (naluType == 8) {
ppsList.add(nalu);
AppLog.log('PPS内容: ' +
nalu
.map((b) => b.toRadixString(16).padLeft(2, '0'))
.join(' '));
// AppLog.log('PPS内容: ' +
// nalu
// .map((b) => b.toRadixString(16).padLeft(2, '0'))
// .join(' '));
} else if (naluType == 5) {
idrList.add(nalu);
}
@ -914,15 +829,15 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
if (spsList.isNotEmpty && ppsList.isNotEmpty && idrList.isNotEmpty) {
for (final sps in spsList) {
await _writeSingleFrameToFile(_ensureStartCode(sps));
AppLog.log('写入顺序: SPS');
// AppLog.log('写入顺序: SPS');
}
for (final pps in ppsList) {
await _writeSingleFrameToFile(_ensureStartCode(pps));
AppLog.log('写入顺序: PPS');
// AppLog.log('写入顺序: PPS');
}
for (final idr in idrList) {
await _writeSingleFrameToFile(_ensureStartCode(idr));
AppLog.log('写入顺序: IDR');
// AppLog.log('写入顺序: IDR');
}
_hasWrittenFirstIFrame = true;
} else {
@ -1131,18 +1046,22 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
}
// I帧处理方法
void _handleIFrameWithSpsPpsAndIdr(List<int> frameData, int durationMs, int frameSeq, int frameSeqI) {
void _handleIFrameWithSpsPpsAndIdr(
List<int> frameData, int durationMs, int frameSeq, int frameSeqI) {
// I帧前所有未处理帧SPS/PPS/I帧
state.h264FrameBuffer.clear();
_extractAndBufferSpsPpsForBuffer(frameData, durationMs, frameSeq, frameSeqI);
_extractAndBufferSpsPpsForBuffer(
frameData, durationMs, frameSeq, frameSeqI);
// SPS/PPS就先写入I帧本体IDR
if (spsCache == null || ppsCache == null) {
// SPS/PPS缓存I帧
return;
}
// SPS/PPS
_addFrameToBuffer(spsCache!, TalkDataH264Frame_FrameTypeE.I, durationMs, frameSeq, frameSeqI);
_addFrameToBuffer(ppsCache!, TalkDataH264Frame_FrameTypeE.I, durationMs, frameSeq, frameSeqI);
_addFrameToBuffer(spsCache!, TalkDataH264Frame_FrameTypeE.I, durationMs,
frameSeq, frameSeqI);
_addFrameToBuffer(ppsCache!, TalkDataH264Frame_FrameTypeE.I, durationMs,
frameSeq, frameSeqI);
// I帧包IDRtype 5
List<List<int>> nalus = [];
int i = 0;
@ -1154,7 +1073,9 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
if (data[i + 2] == 0x01) {
start = i;
i += 3;
} else if (i + 3 < data.length && data[i + 2] == 0x00 && data[i + 3] == 0x01) {
} else if (i + 3 < data.length &&
data[i + 2] == 0x00 &&
data[i + 3] == 0x01) {
start = i;
i += 4;
} else {
@ -1163,7 +1084,10 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
}
next = i;
while (next < data.length - 3) {
if (data[next] == 0x00 && data[next + 1] == 0x00 && ((data[next + 2] == 0x01) || (data[next + 2] == 0x00 && data[next + 3] == 0x01))) {
if (data[next] == 0x00 &&
data[next + 1] == 0x00 &&
((data[next + 2] == 0x01) ||
(data[next + 2] == 0x00 && data[next + 3] == 0x01))) {
break;
}
next++;
@ -1174,7 +1098,8 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
i++;
}
}
int nalusTotalLen = nalus.isNotEmpty ? nalus.fold(0, (p, n) => p + n.length) : 0;
int nalusTotalLen =
nalus.isNotEmpty ? nalus.fold(0, (p, n) => p + n.length) : 0;
if (nalus.isEmpty && data.isNotEmpty) {
nalus.add(data);
} else if (nalus.isNotEmpty && nalusTotalLen < data.length) {
@ -1190,14 +1115,16 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
if (nalu.length > offset) {
int naluType = nalu[offset] & 0x1F;
if (naluType == 5) {
_addFrameToBuffer(nalu, TalkDataH264Frame_FrameTypeE.I, durationMs, frameSeq, frameSeqI);
_addFrameToBuffer(nalu, TalkDataH264Frame_FrameTypeE.I, durationMs,
frameSeq, frameSeqI);
}
}
}
}
// P帧处理方法
void _handlePFrame(List<int> frameData, int durationMs, int frameSeq, int frameSeqI) {
void _handlePFrame(
List<int> frameData, int durationMs, int frameSeq, int frameSeqI) {
// P帧type 1
List<List<int>> nalus = [];
int i = 0;
@ -1209,7 +1136,9 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
if (data[i + 2] == 0x01) {
start = i;
i += 3;
} else if (i + 3 < data.length && data[i + 2] == 0x00 && data[i + 3] == 0x01) {
} else if (i + 3 < data.length &&
data[i + 2] == 0x00 &&
data[i + 3] == 0x01) {
start = i;
i += 4;
} else {
@ -1218,7 +1147,10 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
}
next = i;
while (next < data.length - 3) {
if (data[next] == 0x00 && data[next + 1] == 0x00 && ((data[next + 2] == 0x01) || (data[next + 2] == 0x00 && data[next + 3] == 0x01))) {
if (data[next] == 0x00 &&
data[next + 1] == 0x00 &&
((data[next + 2] == 0x01) ||
(data[next + 2] == 0x00 && data[next + 3] == 0x01))) {
break;
}
next++;
@ -1229,7 +1161,8 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
i++;
}
}
int nalusTotalLen = nalus.isNotEmpty ? nalus.fold(0, (p, n) => p + n.length) : 0;
int nalusTotalLen =
nalus.isNotEmpty ? nalus.fold(0, (p, n) => p + n.length) : 0;
if (nalus.isEmpty && data.isNotEmpty) {
nalus.add(data);
} else if (nalus.isNotEmpty && nalusTotalLen < data.length) {
@ -1245,7 +1178,8 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
if (nalu.length > offset) {
int naluType = nalu[offset] & 0x1F;
if (naluType == 1) {
_addFrameToBuffer(nalu, TalkDataH264Frame_FrameTypeE.P, durationMs, frameSeq, frameSeqI);
_addFrameToBuffer(nalu, TalkDataH264Frame_FrameTypeE.P, durationMs,
frameSeq, frameSeqI);
}
}
}

View File

@ -13,6 +13,7 @@ import 'package:star_lock/talk/call/callTalk.dart';
import 'package:star_lock/talk/starChart/constant/talk_status.dart';
import 'package:star_lock/talk/starChart/handle/impl/debug_Info_model.dart';
import 'package:star_lock/talk/starChart/handle/impl/udp_talk_data_handler.dart';
import 'package:star_lock/talk/starChart/star_chart_manage.dart';
import 'package:star_lock/talk/starChart/views/native/talk_view_native_decode_logic.dart';
import 'package:star_lock/talk/starChart/views/native/talk_view_native_decode_state.dart';
import 'package:star_lock/talk/starChart/views/talkView/talk_view_logic.dart';
@ -35,6 +36,7 @@ class _TalkViewNativeDecodePageState extends State<TalkViewNativeDecodePage>
final TalkViewNativeDecodeLogic logic = Get.put(TalkViewNativeDecodeLogic());
final TalkViewNativeDecodeState state =
Get.find<TalkViewNativeDecodeLogic>().state;
final startChartManage = StartChartManage();
@override
void initState() {
@ -112,7 +114,7 @@ class _TalkViewNativeDecodePageState extends State<TalkViewNativeDecodePage>
child: SizedBox.expand(
child: RotatedBox(
// 使RotatedBox
quarterTurns: -1,
quarterTurns: startChartManage.rotateAngle ~/ 90,
child: Texture(
textureId: state.textureId.value!,
filterQuality: FilterQuality.medium,
@ -126,53 +128,7 @@ class _TalkViewNativeDecodePageState extends State<TalkViewNativeDecodePage>
);
},
),
Positioned(
top: 300.h,
right: 20.w,
child: Obx(() => Container(
padding: const EdgeInsets.symmetric(horizontal: 16, vertical: 10),
decoration: BoxDecoration(
color: Colors.black.withOpacity(0.5),
borderRadius: BorderRadius.circular(12),
),
child: Column(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
Row(
children: [
Icon(Icons.network_check, color: Colors.redAccent, size: 18),
SizedBox(width: 6),
Text(
'接受服务端H264帧率/秒: ',
style: TextStyle(color: Colors.white, fontSize: 15),
),
Text(
'${state.networkH264Fps.value}',
style: TextStyle(color: Colors.redAccent, fontSize: 16, fontWeight: FontWeight.bold),
),
Text(' fps', style: TextStyle(color: Colors.white, fontSize: 13)),
],
),
SizedBox(height: 4),
Row(
children: [
Icon(Icons.send, color: Colors.blueAccent, size: 18),
SizedBox(width: 6),
Text(
'送入Native帧率/秒: ',
style: TextStyle(color: Colors.white, fontSize: 15),
),
Text(
'${state.nativeSendFps.value}',
style: TextStyle(color: Colors.blueAccent, fontSize: 16, fontWeight: FontWeight.bold),
),
Text(' fps', style: TextStyle(color: Colors.white, fontSize: 13)),
],
),
],
),
)),
),
Obx(() => state.isLoading.isTrue
? Positioned(
bottom: 310.h,

View File

@ -109,16 +109,12 @@ class TalkViewNativeDecodeState {
// H264帧缓冲区相关
final List<Map<String, dynamic>> h264FrameBuffer = <Map<String, dynamic>>[]; // H264帧缓冲区
final int maxFrameBufferSize = 25; //
final int targetFps = 120; //
final int maxFrameBufferSize = 15; //
final int targetFps = 25; // ,native的缓冲区
Timer? frameProcessTimer; //
bool isProcessingFrame = false; //
int lastProcessedTimestamp = 0; //
// H264文件保存相关
String? h264FilePath;
File? h264File;
//
RxInt networkH264Fps = 0.obs;
RxInt nativeSendFps = 0.obs;
}