fix:增加native解码插件支持720P对讲页面

This commit is contained in:
liyi 2025-04-30 17:55:57 +08:00
parent e806987fa0
commit 357eaac746
5 changed files with 115 additions and 225 deletions

View File

@ -1185,7 +1185,7 @@ abstract class AppRouters {
page: () => const DoubleLockLinkPage()), page: () => const DoubleLockLinkPage()),
GetPage<dynamic>( GetPage<dynamic>(
name: Routers.starChartTalkView, page: () => const TalkViewPage()), name: Routers.starChartTalkView, page: () => const TalkViewPage()),
// GetPage<dynamic>(name: Routers.h264WebView, page: () => TalkViewNativeDecodePage()), // GetPage<dynamic>(name: Routers.h264WebView, page: () => TalkViewNativeDecodePage()), //
GetPage<dynamic>(name: Routers.h264WebView, page: () => H264WebView()), // webview播放页面 // GetPage<dynamic>(name: Routers.h264WebView, page: () => H264WebView()), // webview播放页面
]; ];
} }

View File

@ -13,7 +13,11 @@ class TalkConstant {
audioType: [AudioTypeE.G711], audioType: [AudioTypeE.G711],
); );
static TalkExpectReq H264Expect = TalkExpectReq( static TalkExpectReq H264Expect = TalkExpectReq(
videoType: [VideoTypeE.H264], videoType: [VideoTypeE.H264_720P],
audioType: [AudioTypeE.G711],
);
static TalkExpectReq H264_720P_Expect = TalkExpectReq(
videoType: [VideoTypeE.H264_720P],
audioType: [AudioTypeE.G711], audioType: [AudioTypeE.G711],
); );
} }

View File

@ -60,25 +60,10 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
// I帧序号 // I帧序号
final Set<int> _decodedIFrames = <int>{}; final Set<int> _decodedIFrames = <int>{};
//
int? _previousFrameTimestamp;
int _flutterToNativeFrameCount = 0;
int _lastFlutterToNativePrintTime = 0;
int _networkFrameCount = 0;
int _lastNetworkPrintTime = 0;
Timer? _frameRefreshTimer;
bool _isFrameAvailable = true;
int _renderedFrameCount = 0;
int _lastRenderedFrameTime = 0;
// I帧前 // I帧前
final List<List<int>> _preIFrameCache = []; final List<List<int>> _preIFrameCache = [];
bool _hasWrittenFirstIFrame = false; bool _hasWrittenFirstIFrame = false;
bool _isStartNative = false;
// SPS/PPS状态追踪变量 // SPS/PPS状态追踪变量
bool hasSps = false; bool hasSps = false;
bool hasPps = false; bool hasPps = false;
@ -87,35 +72,13 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
List<int>? spsCache; List<int>? spsCache;
List<int>? ppsCache; List<int>? ppsCache;
void _setupFrameRefresh() { // frameSeq
// 16ms对应约60fps int? _lastFrameSeq;
_frameRefreshTimer =
Timer.periodic(const Duration(milliseconds: 16), (timer) {
if (_isFrameAvailable) {
_isFrameAvailable = false;
_renderedFrameCount++;
//
int now = DateTime.now().millisecondsSinceEpoch;
if (now - _lastRenderedFrameTime > 1000) {
print('[Flutter] 每秒渲染帧数: $_renderedFrameCount');
_renderedFrameCount = 0;
_lastRenderedFrameTime = now;
}
// Flutter重建widget
WidgetsBinding.instance.scheduleFrame();
}
});
}
void onFrameAvailable() {
_isFrameAvailable = true;
}
// //
Future<void> _initVideoDecoder() async { Future<void> _initVideoDecoder() async {
try { try {
state.isLoading.value = true;
// //
final config = VideoDecoderConfig( final config = VideoDecoderConfig(
width: 1280, width: 1280,
@ -128,10 +91,15 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
if (textureId != null) { if (textureId != null) {
state.textureId.value = textureId; state.textureId.value = textureId;
AppLog.log('视频解码器初始化成功textureId=$textureId'); AppLog.log('视频解码器初始化成功textureId=$textureId');
//
VideoDecodePlugin.setOnFrameRenderedListener((textureId) {
state.isLoading.value = false;
AppLog.log('已经开始渲染=======');
});
} else { } else {
AppLog.log('视频解码器初始化失败'); AppLog.log('视频解码器初始化失败');
} }
//
_startFrameProcessTimer(); _startFrameProcessTimer();
} catch (e) { } catch (e) {
AppLog.log('初始化视频解码器错误: $e'); AppLog.log('初始化视频解码器错误: $e');
@ -178,15 +146,13 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
int frameSeq, int frameSeq,
int frameSeqI, int frameSeqI,
) { ) {
_networkFrameCount++; // frameSeq严格递增
int now = DateTime.now().millisecondsSinceEpoch; if (_lastFrameSeq != null && frameSeq <= _lastFrameSeq!) {
if (now - _lastNetworkPrintTime > 1000) { //
AppLog.log('[Flutter] 每秒收到网络H264帧数: ' + _networkFrameCount.toString()); AppLog.log('丢弃乱序或重复帧: frameSeq=$frameSeq, lastFrameSeq=$_lastFrameSeq');
state.networkH264Fps.value = _networkFrameCount; return;
_networkFrameCount = 0;
_lastNetworkPrintTime = now;
} }
_lastFrameSeq = frameSeq;
// Map // Map
final Map<String, dynamic> frameMap = { final Map<String, dynamic> frameMap = {
'frameData': frameData, 'frameData': frameData,
@ -203,15 +169,6 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
while (state.h264FrameBuffer.length > state.maxFrameBufferSize) { while (state.h264FrameBuffer.length > state.maxFrameBufferSize) {
state.h264FrameBuffer.removeAt(0); state.h264FrameBuffer.removeAt(0);
} }
_flutterToNativeFrameCount++;
if (now - _lastFlutterToNativePrintTime > 1000) {
AppLog.log(
'[Flutter] 每秒送入Native帧数: ' + _flutterToNativeFrameCount.toString());
state.nativeSendFps.value = _flutterToNativeFrameCount;
_flutterToNativeFrameCount = 0;
_lastFlutterToNativePrintTime = now;
}
} }
/// ///
@ -225,12 +182,8 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
// //
state.frameProcessTimer = state.frameProcessTimer =
Timer.periodic(Duration(milliseconds: intervalMs), (timer) { Timer.periodic(Duration(milliseconds: intervalMs), (timer) {
if (state.isLoading.isTrue) {
state.isLoading.value = false;
}
_processNextFrameFromBuffer(); _processNextFrameFromBuffer();
}); });
AppLog.log('启动帧处理定时器,目标帧率: ${state.targetFps}fps间隔: ${intervalMs}ms'); AppLog.log('启动帧处理定时器,目标帧率: ${state.targetFps}fps间隔: ${intervalMs}ms');
} }
@ -258,40 +211,28 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
final int frameSeqI = frameMap['frameSeqI']; final int frameSeqI = frameMap['frameSeqI'];
int pts = DateTime.now().millisecondsSinceEpoch; int pts = DateTime.now().millisecondsSinceEpoch;
if (frameType == TalkDataH264Frame_FrameTypeE.P) { // if (frameType == TalkDataH264Frame_FrameTypeE.P) {
// frameSeqI为I帧序号标识 // // frameSeqI为I帧序号标识
if (!(_decodedIFrames.contains(frameSeqI))) { // if (!(_decodedIFrames.contains(frameSeqI))) {
AppLog.log('丢弃P帧未收到对应I帧frameSeqI=${frameSeqI}'); // AppLog.log('丢弃P帧未收到对应I帧frameSeqI=${frameSeqI}');
return; // return;
} // }
} else if (frameType == TalkDataH264Frame_FrameTypeE.I) { // } else if (frameType == TalkDataH264Frame_FrameTypeE.I) {
// I帧序号 // // I帧序号
_decodedIFrames.add(frameSeq); // _decodedIFrames.add(frameSeq);
} // }
// h264文件 // h264文件
_appendH264FrameToFile(frameData, frameType); // _appendH264FrameToFile(frameData, frameType);
final timestamp = DateTime.now().microsecondsSinceEpoch; final timestamp = DateTime.now().microsecondsSinceEpoch;
VideoDecodePlugin.decodeFrame( VideoDecodePlugin.sendFrame(
frameData: Uint8List.fromList(frameData), frameData: frameData,
frameType: frameType == TalkDataH264Frame_FrameTypeE.I ? 1 : 0, frameType: frameType == TalkDataH264Frame_FrameTypeE.I ? 0 : 1,
frameSeq: frameSeq, frameSeq: frameSeq,
timestamp: timestamp, timestamp: timestamp,
splitNalFromIFrame: true,
refIFrameSeq: frameSeqI, refIFrameSeq: frameSeqI,
); );
// P帧对应I帧是否已解码P帧
if (frameType == TalkDataH264Frame_FrameTypeE.P) {
// frameSeqI为I帧序号标识
if (!(_decodedIFrames.contains(frameSeqI))) {
AppLog.log('丢弃P帧未收到对应I帧frameSeqI=${frameSeqI}');
}
} else if (frameType == TalkDataH264Frame_FrameTypeE.I) {
// I帧序号
_decodedIFrames.add(frameSeq);
}
// h264文件
_appendH264FrameToFile(frameData, frameType);
} catch (e) { } catch (e) {
AppLog.log('处理缓冲帧失败: $e'); AppLog.log('处理缓冲帧失败: $e');
} finally { } finally {
@ -306,7 +247,6 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
state.frameProcessTimer = null; state.frameProcessTimer = null;
state.h264FrameBuffer.clear(); state.h264FrameBuffer.clear();
state.isProcessingFrame = false; state.isProcessingFrame = false;
// AppLog.log('停止帧处理定时器');
} }
// //
@ -342,54 +282,35 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
_playAudioFrames(); _playAudioFrames();
break; break;
case TalkData_ContentTypeE.H264: case TalkData_ContentTypeE.H264:
// if (_isStartNative) {
// if (talkDataH264Frame != null) {
// // I帧NALUSPS/PPS并优先放入缓冲区
// if (talkDataH264Frame.frameType ==
// TalkDataH264Frame_FrameTypeE.I) {
// // I帧前所有未处理帧SPS/PPS/I帧
// state.h264FrameBuffer.clear();
// _extractAndBufferSpsPpsForBuffer(
// talkData.content,
// talkData.durationMs,
// talkDataH264Frame.frameSeq,
// talkDataH264Frame.frameSeqI);
// }
// _addFrameToBuffer(
// talkData.content,
// talkDataH264Frame.frameType,
// talkData.durationMs,
// talkDataH264Frame.frameSeq,
// talkDataH264Frame.frameSeqI);
// }
// } else {
// await VideoDecodePlugin.startNativePlayer(
// VideoDecoderConfig(width: 1280, height: 720, codecType: 'h264'),
// );
// _isStartNative = true;
// }
// H264帧 // H264帧
if (state.textureId.value != null) { if (state.textureId.value != null) {
if (talkDataH264Frame != null) { if (talkDataH264Frame != null) {
if (talkDataH264Frame.frameType == _addFrameToBuffer(
TalkDataH264Frame_FrameTypeE.I) { talkData.content,
_handleIFrameWithSpsPpsAndIdr( talkDataH264Frame.frameType,
talkData.content, talkData.durationMs,
talkData.durationMs, talkDataH264Frame.frameSeq,
talkDataH264Frame.frameSeq, talkDataH264Frame.frameSeqI,
talkDataH264Frame.frameSeqI, );
); // if (talkDataH264Frame.frameType ==
return; // TalkDataH264Frame_FrameTypeE.I) {
} else if (talkDataH264Frame.frameType == // _handleIFrameWithSpsPpsAndIdr(
TalkDataH264Frame_FrameTypeE.P) { // talkData.content,
_handlePFrame( // talkData.durationMs,
talkData.content, // talkDataH264Frame.frameSeq,
talkData.durationMs, // talkDataH264Frame.frameSeqI,
talkDataH264Frame.frameSeq, // );
talkDataH264Frame.frameSeqI, // return;
); // } else if (talkDataH264Frame.frameType ==
return; // TalkDataH264Frame_FrameTypeE.P) {
} // _handlePFrame(
// talkData.content,
// talkData.durationMs,
// talkDataH264Frame.frameSeq,
// talkDataH264Frame.frameSeqI,
// );
// return;
// }
} }
} else { } else {
AppLog.log('无法处理H264帧textureId为空'); AppLog.log('无法处理H264帧textureId为空');
@ -459,7 +380,7 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
/// ///
void _playAudioData(TalkData talkData) async { void _playAudioData(TalkData talkData) async {
if (state.isOpenVoice.value) { if (state.isOpenVoice.value && state.isLoading.isFalse) {
final list = final list =
G711().decodeAndDenoise(talkData.content, true, 8000, 300, 150); G711().decodeAndDenoise(talkData.content, true, 8000, 300, 150);
// // PCM PcmArrayInt16 // // PCM PcmArrayInt16
@ -545,9 +466,6 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
// //
_initFlutterPcmSound(); _initFlutterPcmSound();
//
// _startPlayback();
// //
_initAudioRecorder(); _initAudioRecorder();
@ -559,13 +477,11 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
// H264帧缓冲区 // H264帧缓冲区
state.h264FrameBuffer.clear(); state.h264FrameBuffer.clear();
state.isProcessingFrame = false; state.isProcessingFrame = false;
_setupFrameRefresh();
} }
@override @override
void onClose() { void onClose() {
_closeH264File(); // _closeH264File();
// //
_stopFrameProcessTimer(); _stopFrameProcessTimer();
@ -603,8 +519,7 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
// I帧集合 // I帧集合
_decodedIFrames.clear(); _decodedIFrames.clear();
_frameRefreshTimer?.cancel();
_frameRefreshTimer = null;
super.onClose(); super.onClose();
} }
@ -894,16 +809,16 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
int naluType = nalu[offset] & 0x1F; int naluType = nalu[offset] & 0x1F;
if (naluType == 7) { if (naluType == 7) {
spsList.add(nalu); spsList.add(nalu);
AppLog.log('SPS内容: ' + // AppLog.log('SPS内容: ' +
nalu // nalu
.map((b) => b.toRadixString(16).padLeft(2, '0')) // .map((b) => b.toRadixString(16).padLeft(2, '0'))
.join(' ')); // .join(' '));
} else if (naluType == 8) { } else if (naluType == 8) {
ppsList.add(nalu); ppsList.add(nalu);
AppLog.log('PPS内容: ' + // AppLog.log('PPS内容: ' +
nalu // nalu
.map((b) => b.toRadixString(16).padLeft(2, '0')) // .map((b) => b.toRadixString(16).padLeft(2, '0'))
.join(' ')); // .join(' '));
} else if (naluType == 5) { } else if (naluType == 5) {
idrList.add(nalu); idrList.add(nalu);
} }
@ -914,15 +829,15 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
if (spsList.isNotEmpty && ppsList.isNotEmpty && idrList.isNotEmpty) { if (spsList.isNotEmpty && ppsList.isNotEmpty && idrList.isNotEmpty) {
for (final sps in spsList) { for (final sps in spsList) {
await _writeSingleFrameToFile(_ensureStartCode(sps)); await _writeSingleFrameToFile(_ensureStartCode(sps));
AppLog.log('写入顺序: SPS'); // AppLog.log('写入顺序: SPS');
} }
for (final pps in ppsList) { for (final pps in ppsList) {
await _writeSingleFrameToFile(_ensureStartCode(pps)); await _writeSingleFrameToFile(_ensureStartCode(pps));
AppLog.log('写入顺序: PPS'); // AppLog.log('写入顺序: PPS');
} }
for (final idr in idrList) { for (final idr in idrList) {
await _writeSingleFrameToFile(_ensureStartCode(idr)); await _writeSingleFrameToFile(_ensureStartCode(idr));
AppLog.log('写入顺序: IDR'); // AppLog.log('写入顺序: IDR');
} }
_hasWrittenFirstIFrame = true; _hasWrittenFirstIFrame = true;
} else { } else {
@ -1131,18 +1046,22 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
} }
// I帧处理方法 // I帧处理方法
void _handleIFrameWithSpsPpsAndIdr(List<int> frameData, int durationMs, int frameSeq, int frameSeqI) { void _handleIFrameWithSpsPpsAndIdr(
List<int> frameData, int durationMs, int frameSeq, int frameSeqI) {
// I帧前所有未处理帧SPS/PPS/I帧 // I帧前所有未处理帧SPS/PPS/I帧
state.h264FrameBuffer.clear(); state.h264FrameBuffer.clear();
_extractAndBufferSpsPpsForBuffer(frameData, durationMs, frameSeq, frameSeqI); _extractAndBufferSpsPpsForBuffer(
frameData, durationMs, frameSeq, frameSeqI);
// SPS/PPS就先写入I帧本体IDR // SPS/PPS就先写入I帧本体IDR
if (spsCache == null || ppsCache == null) { if (spsCache == null || ppsCache == null) {
// SPS/PPS缓存I帧 // SPS/PPS缓存I帧
return; return;
} }
// SPS/PPS // SPS/PPS
_addFrameToBuffer(spsCache!, TalkDataH264Frame_FrameTypeE.I, durationMs, frameSeq, frameSeqI); _addFrameToBuffer(spsCache!, TalkDataH264Frame_FrameTypeE.I, durationMs,
_addFrameToBuffer(ppsCache!, TalkDataH264Frame_FrameTypeE.I, durationMs, frameSeq, frameSeqI); frameSeq, frameSeqI);
_addFrameToBuffer(ppsCache!, TalkDataH264Frame_FrameTypeE.I, durationMs,
frameSeq, frameSeqI);
// I帧包IDRtype 5 // I帧包IDRtype 5
List<List<int>> nalus = []; List<List<int>> nalus = [];
int i = 0; int i = 0;
@ -1154,7 +1073,9 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
if (data[i + 2] == 0x01) { if (data[i + 2] == 0x01) {
start = i; start = i;
i += 3; i += 3;
} else if (i + 3 < data.length && data[i + 2] == 0x00 && data[i + 3] == 0x01) { } else if (i + 3 < data.length &&
data[i + 2] == 0x00 &&
data[i + 3] == 0x01) {
start = i; start = i;
i += 4; i += 4;
} else { } else {
@ -1163,7 +1084,10 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
} }
next = i; next = i;
while (next < data.length - 3) { while (next < data.length - 3) {
if (data[next] == 0x00 && data[next + 1] == 0x00 && ((data[next + 2] == 0x01) || (data[next + 2] == 0x00 && data[next + 3] == 0x01))) { if (data[next] == 0x00 &&
data[next + 1] == 0x00 &&
((data[next + 2] == 0x01) ||
(data[next + 2] == 0x00 && data[next + 3] == 0x01))) {
break; break;
} }
next++; next++;
@ -1174,7 +1098,8 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
i++; i++;
} }
} }
int nalusTotalLen = nalus.isNotEmpty ? nalus.fold(0, (p, n) => p + n.length) : 0; int nalusTotalLen =
nalus.isNotEmpty ? nalus.fold(0, (p, n) => p + n.length) : 0;
if (nalus.isEmpty && data.isNotEmpty) { if (nalus.isEmpty && data.isNotEmpty) {
nalus.add(data); nalus.add(data);
} else if (nalus.isNotEmpty && nalusTotalLen < data.length) { } else if (nalus.isNotEmpty && nalusTotalLen < data.length) {
@ -1190,14 +1115,16 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
if (nalu.length > offset) { if (nalu.length > offset) {
int naluType = nalu[offset] & 0x1F; int naluType = nalu[offset] & 0x1F;
if (naluType == 5) { if (naluType == 5) {
_addFrameToBuffer(nalu, TalkDataH264Frame_FrameTypeE.I, durationMs, frameSeq, frameSeqI); _addFrameToBuffer(nalu, TalkDataH264Frame_FrameTypeE.I, durationMs,
frameSeq, frameSeqI);
} }
} }
} }
} }
// P帧处理方法 // P帧处理方法
void _handlePFrame(List<int> frameData, int durationMs, int frameSeq, int frameSeqI) { void _handlePFrame(
List<int> frameData, int durationMs, int frameSeq, int frameSeqI) {
// P帧type 1 // P帧type 1
List<List<int>> nalus = []; List<List<int>> nalus = [];
int i = 0; int i = 0;
@ -1209,7 +1136,9 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
if (data[i + 2] == 0x01) { if (data[i + 2] == 0x01) {
start = i; start = i;
i += 3; i += 3;
} else if (i + 3 < data.length && data[i + 2] == 0x00 && data[i + 3] == 0x01) { } else if (i + 3 < data.length &&
data[i + 2] == 0x00 &&
data[i + 3] == 0x01) {
start = i; start = i;
i += 4; i += 4;
} else { } else {
@ -1218,7 +1147,10 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
} }
next = i; next = i;
while (next < data.length - 3) { while (next < data.length - 3) {
if (data[next] == 0x00 && data[next + 1] == 0x00 && ((data[next + 2] == 0x01) || (data[next + 2] == 0x00 && data[next + 3] == 0x01))) { if (data[next] == 0x00 &&
data[next + 1] == 0x00 &&
((data[next + 2] == 0x01) ||
(data[next + 2] == 0x00 && data[next + 3] == 0x01))) {
break; break;
} }
next++; next++;
@ -1229,7 +1161,8 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
i++; i++;
} }
} }
int nalusTotalLen = nalus.isNotEmpty ? nalus.fold(0, (p, n) => p + n.length) : 0; int nalusTotalLen =
nalus.isNotEmpty ? nalus.fold(0, (p, n) => p + n.length) : 0;
if (nalus.isEmpty && data.isNotEmpty) { if (nalus.isEmpty && data.isNotEmpty) {
nalus.add(data); nalus.add(data);
} else if (nalus.isNotEmpty && nalusTotalLen < data.length) { } else if (nalus.isNotEmpty && nalusTotalLen < data.length) {
@ -1245,7 +1178,8 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
if (nalu.length > offset) { if (nalu.length > offset) {
int naluType = nalu[offset] & 0x1F; int naluType = nalu[offset] & 0x1F;
if (naluType == 1) { if (naluType == 1) {
_addFrameToBuffer(nalu, TalkDataH264Frame_FrameTypeE.P, durationMs, frameSeq, frameSeqI); _addFrameToBuffer(nalu, TalkDataH264Frame_FrameTypeE.P, durationMs,
frameSeq, frameSeqI);
} }
} }
} }

View File

@ -13,6 +13,7 @@ import 'package:star_lock/talk/call/callTalk.dart';
import 'package:star_lock/talk/starChart/constant/talk_status.dart'; import 'package:star_lock/talk/starChart/constant/talk_status.dart';
import 'package:star_lock/talk/starChart/handle/impl/debug_Info_model.dart'; import 'package:star_lock/talk/starChart/handle/impl/debug_Info_model.dart';
import 'package:star_lock/talk/starChart/handle/impl/udp_talk_data_handler.dart'; import 'package:star_lock/talk/starChart/handle/impl/udp_talk_data_handler.dart';
import 'package:star_lock/talk/starChart/star_chart_manage.dart';
import 'package:star_lock/talk/starChart/views/native/talk_view_native_decode_logic.dart'; import 'package:star_lock/talk/starChart/views/native/talk_view_native_decode_logic.dart';
import 'package:star_lock/talk/starChart/views/native/talk_view_native_decode_state.dart'; import 'package:star_lock/talk/starChart/views/native/talk_view_native_decode_state.dart';
import 'package:star_lock/talk/starChart/views/talkView/talk_view_logic.dart'; import 'package:star_lock/talk/starChart/views/talkView/talk_view_logic.dart';
@ -35,6 +36,7 @@ class _TalkViewNativeDecodePageState extends State<TalkViewNativeDecodePage>
final TalkViewNativeDecodeLogic logic = Get.put(TalkViewNativeDecodeLogic()); final TalkViewNativeDecodeLogic logic = Get.put(TalkViewNativeDecodeLogic());
final TalkViewNativeDecodeState state = final TalkViewNativeDecodeState state =
Get.find<TalkViewNativeDecodeLogic>().state; Get.find<TalkViewNativeDecodeLogic>().state;
final startChartManage = StartChartManage();
@override @override
void initState() { void initState() {
@ -112,7 +114,7 @@ class _TalkViewNativeDecodePageState extends State<TalkViewNativeDecodePage>
child: SizedBox.expand( child: SizedBox.expand(
child: RotatedBox( child: RotatedBox(
// 使RotatedBox // 使RotatedBox
quarterTurns: -1, quarterTurns: startChartManage.rotateAngle ~/ 90,
child: Texture( child: Texture(
textureId: state.textureId.value!, textureId: state.textureId.value!,
filterQuality: FilterQuality.medium, filterQuality: FilterQuality.medium,
@ -126,53 +128,7 @@ class _TalkViewNativeDecodePageState extends State<TalkViewNativeDecodePage>
); );
}, },
), ),
Positioned(
top: 300.h,
right: 20.w,
child: Obx(() => Container(
padding: const EdgeInsets.symmetric(horizontal: 16, vertical: 10),
decoration: BoxDecoration(
color: Colors.black.withOpacity(0.5),
borderRadius: BorderRadius.circular(12),
),
child: Column(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
Row(
children: [
Icon(Icons.network_check, color: Colors.redAccent, size: 18),
SizedBox(width: 6),
Text(
'接受服务端H264帧率/秒: ',
style: TextStyle(color: Colors.white, fontSize: 15),
),
Text(
'${state.networkH264Fps.value}',
style: TextStyle(color: Colors.redAccent, fontSize: 16, fontWeight: FontWeight.bold),
),
Text(' fps', style: TextStyle(color: Colors.white, fontSize: 13)),
],
),
SizedBox(height: 4),
Row(
children: [
Icon(Icons.send, color: Colors.blueAccent, size: 18),
SizedBox(width: 6),
Text(
'送入Native帧率/秒: ',
style: TextStyle(color: Colors.white, fontSize: 15),
),
Text(
'${state.nativeSendFps.value}',
style: TextStyle(color: Colors.blueAccent, fontSize: 16, fontWeight: FontWeight.bold),
),
Text(' fps', style: TextStyle(color: Colors.white, fontSize: 13)),
],
),
],
),
)),
),
Obx(() => state.isLoading.isTrue Obx(() => state.isLoading.isTrue
? Positioned( ? Positioned(
bottom: 310.h, bottom: 310.h,

View File

@ -109,16 +109,12 @@ class TalkViewNativeDecodeState {
// H264帧缓冲区相关 // H264帧缓冲区相关
final List<Map<String, dynamic>> h264FrameBuffer = <Map<String, dynamic>>[]; // H264帧缓冲区 final List<Map<String, dynamic>> h264FrameBuffer = <Map<String, dynamic>>[]; // H264帧缓冲区
final int maxFrameBufferSize = 25; // final int maxFrameBufferSize = 15; //
final int targetFps = 120; // final int targetFps = 25; // ,native的缓冲区
Timer? frameProcessTimer; // Timer? frameProcessTimer; //
bool isProcessingFrame = false; // bool isProcessingFrame = false; //
int lastProcessedTimestamp = 0; // int lastProcessedTimestamp = 0; //
// H264文件保存相关 // H264文件保存相关
String? h264FilePath; String? h264FilePath;
File? h264File; File? h264File;
//
RxInt networkH264Fps = 0.obs;
RxInt nativeSendFps = 0.obs;
} }