Merge branch 'release_sky' into 'master_sky'

Release sky

See merge request StarlockTeam/app-starlock!181
This commit is contained in:
李仪 2025-06-24 09:01:01 +00:00
commit 99d728a11c
12 changed files with 446 additions and 305 deletions

View File

@ -16,6 +16,11 @@ import 'package:star_lock/talk/starChart/proto/talk_data_h264_frame.pb.dart';
// implements ScpMessageHandler {
class UdpTalkDataHandler extends ScpMessageBaseHandle
implements ScpMessageHandler {
//
static final UdpTalkDataHandler instance = UdpTalkDataHandler();
UdpTalkDataHandler(); //
@override
void handleReq(ScpMessage scpMessage) {}
@ -32,8 +37,11 @@ class UdpTalkDataHandler extends ScpMessageBaseHandle
if (scpMessage.Payload != null) {
final TalkData talkData = scpMessage.Payload;
//
_handleTalkData(talkData: talkData);
_handleTalkData(
talkData: talkData,
scpMessage: scpMessage,
);
}
}
@ -93,12 +101,15 @@ class UdpTalkDataHandler extends ScpMessageBaseHandle
return hexList.join('');
}
void _handleTalkData({required TalkData talkData}) {
void _handleTalkData({
required TalkData talkData,
required ScpMessage scpMessage,
}) {
if (talkData == null) return;
final contentType = talkData.contentType;
switch (contentType) {
case TalkData_ContentTypeE.H264:
_handleVideoH264(talkData);
_handleVideoH264(talkData, scpMessage);
break;
case TalkData_ContentTypeE.Image:
_handleVideoImage(talkData);
@ -113,10 +124,12 @@ class UdpTalkDataHandler extends ScpMessageBaseHandle
}
/// h264协议的数据
void _handleVideoH264(TalkData talkData) {
void _handleVideoH264(TalkData talkData, ScpMessage scpMessage) {
final TalkDataH264Frame talkDataH264Frame = TalkDataH264Frame();
talkDataH264Frame.mergeFromBuffer(talkData.content);
frameHandler.handleFrame(talkDataH264Frame, talkData);
// AppLog.log('处理H264帧: frameType=${talkDataH264Frame.frameType}, frameSeq=${talkDataH264Frame.frameSeq},MessageId:${scpMessage.MessageId}');
frameHandler.handleFrame(talkDataH264Frame, talkData, scpMessage);
}
///

View File

@ -93,20 +93,21 @@ class UdpTalkExpectHandler extends ScpMessageBaseHandle
}
if (isWifiLockType ||
(talkExpectResp.rotate == 0 &&
talkExpectResp.width == 640 &&
talkExpectResp.height == 480)) {
talkExpectResp.width == 640 &&
talkExpectResp.height == 480) &&
talkStatus.status != TalkStatus.answeredSuccessfully) {
Get.toNamed(Routers.imageTransmissionView);
return;
}
if (startChartManage
.getDefaultTalkExpect()
.videoType
.contains(VideoTypeE.H264)) {
.getDefaultTalkExpect()
.videoType
.contains(VideoTypeE.H264) &&
talkStatus.status != TalkStatus.answeredSuccessfully) {
Get.toNamed(
Routers.h264View,
);
} else {
} else if (talkStatus.status != TalkStatus.answeredSuccessfully) {
Get.toNamed(
Routers.starChartTalkView,
);

View File

@ -3,6 +3,7 @@ import 'dart:typed_data';
import 'package:flutter/services.dart';
import 'package:star_lock/app_settings/app_settings.dart';
import 'package:star_lock/talk/starChart/entity/scp_message.dart';
import 'package:star_lock/talk/starChart/handle/other/talk_data_model.dart';
import 'package:star_lock/talk/starChart/proto/talk_data.pb.dart';
import '../../proto/talk_data_h264_frame.pb.dart';
@ -12,8 +13,15 @@ class H264FrameHandler {
H264FrameHandler({required this.onCompleteFrame});
void handleFrame(TalkDataH264Frame frame, TalkData talkData) {
onCompleteFrame(
TalkDataModel(talkData: talkData, talkDataH264Frame: frame));
void handleFrame(
TalkDataH264Frame frame, TalkData talkData, ScpMessage scpMessage) {
// AppLog.log(
// '送入stream的帧数据: frameSeq=${frame.frameSeq},frameType=${frame
// .frameType},MessageId:${scpMessage.MessageId}');
onCompleteFrame(TalkDataModel(
talkData: talkData,
talkDataH264Frame: frame,
scpMessage: scpMessage,
));
}
}

View File

@ -1,9 +1,12 @@
import 'package:star_lock/talk/starChart/entity/scp_message.dart';
import 'package:star_lock/talk/starChart/proto/talk_data.pbserver.dart';
import 'package:star_lock/talk/starChart/proto/talk_data_h264_frame.pb.dart';
class TalkDataModel {
TalkData? talkData;
TalkDataH264Frame? talkDataH264Frame;
ScpMessage? scpMessage;
TalkDataModel({required this.talkData, this.talkDataH264Frame});
TalkDataModel(
{required this.talkData, this.talkDataH264Frame, this.scpMessage});
}

View File

@ -11,7 +11,7 @@ class TalkDataRepository {
onCancel: () {
_isListening = false;
},
sync: false, //
sync: true, //
);
}

View File

@ -181,40 +181,6 @@ class ScpMessageBaseHandle {
}
return null;
// if (!_packetBuffer.containsKey(key)) {
// _packetBuffer[key] = List.filled(spTotal, []);
// _startTimer(key);
// }
//
// //
// if (spIndex < 1 || spIndex > spTotal) {
// // print(
// // 'Invalid spTotal: $spTotal spIndex: $spIndex for messageId: $messageId');
// return null;
// }
//
// //
// _packetBuffer[key]![spIndex - 1] = byte;
//
// //
// if (_packetBuffer[key]!.every((packet) => packet.isNotEmpty)) {
// //
// Uint8List completePayload = Uint8List.fromList(
// _packetBuffer[key]!.expand((packet) => packet).toList());
// //
// _clearPacketData(key);
//
// // 使TalkData
// if (payloadType == PayloadTypeConstant.talkData) {
// final talkData = TalkData();
// talkData.mergeFromBuffer(completePayload);
// return talkData;
// }
// } else {
// // null
// return null;
// }
}
//

View File

@ -52,7 +52,7 @@ class ScpMessageHandlerFactory {
case PayloadTypeConstant.talkExpect:
return UdpTalkExpectHandler();
case PayloadTypeConstant.talkData:
return UdpTalkDataHandler();
return UdpTalkDataHandler.instance;
case PayloadTypeConstant.talkHangup:
return UdpTalkHangUpHandler();
case PayloadTypeConstant.RbcuInfo:

View File

@ -1,5 +1,6 @@
import 'dart:async';
import 'dart:io';
import 'dart:isolate';
import 'dart:math';
import 'dart:typed_data';
@ -50,6 +51,10 @@ import 'package:star_lock/tools/deviceInfo_utils.dart';
import 'package:star_lock/tools/storage.dart';
import 'package:uuid/uuid.dart';
// Socket选项常量
const int SO_RCVBUF = 8; //
const int SO_SNDBUF = 7; //
class StartChartManage {
// new对象
StartChartManage._internal();
@ -125,6 +130,17 @@ class StartChartManage {
// StartChartTalkStatus
StartChartTalkStatus talkStatus = StartChartTalkStatus.instance;
//
final Map<int, Set<int>> _avFrameParts = {};
int _avFrameTotal = 0;
int _avFrameLost = 0;
//
double getAvFrameLossRate() {
if (_avFrameTotal == 0) return 0.0;
return _avFrameLost / _avFrameTotal;
}
//
Future<void> init() async {
if (F.isXHJ) {
@ -225,6 +241,24 @@ class StartChartManage {
var addressIListenFrom = InternetAddress.anyIPv4;
RawDatagramSocket.bind(addressIListenFrom, localPort)
.then((RawDatagramSocket socket) {
// (SO_RCVBUF = 8)
socket.setRawOption(
RawSocketOption.fromInt(
RawSocketOption.levelSocket,
8, // SO_RCVBUF for Android/iOS
2 * 1024 * 1024, // 2MB receive buffer
),
);
// (SO_SNDBUF = 7)
socket.setRawOption(
RawSocketOption.fromInt(
RawSocketOption.levelSocket,
7, // SO_SNDBUF for Android/iOS
2 * 1024 * 1024, // 2MB send buffer
),
);
_udpSocket = socket;
/// 广
@ -1017,35 +1051,25 @@ class StartChartManage {
void _onReceiveData(RawDatagramSocket socket, BuildContext context) {
socket.listen((RawSocketEvent event) {
if (event == RawSocketEvent.read) {
Datagram? dg = socket.receive();
try {
if (dg?.data != null) {
final deserialize = ScpMessage.deserialize(dg!.data);
// //ToDo:
// UdpTalkDataHandler().updateRecvDataRate(dg.data.length);
// //
// Provider.of<DebugInfoModel>(context, listen: false).updateDebugInfo(
// UdpTalkDataHandler().getLastRecvDataRate() ~/ 1024, // KB
// UdpTalkDataHandler().getLastRecvPacketCount(),
// UdpTalkDataHandler().getLastSendDataRate() ~/ 1024, // KB
// UdpTalkDataHandler().getLastSendPacketCount(),
// );
if (deserialize != null) {
//
_handleUdpResultData(deserialize);
}
if (deserialize.PayloadType != PayloadTypeConstant.heartbeat) {
if (deserialize.Payload != null) {
// _log(text: 'Udp收到结构体数据---》$deserialize');
Datagram? dg;
while ((dg = socket.receive()) != null) {
try {
if (dg?.data != null) {
// Fallback: 线
//
final deserialize = ScpMessage.deserialize(dg!.data);
// if (deserialize.PayloadType == PayloadTypeConstant.talkData) {
// _log(
// text: 'mesaageId:${deserialize.MessageId},'
// 'SpTotal:${deserialize.SpTotal},SpIndex:${deserialize.SpIndex}');
// }
if (deserialize != null) {
_handleUdpResultData(deserialize);
}
// _log(text: 'text---》${utf8.decode(deserialize.Payload)}');
}
} catch (e, stackTrace) {
throw StartChartMessageException('$e\n,$stackTrace');
}
} catch (e, stackTrace) {
throw StartChartMessageException('$e\n,$stackTrace');
}
}
});
@ -1056,14 +1080,6 @@ class StartChartManage {
final int payloadType = scpMessage.PayloadType ?? 0;
final int messageType = scpMessage.MessageType ?? 0;
try {
//
if (scpMessage.SpIndex != null &&
scpMessage.SpTotal != null &&
scpMessage.MessageId != null) {
PacketLossStatistics().recordPacket(
scpMessage.MessageId!, scpMessage.SpIndex!, scpMessage.SpTotal!);
}
final ScpMessageHandler handler =
ScpMessageHandlerFactory.createHandler(payloadType);
if (messageType == MessageTypeConstant.Req) {

View File

@ -25,6 +25,7 @@ import 'package:star_lock/network/api_repository.dart';
import 'package:star_lock/talk/call/callTalk.dart';
import 'package:star_lock/talk/call/g711.dart';
import 'package:star_lock/talk/starChart/constant/talk_status.dart';
import 'package:star_lock/talk/starChart/entity/scp_message.dart';
import 'package:star_lock/talk/starChart/handle/other/packet_loss_statistics.dart';
import 'package:star_lock/talk/starChart/handle/other/talk_data_model.dart';
import 'package:star_lock/talk/starChart/proto/talk_data.pb.dart';
@ -37,6 +38,7 @@ import 'package:star_lock/tools/G711Tool.dart';
import 'package:star_lock/tools/bugly/bugly_tool.dart';
import 'package:star_lock/tools/commonDataManage.dart';
import 'package:star_lock/tools/storage.dart';
import 'package:video_decode_plugin/nalu_utils.dart';
import 'package:video_decode_plugin/video_decode_plugin.dart';
import '../../../../tools/baseGetXController.dart';
@ -50,6 +52,15 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
int audioBufferSize = 2; // 2
// frameSeq较小时阈值也小
int _getFrameSeqRolloverThreshold(int lastSeq) {
if (lastSeq > 2000) {
return 1000;
} else {
return (lastSeq / 2).round();
}
}
//
final List<int> _bufferedAudioFrames = <int>[];
@ -87,15 +98,17 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
// I帧状态
bool _waitingForIFrame = false;
int? lastDecodedIFrameSeq;
//
Future<void> _initVideoDecoder() async {
try {
state.isLoading.value = true;
//
final config = VideoDecoderConfig(
width: 864,
width: StartChartManage().videoWidth,
//
height: 480,
height: StartChartManage().videoHeight,
codecType: 'h264',
);
// textureId
@ -157,29 +170,37 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
int pts,
int frameSeq,
int frameSeqI,
ScpMessage scpMessage,
) {
// frameSeq回绕I帧
// frameSeq较小时阈值也小
if (!_pendingStreamReset &&
_lastFrameSeq != null &&
frameType == TalkDataH264Frame_FrameTypeE.I &&
frameSeq < _lastFrameSeq!) {
// I帧loading并重置所有本地状态
AppLog.log(
'检测到新流I帧frameSeq回绕进入loading并重置: frameSeq=$frameSeq, lastFrameSeq=$_lastFrameSeq');
Future.microtask(() => state.isLoading.value = true);
_pendingStreamReset = true;
//
_stopFrameProcessTimer();
//
_resetDecoderForNewStream(_pendingResetWidth, _pendingResetHeight);
//
_lastFrameSeq = null;
_decodedIFrames.clear();
state.h264FrameBuffer.clear();
//
_startFrameProcessTimer();
// returnI帧初始化解码器并解码
//
int dynamicThreshold = _getFrameSeqRolloverThreshold(_lastFrameSeq!);
if ((_lastFrameSeq! - frameSeq) > dynamicThreshold) {
// I帧frameSeq大幅回绕loading并重置所有本地状态
AppLog.log('检测到新流I帧frameSeq大幅回绕进入loading并重置: frameSeq=$frameSeq, lastFrameSeq=$_lastFrameSeq, 阈值=$dynamicThreshold');
Future.microtask(() => state.isLoading.value = true);
_pendingStreamReset = true;
//
_stopFrameProcessTimer();
//
_resetDecoderForNewStream(_pendingResetWidth, _pendingResetHeight);
//
_lastFrameSeq = null;
_decodedIFrames.clear();
state.h264FrameBuffer.clear();
//
_startFrameProcessTimer();
// returnI帧初始化解码器并解码
//
} else {
//
AppLog.log('检测到I帧乱序未超过回绕阈值$dynamicThreshold),丢弃: frameSeq=$frameSeq, lastFrameSeq=$_lastFrameSeq');
return;
}
}
// pendingStreamResetI帧
if (_pendingStreamReset) {
@ -212,6 +233,7 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
'frameSeq': frameSeq,
'frameSeqI': frameSeqI,
'pts': pts,
'scpMessage': scpMessage,
};
// P/B帧
@ -257,14 +279,25 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
return;
}
//
state.isProcessingFrame = true;
// I帧frameSeq最小的I帧消费
final iFrames = state.h264FrameBuffer
.where((f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.I)
.toList();
iFrames
.sort((a, b) => (a['frameSeq'] as int).compareTo(b['frameSeq'] as int));
try {
//
final Map<String, dynamic>? frameMap = state.h264FrameBuffer.isNotEmpty
? state.h264FrameBuffer.removeAt(0)
: null;
if (iFrames.isNotEmpty) {
// I帧I帧frameSeq
final minIFrame = iFrames.first;
final minIFrameSeq = minIFrame['frameSeq'];
final targetIndex = state.h264FrameBuffer.indexWhere(
(f) =>
f['frameType'] == TalkDataH264Frame_FrameTypeE.I &&
f['frameSeq'] == minIFrameSeq,
);
state.isProcessingFrame = true;
final Map<String, dynamic>? frameMap =
state.h264FrameBuffer.removeAt(targetIndex);
if (frameMap == null) {
state.isProcessingFrame = false;
return;
@ -274,6 +307,7 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
final int? frameSeq = frameMap['frameSeq'];
final int? frameSeqI = frameMap['frameSeqI'];
final int? pts = frameMap['pts'];
final ScpMessage? scpMessage = frameMap['scpMessage'];
if (frameData == null ||
frameType == null ||
frameSeq == null ||
@ -282,25 +316,86 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
state.isProcessingFrame = false;
return;
}
// textureId为null时跳过
if (state.textureId.value == null) {
state.isProcessingFrame = false;
return;
}
lastDecodedIFrameSeq = minIFrameSeq;
// AppLog.log('送入解码器的P帧数据frameSeq:${frameSeq},frameSeqI:${frameSeqI},'
// 'frameType:${frameType},messageId:${scpMessage!.MessageId}');
// final spsData = NaluUtils.filterNalusByType(frameData, 7);
// final ppsData = NaluUtils.filterNalusByType(frameData, 8);
// AppLog.log('SPSDATA:${spsData}ppsData:${ppsData}');
await VideoDecodePlugin.sendFrame(
frameData: frameData,
frameType: frameType == TalkDataH264Frame_FrameTypeE.I ? 0 : 1,
frameType: 0,
frameSeq: frameSeq,
timestamp: pts,
splitNalFromIFrame: true,
refIFrameSeq: frameSeqI,
);
} catch (e) {
AppLog.log('处理缓冲帧失败: $e');
} finally {
//
state.isProcessingFrame = false;
return;
}
// I帧时refIFrameSeq等于lastDecodedIFrameSeq的P帧
if (lastDecodedIFrameSeq != null) {
final validPFrames = state.h264FrameBuffer
.where((f) =>
f['frameType'] == TalkDataH264Frame_FrameTypeE.P &&
f['frameSeqI'] == lastDecodedIFrameSeq)
.toList();
if (validPFrames.isNotEmpty) {
validPFrames.sort(
(a, b) => (a['frameSeq'] as int).compareTo(b['frameSeq'] as int));
final minPFrame = validPFrames.first;
final targetIndex = state.h264FrameBuffer.indexWhere(
(f) =>
f['frameType'] == TalkDataH264Frame_FrameTypeE.P &&
f['frameSeq'] == minPFrame['frameSeq'] &&
f['frameSeqI'] == lastDecodedIFrameSeq,
);
state.isProcessingFrame = true;
final Map<String, dynamic>? frameMap =
state.h264FrameBuffer.removeAt(targetIndex);
if (frameMap == null) {
state.isProcessingFrame = false;
return;
}
final List<int>? frameData = frameMap['frameData'];
final TalkDataH264Frame_FrameTypeE? frameType = frameMap['frameType'];
final int? frameSeq = frameMap['frameSeq'];
final int? frameSeqI = frameMap['frameSeqI'];
final int? pts = frameMap['pts'];
final ScpMessage? scpMessage = frameMap['scpMessage'];
if (frameData == null ||
frameType == null ||
frameSeq == null ||
frameSeqI == null ||
pts == null) {
state.isProcessingFrame = false;
return;
}
if (state.textureId.value == null) {
state.isProcessingFrame = false;
return;
}
// AppLog.log('送入解码器的I帧数据frameSeq:${frameSeq},frameSeqI:${frameSeqI},'
// 'frameType:${frameType},messageId:${scpMessage!.MessageId}');
await VideoDecodePlugin.sendFrame(
frameData: frameData,
frameType: 1,
frameSeq: frameSeq,
timestamp: pts,
splitNalFromIFrame: true,
refIFrameSeq: frameSeqI,
);
state.isProcessingFrame = false;
return;
}
}
// I帧到来
}
///
@ -318,6 +413,11 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
//
void _startListenTalkData() {
//
if (_streamSubscription != null) {
_streamSubscription!.cancel();
_streamSubscription = null;
}
//
if (_isListening) {
AppLog.log("已经存在数据流监听,避免重复监听");
@ -329,37 +429,7 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
_streamSubscription = state.talkDataRepository.talkDataStream
.listen((TalkDataModel talkDataModel) async {
final talkData = talkDataModel.talkData;
final talkDataH264Frame = talkDataModel.talkDataH264Frame;
final contentType = talkData!.contentType;
//
switch (contentType) {
case TalkData_ContentTypeE.G711:
if (state.audioBuffer.length >= audioBufferSize) {
state.audioBuffer.removeAt(0); //
}
state.audioBuffer.add(talkData); //
//
_playAudioFrames();
break;
case TalkData_ContentTypeE.H264:
// H264帧
if (state.textureId.value != null) {
if (talkDataH264Frame != null) {
_addFrameToBuffer(
talkData.content,
talkDataH264Frame.frameType,
talkData.durationMs,
talkDataH264Frame.frameSeq,
talkDataH264Frame.frameSeqI,
);
}
} else {
AppLog.log('无法处理H264帧textureId为空');
}
break;
}
_processFrame(talkDataModel);
});
}
@ -585,7 +655,8 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
TalkExpectReq talkExpectReq = TalkExpectReq();
state.isOpenVoice.value = !state.isOpenVoice.value;
// videoType
VideoTypeE currentVideoType = qualityToVideoType[state.currentQuality.value] ?? VideoTypeE.H264;
VideoTypeE currentVideoType =
qualityToVideoType[state.currentQuality.value] ?? VideoTypeE.H264;
if (!state.isOpenVoice.value) {
talkExpectReq = TalkExpectReq(
videoType: [currentVideoType],
@ -1104,144 +1175,144 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
}
// I帧处理方法
void _handleIFrameWithSpsPpsAndIdr(
List<int> frameData, int durationMs, int frameSeq, int frameSeqI) {
// I帧前所有未处理帧SPS/PPS/I帧
state.h264FrameBuffer.clear();
_extractAndBufferSpsPpsForBuffer(
frameData, durationMs, frameSeq, frameSeqI);
// SPS/PPS就先写入I帧本体IDR
if (spsCache == null || ppsCache == null) {
// SPS/PPS缓存I帧
return;
}
// SPS/PPS
_addFrameToBuffer(spsCache!, TalkDataH264Frame_FrameTypeE.I, durationMs,
frameSeq, frameSeqI);
_addFrameToBuffer(ppsCache!, TalkDataH264Frame_FrameTypeE.I, durationMs,
frameSeq, frameSeqI);
// I帧包IDRtype 5
List<List<int>> nalus = [];
int i = 0;
List<int> data = frameData;
while (i < data.length - 3) {
int start = -1;
int next = -1;
if (data[i] == 0x00 && data[i + 1] == 0x00) {
if (data[i + 2] == 0x01) {
start = i;
i += 3;
} else if (i + 3 < data.length &&
data[i + 2] == 0x00 &&
data[i + 3] == 0x01) {
start = i;
i += 4;
} else {
i++;
continue;
}
next = i;
while (next < data.length - 3) {
if (data[next] == 0x00 &&
data[next + 1] == 0x00 &&
((data[next + 2] == 0x01) ||
(data[next + 2] == 0x00 && data[next + 3] == 0x01))) {
break;
}
next++;
}
nalus.add(data.sublist(start, next));
i = next;
} else {
i++;
}
}
int nalusTotalLen =
nalus.isNotEmpty ? nalus.fold(0, (p, n) => p + n.length) : 0;
if (nalus.isEmpty && data.isNotEmpty) {
nalus.add(data);
} else if (nalus.isNotEmpty && nalusTotalLen < data.length) {
nalus.add(data.sublist(nalusTotalLen));
}
for (final nalu in nalus) {
int offset = 0;
if (nalu.length >= 4 && nalu[0] == 0x00 && nalu[1] == 0x00) {
if (nalu[2] == 0x01)
offset = 3;
else if (nalu[2] == 0x00 && nalu[3] == 0x01) offset = 4;
}
if (nalu.length > offset) {
int naluType = nalu[offset] & 0x1F;
if (naluType == 5) {
_addFrameToBuffer(nalu, TalkDataH264Frame_FrameTypeE.I, durationMs,
frameSeq, frameSeqI);
}
}
}
}
// void _handleIFrameWithSpsPpsAndIdr(
// List<int> frameData, int durationMs, int frameSeq, int frameSeqI) {
// // I帧前所有未处理帧SPS/PPS/I帧
// state.h264FrameBuffer.clear();
// _extractAndBufferSpsPpsForBuffer(
// frameData, durationMs, frameSeq, frameSeqI);
// // SPS/PPS就先写入I帧本体IDR
// if (spsCache == null || ppsCache == null) {
// // SPS/PPS缓存I帧
// return;
// }
// // SPS/PPS
// _addFrameToBuffer(spsCache!, TalkDataH264Frame_FrameTypeE.I, durationMs,
// frameSeq, frameSeqI);
// _addFrameToBuffer(ppsCache!, TalkDataH264Frame_FrameTypeE.I, durationMs,
// frameSeq, frameSeqI);
// // I帧包IDRtype 5
// List<List<int>> nalus = [];
// int i = 0;
// List<int> data = frameData;
// while (i < data.length - 3) {
// int start = -1;
// int next = -1;
// if (data[i] == 0x00 && data[i + 1] == 0x00) {
// if (data[i + 2] == 0x01) {
// start = i;
// i += 3;
// } else if (i + 3 < data.length &&
// data[i + 2] == 0x00 &&
// data[i + 3] == 0x01) {
// start = i;
// i += 4;
// } else {
// i++;
// continue;
// }
// next = i;
// while (next < data.length - 3) {
// if (data[next] == 0x00 &&
// data[next + 1] == 0x00 &&
// ((data[next + 2] == 0x01) ||
// (data[next + 2] == 0x00 && data[next + 3] == 0x01))) {
// break;
// }
// next++;
// }
// nalus.add(data.sublist(start, next));
// i = next;
// } else {
// i++;
// }
// }
// int nalusTotalLen =
// nalus.isNotEmpty ? nalus.fold(0, (p, n) => p + n.length) : 0;
// if (nalus.isEmpty && data.isNotEmpty) {
// nalus.add(data);
// } else if (nalus.isNotEmpty && nalusTotalLen < data.length) {
// nalus.add(data.sublist(nalusTotalLen));
// }
// for (final nalu in nalus) {
// int offset = 0;
// if (nalu.length >= 4 && nalu[0] == 0x00 && nalu[1] == 0x00) {
// if (nalu[2] == 0x01)
// offset = 3;
// else if (nalu[2] == 0x00 && nalu[3] == 0x01) offset = 4;
// }
// if (nalu.length > offset) {
// int naluType = nalu[offset] & 0x1F;
// if (naluType == 5) {
// _addFrameToBuffer(nalu, TalkDataH264Frame_FrameTypeE.I, durationMs,
// frameSeq, frameSeqI);
// }
// }
// }
// }
// P帧处理方法
void _handlePFrame(
List<int> frameData, int durationMs, int frameSeq, int frameSeqI) {
// P帧type 1
List<List<int>> nalus = [];
int i = 0;
List<int> data = frameData;
while (i < data.length - 3) {
int start = -1;
int next = -1;
if (data[i] == 0x00 && data[i + 1] == 0x00) {
if (data[i + 2] == 0x01) {
start = i;
i += 3;
} else if (i + 3 < data.length &&
data[i + 2] == 0x00 &&
data[i + 3] == 0x01) {
start = i;
i += 4;
} else {
i++;
continue;
}
next = i;
while (next < data.length - 3) {
if (data[next] == 0x00 &&
data[next + 1] == 0x00 &&
((data[next + 2] == 0x01) ||
(data[next + 2] == 0x00 && data[next + 3] == 0x01))) {
break;
}
next++;
}
nalus.add(data.sublist(start, next));
i = next;
} else {
i++;
}
}
int nalusTotalLen =
nalus.isNotEmpty ? nalus.fold(0, (p, n) => p + n.length) : 0;
if (nalus.isEmpty && data.isNotEmpty) {
nalus.add(data);
} else if (nalus.isNotEmpty && nalusTotalLen < data.length) {
nalus.add(data.sublist(nalusTotalLen));
}
for (final nalu in nalus) {
int offset = 0;
if (nalu.length >= 4 && nalu[0] == 0x00 && nalu[1] == 0x00) {
if (nalu[2] == 0x01)
offset = 3;
else if (nalu[2] == 0x00 && nalu[3] == 0x01) offset = 4;
}
if (nalu.length > offset) {
int naluType = nalu[offset] & 0x1F;
if (naluType == 1) {
_addFrameToBuffer(nalu, TalkDataH264Frame_FrameTypeE.P, durationMs,
frameSeq, frameSeqI);
}
}
}
}
// void _handlePFrame(
// List<int> frameData, int durationMs, int frameSeq, int frameSeqI) {
// // P帧type 1
// List<List<int>> nalus = [];
// int i = 0;
// List<int> data = frameData;
// while (i < data.length - 3) {
// int start = -1;
// int next = -1;
// if (data[i] == 0x00 && data[i + 1] == 0x00) {
// if (data[i + 2] == 0x01) {
// start = i;
// i += 3;
// } else if (i + 3 < data.length &&
// data[i + 2] == 0x00 &&
// data[i + 3] == 0x01) {
// start = i;
// i += 4;
// } else {
// i++;
// continue;
// }
// next = i;
// while (next < data.length - 3) {
// if (data[next] == 0x00 &&
// data[next + 1] == 0x00 &&
// ((data[next + 2] == 0x01) ||
// (data[next + 2] == 0x00 && data[next + 3] == 0x01))) {
// break;
// }
// next++;
// }
// nalus.add(data.sublist(start, next));
// i = next;
// } else {
// i++;
// }
// }
// int nalusTotalLen =
// nalus.isNotEmpty ? nalus.fold(0, (p, n) => p + n.length) : 0;
// if (nalus.isEmpty && data.isNotEmpty) {
// nalus.add(data);
// } else if (nalus.isNotEmpty && nalusTotalLen < data.length) {
// nalus.add(data.sublist(nalusTotalLen));
// }
// for (final nalu in nalus) {
// int offset = 0;
// if (nalu.length >= 4 && nalu[0] == 0x00 && nalu[1] == 0x00) {
// if (nalu[2] == 0x01)
// offset = 3;
// else if (nalu[2] == 0x00 && nalu[3] == 0x01) offset = 4;
// }
// if (nalu.length > offset) {
// int naluType = nalu[offset] & 0x1F;
// if (naluType == 1) {
// _addFrameToBuffer(nalu, TalkDataH264Frame_FrameTypeE.P, durationMs,
// frameSeq, frameSeqI);
// }
// }
// }
// }
//
void onQualityChanged(String quality) async {
@ -1293,30 +1364,92 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
//
Future<void> _resetDecoderForNewStream(int width, int height) async {
try {
//
_stopFrameProcessTimer();
//
if (state.textureId.value != null) {
await VideoDecodePlugin.releaseDecoder();
Future.microtask(() => state.textureId.value = null);
state.textureId.value = null;
}
//
await Future.delayed(Duration(milliseconds: 100));
//
final config = VideoDecoderConfig(
width: width,
height: height,
codecType: 'h264',
);
//
final textureId = await VideoDecodePlugin.initDecoder(config);
if (textureId != null) {
Future.microtask(() => state.textureId.value = textureId);
state.textureId.value = textureId;
AppLog.log('frameSeq回绕后解码器初始化成功textureId=$textureId');
//
VideoDecodePlugin.setOnFrameRenderedListener((textureId) {
AppLog.log('已经开始渲染=======');
// loading
Future.microtask(() => state.isLoading.value = false);
state.isLoading.value = false;
});
//
_startFrameProcessTimer();
//
_decodedIFrames.clear();
state.h264FrameBuffer.clear();
state.isProcessingFrame = false;
hasSps = false;
hasPps = false;
spsCache = null;
ppsCache = null;
} else {
AppLog.log('frameSeq回绕后解码器初始化失败');
state.isLoading.value = false;
}
_startFrameProcessTimer();
} catch (e) {
AppLog.log('frameSeq回绕时解码器初始化错误: $e');
state.isLoading.value = false;
}
}
void _processFrame(TalkDataModel talkDataModel) {
final talkData = talkDataModel.talkData;
final talkDataH264Frame = talkDataModel.talkDataH264Frame;
final scpMessage = talkDataModel.scpMessage;
final contentType = talkData!.contentType;
//
switch (contentType) {
case TalkData_ContentTypeE.G711:
if (state.audioBuffer.length >= audioBufferSize) {
state.audioBuffer.removeAt(0); //
}
state.audioBuffer.add(talkData); //
//
_playAudioFrames();
break;
case TalkData_ContentTypeE.H264:
// H264帧
if (state.textureId.value != null) {
if (talkDataH264Frame != null) {
_addFrameToBuffer(
talkData.content,
talkDataH264Frame.frameType,
talkData.durationMs,
talkDataH264Frame.frameSeq,
talkDataH264Frame.frameSeqI,
scpMessage!,
);
}
} else {
AppLog.log('无法处理H264帧textureId为空');
}
break;
}
}
}

View File

@ -114,16 +114,16 @@ class _TalkViewNativeDecodePageState extends State<TalkViewNativeDecodePage>
key: state.globalKey,
child: SizedBox.expand(
child: RotatedBox(
// 使RotatedBox
quarterTurns: startChartManage.rotateAngle ~/ 90,
child: Platform.isIOS
? Transform.scale(
scale: 1.008, // iOS白边
child: Texture(
textureId: state.textureId.value!,
filterQuality: FilterQuality.medium,
),
)
// 使RotatedBox
quarterTurns: startChartManage.rotateAngle ~/ 90,
child: Platform.isIOS
? Transform.scale(
scale: 1.008, // iOS白边
child: Texture(
textureId: state.textureId.value!,
filterQuality: FilterQuality.medium,
),
)
: Texture(
textureId: state.textureId.value!,
filterQuality: FilterQuality.medium,

View File

@ -110,8 +110,8 @@ class TalkViewNativeDecodeState {
// H264帧缓冲区相关
final List<Map<String, dynamic>> h264FrameBuffer = <Map<String, dynamic>>[]; // H264帧缓冲区
final int maxFrameBufferSize = 15; //
final int targetFps = 30; // ,native的缓冲区
final int maxFrameBufferSize = 50; //
final int targetFps = 60; // ,native的缓冲区
Timer? frameProcessTimer; //
bool isProcessingFrame = false; //
int lastProcessedTimestamp = 0; //

View File

@ -130,7 +130,7 @@ dependencies:
video_decode_plugin:
git:
url: git@code.star-lock.cn:liyi/video_decode_plugin.git
ref: 68bb4b7fb637ef5a78856908e1bc464f50fe967a
ref: 5dfbd190fdc61dab3fc93543606b85d6b826a2ed
flutter_localizations:
sdk: flutter
@ -319,6 +319,7 @@ flutter:
assets:
- images/
- images/tabbar/
- images/guide/
- images/main/
- images/mine/
- images/lockType/