Merge branch 'develop_sky_liyi' into 'develop_sky'

Develop sky liyi

See merge request StarlockTeam/app-starlock!174
This commit is contained in:
李仪 2025-06-18 07:18:48 +00:00
commit 6385a2c7b9
7 changed files with 370 additions and 199 deletions

View File

@ -33,7 +33,10 @@ class UdpTalkDataHandler extends ScpMessageBaseHandle
if (scpMessage.Payload != null) {
final TalkData talkData = scpMessage.Payload;
//
_handleTalkData(talkData: talkData);
_handleTalkData(
talkData: talkData,
scpMessage: scpMessage,
);
}
}
@ -93,12 +96,15 @@ class UdpTalkDataHandler extends ScpMessageBaseHandle
return hexList.join('');
}
void _handleTalkData({required TalkData talkData}) {
void _handleTalkData({
required TalkData talkData,
required ScpMessage scpMessage,
}) {
if (talkData == null) return;
final contentType = talkData.contentType;
switch (contentType) {
case TalkData_ContentTypeE.H264:
_handleVideoH264(talkData);
_handleVideoH264(talkData, scpMessage);
break;
case TalkData_ContentTypeE.Image:
_handleVideoImage(talkData);
@ -113,10 +119,10 @@ class UdpTalkDataHandler extends ScpMessageBaseHandle
}
/// h264协议的数据
void _handleVideoH264(TalkData talkData) {
void _handleVideoH264(TalkData talkData, ScpMessage scpMessage) {
final TalkDataH264Frame talkDataH264Frame = TalkDataH264Frame();
talkDataH264Frame.mergeFromBuffer(talkData.content);
frameHandler.handleFrame(talkDataH264Frame, talkData);
frameHandler.handleFrame(talkDataH264Frame, talkData, scpMessage);
}
///

View File

@ -93,20 +93,21 @@ class UdpTalkExpectHandler extends ScpMessageBaseHandle
}
if (isWifiLockType ||
(talkExpectResp.rotate == 0 &&
talkExpectResp.width == 640 &&
talkExpectResp.height == 480)) {
talkExpectResp.width == 640 &&
talkExpectResp.height == 480) &&
talkStatus.status != TalkStatus.answeredSuccessfully) {
Get.toNamed(Routers.imageTransmissionView);
return;
}
if (startChartManage
.getDefaultTalkExpect()
.videoType
.contains(VideoTypeE.H264)) {
.getDefaultTalkExpect()
.videoType
.contains(VideoTypeE.H264) &&
talkStatus.status != TalkStatus.answeredSuccessfully) {
Get.toNamed(
Routers.h264View,
);
} else {
} else if (talkStatus.status != TalkStatus.answeredSuccessfully) {
Get.toNamed(
Routers.starChartTalkView,
);

View File

@ -3,6 +3,7 @@ import 'dart:typed_data';
import 'package:flutter/services.dart';
import 'package:star_lock/app_settings/app_settings.dart';
import 'package:star_lock/talk/starChart/entity/scp_message.dart';
import 'package:star_lock/talk/starChart/handle/other/talk_data_model.dart';
import 'package:star_lock/talk/starChart/proto/talk_data.pb.dart';
import '../../proto/talk_data_h264_frame.pb.dart';
@ -12,8 +13,12 @@ class H264FrameHandler {
H264FrameHandler({required this.onCompleteFrame});
void handleFrame(TalkDataH264Frame frame, TalkData talkData) {
onCompleteFrame(
TalkDataModel(talkData: talkData, talkDataH264Frame: frame));
void handleFrame(
TalkDataH264Frame frame, TalkData talkData, ScpMessage scpMessage) {
onCompleteFrame(TalkDataModel(
talkData: talkData,
talkDataH264Frame: frame,
scpMessage: scpMessage,
));
}
}

View File

@ -1,9 +1,12 @@
import 'package:star_lock/talk/starChart/entity/scp_message.dart';
import 'package:star_lock/talk/starChart/proto/talk_data.pbserver.dart';
import 'package:star_lock/talk/starChart/proto/talk_data_h264_frame.pb.dart';
class TalkDataModel {
TalkData? talkData;
TalkDataH264Frame? talkDataH264Frame;
ScpMessage? scpMessage;
TalkDataModel({required this.talkData, this.talkDataH264Frame});
TalkDataModel(
{required this.talkData, this.talkDataH264Frame, this.scpMessage});
}

View File

@ -50,6 +50,10 @@ import 'package:star_lock/tools/deviceInfo_utils.dart';
import 'package:star_lock/tools/storage.dart';
import 'package:uuid/uuid.dart';
// Socket选项常量
const int SO_RCVBUF = 8; //
const int SO_SNDBUF = 7; //
class StartChartManage {
// new对象
StartChartManage._internal();
@ -125,6 +129,17 @@ class StartChartManage {
// StartChartTalkStatus
StartChartTalkStatus talkStatus = StartChartTalkStatus.instance;
//
final Map<int, Set<int>> _avFrameParts = {};
int _avFrameTotal = 0;
int _avFrameLost = 0;
//
double getAvFrameLossRate() {
if (_avFrameTotal == 0) return 0.0;
return _avFrameLost / _avFrameTotal;
}
//
Future<void> init() async {
if (F.isXHJ) {
@ -225,6 +240,25 @@ class StartChartManage {
var addressIListenFrom = InternetAddress.anyIPv4;
RawDatagramSocket.bind(addressIListenFrom, localPort)
.then((RawDatagramSocket socket) {
// (SO_RCVBUF = 8)
socket.setRawOption(
RawSocketOption.fromInt(
RawSocketOption.levelSocket,
8, // SO_RCVBUF for Android/iOS
2 * 1024 * 1024, // 2MB receive buffer
),
);
// (SO_SNDBUF = 7)
socket.setRawOption(
RawSocketOption.fromInt(
RawSocketOption.levelSocket,
7, // SO_SNDBUF for Android/iOS
2 * 1024 * 1024, // 2MB send buffer
),
);
_udpSocket = socket;
/// 广
@ -1017,35 +1051,54 @@ class StartChartManage {
void _onReceiveData(RawDatagramSocket socket, BuildContext context) {
socket.listen((RawSocketEvent event) {
if (event == RawSocketEvent.read) {
Datagram? dg = socket.receive();
try {
if (dg?.data != null) {
final deserialize = ScpMessage.deserialize(dg!.data);
Datagram? dg;
while ((dg = socket.receive()) != null) {
try {
if (dg?.data != null) {
final deserialize = ScpMessage.deserialize(dg!.data);
// //ToDo:
// UdpTalkDataHandler().updateRecvDataRate(dg.data.length);
// //
// Provider.of<DebugInfoModel>(context, listen: false).updateDebugInfo(
// UdpTalkDataHandler().getLastRecvDataRate() ~/ 1024, // KB
// UdpTalkDataHandler().getLastRecvPacketCount(),
// UdpTalkDataHandler().getLastSendDataRate() ~/ 1024, // KB
// UdpTalkDataHandler().getLastSendPacketCount(),
// );
if (deserialize != null) {
//
_handleUdpResultData(deserialize);
}
if (deserialize.PayloadType != PayloadTypeConstant.heartbeat) {
if (deserialize.Payload != null) {
// _log(text: 'Udp收到结构体数据---》$deserialize');
// PayloadType==talkData的数据包
if (deserialize != null &&
deserialize.PayloadType == PayloadTypeConstant.talkData) {
int? msgId = deserialize.MessageId;
int spTotal = deserialize.SpTotal ?? 1;
int spIndex = deserialize.SpIndex ?? 1;
if (msgId != null) {
//
_avFrameParts.putIfAbsent(msgId, () => <int>{});
_avFrameParts[msgId]!.add(spIndex);
//
if (spIndex == spTotal) {
_avFrameTotal++;
if (_avFrameParts[msgId]!.length < spTotal) {
_avFrameLost++;
// _log(text: '音视频丢包丢失的messageId: $msgId');
}
_avFrameParts.remove(msgId);
// 100
if (_avFrameTotal % 100 == 0) {
_log(
text:
'音视频帧丢包率: ${(getAvFrameLossRate() * 100).toStringAsFixed(2)}%');
}
}
}
}
if (deserialize != null) {
//
_handleUdpResultData(deserialize);
}
// if (deserialize.PayloadType != PayloadTypeConstant.heartbeat) {
// if (deserialize.Payload != null) {
// _log(text: 'Udp收到结构体数据---》$deserialize');
// }
// _log(text: 'text---》${utf8.decode(deserialize.Payload)}');
// }
}
} catch (e, stackTrace) {
throw StartChartMessageException('$e\n,$stackTrace');
}
} catch (e, stackTrace) {
throw StartChartMessageException('$e\n,$stackTrace');
}
}
});

View File

@ -25,6 +25,7 @@ import 'package:star_lock/network/api_repository.dart';
import 'package:star_lock/talk/call/callTalk.dart';
import 'package:star_lock/talk/call/g711.dart';
import 'package:star_lock/talk/starChart/constant/talk_status.dart';
import 'package:star_lock/talk/starChart/entity/scp_message.dart';
import 'package:star_lock/talk/starChart/handle/other/packet_loss_statistics.dart';
import 'package:star_lock/talk/starChart/handle/other/talk_data_model.dart';
import 'package:star_lock/talk/starChart/proto/talk_data.pb.dart';
@ -87,15 +88,17 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
// I帧状态
bool _waitingForIFrame = false;
int? lastDecodedIFrameSeq;
//
Future<void> _initVideoDecoder() async {
try {
state.isLoading.value = true;
//
final config = VideoDecoderConfig(
width: 864,
width: StartChartManage().videoWidth,
//
height: 480,
height: StartChartManage().videoHeight,
codecType: 'h264',
);
// textureId
@ -157,6 +160,7 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
int pts,
int frameSeq,
int frameSeqI,
ScpMessage scpMessage,
) {
// frameSeq回绕I帧
if (!_pendingStreamReset &&
@ -212,6 +216,7 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
'frameSeq': frameSeq,
'frameSeqI': frameSeqI,
'pts': pts,
'scpMessage': scpMessage,
};
// P/B帧
@ -257,14 +262,25 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
return;
}
//
state.isProcessingFrame = true;
// I帧frameSeq最小的I帧消费
final iFrames = state.h264FrameBuffer
.where((f) => f['frameType'] == TalkDataH264Frame_FrameTypeE.I)
.toList();
iFrames
.sort((a, b) => (a['frameSeq'] as int).compareTo(b['frameSeq'] as int));
try {
//
final Map<String, dynamic>? frameMap = state.h264FrameBuffer.isNotEmpty
? state.h264FrameBuffer.removeAt(0)
: null;
if (iFrames.isNotEmpty) {
// I帧I帧frameSeq
final minIFrame = iFrames.first;
final minIFrameSeq = minIFrame['frameSeq'];
final targetIndex = state.h264FrameBuffer.indexWhere(
(f) =>
f['frameType'] == TalkDataH264Frame_FrameTypeE.I &&
f['frameSeq'] == minIFrameSeq,
);
state.isProcessingFrame = true;
final Map<String, dynamic>? frameMap =
state.h264FrameBuffer.removeAt(targetIndex);
if (frameMap == null) {
state.isProcessingFrame = false;
return;
@ -274,6 +290,7 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
final int? frameSeq = frameMap['frameSeq'];
final int? frameSeqI = frameMap['frameSeqI'];
final int? pts = frameMap['pts'];
final ScpMessage? scpMessage = frameMap['scpMessage'];
if (frameData == null ||
frameType == null ||
frameSeq == null ||
@ -282,25 +299,82 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
state.isProcessingFrame = false;
return;
}
// textureId为null时跳过
if (state.textureId.value == null) {
state.isProcessingFrame = false;
return;
}
lastDecodedIFrameSeq = minIFrameSeq;
// AppLog.log('送入解码器的P帧数据frameSeq:${frameSeq},frameSeqI:${frameSeqI},'
// 'frameType:${frameType},messageId:${scpMessage!.MessageId}');
await VideoDecodePlugin.sendFrame(
frameData: frameData,
frameType: frameType == TalkDataH264Frame_FrameTypeE.I ? 0 : 1,
frameType: 0,
frameSeq: frameSeq,
timestamp: pts,
splitNalFromIFrame: true,
refIFrameSeq: frameSeqI,
);
} catch (e) {
AppLog.log('处理缓冲帧失败: $e');
} finally {
//
state.isProcessingFrame = false;
return;
}
// I帧时refIFrameSeq等于lastDecodedIFrameSeq的P帧
if (lastDecodedIFrameSeq != null) {
final validPFrames = state.h264FrameBuffer
.where((f) =>
f['frameType'] == TalkDataH264Frame_FrameTypeE.P &&
f['frameSeqI'] == lastDecodedIFrameSeq)
.toList();
if (validPFrames.isNotEmpty) {
validPFrames.sort(
(a, b) => (a['frameSeq'] as int).compareTo(b['frameSeq'] as int));
final minPFrame = validPFrames.first;
final targetIndex = state.h264FrameBuffer.indexWhere(
(f) =>
f['frameType'] == TalkDataH264Frame_FrameTypeE.P &&
f['frameSeq'] == minPFrame['frameSeq'] &&
f['frameSeqI'] == lastDecodedIFrameSeq,
);
state.isProcessingFrame = true;
final Map<String, dynamic>? frameMap =
state.h264FrameBuffer.removeAt(targetIndex);
if (frameMap == null) {
state.isProcessingFrame = false;
return;
}
final List<int>? frameData = frameMap['frameData'];
final TalkDataH264Frame_FrameTypeE? frameType = frameMap['frameType'];
final int? frameSeq = frameMap['frameSeq'];
final int? frameSeqI = frameMap['frameSeqI'];
final int? pts = frameMap['pts'];
final ScpMessage? scpMessage = frameMap['scpMessage'];
if (frameData == null ||
frameType == null ||
frameSeq == null ||
frameSeqI == null ||
pts == null) {
state.isProcessingFrame = false;
return;
}
if (state.textureId.value == null) {
state.isProcessingFrame = false;
return;
}
// AppLog.log('送入解码器的P帧数据frameSeq:${frameSeq},frameSeqI:${frameSeqI},'
// 'frameType:${frameType},messageId:${scpMessage!.MessageId}');
await VideoDecodePlugin.sendFrame(
frameData: frameData,
frameType: 1,
frameSeq: frameSeq,
timestamp: pts,
splitNalFromIFrame: true,
refIFrameSeq: frameSeqI,
);
state.isProcessingFrame = false;
return;
}
}
// I帧到来
}
///
@ -331,6 +405,7 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
.listen((TalkDataModel talkDataModel) async {
final talkData = talkDataModel.talkData;
final talkDataH264Frame = talkDataModel.talkDataH264Frame;
final scpMessage = talkDataModel.scpMessage;
final contentType = talkData!.contentType;
//
@ -345,7 +420,7 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
break;
case TalkData_ContentTypeE.H264:
// H264帧
if (state.textureId.value != null) {
if (state.textureId.value != null || true) {
if (talkDataH264Frame != null) {
_addFrameToBuffer(
talkData.content,
@ -353,6 +428,7 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
talkData.durationMs,
talkDataH264Frame.frameSeq,
talkDataH264Frame.frameSeqI,
scpMessage!,
);
}
} else {
@ -585,7 +661,8 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
TalkExpectReq talkExpectReq = TalkExpectReq();
state.isOpenVoice.value = !state.isOpenVoice.value;
// videoType
VideoTypeE currentVideoType = qualityToVideoType[state.currentQuality.value] ?? VideoTypeE.H264;
VideoTypeE currentVideoType =
qualityToVideoType[state.currentQuality.value] ?? VideoTypeE.H264;
if (!state.isOpenVoice.value) {
talkExpectReq = TalkExpectReq(
videoType: [currentVideoType],
@ -1104,144 +1181,144 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
}
// I帧处理方法
void _handleIFrameWithSpsPpsAndIdr(
List<int> frameData, int durationMs, int frameSeq, int frameSeqI) {
// I帧前所有未处理帧SPS/PPS/I帧
state.h264FrameBuffer.clear();
_extractAndBufferSpsPpsForBuffer(
frameData, durationMs, frameSeq, frameSeqI);
// SPS/PPS就先写入I帧本体IDR
if (spsCache == null || ppsCache == null) {
// SPS/PPS缓存I帧
return;
}
// SPS/PPS
_addFrameToBuffer(spsCache!, TalkDataH264Frame_FrameTypeE.I, durationMs,
frameSeq, frameSeqI);
_addFrameToBuffer(ppsCache!, TalkDataH264Frame_FrameTypeE.I, durationMs,
frameSeq, frameSeqI);
// I帧包IDRtype 5
List<List<int>> nalus = [];
int i = 0;
List<int> data = frameData;
while (i < data.length - 3) {
int start = -1;
int next = -1;
if (data[i] == 0x00 && data[i + 1] == 0x00) {
if (data[i + 2] == 0x01) {
start = i;
i += 3;
} else if (i + 3 < data.length &&
data[i + 2] == 0x00 &&
data[i + 3] == 0x01) {
start = i;
i += 4;
} else {
i++;
continue;
}
next = i;
while (next < data.length - 3) {
if (data[next] == 0x00 &&
data[next + 1] == 0x00 &&
((data[next + 2] == 0x01) ||
(data[next + 2] == 0x00 && data[next + 3] == 0x01))) {
break;
}
next++;
}
nalus.add(data.sublist(start, next));
i = next;
} else {
i++;
}
}
int nalusTotalLen =
nalus.isNotEmpty ? nalus.fold(0, (p, n) => p + n.length) : 0;
if (nalus.isEmpty && data.isNotEmpty) {
nalus.add(data);
} else if (nalus.isNotEmpty && nalusTotalLen < data.length) {
nalus.add(data.sublist(nalusTotalLen));
}
for (final nalu in nalus) {
int offset = 0;
if (nalu.length >= 4 && nalu[0] == 0x00 && nalu[1] == 0x00) {
if (nalu[2] == 0x01)
offset = 3;
else if (nalu[2] == 0x00 && nalu[3] == 0x01) offset = 4;
}
if (nalu.length > offset) {
int naluType = nalu[offset] & 0x1F;
if (naluType == 5) {
_addFrameToBuffer(nalu, TalkDataH264Frame_FrameTypeE.I, durationMs,
frameSeq, frameSeqI);
}
}
}
}
// void _handleIFrameWithSpsPpsAndIdr(
// List<int> frameData, int durationMs, int frameSeq, int frameSeqI) {
// // I帧前所有未处理帧SPS/PPS/I帧
// state.h264FrameBuffer.clear();
// _extractAndBufferSpsPpsForBuffer(
// frameData, durationMs, frameSeq, frameSeqI);
// // SPS/PPS就先写入I帧本体IDR
// if (spsCache == null || ppsCache == null) {
// // SPS/PPS缓存I帧
// return;
// }
// // SPS/PPS
// _addFrameToBuffer(spsCache!, TalkDataH264Frame_FrameTypeE.I, durationMs,
// frameSeq, frameSeqI);
// _addFrameToBuffer(ppsCache!, TalkDataH264Frame_FrameTypeE.I, durationMs,
// frameSeq, frameSeqI);
// // I帧包IDRtype 5
// List<List<int>> nalus = [];
// int i = 0;
// List<int> data = frameData;
// while (i < data.length - 3) {
// int start = -1;
// int next = -1;
// if (data[i] == 0x00 && data[i + 1] == 0x00) {
// if (data[i + 2] == 0x01) {
// start = i;
// i += 3;
// } else if (i + 3 < data.length &&
// data[i + 2] == 0x00 &&
// data[i + 3] == 0x01) {
// start = i;
// i += 4;
// } else {
// i++;
// continue;
// }
// next = i;
// while (next < data.length - 3) {
// if (data[next] == 0x00 &&
// data[next + 1] == 0x00 &&
// ((data[next + 2] == 0x01) ||
// (data[next + 2] == 0x00 && data[next + 3] == 0x01))) {
// break;
// }
// next++;
// }
// nalus.add(data.sublist(start, next));
// i = next;
// } else {
// i++;
// }
// }
// int nalusTotalLen =
// nalus.isNotEmpty ? nalus.fold(0, (p, n) => p + n.length) : 0;
// if (nalus.isEmpty && data.isNotEmpty) {
// nalus.add(data);
// } else if (nalus.isNotEmpty && nalusTotalLen < data.length) {
// nalus.add(data.sublist(nalusTotalLen));
// }
// for (final nalu in nalus) {
// int offset = 0;
// if (nalu.length >= 4 && nalu[0] == 0x00 && nalu[1] == 0x00) {
// if (nalu[2] == 0x01)
// offset = 3;
// else if (nalu[2] == 0x00 && nalu[3] == 0x01) offset = 4;
// }
// if (nalu.length > offset) {
// int naluType = nalu[offset] & 0x1F;
// if (naluType == 5) {
// _addFrameToBuffer(nalu, TalkDataH264Frame_FrameTypeE.I, durationMs,
// frameSeq, frameSeqI);
// }
// }
// }
// }
// P帧处理方法
void _handlePFrame(
List<int> frameData, int durationMs, int frameSeq, int frameSeqI) {
// P帧type 1
List<List<int>> nalus = [];
int i = 0;
List<int> data = frameData;
while (i < data.length - 3) {
int start = -1;
int next = -1;
if (data[i] == 0x00 && data[i + 1] == 0x00) {
if (data[i + 2] == 0x01) {
start = i;
i += 3;
} else if (i + 3 < data.length &&
data[i + 2] == 0x00 &&
data[i + 3] == 0x01) {
start = i;
i += 4;
} else {
i++;
continue;
}
next = i;
while (next < data.length - 3) {
if (data[next] == 0x00 &&
data[next + 1] == 0x00 &&
((data[next + 2] == 0x01) ||
(data[next + 2] == 0x00 && data[next + 3] == 0x01))) {
break;
}
next++;
}
nalus.add(data.sublist(start, next));
i = next;
} else {
i++;
}
}
int nalusTotalLen =
nalus.isNotEmpty ? nalus.fold(0, (p, n) => p + n.length) : 0;
if (nalus.isEmpty && data.isNotEmpty) {
nalus.add(data);
} else if (nalus.isNotEmpty && nalusTotalLen < data.length) {
nalus.add(data.sublist(nalusTotalLen));
}
for (final nalu in nalus) {
int offset = 0;
if (nalu.length >= 4 && nalu[0] == 0x00 && nalu[1] == 0x00) {
if (nalu[2] == 0x01)
offset = 3;
else if (nalu[2] == 0x00 && nalu[3] == 0x01) offset = 4;
}
if (nalu.length > offset) {
int naluType = nalu[offset] & 0x1F;
if (naluType == 1) {
_addFrameToBuffer(nalu, TalkDataH264Frame_FrameTypeE.P, durationMs,
frameSeq, frameSeqI);
}
}
}
}
// void _handlePFrame(
// List<int> frameData, int durationMs, int frameSeq, int frameSeqI) {
// // P帧type 1
// List<List<int>> nalus = [];
// int i = 0;
// List<int> data = frameData;
// while (i < data.length - 3) {
// int start = -1;
// int next = -1;
// if (data[i] == 0x00 && data[i + 1] == 0x00) {
// if (data[i + 2] == 0x01) {
// start = i;
// i += 3;
// } else if (i + 3 < data.length &&
// data[i + 2] == 0x00 &&
// data[i + 3] == 0x01) {
// start = i;
// i += 4;
// } else {
// i++;
// continue;
// }
// next = i;
// while (next < data.length - 3) {
// if (data[next] == 0x00 &&
// data[next + 1] == 0x00 &&
// ((data[next + 2] == 0x01) ||
// (data[next + 2] == 0x00 && data[next + 3] == 0x01))) {
// break;
// }
// next++;
// }
// nalus.add(data.sublist(start, next));
// i = next;
// } else {
// i++;
// }
// }
// int nalusTotalLen =
// nalus.isNotEmpty ? nalus.fold(0, (p, n) => p + n.length) : 0;
// if (nalus.isEmpty && data.isNotEmpty) {
// nalus.add(data);
// } else if (nalus.isNotEmpty && nalusTotalLen < data.length) {
// nalus.add(data.sublist(nalusTotalLen));
// }
// for (final nalu in nalus) {
// int offset = 0;
// if (nalu.length >= 4 && nalu[0] == 0x00 && nalu[1] == 0x00) {
// if (nalu[2] == 0x01)
// offset = 3;
// else if (nalu[2] == 0x00 && nalu[3] == 0x01) offset = 4;
// }
// if (nalu.length > offset) {
// int naluType = nalu[offset] & 0x1F;
// if (naluType == 1) {
// _addFrameToBuffer(nalu, TalkDataH264Frame_FrameTypeE.P, durationMs,
// frameSeq, frameSeqI);
// }
// }
// }
// }
//
void onQualityChanged(String quality) async {
@ -1293,30 +1370,56 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
//
Future<void> _resetDecoderForNewStream(int width, int height) async {
try {
//
_stopFrameProcessTimer();
//
if (state.textureId.value != null) {
await VideoDecodePlugin.releaseDecoder();
Future.microtask(() => state.textureId.value = null);
state.textureId.value = null;
}
//
await Future.delayed(Duration(milliseconds: 100));
//
final config = VideoDecoderConfig(
width: width,
height: height,
codecType: 'h264',
);
//
final textureId = await VideoDecodePlugin.initDecoder(config);
if (textureId != null) {
Future.microtask(() => state.textureId.value = textureId);
state.textureId.value = textureId;
AppLog.log('frameSeq回绕后解码器初始化成功textureId=$textureId');
//
VideoDecodePlugin.setOnFrameRenderedListener((textureId) {
AppLog.log('已经开始渲染=======');
// loading
Future.microtask(() => state.isLoading.value = false);
state.isLoading.value = false;
});
//
_startFrameProcessTimer();
//
_decodedIFrames.clear();
state.h264FrameBuffer.clear();
state.isProcessingFrame = false;
hasSps = false;
hasPps = false;
spsCache = null;
ppsCache = null;
} else {
AppLog.log('frameSeq回绕后解码器初始化失败');
state.isLoading.value = false;
}
_startFrameProcessTimer();
} catch (e) {
AppLog.log('frameSeq回绕时解码器初始化错误: $e');
state.isLoading.value = false;
}
}
}

View File

@ -110,8 +110,8 @@ class TalkViewNativeDecodeState {
// H264帧缓冲区相关
final List<Map<String, dynamic>> h264FrameBuffer = <Map<String, dynamic>>[]; // H264帧缓冲区
final int maxFrameBufferSize = 15; //
final int targetFps = 30; // ,native的缓冲区
final int maxFrameBufferSize = 150; //
final int targetFps = 60; // ,native的缓冲区
Timer? frameProcessTimer; //
bool isProcessingFrame = false; //
int lastProcessedTimestamp = 0; //