fix:调整h264 webview播放效果

This commit is contained in:
liyi 2025-04-18 10:33:51 +08:00
parent 7ca848d61b
commit 0d8d5cb0c8
10 changed files with 391 additions and 363 deletions

View File

@ -1,86 +1,87 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport"
content="width=device-width, initial-scale=1, maximum-scale=1, user-scalable=no">
<meta name="viewport" content="width=device-width, initial-scale=1, maximum-scale=1, user-scalable=no">
<title>play</title>
</head>
<style>
html {
margin: 0;
padding: 0;
overflow: hidden; /* 防止滚动条出现 */
overflow: hidden;
/* 防止滚动条出现 */
}
body {
width: 100vw;
height: 100vh;
margin: 0;
padding: 0;
background-color: white;
overflow: hidden; /* 防止滚动条出现 */
overflow: hidden;
/* 防止滚动条出现 */
display: flex;
align-items: center;
justify-content: center;
}
#player {
object-fit:cover;
object-fit: cover;
height: 56vh;
transform: rotate(-90deg);
}
</style>
<body>
<video autoplay muted poster="images/loader-thumb.jpg"
id="player">
</video>
<script src="jmuxer.min.js"></script>
<script>
<video autoplay muted poster="images/loader-thumb.jpg" id="player">
</video>
<script src="jmuxer.min.js"></script>
<script>
if (typeof JMuxer === 'undefined') {
console.error("JMuxer is not defined. Check if jmuxer.min.js is loaded correctly.");
} else {
console.log("JMuxer loaded successfully.");
}
let jmuxer;
window.onload = function() {
try {
jmuxer = new JMuxer({
node: 'player',
mode: 'video',
debug: false,
webgl: true, // 启用WebGL加速
webglOptions: {
preserveDrawingBuffer: false,
antialias: false
},
flushingTime: 0, // 禁用自动刷新
clearBuffer: false, // 保留解码缓存
fps:20,
onReady: () => {
console.log('播放器初始化完成');
// 通知Flutter端准备就绪
window.Flutter.postMessage('ready');
}
});
jmuxer.reset();
console.log("JMuxer initialized.");
} catch (e) {
console.error("Error initializing JMuxer:", e);
if (typeof JMuxer === 'undefined') {
console.error("JMuxer is not defined. Check if jmuxer.min.js is loaded correctly.");
} else {
console.log("JMuxer loaded successfully.");
}
};
// Feed data from Flutter
function feedDataFromFlutter(data) {
const buffer = new Uint8Array(data);
jmuxer.feed({
video: buffer,
let jmuxer;
window.onload = function () {
try {
jmuxer = new JMuxer({
node: 'player',
mode: 'video',
debug: false,
readfpsfromtrack: true,
flushingTime: 0, // 立即刷新
clearBuffer: true, // 丢弃延迟帧
fps: 25, // 强制指定帧率
onReady: () => {
console.log('播放器初始化完成');
// 通知Flutter端准备就绪
window.Flutter.postMessage('ready');
},
onMissingVideoFrames: (missingFrames) => {
// console.log('Missing video frames:', missingFrames);
},
});
}
} catch (e) {
console.error("Error initializing JMuxer:", e);
}
};
// Feed data from Flutter
function feedDataFromFlutter(data) {
const buffer = new Uint8Array(data);
jmuxer.feed({
video: buffer,
duration: 40 // 每帧持续时间40ms25fps
});
}
// Optional: notify Flutter
@ -92,11 +93,27 @@
}
}
// Function to return to Flutter page
function returnToFlutter() {
notifyFlutter("Returning to Flutter page");
}
</script>
// Function to return to Flutter page
function returnToFlutter() {
notifyFlutter("Returning to Flutter page");
}
// 添加清理方法
function cleanupJMuxer() {
if (jmuxer) {
try {
jmuxer.destroy();
jmuxer = null;
console.log('JMuxer cleaned up successfully');
window.Flutter.postMessage('cleanup_complete');
} catch (e) {
console.error('Error cleaning up JMuxer:', e);
window.Flutter.postMessage('cleanup_error');
}
}
}
</script>
</body>
</html>
</html>

View File

@ -7,6 +7,7 @@ import 'package:get/get.dart';
import 'package:intl/intl.dart';
import 'package:permission_handler/permission_handler.dart';
import 'package:star_lock/apm/apm_helper.dart';
import 'package:star_lock/appRouters.dart';
import 'package:star_lock/common/XSConstantMacro/XSConstantMacro.dart';
import 'package:star_lock/login/login/entity/LoginEntity.dart';
import 'package:star_lock/main/lockDetail/electronicKey/electronicKeyList/entity/ElectronicKeyListEntity.dart';

View File

@ -1,4 +1,3 @@
import 'dart:async';
import 'package:flutter_blue_plus/flutter_blue_plus.dart';
@ -17,51 +16,59 @@ import '../../../../tools/eventBusEventManage.dart';
import '../../../../tools/storage.dart';
import 'normallyOpenMode_state.dart';
class NormallyOpenModeLogic extends BaseGetXController{
class NormallyOpenModeLogic extends BaseGetXController {
NormallyOpenModeState state = NormallyOpenModeState();
//
Future<void> configPassageMode() async{
if(state.weekDays.value.isEmpty){
Future<void> configPassageMode() async {
if (state.weekDays.value.isEmpty) {
showToast('请选择常开日期'.tr);
return;
}
if(state.endTimeMinute.value < state.beginTimeMinute.value){
if (state.endTimeMinute.value < state.beginTimeMinute.value) {
showToast('结束时间不能小于开始时间哦'.tr);
return;
}
final List passageModeConfig = [];
final Map<String, Object> map = <String, Object>{
'isAllDay':state.isAllDay.value,
'weekDays':state.weekDays.value,
'startDate':state.beginTimeMinute.value,
'endDate':state.endTimeMinute.value,
'isAllDay': state.isAllDay.value,
'weekDays': state.weekDays.value,
'startDate': state.beginTimeMinute.value,
'endDate': state.endTimeMinute.value,
};
passageModeConfig.add(map);
final LoginEntity entity = await ApiRepository.to.setNormallyModeData(
lockId: state.lockSetInfoData.value.lockId!,
passageMode:state.isOpenNormallyOpenMode.value == true ? 1:0,
passageMode: state.isOpenNormallyOpenMode.value == true ? 1 : 0,
passageModeConfig: passageModeConfig,
);
if(entity.errorCode!.codeIsSuccessful){
showToast('操作成功'.tr, something: (){
if (entity.errorCode!.codeIsSuccessful) {
showToast('操作成功'.tr, something: () {
eventBus.fire(RefreshLockListInfoDataEvent());
state.lockSetInfoData.value.lockSettingInfo!.passageMode = state.isOpenNormallyOpenMode.value == true ? 1:0;
eventBus.fire(PassCurrentLockInformationEvent(state.lockSetInfoData.value));
eventBus.fire(LockSetChangeSetRefreshLockDetailWithType(2, state.lockSetInfoData.value.lockSettingInfo!.passageMode!.toString()));
state.lockSetInfoData.value.lockSettingInfo!.passageMode =
state.isOpenNormallyOpenMode.value == true ? 1 : 0;
eventBus
.fire(PassCurrentLockInformationEvent(state.lockSetInfoData.value));
eventBus.fire(LockSetChangeSetRefreshLockDetailWithType(
2,
state.lockSetInfoData.value.lockSettingInfo!.passageMode!
.toString()));
Get.back();
});
}
}
//
late StreamSubscription<Reply> _replySubscription;
void _initReplySubscription() {
_replySubscription = EventBusManager().eventBus!.on<Reply>().listen((Reply reply) {
if(reply is SetSupportFunctionsWithParametersReply) {
_replySubscription =
EventBusManager().eventBus!.on<Reply>().listen((Reply reply) {
if (reply is SetSupportFunctionsWithParametersReply) {
_replySetSupportFunctionsWithParameters(reply);
}
@ -93,7 +100,7 @@ class NormallyOpenModeLogic extends BaseGetXController{
//
Future<void> _replySetSupportFunctionsWithParameters(Reply reply) async {
final int status = reply.data[2];
switch(status){
switch (status) {
case 0x00:
//
state.sureBtnState.value = 0;
@ -111,38 +118,44 @@ class NormallyOpenModeLogic extends BaseGetXController{
// ()
Future<void> sendAutoLock() async {
if(state.sureBtnState.value == 1){
if (state.sureBtnState.value == 1) {
return;
}
state.sureBtnState.value = 1;
showEasyLoading();
showBlueConnetctToastTimer(action: (){
showBlueConnetctToastTimer(action: () {
dismissEasyLoading();
state.sureBtnState.value = 0;
});
BlueManage().blueSendData(BlueManage().connectDeviceName, (BluetoothConnectionState connectionState) async {
BlueManage().blueSendData(BlueManage().connectDeviceName,
(BluetoothConnectionState connectionState) async {
if (connectionState == BluetoothConnectionState.connected) {
final List<String>? privateKey = await Storage.getStringList(saveBluePrivateKey);
final List<int> getPrivateKeyList = changeStringListToIntList(privateKey!);
final List<String>? privateKey =
await Storage.getStringList(saveBluePrivateKey);
final List<int> getPrivateKeyList =
changeStringListToIntList(privateKey!);
final List<String>? token = await Storage.getStringList(saveBlueToken);
final List<int> getTokenList = changeStringListToIntList(token!);
final List<String>? publicKey = await Storage.getStringList(saveBluePublicKey);
final List<int> getPublicKeyList = changeStringListToIntList(publicKey!);
final List<String>? publicKey =
await Storage.getStringList(saveBluePublicKey);
final List<int> getPublicKeyList =
changeStringListToIntList(publicKey!);
String weekStr = '00000000';
for (var day in state.weekDays.value) {
final int index = day % 7; // 0
weekStr = '${weekStr.substring(0, index)}1${weekStr.substring(index + 1)}';
final int index = day % 7; // 0
weekStr =
'${weekStr.substring(0, index)}1${weekStr.substring(index + 1)}';
}
// weekStr
weekStr = weekStr.split('').reversed.join('');
final int number = int.parse(weekStr, radix: 2);
final List<int> list = <int>[];
list.add(state.isOpenNormallyOpenMode.value == true ? 1:0);
list.add(state.isOpenNormallyOpenMode.value == true ? 1 : 0);
final int bieginTime = state.beginTimeMinute.value;
final double bieginDouble = bieginTime / 256;
@ -159,7 +172,7 @@ class NormallyOpenModeLogic extends BaseGetXController{
list.add(end1);
list.add(end2);
list.add(state.isAllDay.value == 1 ? 1:0);
list.add(state.isAllDay.value == 1 ? 1 : 0);
list.add(number);
list.add(0);
@ -177,7 +190,7 @@ class NormallyOpenModeLogic extends BaseGetXController{
dismissEasyLoading();
cancelBlueConnetctToastTimer();
state.sureBtnState.value = 0;
if(state.ifCurrentScreen.value == true){
if (state.ifCurrentScreen.value == true) {
showBlueConnetctToast();
}
}

View File

@ -1,7 +1,19 @@
import 'package:star_lock/talk/starChart/proto/talk_expect.pb.dart';
class TalkConstant {
// TalkPing (s)
static const int talkePingOverTime = 10;
static const int talkeDataOverTime = 10;
// TalkRequest (s)
static const int talkeRequestOverTime = 30;
static TalkExpectReq ImageExpect = TalkExpectReq(
videoType: [VideoTypeE.IMAGE],
audioType: [AudioTypeE.G711],
);
static TalkExpectReq H264Expect = TalkExpectReq(
videoType: [VideoTypeE.H264],
audioType: [AudioTypeE.G711],
);
}

View File

@ -4,6 +4,7 @@ import 'package:star_lock/talk/call/g711.dart';
import 'package:star_lock/talk/starChart/constant/message_type_constant.dart';
import 'package:star_lock/talk/starChart/entity/scp_message.dart';
import 'package:star_lock/talk/starChart/handle/other/h264_frame_handler.dart';
import 'package:star_lock/talk/starChart/handle/other/packet_loss_statistics.dart';
import 'package:star_lock/talk/starChart/handle/scp_message_base_handle.dart';
import 'package:star_lock/talk/starChart/handle/scp_message_handle.dart';
import 'package:star_lock/talk/starChart/proto/talk_data.pb.dart';
@ -14,62 +15,6 @@ import 'package:star_lock/talk/starChart/proto/talk_data_h264_frame.pb.dart';
// implements ScpMessageHandler {
class UdpTalkDataHandler extends ScpMessageBaseHandle
implements ScpMessageHandler {
factory UdpTalkDataHandler() {
return _instance;
}
UdpTalkDataHandler._internal();
static final UdpTalkDataHandler _instance = UdpTalkDataHandler._internal();
int _recentRecvDataRate = 0;
int _recentRecvPacketCount = 0;
int _recentSendDataRate = 0;
int _recentSendPacketCount = 0;
int _lastRecvDataRate = 0;
int _lastRecvPacketCount = 0;
int _lastSendDataRate = 0;
int _lastSendPacketCount = 0;
void updateRecvDataRate(int dataSize) {
_recentRecvDataRate += dataSize;
_recentRecvPacketCount++;
}
void updateSendDataRate(int dataSize) {
_recentSendDataRate += dataSize;
_recentSendPacketCount++;
}
void resetDataRates() {
_lastRecvDataRate = _recentRecvDataRate;
_lastRecvPacketCount = _recentRecvPacketCount;
_lastSendDataRate = _recentSendDataRate;
_lastSendPacketCount = _recentSendPacketCount;
_recentRecvDataRate = 0;
_recentRecvPacketCount = 0;
_recentSendDataRate = 0;
_recentSendPacketCount = 0;
}
int getLastRecvDataRate() {
return _lastRecvDataRate;
}
int getLastRecvPacketCount() {
return _lastRecvPacketCount;
}
int getLastSendDataRate() {
return _lastSendDataRate;
}
int getLastSendPacketCount() {
return _lastSendPacketCount;
}
@override
void handleReq(ScpMessage scpMessage) {}
@ -95,6 +40,17 @@ class UdpTalkDataHandler extends ScpMessageBaseHandle
return buffer.map((byte) => byte.toRadixString(16).padLeft(2, '0')).join();
}
//
void _asyncLog(String message) {
Future.microtask(() {
try {
AppLog.log(message);
} catch (e) {
//
}
});
}
@override
deserializePayload(
{required int payloadType,
@ -105,10 +61,12 @@ class UdpTalkDataHandler extends ScpMessageBaseHandle
int? spTotal,
int? spIndex,
int? messageId}) {
// AppLog.log(
// '没有组包之前的每一个包的数据:${byte.length} messageId:$messageId spTotal:$spTotal spIndex:$spIndex PayloadLength:$PayloadLength,byte:${bufferToHexString(byte)}');
//
final stats = PacketLossStatistics().getStatistics();
_asyncLog('丢包统计: $stats');
// _asyncLog(
// '分包数据:messageId:$messageId [$spIndex/$spTotal] PayloadLength:$PayloadLength');
if (messageType == MessageTypeConstant.RealTimeData) {
//
if (spTotal != null &&
spTotal > 1 &&
messageId != null &&
@ -161,8 +119,6 @@ class UdpTalkDataHandler extends ScpMessageBaseHandle
final TalkDataH264Frame talkDataH264Frame = TalkDataH264Frame();
talkDataH264Frame.mergeFromBuffer(talkData.content);
frameHandler.handleFrame(talkDataH264Frame);
// AppLog.log(
// "帧:${talkDataH264Frame.frameType},帧序号:${talkDataH264Frame.frameSeq},对应I帧序号${talkDataH264Frame.frameSeqI}");
}
///

View File

@ -1,105 +1,17 @@
import 'dart:collection';
import 'dart:typed_data';
import 'package:flutter/services.dart';
import 'package:star_lock/app_settings/app_settings.dart';
import '../../proto/talk_data_h264_frame.pb.dart';
class H264FrameHandler {
final LinkedHashMap<int, TalkDataH264Frame> _frameBuffer = LinkedHashMap();
final void Function(List<int> frameData) onCompleteFrame;
final LinkedHashMap<int, TalkDataH264Frame_FrameTypeE> _frameTypeIndex = LinkedHashMap();
final void Function(List<int> frameData) onCompleteFrame;
H264FrameHandler({required this.onCompleteFrame});
void handleFrame(TalkDataH264Frame frame) {
//
_frameBuffer[frame.frameSeq] = frame;
_frameTypeIndex[frame.frameSeq] = frame.frameType;
// GOP (Group of Pictures)
_tryAssembleFrames(frame.frameSeq);
}
void _tryAssembleFrames(int currentSeq) {
final List<int> framesToProcess = [];
int? startFrameSeq;
// I P
for (int seq = currentSeq; seq >= 0; seq--) {
final frameType = _frameTypeIndex[seq];
if (frameType == null) continue;
if (frameType == TalkDataH264Frame_FrameTypeE.I) {
startFrameSeq = seq;
break;
} else if (frameType == TalkDataH264Frame_FrameTypeE.P) {
if (_frameBuffer.containsKey(_frameBuffer[seq]!.frameSeqI)) {
startFrameSeq = seq;
break;
} else {
_frameBuffer.remove(seq);
_frameTypeIndex.remove(seq);
}
}
}
if (startFrameSeq != null) {
for (int seq = startFrameSeq; _frameBuffer.containsKey(seq); seq++) {
framesToProcess.add(seq);
}
if (framesToProcess.isNotEmpty) {
_processFrames(framesToProcess);
}
} else {
_clearOldFrames(currentSeq);
}
}
void _processFrames(List<int> frameSeqs) {
//
// final List<int> assembledData = [];
//
// for (var seq in frameSeqs) {
// final frame = _frameBuffer[seq]!;
// assembledData.addAll(frame.frameData);
//
// //
// _frameBuffer.remove(seq);
// }
//
// //
// onCompleteFrame(assembledData);
// Calculate the total length of the assembled data
int totalLength = frameSeqs.fold(
0, (sum, seq) => sum + _frameBuffer[seq]!.frameData.length);
// Allocate a buffer for the assembled data
final assembledData = Uint8List(totalLength);
int offset = 0;
for (var seq in frameSeqs) {
final frame = _frameBuffer[seq]!;
assembledData.setRange(
offset, offset + frame.frameData.length, frame.frameData);
offset += frame.frameData.length;
// Remove the frame from the buffer after processing
_frameBuffer.remove(seq);
_frameTypeIndex.remove(seq);
}
// Callback with the complete frame data
onCompleteFrame(assembledData);
}
void clear() {
_frameBuffer.clear();
}
void _clearOldFrames(int currentSeq) {
//
_frameBuffer.removeWhere((seq, frame) => seq < currentSeq - 200); //
_frameTypeIndex.removeWhere((seq, frameType) => seq < currentSeq - 200);
onCompleteFrame(frame.frameData);
}
}

View File

@ -0,0 +1,94 @@
import 'dart:collection';
class PacketLossStatistics {
static final PacketLossStatistics _instance =
PacketLossStatistics._internal();
factory PacketLossStatistics() => _instance;
PacketLossStatistics._internal();
// messageId的分包信息
// key: messageId, value: {totalPackets, receivedPackets}
final Map<int, PacketInfo> _packetsMap = HashMap();
//
int _totalMessages = 0; //
int _lostMessages = 0; //
int _totalPackets = 0; //
int _lostPackets = 0; //
//
void recordPacket(int messageId, int currentIndex, int totalPackets) {
if (!_packetsMap.containsKey(messageId)) {
_packetsMap[messageId] = PacketInfo(totalPackets);
_totalMessages++;
_totalPackets += totalPackets;
}
_packetsMap[messageId]!.receivedPackets.add(currentIndex);
// messageId的最后一个包
if (currentIndex == totalPackets) {
_checkPacketLoss(messageId);
}
}
//
void _checkPacketLoss(int messageId) {
final info = _packetsMap[messageId]!;
//
int received = info.receivedPackets.length;
if (received < info.totalPackets) {
_lostMessages++;
_lostPackets += (info.totalPackets - received);
}
// messageId的记录
_packetsMap.remove(messageId);
}
//
PacketLossInfo getStatistics() {
if (_totalMessages == 0 || _totalPackets == 0) {
return PacketLossInfo(0.0, 0.0);
}
//
double messageLossRate = (_lostMessages / _totalMessages) * 100;
//
double packetLossRate = (_lostPackets / _totalPackets) * 100;
return PacketLossInfo(messageLossRate, packetLossRate);
}
//
void reset() {
_packetsMap.clear();
_totalMessages = 0;
_lostMessages = 0;
_totalPackets = 0;
_lostPackets = 0;
}
}
//
class PacketInfo {
final int totalPackets;
final Set<int> receivedPackets = HashSet<int>();
PacketInfo(this.totalPackets);
}
//
class PacketLossInfo {
final double messageLossRate; //
final double packetLossRate; //
PacketLossInfo(this.messageLossRate, this.packetLossRate);
@override
String toString() {
return 'Message Loss Rate: ${messageLossRate.toStringAsFixed(2)}%, Packet Loss Rate: ${packetLossRate.toStringAsFixed(2)}%';
}
}

View File

@ -22,6 +22,7 @@ import 'package:star_lock/talk/starChart/constant/ip_constant.dart';
import 'package:star_lock/talk/starChart/constant/listen_addr_type_constant.dart';
import 'package:star_lock/talk/starChart/constant/message_type_constant.dart';
import 'package:star_lock/talk/starChart/constant/payload_type_constant.dart';
import 'package:star_lock/talk/starChart/constant/talk_constant.dart';
import 'package:star_lock/talk/starChart/constant/talk_status.dart';
import 'package:star_lock/talk/starChart/entity/relay_info_entity.dart';
import 'package:star_lock/talk/starChart/entity/report_information_data.dart';
@ -31,6 +32,7 @@ import 'package:star_lock/talk/starChart/exception/start_chart_message_exception
import 'package:star_lock/talk/starChart/handle/impl/debug_Info_model.dart';
import 'package:star_lock/talk/starChart/handle/impl/udp_talk_data_handler.dart';
import 'package:star_lock/talk/starChart/handle/other/do_sign.dart';
import 'package:star_lock/talk/starChart/handle/other/packet_loss_statistics.dart';
import 'package:star_lock/talk/starChart/handle/other/talke_data_over_time_timer_manager.dart';
import 'package:star_lock/talk/starChart/handle/other/talke_ping_over_time_timer_manager.dart';
import 'package:star_lock/talk/starChart/handle/other/talke_request_over_time_timer_manager.dart';
@ -111,10 +113,7 @@ class StartChartManage {
final int _maxPayloadSize = 8 * 1024; //
//
TalkExpectReq _defaultTalkExpect = TalkExpectReq(
videoType: [VideoTypeE.IMAGE],
audioType: [AudioTypeE.G711],
);
TalkExpectReq _defaultTalkExpect = TalkConstant.H264Expect;
String relayPeerId = ''; // peerId
@ -342,7 +341,7 @@ class StartChartManage {
}
}
// RbcuConfirm
//
void _sendRbcuConfirmMessage() async {
RbcuConfirm(
sessionId: _rbcuSessionId,
@ -596,10 +595,18 @@ class StartChartManage {
//
void startTalkRejectMessageTimer() async {
try {
int count = 0;
final int maxCount = 10; // 10
talkRejectTimer ??= Timer.periodic(
Duration(seconds: _defaultIntervalTime),
(Timer timer) async {
_sendTalkRejectMessage();
count++;
if (count >= maxCount) {
timer.cancel();
talkRejectTimer = null;
}
},
);
} catch (e) {
@ -1035,6 +1042,14 @@ class StartChartManage {
final int payloadType = scpMessage.PayloadType ?? 0;
final int messageType = scpMessage.MessageType ?? 0;
try {
//
if (scpMessage.SpIndex != null &&
scpMessage.SpTotal != null &&
scpMessage.MessageId != null) {
PacketLossStatistics().recordPacket(
scpMessage.MessageId!, scpMessage.SpIndex!, scpMessage.SpTotal!);
}
final ScpMessageHandler handler =
ScpMessageHandlerFactory.createHandler(payloadType);
if (messageType == MessageTypeConstant.Req) {
@ -1130,10 +1145,7 @@ class StartChartManage {
}
void reSetDefaultTalkExpect() {
_defaultTalkExpect = TalkExpectReq(
videoType: [VideoTypeE.IMAGE],
audioType: [AudioTypeE.G711],
);
_defaultTalkExpect = TalkConstant.H264Expect;
}
TalkExpectReq getDefaultTalkExpect() {
@ -1152,10 +1164,7 @@ class StartChartManage {
///
void sendImageVideoAndG711AudioTalkExpectData() {
final talkExpectReq = TalkExpectReq(
videoType: [VideoTypeE.IMAGE],
audioType: [AudioTypeE.G711],
);
final talkExpectReq = TalkConstant.H264Expect;
changeTalkExpectDataTypeAndReStartTalkExpectMessageTimer(
talkExpect: talkExpectReq);
}
@ -1216,6 +1225,7 @@ class StartChartManage {
await Storage.removerStarChartRegisterNodeInfo();
// udp服务
closeUdpSocket();
PacketLossStatistics().reset();
}
///

View File

@ -615,7 +615,7 @@ class _TalkViewPageState extends State<TalkViewPage>
state.videoBuffer.clear();
state.listData.value = Uint8List(0);
CallTalk().finishAVData();
UdpTalkDataHandler().resetDataRates();
// UdpTalkDataHandler().resetDataRates();
super.dispose();
}
}

View File

@ -1,4 +1,5 @@
import 'dart:async';
import 'dart:collection';
import 'dart:io';
import 'dart:ui' as ui;
import 'dart:math'; // Import the math package to use sqrt
@ -23,11 +24,13 @@ import 'package:star_lock/main/lockMian/entity/lockListInfo_entity.dart';
import 'package:star_lock/network/api_repository.dart';
import 'package:star_lock/talk/call/g711.dart';
import 'package:star_lock/talk/starChart/constant/talk_status.dart';
import 'package:star_lock/talk/starChart/handle/other/packet_loss_statistics.dart';
import 'package:star_lock/talk/starChart/proto/talk_data.pb.dart';
import 'package:star_lock/talk/starChart/proto/talk_expect.pb.dart';
import 'package:star_lock/talk/starChart/star_chart_manage.dart';
import 'package:star_lock/talk/starChart/views/talkView/talk_view_state.dart';
import 'package:star_lock/talk/starChart/webView/h264_web_view_state.dart';
import 'package:star_lock/tools/G711Tool.dart';
import 'package:star_lock/tools/bugly/bugly_tool.dart';
import 'package:webview_flutter/webview_flutter.dart';
@ -38,6 +41,14 @@ class H264WebViewLogic extends BaseGetXController {
final LockDetailState lockDetailState = Get.put(LockDetailLogic()).state;
//
static const int CHUNK_SIZE = 4096;
Timer? _mockDataTimer;
//
final List<int> _bufferedAudioFrames = <int>[];
final Queue<List<int>> _frameBuffer = Queue<List<int>>();
static const int FRAME_BUFFER_SIZE = 25;
@override
void onInit() {
super.onInit();
@ -57,7 +68,7 @@ class H264WebViewLogic extends BaseGetXController {
_loadLocalHtml();
//
_createFramesStreamListen();
// playLocalTestVideo();
_startListenTalkStatus();
state.talkStatus.value = state.startChartTalkStatus.status;
//
@ -86,11 +97,43 @@ class H264WebViewLogic extends BaseGetXController {
void _createFramesStreamListen() async {
state.talkDataRepository.talkDataStream.listen((TalkData event) async {
// js处理
_sendBufferedData(event.content);
//
_frameBuffer.add(event.content);
// ,
while (_frameBuffer.length > FRAME_BUFFER_SIZE) {
if (_frameBuffer.isNotEmpty) {
final frame = _frameBuffer.removeFirst();
await _sendBufferedData(frame);
}
}
});
}
///
// Future<void> playLocalTestVideo() async {
// try {
// ByteData data = await rootBundle.load('assets/html/demo.h264');
// List<int> bytes = data.buffer.asUint8List();
//
// int offset = 0;
// _mockDataTimer = Timer.periodic(Duration(milliseconds: 40), (timer) {
// if (offset >= bytes.length) {
// timer.cancel();
// return;
// }
//
// int end = min(offset + CHUNK_SIZE, bytes.length);
// List<int> chunk = bytes.sublist(offset, end);
// _sendBufferedData(chunk);
//
// offset += CHUNK_SIZE;
// });
// } catch (e) {
// AppLog.log('加载测试视频文件失败: $e');
// }
// }
/// html文件
Future<void> _loadLocalHtml() async {
// HTML
@ -226,15 +269,17 @@ class H264WebViewLogic extends BaseGetXController {
//
Future<void> startProcessingAudio() async {
//
state.voiceProcessor?.addFrameListener(_onFrame);
state.voiceProcessor?.addErrorListener(_onError);
try {
if (await state.voiceProcessor?.hasRecordAudioPermission() ?? false) {
await state.voiceProcessor?.start(state.frameLength, state.sampleRate);
final bool? isRecording = await state.voiceProcessor?.isRecording();
state.isRecordingAudio.value = isRecording!;
state.startRecordingAudioTime.value = DateTime.now();
//
state.voiceProcessor
?.addFrameListeners(<VoiceProcessorFrameListener>[_onFrame]);
state.voiceProcessor?.addErrorListener(_onError);
} else {
// state.errorMessage.value = 'Recording permission not granted';
}
@ -254,8 +299,8 @@ class H264WebViewLogic extends BaseGetXController {
state.endRecordingAudioTime.value = DateTime.now();
//
final duration = state.endRecordingAudioTime.value!
.difference(state.startRecordingAudioTime.value!);
final Duration duration = state.endRecordingAudioTime.value
.difference(state.startRecordingAudioTime.value);
state.recordingAudioTime.value = duration.inSeconds;
} on PlatformException catch (ex) {
@ -267,25 +312,71 @@ class H264WebViewLogic extends BaseGetXController {
}
}
//
//
Future<void> _onFrame(List<int> frame) async {
// 线
// final processedFrame = await compute(preprocessAudio, frame);
// final list = listLinearToALaw(processedFrame);
final List<int> processedFrame = preprocessAudio(frame);
final List<int> list = listLinearToALaw(processedFrame);
//
if (_bufferedAudioFrames.length > state.frameLength * 3) {
_bufferedAudioFrames.clear(); //
return;
}
final int ms = DateTime.now().millisecondsSinceEpoch -
state.startRecordingAudioTime.value.millisecondsSinceEpoch;
//
List<int> amplifiedFrame = _applyGain(frame, 1.6);
// G711数据
List<int> encodedData = G711Tool.encode(amplifiedFrame, 0); // 0A-law
_bufferedAudioFrames.addAll(encodedData);
// 使
final int ms = DateTime.now().millisecondsSinceEpoch % 1000000; // 使
int getFrameLength = state.frameLength;
if (Platform.isIOS) {
getFrameLength = state.frameLength * 2;
}
// UDP
await StartChartManage().sendTalkDataMessage(
talkData: TalkData(
content: list,
contentType: TalkData_ContentTypeE.G711,
durationMs: ms,
),
);
//
if (_bufferedAudioFrames.length >= state.frameLength) {
try {
await StartChartManage().sendTalkDataMessage(
talkData: TalkData(
content: _bufferedAudioFrames,
contentType: TalkData_ContentTypeE.G711,
durationMs: ms,
),
);
} finally {
_bufferedAudioFrames.clear(); //
}
} else {
_bufferedAudioFrames.addAll(encodedData);
}
}
//
void _onError(VoiceProcessorException error) {
AppLog.log(error.message!);
}
//
List<int> _applyGain(List<int> pcmData, double gainFactor) {
List<int> result = List<int>.filled(pcmData.length, 0);
for (int i = 0; i < pcmData.length; i++) {
// PCM数据通常是有符号的16位整数
int sample = pcmData[i];
//
double amplified = sample * gainFactor;
//
if (amplified > 32767) {
amplified = 32767;
} else if (amplified < -32768) {
amplified = -32768;
}
result[i] = amplified.toInt();
}
return result;
}
///
@ -297,6 +388,9 @@ class H264WebViewLogic extends BaseGetXController {
//
StartChartManage().startTalkRejectMessageTimer();
}
// _mockDataTimer?.cancel();
// _mockDataTimer = null;
PacketLossStatistics().reset();
Get.back();
}
@ -315,7 +409,7 @@ class H264WebViewLogic extends BaseGetXController {
});
final LockSetInfoEntity lockSetInfoEntity =
await ApiRepository.to.getLockSettingInfoData(
await ApiRepository.to.getLockSettingInfoData(
lockId: lockId.toString(),
);
if (lockSetInfoEntity.errorCode!.codeIsSuccessful) {
@ -333,93 +427,11 @@ class H264WebViewLogic extends BaseGetXController {
}
}
List<int> preprocessAudio(List<int> pcmList) {
//
final List<int> processedList = [];
for (int pcmVal in pcmList) {
// 0
if (pcmVal.abs() < 200) {
pcmVal = 0;
}
processedList.add(pcmVal);
}
return processedList;
}
List<int> listLinearToALaw(List<int> pcmList) {
final List<int> aLawList = [];
for (int pcmVal in pcmList) {
final int aLawVal = linearToALaw(pcmVal);
aLawList.add(aLawVal);
}
return aLawList;
}
int linearToALaw(int pcmVal) {
const int ALAW_MAX = 0x7FFF; // 32767
const int ALAW_BIAS = 0x84; // 132
int mask;
int seg;
int aLawVal;
// Handle sign
if (pcmVal < 0) {
pcmVal = -pcmVal;
mask = 0x7F; // 127 (sign bit is 1)
} else {
mask = 0xFF; // 255 (sign bit is 0)
}
// Add bias and clamp to ALAW_MAX
pcmVal += ALAW_BIAS;
if (pcmVal > ALAW_MAX) {
pcmVal = ALAW_MAX;
}
// Determine segment
seg = search(pcmVal);
// Calculate A-law value
if (seg >= 8) {
aLawVal = 0x7F ^ mask; // Clamp to maximum value
} else {
int quantized = (pcmVal >> (seg + 3)) & 0xF;
aLawVal = (seg << 4) | quantized;
aLawVal ^= 0xD5; // XOR with 0xD5 to match standard A-law table
}
return aLawVal;
}
int search(int val) {
final List<int> table = [
0xFF, // Segment 0
0x1FF, // Segment 1
0x3FF, // Segment 2
0x7FF, // Segment 3
0xFFF, // Segment 4
0x1FFF, // Segment 5
0x3FFF, // Segment 6
0x7FFF // Segment 7
];
const int size = 8;
for (int i = 0; i < size; i++) {
if (val <= table[i]) {
return i;
}
}
return size;
}
//
void _onError(VoiceProcessorException error) {
AppLog.log(error.message!);
}
@override
void dispose() {
// TODO: implement dispose
// _mockDataTimer?.cancel();
// _mockDataTimer = null;
super.dispose();
StartChartManage().startTalkHangupMessageTimer();
state.animationController.dispose();
@ -429,5 +441,6 @@ class H264WebViewLogic extends BaseGetXController {
state.oneMinuteTimeTimer = null;
stopProcessingAudio();
StartChartManage().reSetDefaultTalkExpect();
_frameBuffer.clear();
}
}