fix:增加原生插件解码的页面、增加h264、mjpeg切换的debug按钮

This commit is contained in:
liyi 2025-04-25 10:21:05 +08:00
parent 07aa71c679
commit 1784f75c47
16 changed files with 1672 additions and 38 deletions

View File

@ -60,6 +60,7 @@ import 'package:star_lock/mine/mineSet/transferSmartLock/transferSmartLockList/t
import 'package:star_lock/mine/valueAddedServices/advancedFeaturesWeb/advancedFeaturesWeb_page.dart';
import 'package:star_lock/mine/valueAddedServices/advancedFunctionRecord/advancedFunctionRecord_page.dart';
import 'package:star_lock/mine/valueAddedServices/valueAddedServicesRecord/value_added_services_record_page.dart';
import 'package:star_lock/talk/starChart/views/native/talk_view_native_decode_page.dart';
import 'package:star_lock/talk/starChart/views/talkView/talk_view_page.dart';
import 'package:star_lock/talk/starChart/webView/h264_web_view.dart';
@ -1184,6 +1185,7 @@ abstract class AppRouters {
page: () => const DoubleLockLinkPage()),
GetPage<dynamic>(
name: Routers.starChartTalkView, page: () => const TalkViewPage()),
// GetPage<dynamic>(name: Routers.h264WebView, page: () => TalkViewNativeDecodePage()),
GetPage<dynamic>(name: Routers.h264WebView, page: () => H264WebView()),
];
}

View File

@ -15,6 +15,8 @@ import 'package:star_lock/main/lockDetail/lockDetail/device_network_info.dart';
import 'package:star_lock/main/lockDetail/lockSet/lockTime/getServerDatetime_entity.dart';
import 'package:star_lock/main/lockMian/entity/lockListInfo_entity.dart';
import 'package:star_lock/talk/starChart/constant/talk_status.dart';
import 'package:star_lock/talk/starChart/handle/other/packet_loss_statistics.dart';
import 'package:star_lock/talk/starChart/proto/talk_expect.pb.dart';
import 'package:star_lock/talk/starChart/star_chart_manage.dart';
import 'package:star_lock/tools/bugly/bugly_tool.dart';
import 'package:star_lock/tools/throttler.dart';
@ -564,7 +566,7 @@ class LockDetailLogic extends BaseGetXController {
// token
Future<void> getLockNetToken() async {
final LockNetTokenEntity entity = await ApiRepository.to
.getLockNetToken(lockId: state.keyInfos.value.lockId.toString());
.getLockNetToken(lockId: state.keyInfos.value.lockId!);
if (entity.errorCode!.codeIsSuccessful) {
state.lockNetToken = entity.data!.token!.toString();
// AppLog.log('从服务器获取联网token:${state.lockNetToken}');
@ -769,12 +771,12 @@ class LockDetailLogic extends BaseGetXController {
if (catEyeConfig.isNotEmpty &&
catEyeConfig.length > 0 &&
catEyeConfig[0].catEyeMode != 0) {
if (StartChartManage().lockNetworkInfo.wifiName == null ||
StartChartManage().lockNetworkInfo.wifiName == '') {
if ((StartChartManage().lockNetworkInfo.wifiName == null ||
StartChartManage().lockNetworkInfo.wifiName == '') ) {
showToast('设备未配网'.tr);
return;
}
PacketLossStatistics().reset();
// id
StartChartManage().startCallRequestMessageTimer(
ToPeerId: StartChartManage().lockNetworkInfo.peerId ?? '');
@ -795,6 +797,15 @@ class LockDetailLogic extends BaseGetXController {
@override
void onInit() {
super.onInit();
//
final currentTalkExpect = StartChartManage().getDefaultTalkExpect();
if (currentTalkExpect.videoType.contains(VideoTypeE.H264)) {
state.useH264Mode.value = true;
} else if (currentTalkExpect.videoType.contains(VideoTypeE.IMAGE)) {
state.useH264Mode.value = false;
}
state.LockSetChangeSetRefreshLockDetailWithTypeSubscription = eventBus
.on<LockSetChangeSetRefreshLockDetailWithType>()
.listen((LockSetChangeSetRefreshLockDetailWithType event) {

View File

@ -1,5 +1,6 @@
import 'dart:async';
import 'package:flutter/cupertino.dart';
import 'package:flutter/material.dart';
import 'package:flutter_easyloading/flutter_easyloading.dart';
import 'package:flutter_screenutil/flutter_screenutil.dart';
@ -88,7 +89,6 @@ class _LockDetailPageState extends State<LockDetailPage>
///
AppRouteObserver().routeObserver.subscribe(this, ModalRoute.of(context)!);
state.isOpenLockNeedOnline.refresh();
}
StreamSubscription? _lockRefreshLockDetailInfoDataEvent;
@ -507,6 +507,60 @@ class _LockDetailPageState extends State<LockDetailPage>
Widget skWidget() {
return ListView(
children: <Widget>[
// Container(
// padding: EdgeInsets.symmetric(vertical: 15, horizontal: 20),
// margin: EdgeInsets.only(top: 10, bottom: 10),
// decoration: BoxDecoration(
// color: Colors.white,
// borderRadius: BorderRadius.circular(10),
// boxShadow: [
// BoxShadow(
// color: Colors.black.withOpacity(0.05),
// blurRadius: 5,
// offset: Offset(0, 2),
// ),
// ],
// ),
// child: Row(
// mainAxisAlignment: MainAxisAlignment.spaceBetween,
// children: [
// Text('对讲视频模式'.tr,
// style: TextStyle(fontSize: 16, fontWeight: FontWeight.bold)),
// Row(
// children: [
// Text('mjpeg',
// style: TextStyle(
// fontSize: 14,
// color: !state.useH264Mode.value
// ? AppColors.mainColor
// : Colors.grey)),
// Obx(() => Switch(
// value: state.useH264Mode.value,
// activeColor: AppColors.mainColor,
// onChanged: (value) {
// state.useH264Mode.value = value;
// if (value) {
// // 使H264模式
// StartChartManage()
// .sendH264VideoAndG711AudioTalkExpectData();
// } else {
// // 使Image模式
// StartChartManage()
// .sendImageVideoAndG711AudioTalkExpectData();
// }
// },
// )),
// Text('H264'.tr,
// style: TextStyle(
// fontSize: 14,
// color: state.useH264Mode.value
// ? AppColors.mainColor
// : Colors.grey)),
// ],
// ),
// ],
// ),
// ),
Visibility(
visible:
(state.keyInfos.value.keyType == XSConstantMacro.keyTypeTime ||
@ -1467,7 +1521,7 @@ class _LockDetailPageState extends State<LockDetailPage>
state.iSOpenLock.value = true;
state.openLockBtnState.value = 1;
state.animationController!.forward();
// AppLog.log('点击开锁');
AppLog.log('点击开锁');
if (isOpenLockNeedOnline) {
//
state.openDoorModel = 0;

View File

@ -7,18 +7,18 @@ import 'package:star_lock/main/lockDetail/lockSet/lockSet/lockSetInfo_entity.dar
import '../../../blue/io_reply.dart';
import '../../lockMian/entity/lockListInfo_entity.dart';
class LockDetailState {
Rx<LockListInfoItemEntity> keyInfos = LockListInfoItemEntity().obs;
final Rx<LockSetInfoData> lockSetInfoData = LockSetInfoData().obs;
late StreamSubscription<Reply> replySubscription;
StreamSubscription? lockSetOpenOrCloseCheckInRefreshLockDetailWithAttendanceEvent;
StreamSubscription?
lockSetOpenOrCloseCheckInRefreshLockDetailWithAttendanceEvent;
StreamSubscription? LockSetChangeSetRefreshLockDetailWithTypeSubscription;
StreamSubscription? DetailLockInfo;
StreamSubscription? SuccessfulDistributionNetworkEvent;
String lockNetToken = '0';
int differentialTime = 0;//
int differentialTime = 0; //
bool isHaveNetwork = true;
int lockUserNo = 0;
int senderUserId = 0;
@ -41,7 +41,7 @@ class LockDetailState {
RxBool bottomBtnisEable = true.obs; //
RxBool openDoorBtnisUneable = true.obs; // 使使,
int openDoorModel = 0;// 线0, 线2 线32 线34
int openDoorModel = 0; // 线0, 线2 线32 线34
//
AnimationController? animationController;
@ -58,6 +58,9 @@ class LockDetailState {
int logCountPage = 10; //
RxInt nextAuthTime = 0.obs; //
//
RxBool useH264Mode = true.obs; // true表示使用H264模式false表示使用Image模式
// LockDetailState() {
// Map map = Get.arguments;
// lockCount = map["lockCount"];

View File

@ -353,7 +353,7 @@ class ApiProvider extends BaseProvider {
);
// token
Future<Response> getLockNetToken(String lockId) => post(
Future<Response> getLockNetToken(int lockId) => post(
getLockNetTokenURL.toUrl,
jsonEncode({
'lockId': lockId,

View File

@ -325,7 +325,7 @@ class ApiRepository {
}
// token
Future<LockNetTokenEntity> getLockNetToken({required String lockId}) async {
Future<LockNetTokenEntity> getLockNetToken({required int lockId}) async {
final res = await apiProvider.getLockNetToken(lockId);
return LockNetTokenEntity.fromJson(res.body);
}

View File

@ -62,9 +62,6 @@ class UdpTalkDataHandler extends ScpMessageBaseHandle
int? spTotal,
int? spIndex,
int? messageId}) {
//
final stats = PacketLossStatistics().getStatistics();
// _asyncLog('丢包统计: $stats');
// _asyncLog(
// '分包数据:messageId:$messageId [$spIndex/$spTotal] PayloadLength:$PayloadLength');
if (messageType == MessageTypeConstant.RealTimeData) {

View File

@ -10,9 +10,6 @@ import '../../proto/talk_data_h264_frame.pb.dart';
class H264FrameHandler {
final void Function(TalkDataModel frameData) onCompleteFrame;
// I帧的序号
int _lastProcessedIFrameSeq = -1;
H264FrameHandler({required this.onCompleteFrame});
void handleFrame(TalkDataH264Frame frame, TalkData talkData) {

View File

@ -10,6 +10,10 @@ class PacketLossStatistics {
// key: messageId, value: {totalPackets, receivedPackets}
final Map<int, PacketInfo> _packetsMap = HashMap();
//
int _maxCapacity = 300; // 300
int _timeoutMs = 30000; // 30
//
int _totalMessages = 0; //
int _lostMessages = 0; //
@ -18,10 +22,19 @@ class PacketLossStatistics {
//
void recordPacket(int messageId, int currentIndex, int totalPackets) {
//
_cleanupExpiredPackets();
//
_checkCapacityLimit();
if (!_packetsMap.containsKey(messageId)) {
_packetsMap[messageId] = PacketInfo(totalPackets);
_totalMessages++;
_totalPackets += totalPackets;
} else {
//
_packetsMap[messageId]!.timestamp = DateTime.now().millisecondsSinceEpoch;
}
_packetsMap[messageId]!.receivedPackets.add(currentIndex);
@ -32,6 +45,51 @@ class PacketLossStatistics {
}
}
//
void _cleanupExpiredPackets() {
final currentTime = DateTime.now().millisecondsSinceEpoch;
final expiredMessageIds = <int>[];
_packetsMap.forEach((messageId, info) {
//
if (currentTime - info.timestamp > _timeoutMs) {
expiredMessageIds.add(messageId);
//
_lostMessages++;
_lostPackets += (info.totalPackets - info.receivedPackets.length);
}
});
//
for (var messageId in expiredMessageIds) {
_packetsMap.remove(messageId);
}
}
//
void _checkCapacityLimit() {
if (_packetsMap.length <= _maxCapacity) {
return;
}
//
var entries = _packetsMap.entries.toList()
..sort((a, b) => a.value.timestamp.compareTo(b.value.timestamp));
// 25%
int removeCount = (_packetsMap.length * 0.25).ceil();
//
for (int i = 0; i < removeCount && i < entries.length; i++) {
var entry = entries[i];
_lostMessages++;
_lostPackets +=
(entry.value.totalPackets - entry.value.receivedPackets.length);
_packetsMap.remove(entry.key);
}
}
//
void _checkPacketLoss(int messageId) {
final info = _packetsMap[messageId]!;
@ -62,6 +120,28 @@ class PacketLossStatistics {
return PacketLossInfo(messageLossRate, packetLossRate);
}
// Getter和Setter
int get maxCapacity => _maxCapacity;
set maxCapacity(int value) {
if (value > 0) {
_maxCapacity = value;
//
_checkCapacityLimit();
}
}
int get timeoutMs => _timeoutMs;
set timeoutMs(int value) {
if (value > 0) {
_timeoutMs = value;
//
_cleanupExpiredPackets();
}
}
//
int get pendingRecordsCount => _packetsMap.length;
//
void reset() {
_packetsMap.clear();
@ -76,8 +156,10 @@ class PacketLossStatistics {
class PacketInfo {
final int totalPackets;
final Set<int> receivedPackets = HashSet<int>();
int timestamp; //
PacketInfo(this.totalPackets);
PacketInfo(this.totalPackets)
: timestamp = DateTime.now().millisecondsSinceEpoch;
}
//

View File

@ -1226,7 +1226,7 @@ class StartChartManage {
await Storage.removerStarChartRegisterNodeInfo();
// udp服务
closeUdpSocket();
PacketLossStatistics().reset();
}
///

View File

@ -0,0 +1,798 @@
import 'dart:async';
import 'dart:io';
import 'dart:ui' as ui;
import 'dart:math'; // Import the math package to use sqrt
import 'package:flutter/foundation.dart';
import 'package:flutter/rendering.dart';
import 'package:flutter/services.dart';
import 'package:flutter_pcm_sound/flutter_pcm_sound.dart';
import 'package:flutter_voice_processor/flutter_voice_processor.dart';
import 'package:gallery_saver/gallery_saver.dart';
import 'package:get/get.dart';
import 'package:image_gallery_saver/image_gallery_saver.dart';
import 'package:path_provider/path_provider.dart';
import 'package:permission_handler/permission_handler.dart';
import 'package:star_lock/app_settings/app_settings.dart';
import 'package:star_lock/login/login/entity/LoginEntity.dart';
import 'package:star_lock/main/lockDetail/lockDetail/lockDetail_logic.dart';
import 'package:star_lock/main/lockDetail/lockDetail/lockDetail_state.dart';
import 'package:star_lock/main/lockDetail/lockDetail/lockNetToken_entity.dart';
import 'package:star_lock/main/lockDetail/lockSet/lockSet/lockSetInfo_entity.dart';
import 'package:star_lock/main/lockMian/entity/lockListInfo_entity.dart';
import 'package:star_lock/network/api_repository.dart';
import 'package:star_lock/talk/call/callTalk.dart';
import 'package:star_lock/talk/call/g711.dart';
import 'package:star_lock/talk/starChart/constant/talk_status.dart';
import 'package:star_lock/talk/starChart/handle/other/packet_loss_statistics.dart';
import 'package:star_lock/talk/starChart/handle/other/talk_data_model.dart';
import 'package:star_lock/talk/starChart/proto/talk_data.pb.dart';
import 'package:star_lock/talk/starChart/proto/talk_data_h264_frame.pb.dart';
import 'package:star_lock/talk/starChart/proto/talk_expect.pb.dart';
import 'package:star_lock/talk/starChart/star_chart_manage.dart';
import 'package:star_lock/talk/starChart/views/native/talk_view_native_decode_state.dart';
import 'package:star_lock/talk/starChart/views/talkView/talk_view_state.dart';
import 'package:star_lock/tools/G711Tool.dart';
import 'package:star_lock/tools/bugly/bugly_tool.dart';
import 'package:video_decode_plugin/video_decode_plugin.dart';
import '../../../../tools/baseGetXController.dart';
class TalkViewNativeDecodeLogic extends BaseGetXController {
final TalkViewNativeDecodeState state = TalkViewNativeDecodeState();
final LockDetailState lockDetailState = Get.put(LockDetailLogic()).state;
int bufferSize = 25; //
int audioBufferSize = 2; // 2
//
final List<int> _bufferedAudioFrames = <int>[];
//
bool _isListening = false;
StreamSubscription? _streamSubscription;
Timer? _batchProcessTimer;
// I帧序号
final Set<int> _decodedIFrames = <int>{};
//
Future<void> _initVideoDecoder() async {
try {
//
final config = VideoDecoderConfig(
width: 864,
//
height: 480,
frameRate: 25,
//
//
codecType: CodecType.h264,
//
isDebug: true,
);
// textureId
final textureId = await VideoDecodePlugin.initDecoder(config);
if (textureId != null) {
state.textureId.value = textureId;
AppLog.log('视频解码器初始化成功textureId=$textureId');
//
VideoDecodePlugin.setFrameCallback(_onFrameAvailable);
//
VideoDecodePlugin.setStateCallbackForTexture(
textureId, _onDecoderStateChanged);
// FPS监测
startFpsMonitoring();
} else {
AppLog.log('视频解码器初始化失败');
}
} catch (e) {
AppLog.log('初始化视频解码器错误: $e');
//
await Future.delayed(const Duration(seconds: 2));
if (!Get.isRegistered<TalkViewNativeDecodeLogic>()) {
return; //
}
_initVideoDecoder(); //
}
}
//
void _onFrameAvailable(int textureId) {}
//
void _onDecoderStateChanged(
int textureId, DecoderState decoderState, Map<String, dynamic> stats) {
String stateText;
switch (decoderState) {
case DecoderState.initializing:
state.isLoading.value = true;
stateText = "初始化中";
break;
case DecoderState.ready:
stateText = "准备就绪";
break;
case DecoderState.rendering:
stateText = "渲染中";
state.isLoading.value = false;
break;
case DecoderState.error:
stateText = "出错";
//
final errorMessage = stats['errorMessage'] as String?;
if (errorMessage != null) {
AppLog.log("解码器错误: $errorMessage");
}
break;
case DecoderState.released:
stateText = "已释放";
break;
default:
stateText = "未知状态";
}
//
if (stats.isNotEmpty) {
//
final PacketLossInfo packetLossInfo =
PacketLossStatistics().getStatistics();
// FPS
// state.decoderFps.value = (stats['fps'] as num?)?.toDouble() ?? 0.0;
//
state.renderedFrameCount.value = (stats['renderedFrames'] as int?) ?? 0;
state.totalFrames.value = (stats['totalFrames'] as int?) ?? 0;
state.droppedFrames.value = (stats['droppedFrames'] as int?) ?? 0;
state.hasSentIDR.value = (stats['hasSentIDR'] as bool?) ?? false;
state.hasSentSPS.value = (stats['hasSentSPS'] as bool?) ?? false;
state.hasSentPPS.value = (stats['hasSentPPS'] as bool?) ?? false;
state.keyFrameInterval.value = (stats['keyFrameInterval'] as int?) ?? 0;
state.decodingJitterMs.value = (stats['decodingJitterMs'] as int?) ?? 0;
//
state.messageLossRate.value = packetLossInfo.messageLossRate;
state.packetLossRate.value = packetLossInfo.packetLossRate;
state.lastPacketStatsUpdateTime.value =
DateTime.now().millisecondsSinceEpoch;
}
}
///
void _initFlutterPcmSound() {
const int sampleRate = 8000;
FlutterPcmSound.setLogLevel(LogLevel.none);
FlutterPcmSound.setup(sampleRate: sampleRate, channelCount: 1);
// feed
if (Platform.isAndroid) {
FlutterPcmSound.setFeedThreshold(1024); // Android
} else {
FlutterPcmSound.setFeedThreshold(2000); // Android
}
}
///
void udpHangUpAction() async {
if (state.talkStatus.value == TalkStatus.answeredSuccessfully) {
//
StartChartManage().startTalkHangupMessageTimer();
} else {
//
StartChartManage().startTalkRejectMessageTimer();
}
if (state.textureId.value != null) {
VideoDecodePlugin.releaseDecoderForTexture(state.textureId.value!);
}
VideoDecodePlugin.releaseAllDecoders();
Get.back();
}
//
void initiateAnswerCommand() {
StartChartManage().startTalkAcceptTimer();
}
//
void _startListenTalkData() {
//
if (_isListening) {
AppLog.log("已经存在数据流监听,避免重复监听");
return;
}
AppLog.log("==== 启动新的数据流监听 ====");
_isListening = true;
_streamSubscription = state.talkDataRepository.talkDataStream
.listen((TalkDataModel talkDataModel) async {
final talkData = talkDataModel.talkData;
final talkDataH264Frame = talkDataModel.talkDataH264Frame;
final contentType = talkData!.contentType;
//
switch (contentType) {
case TalkData_ContentTypeE.G711:
if (state.audioBuffer.length >= audioBufferSize) {
state.audioBuffer.removeAt(0); //
}
state.audioBuffer.add(talkData); //
//
_playAudioFrames();
break;
case TalkData_ContentTypeE.H264:
//
// _processH264Frame(talkData, talkDataH264Frame!);
// H264视频帧
_processH264Frame(talkData, talkDataH264Frame!);
//
if (talkDataH264Frame!.frameType == TalkDataH264Frame_FrameTypeE.I) {
AppLog.log(
'帧序号${talkDataH264Frame.frameSeq};帧类型:${talkDataH264Frame.frameType.toString()};时间戳:${DateTime.now().millisecondsSinceEpoch}');
}
break;
}
});
}
// H264视频帧
Future<void> _processH264Frame(
TalkData talkData, TalkDataH264Frame frameInfo) async {
//
if (state.textureId.value == null) {
//
AppLog.log('解码器尚未初始化,尝试重新初始化...');
await _initVideoDecoder();
//
if (state.textureId.value == null) {
return;
}
}
// P帧对应的I帧序号
final frameSeqI = frameInfo.frameSeqI;
// P帧检查I帧未解码成功
if (frameInfo.frameType == TalkDataH264Frame_FrameTypeE.P &&
!_decodedIFrames.contains(frameSeqI)) {
AppLog.log('丢弃P帧: 依赖的I帧(${frameSeqI})尚未解码, P帧序号: ${frameInfo.frameSeq}');
return;
}
// talkData中提取H264帧数据
final Uint8List frameData = Uint8List.fromList(talkData.content);
//
final FrameType frameType =
frameInfo.frameType == TalkDataH264Frame_FrameTypeE.I
? FrameType.iFrame
: FrameType.pFrame;
//
try {
final bool result =
await VideoDecodePlugin.decodeFrame(frameData, frameType);
// I帧且成功解码I帧集合
if (frameInfo.frameType == TalkDataH264Frame_FrameTypeE.I && result) {
_decodedIFrames.add(frameInfo.frameSeq);
//
if (_decodedIFrames.length > 30) {
_decodedIFrames.remove(_decodedIFrames.first);
}
}
} catch (e) {
AppLog.log('解码帧错误: $e, 帧序号: ${frameInfo.frameSeq}');
}
}
//
void _playAudioFrames() {
//
//
if (state.audioBuffer.isEmpty ||
state.audioBuffer.length < audioBufferSize) {
return;
}
//
TalkData? oldestFrame;
int oldestIndex = -1;
for (int i = 0; i < state.audioBuffer.length; i++) {
if (oldestFrame == null ||
state.audioBuffer[i].durationMs < oldestFrame.durationMs) {
oldestFrame = state.audioBuffer[i];
oldestIndex = i;
}
}
//
if (oldestFrame != null && oldestIndex != -1) {
if (state.isOpenVoice.value) {
//
_playAudioData(oldestFrame);
}
state.audioBuffer.removeAt(oldestIndex);
}
}
///
void _startListenTalkStatus() {
state.startChartTalkStatus.statusStream.listen((talkStatus) {
state.talkStatus.value = talkStatus;
switch (talkStatus) {
case TalkStatus.rejected:
case TalkStatus.hangingUpDuring:
case TalkStatus.notTalkData:
case TalkStatus.notTalkPing:
case TalkStatus.end:
_handleInvalidTalkStatus();
break;
case TalkStatus.answeredSuccessfully:
state.oneMinuteTimeTimer?.cancel(); //
state.oneMinuteTimeTimer ??=
Timer.periodic(const Duration(seconds: 1), (Timer t) {
if (state.isLoading.isFalse) {
state.oneMinuteTime.value++;
}
});
break;
default:
//
break;
}
});
}
///
void _playAudioData(TalkData talkData) async {
if (state.isOpenVoice.value) {
final list =
G711().decodeAndDenoise(talkData.content, true, 8000, 300, 150);
// // PCM PcmArrayInt16
final PcmArrayInt16 fromList = PcmArrayInt16.fromList(list);
FlutterPcmSound.feed(fromList);
if (!state.isPlaying.value) {
FlutterPcmSound.play();
state.isPlaying.value = true;
}
}
}
///
void _stopPlayG711Data() async {
await FlutterPcmSound.pause();
await FlutterPcmSound.stop();
await FlutterPcmSound.clear();
}
///
Future<bool> getPermissionStatus() async {
final Permission permission = Permission.microphone;
//granted denied permanentlyDenied
final PermissionStatus status = await permission.status;
if (status.isGranted) {
return true;
} else if (status.isDenied) {
requestPermission(permission);
} else if (status.isPermanentlyDenied) {
openAppSettings();
} else if (status.isRestricted) {
requestPermission(permission);
} else {}
return false;
}
///
void requestPermission(Permission permission) async {
final PermissionStatus status = await permission.request();
if (status.isPermanentlyDenied) {
openAppSettings();
}
}
Future<void> requestPermissions() async {
//
var storageStatus = await Permission.storage.request();
//
var microphoneStatus = await Permission.microphone.request();
if (storageStatus.isGranted && microphoneStatus.isGranted) {
print("Permissions granted");
} else {
print("Permissions denied");
//
if (await Permission.storage.isPermanentlyDenied) {
openAppSettings(); //
}
}
}
Future<void> startRecording() async {}
Future<void> stopRecording() async {}
@override
void onReady() {
super.onReady();
}
@override
void onInit() {
super.onInit();
//
_startListenTalkData();
//
_startListenTalkStatus();
//
// *** ***
state.talkStatus.value = state.startChartTalkStatus.status;
//
_initFlutterPcmSound();
//
// _startPlayback();
//
_initAudioRecorder();
requestPermissions();
//
_initVideoDecoder();
}
@override
void onClose() {
_stopPlayG711Data(); //
state.audioBuffer.clear(); //
state.oneMinuteTimeTimer?.cancel();
state.oneMinuteTimeTimer = null;
//
stopProcessingAudio();
state.oneMinuteTimeTimer?.cancel(); //
state.oneMinuteTimeTimer = null; //
state.oneMinuteTime.value = 0;
//
if (state.textureId.value != null) {
VideoDecodePlugin.releaseDecoder();
state.textureId.value = null;
}
//
_streamSubscription?.cancel();
_isListening = false;
// FPS监测
stopFpsMonitoring();
//
StartChartManage().reSetDefaultTalkExpect();
VideoDecodePlugin.releaseAllDecoders();
//
_batchProcessTimer?.cancel();
_batchProcessTimer = null;
// I帧集合
_decodedIFrames.clear();
super.onClose();
}
///
void _handleInvalidTalkStatus() {
//
_stopPlayG711Data();
stopProcessingAudio();
}
///
void updateTalkExpect() {
TalkExpectReq talkExpectReq = TalkExpectReq();
state.isOpenVoice.value = !state.isOpenVoice.value;
if (!state.isOpenVoice.value) {
talkExpectReq = TalkExpectReq(
videoType: [VideoTypeE.IMAGE],
audioType: [],
);
showToast('已静音'.tr);
} else {
talkExpectReq = TalkExpectReq(
videoType: [VideoTypeE.IMAGE],
audioType: [AudioTypeE.G711],
);
}
///
StartChartManage().changeTalkExpectDataTypeAndReStartTalkExpectMessageTimer(
talkExpect: talkExpectReq);
}
///
Future<void> captureAndSavePng() async {
try {
if (state.globalKey.currentContext == null) {
AppLog.log('截图失败: 未找到当前上下文');
return;
}
final RenderRepaintBoundary boundary = state.globalKey.currentContext!
.findRenderObject()! as RenderRepaintBoundary;
final ui.Image image = await boundary.toImage();
final ByteData? byteData =
await image.toByteData(format: ui.ImageByteFormat.png);
if (byteData == null) {
AppLog.log('截图失败: 图像数据为空');
return;
}
final Uint8List pngBytes = byteData.buffer.asUint8List();
//
final Directory directory = await getApplicationDocumentsDirectory();
final String imagePath = '${directory.path}/screenshot.png';
//
final File imgFile = File(imagePath);
await imgFile.writeAsBytes(pngBytes);
//
await ImageGallerySaver.saveFile(imagePath);
AppLog.log('截图保存路径: $imagePath');
showToast('截图已保存到相册'.tr);
} catch (e) {
AppLog.log('截图失败: $e');
}
}
//
Future<void> remoteOpenLock() async {
final lockPeerId = StartChartManage().lockPeerId;
final lockListPeerId = StartChartManage().lockListPeerId;
int lockId = lockDetailState.keyInfos.value.lockId ?? 0;
// peerId使peerId
// peerId
lockListPeerId.forEach((element) {
if (element.network?.peerId == lockPeerId) {
lockId = element.lockId ?? 0;
}
});
final LockSetInfoEntity lockSetInfoEntity =
await ApiRepository.to.getLockSettingInfoData(
lockId: lockId.toString(),
);
if (lockSetInfoEntity.errorCode!.codeIsSuccessful) {
if (lockSetInfoEntity.data?.lockFeature?.remoteUnlock == 1 &&
lockSetInfoEntity.data?.lockSettingInfo?.remoteUnlock == 1) {
final LoginEntity entity = await ApiRepository.to
.remoteOpenLock(lockId: lockId.toString(), timeOut: 60);
if (entity.errorCode!.codeIsSuccessful) {
showToast('已开锁'.tr);
StartChartManage().lockListPeerId = [];
}
} else {
showToast('该锁的远程开锁功能未启用'.tr);
}
}
}
///
void _initAudioRecorder() {
state.voiceProcessor = VoiceProcessor.instance;
}
//
Future<void> startProcessingAudio() async {
try {
if (await state.voiceProcessor?.hasRecordAudioPermission() ?? false) {
await state.voiceProcessor?.start(state.frameLength, state.sampleRate);
final bool? isRecording = await state.voiceProcessor?.isRecording();
state.isRecordingAudio.value = isRecording!;
state.startRecordingAudioTime.value = DateTime.now();
//
state.voiceProcessor
?.addFrameListeners(<VoiceProcessorFrameListener>[_onFrame]);
state.voiceProcessor?.addErrorListener(_onError);
} else {
// state.errorMessage.value = 'Recording permission not granted';
}
} on PlatformException catch (ex) {
// state.errorMessage.value = 'Failed to start recorder: $ex';
}
state.isOpenVoice.value = false;
}
///
Future<void> stopProcessingAudio() async {
try {
await state.voiceProcessor?.stop();
state.voiceProcessor?.removeFrameListener(_onFrame);
state.udpSendDataFrameNumber = 0;
//
state.endRecordingAudioTime.value = DateTime.now();
//
final Duration duration = state.endRecordingAudioTime.value
.difference(state.startRecordingAudioTime.value);
state.recordingAudioTime.value = duration.inSeconds;
} on PlatformException catch (ex) {
// state.errorMessage.value = 'Failed to stop recorder: $ex';
} finally {
final bool? isRecording = await state.voiceProcessor?.isRecording();
state.isRecordingAudio.value = isRecording!;
state.isOpenVoice.value = true;
}
}
//
Future<void> _onFrame(List<int> frame) async {
//
if (_bufferedAudioFrames.length > state.frameLength * 3) {
_bufferedAudioFrames.clear(); //
return;
}
//
List<int> amplifiedFrame = _applyGain(frame, 1.6);
// G711数据
List<int> encodedData = G711Tool.encode(amplifiedFrame, 0); // 0A-law
_bufferedAudioFrames.addAll(encodedData);
// 使
final int ms = DateTime.now().millisecondsSinceEpoch % 1000000; // 使
int getFrameLength = state.frameLength;
if (Platform.isIOS) {
getFrameLength = state.frameLength * 2;
}
//
if (_bufferedAudioFrames.length >= state.frameLength) {
try {
await StartChartManage().sendTalkDataMessage(
talkData: TalkData(
content: _bufferedAudioFrames,
contentType: TalkData_ContentTypeE.G711,
durationMs: ms,
),
);
} finally {
_bufferedAudioFrames.clear(); //
}
} else {
_bufferedAudioFrames.addAll(encodedData);
}
}
//
void _onError(VoiceProcessorException error) {
AppLog.log(error.message!);
}
//
List<int> _applyGain(List<int> pcmData, double gainFactor) {
List<int> result = List<int>.filled(pcmData.length, 0);
for (int i = 0; i < pcmData.length; i++) {
// PCM数据通常是有符号的16位整数
int sample = pcmData[i];
//
double amplified = sample * gainFactor;
//
if (amplified > 32767) {
amplified = 32767;
} else if (amplified < -32768) {
amplified = -32768;
}
result[i] = amplified.toInt();
}
return result;
}
//
void startFpsMonitoring() {
//
stopFpsMonitoring();
//
state.lastFpsUpdateTime.value = DateTime.now().millisecondsSinceEpoch;
//
state.fpsTimer = Timer.periodic(const Duration(seconds: 1), (timer) {
//
updatePacketLossStats();
//
_analyzePerformance();
});
}
//
void stopFpsMonitoring() {
state.fpsTimer?.cancel();
state.fpsTimer = null;
}
//
void logMessage(String message) {
AppLog.log(message);
}
//
void updatePacketLossStats() async {
try {} catch (e) {
logMessage('获取丢包率数据失败: $e');
}
}
//
void _analyzePerformance() {
final int now = DateTime.now().millisecondsSinceEpoch;
//
if (state.lastPerformanceCheck == 0) {
state.lastPerformanceCheck = now;
state.lastFrameCount = state.renderedFrameCount.value;
return;
}
//
if (now - state.lastPerformanceCheck >= 1000) {
//
final int frameRendered =
state.renderedFrameCount.value - state.lastFrameCount;
final double actualFPS =
frameRendered * 1000 / (now - state.lastPerformanceCheck);
//
final double dropRate = state.droppedFrames.value /
(state.totalFrames.value > 0 ? state.totalFrames.value : 1) *
100;
//
final int pendingFrames =
state.totalFrames.value - state.renderedFrameCount.value;
// Map中的帧数
final int processingFrames = state.frameTracker.length;
//
String performanceStatus = "正常";
if (actualFPS < 15 && dropRate > 10) {
performanceStatus = "严重渲染瓶颈";
} else if (actualFPS < 20 && dropRate > 5) {
performanceStatus = "轻微渲染瓶颈";
}
//
AppLog.log("性能分析: 实际帧率=${actualFPS.toStringAsFixed(1)}fps, " +
"丢帧率=${dropRate.toStringAsFixed(1)}%, " +
"待处理帧数=$pendingFrames, " +
"处理中帧数=$processingFrames, " +
"状态=$performanceStatus");
//
state.lastPerformanceCheck = now;
state.lastFrameCount = state.renderedFrameCount.value;
}
}
}

View File

@ -0,0 +1,557 @@
import 'dart:async';
import 'dart:math';
import 'package:flutter/material.dart';
import 'package:flutter/scheduler.dart';
import 'package:flutter/services.dart';
import 'package:flutter_screenutil/flutter_screenutil.dart';
import 'package:get/get.dart';
import 'package:http/http.dart' as http;
import 'package:provider/provider.dart';
import 'package:star_lock/flavors.dart';
import 'package:star_lock/talk/call/callTalk.dart';
import 'package:star_lock/talk/starChart/constant/talk_status.dart';
import 'package:star_lock/talk/starChart/handle/impl/debug_Info_model.dart';
import 'package:star_lock/talk/starChart/handle/impl/udp_talk_data_handler.dart';
import 'package:star_lock/talk/starChart/views/native/talk_view_native_decode_logic.dart';
import 'package:star_lock/talk/starChart/views/native/talk_view_native_decode_state.dart';
import 'package:star_lock/talk/starChart/views/talkView/talk_view_logic.dart';
import 'package:star_lock/talk/starChart/views/talkView/talk_view_state.dart';
import 'package:video_decode_plugin/video_decode_plugin.dart';
import '../../../../app_settings/app_colors.dart';
import '../../../../tools/showTFView.dart';
class TalkViewNativeDecodePage extends StatefulWidget {
const TalkViewNativeDecodePage({Key? key}) : super(key: key);
@override
State<TalkViewNativeDecodePage> createState() =>
_TalkViewNativeDecodePageState();
}
class _TalkViewNativeDecodePageState extends State<TalkViewNativeDecodePage>
with TickerProviderStateMixin {
final TalkViewNativeDecodeLogic logic = Get.put(TalkViewNativeDecodeLogic());
final TalkViewNativeDecodeState state =
Get.find<TalkViewNativeDecodeLogic>().state;
@override
void initState() {
super.initState();
state.animationController = AnimationController(
vsync: this, // 使TickerProvider是当前Widget
duration: const Duration(seconds: 1),
);
state.animationController.repeat();
//StatusListener
state.animationController.addStatusListener((AnimationStatus status) {
if (status == AnimationStatus.completed) {
state.animationController.reset();
state.animationController.forward();
} else if (status == AnimationStatus.dismissed) {
state.animationController.reset();
state.animationController.forward();
}
});
}
@override
Widget build(BuildContext context) {
return WillPopScope(
onWillPop: () async {
// false 退
return false;
},
child: SizedBox(
width: 1.sw,
height: 1.sh,
child: Stack(
alignment: Alignment.center,
children: <Widget>[
Obx(
() {
final double screenWidth = MediaQuery.of(context).size.width;
final double screenHeight = MediaQuery.of(context).size.height;
final double logicalWidth = MediaQuery.of(context).size.width;
final double logicalHeight = MediaQuery.of(context).size.height;
final double devicePixelRatio =
MediaQuery.of(context).devicePixelRatio;
//
final double physicalWidth = logicalWidth * devicePixelRatio;
final double physicalHeight = logicalHeight * devicePixelRatio;
//
const int rotatedImageWidth = 480; //
const int rotatedImageHeight = 864; //
//
final double scaleWidth = physicalWidth / rotatedImageWidth;
final double scaleHeight = physicalHeight / rotatedImageHeight;
max(scaleWidth, scaleHeight); //
return state.isLoading.isTrue
? Image.asset(
'images/main/monitorBg.png',
width: screenWidth,
height: screenHeight,
fit: BoxFit.cover,
)
: PopScope(
canPop: false,
child: RepaintBoundary(
key: state.globalKey,
child: SizedBox.expand(
child: RotatedBox(
// 使RotatedBox
quarterTurns: -1,
child: Texture(
textureId: state.textureId.value!,
filterQuality: FilterQuality.medium,
),
),
),
),
);
},
),
Obx(() => state.isLoading.isTrue
? Positioned(
bottom: 310.h,
child: Text(
'正在创建安全连接...'.tr,
style: TextStyle(color: Colors.black, fontSize: 26.sp),
))
: Container()),
Obx(() => state.textureId.value != null && state.showFps.value
? Positioned(
top: ScreenUtil().statusBarHeight + 10.h,
right: 20.w,
child: Container(
padding:
EdgeInsets.symmetric(horizontal: 10.w, vertical: 5.h),
decoration: BoxDecoration(
color: Colors.black.withOpacity(0.5),
borderRadius: BorderRadius.circular(5.h),
),
child: Column(
crossAxisAlignment: CrossAxisAlignment.end,
children: <Widget>[
// Text(
// 'FPS: ${state.decoderFps.value.toStringAsFixed(1)}',
// style: TextStyle(
// color: _getPacketLossColor(
// state.packetLossRate.value),
// fontSize: 20.sp,
// ),
// ),
Text(
'丢包率: ${state.packetLossRate.value.toStringAsFixed(1)}%',
style: TextStyle(
color: _getPacketLossColor(
state.packetLossRate.value),
fontSize: 20.sp,
),
),
Text(
'消息丢失: ${state.messageLossRate.value.toStringAsFixed(1)}%',
style: TextStyle(
color: _getPacketLossColor(
state.messageLossRate.value),
fontSize: 20.sp,
),
),
Divider(
color: Colors.white30,
height: 10.h,
thickness: 1),
Text(
'已渲染帧: ${state.renderedFrameCount.value}',
style:
TextStyle(color: Colors.white, fontSize: 18.sp),
),
Text(
'总帧数: ${state.totalFrames.value}',
style:
TextStyle(color: Colors.white, fontSize: 18.sp),
),
Text(
'丢弃帧: ${state.droppedFrames.value}',
style:
TextStyle(color: Colors.white, fontSize: 18.sp),
),
Text(
'IDR帧: ${state.hasSentIDR.value ? "已发送" : "未发送"}',
style: TextStyle(
color: state.hasSentIDR.value
? Colors.green
: Colors.red,
fontSize: 18.sp),
),
Text(
'SPS: ${state.hasSentSPS.value ? "已发送" : "未发送"}',
style: TextStyle(
color: state.hasSentSPS.value
? Colors.green
: Colors.red,
fontSize: 18.sp),
),
Text(
'PPS: ${state.hasSentPPS.value ? "已发送" : "未发送"}',
style: TextStyle(
color: state.hasSentPPS.value
? Colors.green
: Colors.red,
fontSize: 18.sp),
),
Text(
'keyFrameInterval: ${state.keyFrameInterval.value}',
style:
TextStyle(color: Colors.green, fontSize: 18.sp),
),
Text(
'decodingJitterMs: ${state.decodingJitterMs.value}',
style:
TextStyle(color: Colors.green, fontSize: 18.sp),
),
],
),
),
)
: Container()),
Obx(() => state.isLoading.isFalse && state.oneMinuteTime.value > 0
? Positioned(
top: ScreenUtil().statusBarHeight + 75.h,
width: 1.sw,
child: Obx(
() {
final String sec = (state.oneMinuteTime.value % 60)
.toString()
.padLeft(2, '0');
final String min = (state.oneMinuteTime.value ~/ 60)
.toString()
.padLeft(2, '0');
return Row(
mainAxisAlignment: MainAxisAlignment.center,
children: <Widget>[
Text(
'$min:$sec',
style: TextStyle(
fontSize: 26.sp, color: Colors.white),
),
],
);
},
),
)
: Container()),
Positioned(
bottom: 10.w,
child: Container(
width: 1.sw - 30.w * 2,
// height: 300.h,
margin: EdgeInsets.all(30.w),
decoration: BoxDecoration(
color: Colors.black.withOpacity(0.2),
borderRadius: BorderRadius.circular(20.h)),
child: Column(
children: <Widget>[
SizedBox(height: 20.h),
bottomTopBtnWidget(),
SizedBox(height: 20.h),
bottomBottomBtnWidget(),
SizedBox(height: 20.h),
],
),
),
),
Obx(() => state.isLoading.isTrue
? buildRotationTransition()
: Container()),
Obx(() => state.isLongPressing.value
? Positioned(
top: 80.h,
left: 0,
right: 0,
child: Center(
child: Container(
padding: EdgeInsets.all(10.w),
decoration: BoxDecoration(
color: Colors.black.withOpacity(0.7),
borderRadius: BorderRadius.circular(10.w),
),
child: Row(
mainAxisSize: MainAxisSize.min,
children: <Widget>[
Icon(Icons.mic, color: Colors.white, size: 24.w),
SizedBox(width: 10.w),
Text(
'正在说话...'.tr,
style: TextStyle(
fontSize: 20.sp, color: Colors.white),
),
],
),
),
),
)
: Container()),
],
),
),
);
}
Widget bottomTopBtnWidget() {
return Row(mainAxisAlignment: MainAxisAlignment.center, children: <Widget>[
//
GestureDetector(
onTap: () {
if (state.talkStatus.value == TalkStatus.answeredSuccessfully) {
//
logic.updateTalkExpect();
}
},
child: Container(
width: 50.w,
height: 50.w,
padding: EdgeInsets.all(5.w),
child: Obx(() => Image(
width: 40.w,
height: 40.w,
image: state.isOpenVoice.value
? const AssetImage(
'images/main/icon_lockDetail_monitoringOpenVoice.png')
: const AssetImage(
'images/main/icon_lockDetail_monitoringCloseVoice.png'))),
),
),
SizedBox(width: 50.w),
//
GestureDetector(
onTap: () async {
if (state.talkStatus.value == TalkStatus.answeredSuccessfully) {
await logic.captureAndSavePng();
}
},
child: Container(
width: 50.w,
height: 50.w,
padding: EdgeInsets.all(5.w),
child: Image(
width: 40.w,
height: 40.w,
image: const AssetImage(
'images/main/icon_lockDetail_monitoringScreenshot.png')),
),
),
SizedBox(width: 50.w),
//
GestureDetector(
onTap: () async {
logic.showToast('功能暂未开放'.tr);
// if (
// state.talkStatus.value == TalkStatus.answeredSuccessfully) {
// if (state.isRecordingScreen.value) {
// await logic.stopRecording();
// } else {
// await logic.startRecording();
// }
// }
},
child: Container(
width: 50.w,
height: 50.w,
padding: EdgeInsets.all(5.w),
child: Image(
width: 40.w,
height: 40.w,
fit: BoxFit.fill,
image: const AssetImage(
'images/main/icon_lockDetail_monitoringScreenRecording.png'),
),
),
),
]);
}
Widget bottomBottomBtnWidget() {
return Row(
mainAxisAlignment: MainAxisAlignment.spaceEvenly,
children: <Widget>[
//
Obx(
() => bottomBtnItemWidget(
getAnswerBtnImg(),
getAnswerBtnName(),
Colors.white,
longPress: () async {
if (state.talkStatus.value == TalkStatus.answeredSuccessfully) {
//
logic.startProcessingAudio();
state.isLongPressing.value = true;
}
},
longPressUp: () async {
//
logic.stopProcessingAudio();
state.isLongPressing.value = false;
},
onClick: () async {
if (state.talkStatus.value ==
TalkStatus.passiveCallWaitingAnswer) {
//
logic.initiateAnswerCommand();
}
},
),
),
bottomBtnItemWidget(
'images/main/icon_lockDetail_hangUp.png', '挂断'.tr, Colors.red,
onClick: () {
//
logic.udpHangUpAction();
}),
bottomBtnItemWidget(
'images/main/icon_lockDetail_monitoringUnlock.png',
'开锁'.tr,
AppColors.mainColor,
onClick: () {
// if (state.talkStatus.value == TalkStatus.answeredSuccessfully &&
// state.listData.value.length > 0) {
// logic.udpOpenDoorAction();
// }
// if (UDPManage().remoteUnlock == 1) {
// logic.udpOpenDoorAction();
// showDeletPasswordAlertDialog(context);
// } else {
// logic.showToast('请在锁设置中开启远程开锁'.tr);
// }
logic.remoteOpenLock();
},
)
]);
}
String getAnswerBtnImg() {
switch (state.talkStatus.value) {
case TalkStatus.passiveCallWaitingAnswer:
return 'images/main/icon_lockDetail_monitoringAnswerCalls.png';
case TalkStatus.answeredSuccessfully:
case TalkStatus.proactivelyCallWaitingAnswer:
return 'images/main/icon_lockDetail_monitoringUnTalkback.png';
default:
return 'images/main/icon_lockDetail_monitoringAnswerCalls.png';
}
}
String getAnswerBtnName() {
switch (state.talkStatus.value) {
case TalkStatus.passiveCallWaitingAnswer:
return '接听'.tr;
case TalkStatus.proactivelyCallWaitingAnswer:
case TalkStatus.answeredSuccessfully:
return '长按说话'.tr;
default:
return '接听'.tr;
}
}
Widget bottomBtnItemWidget(
String iconUrl,
String name,
Color backgroundColor, {
required Function() onClick,
Function()? longPress,
Function()? longPressUp,
}) {
double wh = 80.w;
return GestureDetector(
onTap: onClick,
onLongPress: longPress,
onLongPressUp: longPressUp,
child: SizedBox(
height: 160.w,
width: 140.w,
child: Column(
crossAxisAlignment: CrossAxisAlignment.center,
children: <Widget>[
Container(
width: wh,
height: wh,
constraints: BoxConstraints(
minWidth: wh,
),
decoration: BoxDecoration(
color: backgroundColor,
borderRadius: BorderRadius.circular((wh + 10.w * 2) / 2),
),
padding: EdgeInsets.all(20.w),
child: Image.asset(iconUrl, fit: BoxFit.fitWidth),
),
SizedBox(height: 20.w),
Text(
name,
style: TextStyle(fontSize: 20.sp, color: Colors.white),
textAlign: TextAlign.center, // 使
maxLines: 2, // 1
)
],
),
),
);
}
//
Color _getPacketLossColor(double lossRate) {
if (lossRate < 1.0) {
return Colors.green; // 1%绿
} else if (lossRate < 5.0) {
return Colors.yellow; // 1%-5%
} else if (lossRate < 10.0) {
return Colors.orange; // 5%-10%
} else {
return Colors.red; // 10%
}
}
//
Widget buildRotationTransition() {
return Positioned(
left: ScreenUtil().screenWidth / 2 - 220.w / 2,
top: ScreenUtil().screenHeight / 2 - 220.w / 2 - 150.h,
child: GestureDetector(
child: RotationTransition(
//
alignment: Alignment.center,
//
turns: state.animationController,
//view
child: AnimatedOpacity(
opacity: 0.5,
duration: const Duration(seconds: 2),
child: Image.asset(
'images/main/realTime_connecting.png',
width: 220.w,
height: 220.w,
),
),
),
onTap: () {
state.animationController.forward();
},
),
);
}
@override
void dispose() {
state.animationController.dispose();
CallTalk().finishAVData();
super.dispose();
}
}

View File

@ -0,0 +1,109 @@
import 'dart:async';
import 'dart:typed_data';
import 'dart:ui' as ui;
import 'dart:io';
import 'package:flutter/material.dart';
import 'package:flutter_voice_processor/flutter_voice_processor.dart';
import 'package:get/get.dart';
import 'package:get/get_rx/get_rx.dart';
import 'package:get/get_rx/src/rx_types/rx_types.dart';
import 'package:get/state_manager.dart';
import 'package:network_info_plus/network_info_plus.dart';
import 'package:star_lock/talk/starChart/constant/talk_status.dart';
import 'package:star_lock/talk/starChart/handle/other/packet_loss_statistics.dart';
import 'package:star_lock/talk/starChart/handle/other/talk_data_repository.dart';
import 'package:star_lock/talk/starChart/proto/talk_data.pb.dart';
import 'package:star_lock/talk/starChart/status/star_chart_talk_status.dart';
import 'package:video_decode_plugin/video_decode_plugin.dart';
import '../../../../tools/storage.dart';
enum NetworkStatus {
normal, // 0
lagging, // 1
delayed, // 2
packetLoss // 3
}
class TalkViewNativeDecodeState {
//
static const int maxSourceFps = 25; // 25fps
int udpSendDataFrameNumber = 0; //
// var isSenderAudioData = false.obs;//
Future<String?> userMobileIP = NetworkInfo().getWifiIP();
Future<String?> userUid = Storage.getUid();
RxInt udpStatus =
0.obs; //0 1 2 3 4 5 6 8 9
TextEditingController passwordTF = TextEditingController();
RxList<int> listAudioData = <int>[].obs; //
GlobalKey globalKey = GlobalKey();
Timer? oneMinuteTimeTimer; // 60
RxInt oneMinuteTime = 0.obs; //
// 10
late Timer answerTimer;
late Timer hangUpTimer;
late Timer openDoorTimer;
Timer? fpsTimer;
late AnimationController animationController;
RxInt elapsedSeconds = 0.obs;
//
List<TalkData> audioBuffer = <TalkData>[].obs;
RxBool isLoading = true.obs; //
RxBool isPlaying = false.obs; //
Rx<TalkStatus> talkStatus = TalkStatus.none.obs; //
// startChartTalkStatus
final StartChartTalkStatus startChartTalkStatus =
StartChartTalkStatus.instance;
//
final TalkDataRepository talkDataRepository = TalkDataRepository.instance;
RxBool isOpenVoice = true.obs; //
RxBool isRecordingScreen = false.obs; //
RxBool isRecordingAudio = false.obs; //
Rx<DateTime> startRecordingAudioTime = DateTime.now().obs; //
Rx<DateTime> endRecordingAudioTime = DateTime.now().obs; //
RxInt recordingAudioTime = 0.obs; //
late VoiceProcessor? voiceProcessor; //
final int frameLength = 320; //640
final int sampleRate = 8000; //8000
RxBool isLongPressing = false.obs; //
// ID
Rx<int?> textureId = Rx<int?>(null);
// FPS监测相关变量
RxInt lastFpsUpdateTime = 0.obs; // FPS更新时间
RxBool showFps = true.obs; // FPS
//
RxDouble decoderFps = 0.0.obs; //
RxDouble messageLossRate = 0.0.obs; //
RxDouble packetLossRate = 0.0.obs; //
RxInt lastPacketStatsUpdateTime = 0.obs; //
//
RxInt renderedFrameCount = 0.obs; //
RxInt totalFrames = 0.obs; //
RxInt droppedFrames = 0.obs; //
RxBool hasSentIDR = false.obs; // IDR帧
RxBool hasSentSPS = false.obs; // SPS
RxBool hasSentPPS = false.obs; // PPS
RxInt keyFrameInterval = 0.obs; // ms
RxInt decodingJitterMs = 0.obs; // ms
//
int lastPerformanceCheck = 0;
int lastFrameCount = 0;
// Mapkey为textureId_frameSeq
Map<String, Map<String, dynamic>> frameTracker = {};
}

View File

@ -45,12 +45,12 @@ class TalkViewLogic extends BaseGetXController {
final int minAudioBufferSize = 1; // 1
final int maxAudioBufferSize = 3; // 3
int audioBufferSize = 2; // 2
bool _isFirstAudioFrame = true; //
//
int _startTime = 0; //
int _startAudioTime = 0; //
bool _isFirstFrame = true; //
bool _isFirstAudioFrame = true; //
//
final List<int> _bufferedAudioFrames = <int>[];
@ -106,6 +106,24 @@ class TalkViewLogic extends BaseGetXController {
//
switch (contentType) {
case TalkData_ContentTypeE.G711:
// //
if (_isFirstAudioFrame) {
_startAudioTime = currentTime;
_isFirstAudioFrame = false;
}
//
final expectedTime = _startAudioTime + talkData.durationMs;
final audioDelay = currentTime - expectedTime;
//
if (audioDelay > 500) {
state.audioBuffer.clear();
if (state.isOpenVoice.value) {
_playAudioFrames();
}
return;
}
if (state.audioBuffer.length >= audioBufferSize) {
state.audioBuffer.removeAt(0); //
}
@ -118,7 +136,7 @@ class TalkViewLogic extends BaseGetXController {
if (_isFirstFrame) {
_startTime = currentTime;
_isFirstFrame = false;
AppLog.log('第一帧帧的时间戳:${talkData.durationMs}');
// AppLog.log('第一帧帧的时间戳:${talkData.durationMs}');
}
// AppLog.log('其他帧的时间戳:${talkData.durationMs}');
//
@ -366,19 +384,6 @@ class TalkViewLogic extends BaseGetXController {
}
}
// token
Future<void> _getLockNetToken() async {
final LockNetTokenEntity entity = await ApiRepository.to.getLockNetToken(
lockId: lockDetailState.keyInfos.value.lockId.toString());
if (entity.errorCode!.codeIsSuccessful) {
lockDetailState.lockNetToken = entity.data!.token!.toString();
AppLog.log('从服务器获取联网token:${lockDetailState.lockNetToken}');
} else {
BuglyTool.uploadException(
message: '点击了需要联网开锁', detail: '点击了需要联网开锁 获取连网token失败', upload: true);
showToast('网络访问失败,请检查网络是否正常'.tr, something: () {});
}
}
///
Future<bool> getPermissionStatus() async {

View File

@ -48,7 +48,7 @@ class H264WebViewLogic extends BaseGetXController {
Timer? _mockDataTimer;
int _startAudioTime = 0; //
int audioBufferSize = 2; // 2
bool _isFirstAudioFrame = true; //
//
final List<int> _bufferedAudioFrames = <int>[];
final Queue<List<int>> _frameBuffer = Queue<List<int>>();
@ -131,6 +131,24 @@ class H264WebViewLogic extends BaseGetXController {
//
switch (contentType) {
case TalkData_ContentTypeE.G711:
// //
if (_isFirstAudioFrame) {
_startAudioTime = currentTime;
_isFirstAudioFrame = false;
}
//
final expectedTime = _startAudioTime + talkData.durationMs;
final audioDelay = currentTime - expectedTime;
//
if (audioDelay > 500) {
state.audioBuffer.clear();
if (state.isOpenVoice.value) {
_playAudioFrames();
}
return;
}
if (state.audioBuffer.length >= audioBufferSize) {
state.audioBuffer.removeAt(0); //
}

View File

@ -127,7 +127,8 @@ dependencies:
sdk: flutter
aliyun_face_plugin:
path: aliyun_face_plugin
video_decode_plugin:
path: ../video_decode_plugin
flutter_localizations:
sdk: flutter