fix:调整优化h264播放逻辑并增加音频

This commit is contained in:
liyi 2025-04-22 15:17:42 +08:00
parent a106fe6bbf
commit 07aa71c679
15 changed files with 244 additions and 90 deletions

View File

@ -774,9 +774,10 @@ class LockDetailLogic extends BaseGetXController {
showToast('设备未配网'.tr);
return;
}
// id
StartChartManage().startCallRequestMessageTimer(
ToPeerId: StartChartManage().lockPeerId ?? '');
ToPeerId: StartChartManage().lockNetworkInfo.peerId ?? '');
} else {
showToast('猫眼设置为省电模式时无法进行监控,请在猫眼设置中切换为其他模式'.tr);
}

View File

@ -374,6 +374,9 @@ class LockFeature {
this.isNoSupportedBlueBroadcast,
this.wifiLockType,
this.wifi,
this.isH264,
this.isH265,
this.isMJpeg,
});
LockFeature.fromJson(Map<String, dynamic> json) {
@ -391,6 +394,9 @@ class LockFeature {
isNoSupportedBlueBroadcast = json['isNoSupportedBlueBroadcast'];
wifiLockType = json['wifiLockType'];
wifi = json['wifi'];
isH264 = json['isH264'];
isH265 = json['isH265'];
isMJpeg = json['isMJpeg'];
}
int? password;
@ -407,6 +413,9 @@ class LockFeature {
int? isNoSupportedBlueBroadcast;
int? wifiLockType;
int? wifi;
int? isH264;
int? isH265;
int? isMJpeg;
Map<String, dynamic> toJson() {
final Map<String, dynamic> data = <String, dynamic>{};
@ -424,6 +433,9 @@ class LockFeature {
data['isNoSupportedBlueBroadcast'] = isNoSupportedBlueBroadcast;
data['wifiLockType'] = wifiLockType;
data['wifi'] = wifi;
data['isH264'] = isH264;
data['isH265'] = isH265;
data['isMJpeg'] = isMJpeg;
return data;
}
}

View File

@ -4,6 +4,7 @@ import 'dart:typed_data';
import 'package:flutter_easyloading/flutter_easyloading.dart';
import 'package:get/get.dart';
import 'package:star_lock/talk/starChart/entity/scp_message.dart';
import 'package:star_lock/talk/starChart/handle/other/talk_data_model.dart';
import 'package:star_lock/talk/starChart/handle/scp_message_base_handle.dart';
import 'package:star_lock/talk/starChart/handle/scp_message_handle.dart';
import 'package:star_lock/talk/starChart/proto/talk_data.pb.dart';
@ -22,7 +23,7 @@ class UdpEchoTestHandler extends ScpMessageBaseHandle
EasyLoading.showToast(scpMessage.Payload, duration: 2000.milliseconds);
} else {
talkDataRepository.addTalkData(
TalkData(content: payload, contentType: TalkData_ContentTypeE.Image));
TalkDataModel(talkData: TalkData(content: payload, contentType: TalkData_ContentTypeE.Image)));
}
}

View File

@ -4,6 +4,7 @@ import 'dart:typed_data';
import 'package:flutter_easyloading/flutter_easyloading.dart';
import 'package:flutter_pcm_sound/flutter_pcm_sound.dart';
import 'package:get/get.dart';
import 'package:star_lock/main/lockMian/entity/lockListInfo_entity.dart';
import 'package:star_lock/talk/starChart/constant/message_type_constant.dart';
import 'package:star_lock/talk/starChart/constant/talk_status.dart';
import 'package:star_lock/talk/starChart/entity/scp_message.dart';
@ -13,6 +14,7 @@ import 'package:star_lock/talk/starChart/proto/gateway_reset.pb.dart';
import 'package:star_lock/talk/starChart/proto/generic.pb.dart';
import 'package:star_lock/talk/starChart/proto/talk_accept.pb.dart';
import 'package:star_lock/talk/starChart/proto/talk_expect.pb.dart';
import 'package:star_lock/tools/commonDataManage.dart';
import '../../star_chart_manage.dart';
@ -77,7 +79,24 @@ class UdpTalkAcceptHandler extends ScpMessageBaseHandle
}
void _handleSendExpect() {
//
startChartManage.sendImageVideoAndG711AudioTalkExpectData();
final LockListInfoItemEntity currentKeyInfo =
CommonDataManage().currentKeyInfo;
final isH264 = currentKeyInfo.lockFeature?.isH264 == 1;
final isMJpeg = currentKeyInfo.lockFeature?.isMJpeg == 1;
// 使H264MJPEG
if (isH264) {
// H264H264视频和G711音频期望
startChartManage.sendH264VideoAndG711AudioTalkExpectData();
print('锁支持H264发送H264视频格式期望数据');
} else if (isMJpeg) {
// MJPEGG711音频期望
startChartManage.sendImageVideoAndG711AudioTalkExpectData();
print('锁不支持H264支持MJPEG发送MJPEG视频格式期望数据');
} else {
// 使
startChartManage.sendImageVideoAndG711AudioTalkExpectData();
print('锁不支持H264和MJPEG默认发送图像视频格式期望数据');
}
}
}

View File

@ -5,6 +5,7 @@ import 'package:star_lock/talk/starChart/constant/message_type_constant.dart';
import 'package:star_lock/talk/starChart/entity/scp_message.dart';
import 'package:star_lock/talk/starChart/handle/other/h264_frame_handler.dart';
import 'package:star_lock/talk/starChart/handle/other/packet_loss_statistics.dart';
import 'package:star_lock/talk/starChart/handle/other/talk_data_model.dart';
import 'package:star_lock/talk/starChart/handle/scp_message_base_handle.dart';
import 'package:star_lock/talk/starChart/handle/scp_message_handle.dart';
import 'package:star_lock/talk/starChart/proto/talk_data.pb.dart';
@ -63,7 +64,7 @@ class UdpTalkDataHandler extends ScpMessageBaseHandle
int? messageId}) {
//
final stats = PacketLossStatistics().getStatistics();
_asyncLog('丢包统计: $stats');
// _asyncLog('丢包统计: $stats');
// _asyncLog(
// '分包数据:messageId:$messageId [$spIndex/$spTotal] PayloadLength:$PayloadLength');
if (messageType == MessageTypeConstant.RealTimeData) {
@ -118,7 +119,7 @@ class UdpTalkDataHandler extends ScpMessageBaseHandle
void _handleVideoH264(TalkData talkData) {
final TalkDataH264Frame talkDataH264Frame = TalkDataH264Frame();
talkDataH264Frame.mergeFromBuffer(talkData.content);
frameHandler.handleFrame(talkDataH264Frame);
frameHandler.handleFrame(talkDataH264Frame, talkData);
}
///
@ -127,7 +128,11 @@ class UdpTalkDataHandler extends ScpMessageBaseHandle
await _processCompletePayload(Uint8List.fromList(talkData.content));
processCompletePayload.forEach((element) {
talkData.content = element;
talkDataRepository.addTalkData(talkData);
talkDataRepository.addTalkData(
TalkDataModel(
talkData: talkData,
),
);
});
}
@ -138,7 +143,11 @@ class UdpTalkDataHandler extends ScpMessageBaseHandle
// // pcm数据
// List<int> pcmBytes = G711().convertList(g711Data);
// talkData.content = pcmBytes;
talkDataRepository.addTalkData(talkData);
talkDataRepository.addTalkData(
TalkDataModel(
talkData: talkData,
),
);
} catch (e) {
print('Error decoding G.711 to PCM: $e');
}

View File

@ -20,7 +20,7 @@ import '../../star_chart_manage.dart';
class UdpTalkExpectHandler extends ScpMessageBaseHandle
implements ScpMessageHandler {
final TalkViewState talkViewState = Get.put(TalkViewLogic()).state;
// final TalkViewState talkViewState = Get.put(TalkViewLogic()).state;
@override
void handleReq(ScpMessage scpMessage) {
@ -40,7 +40,7 @@ class UdpTalkExpectHandler extends ScpMessageBaseHandle
startChartManage.stopTalkExpectMessageTimer();
//
startChartManage.stopCallRequestMessageTimer();
talkViewState.rotateAngle.value = talkExpectResp.rotate ?? 0;
// talkViewState.rotateAngle.value = talkExpectResp.rotate ?? 0;
//
// x秒内没有收到通话保持则执行的操作;
talkePingOverTimeTimerManager.start();

View File

@ -7,7 +7,9 @@ import 'package:flutter_local_notifications/flutter_local_notifications.dart';
import 'package:get/get.dart';
import 'package:star_lock/appRouters.dart';
import 'package:star_lock/app_settings/app_settings.dart';
import 'package:star_lock/main/lockMian/entity/lockListInfo_entity.dart';
import 'package:star_lock/talk/starChart/constant/message_type_constant.dart';
import 'package:star_lock/talk/starChart/constant/talk_constant.dart';
import 'package:star_lock/talk/starChart/constant/talk_status.dart';
import 'package:star_lock/talk/starChart/entity/scp_message.dart';
import 'package:star_lock/talk/starChart/handle/scp_message_base_handle.dart';
@ -16,6 +18,7 @@ import 'package:star_lock/talk/starChart/proto/gateway_reset.pb.dart';
import 'package:star_lock/talk/starChart/proto/generic.pb.dart';
import 'package:star_lock/talk/starChart/proto/talk_expect.pb.dart';
import 'package:star_lock/talk/starChart/proto/talk_request.pb.dart';
import 'package:star_lock/tools/commonDataManage.dart';
import 'package:star_lock/tools/push/xs_jPhush.dart';
import 'package:star_lock/tools/storage.dart';
import 'package:star_lock/translations/current_locale_tool.dart';
@ -30,7 +33,6 @@ class UdpTalkRequestHandler extends ScpMessageBaseHandle
@override
void handleReq(ScpMessage scpMessage) async {
final currentTime = DateTime.now().millisecondsSinceEpoch;
// 1
if (currentTime - _lastRequestTime < 1000) {
@ -105,18 +107,16 @@ class UdpTalkRequestHandler extends ScpMessageBaseHandle
_showTalkRequestNotification(talkObjectName: talkObjectName);
//
talkStatus.setPassiveCallWaitingAnswer();
//
if (startChartManage
.getDefaultTalkExpect()
.videoType
.indexOf(VideoTypeE.H264) ==
-1) {
.getDefaultTalkExpect()
.videoType
.contains(VideoTypeE.H264)) {
Get.toNamed(
Routers.starChartTalkView,
Routers.h264WebView,
);
} else {
Get.toNamed(
Routers.h264WebView,
Routers.starChartTalkView,
);
}
}
@ -189,7 +189,24 @@ class UdpTalkRequestHandler extends ScpMessageBaseHandle
}
void _handleSendExpect() {
//
startChartManage.sendOnlyImageVideoTalkExpectData();
final LockListInfoItemEntity currentKeyInfo =
CommonDataManage().currentKeyInfo;
final isH264 = currentKeyInfo.lockFeature?.isH264 == 1;
final isMJpeg = currentKeyInfo.lockFeature?.isMJpeg == 1;
// 使H264MJPEG
if (isH264) {
// H264H264视频和G711音频期望
startChartManage.sendOnlyH264VideoTalkExpectData();
print('锁支持H264发送H264视频格式期望数据');
} else if (isMJpeg) {
// MJPEGG711音频期望
startChartManage.sendOnlyImageVideoTalkExpectData();
print('锁不支持H264支持MJPEG发送MJPEG视频格式期望数据');
} else {
// 使
startChartManage.sendOnlyImageVideoTalkExpectData();
print('锁不支持H264和MJPEG默认发送图像视频格式期望数据');
}
}
}

View File

@ -3,15 +3,20 @@ import 'dart:typed_data';
import 'package:flutter/services.dart';
import 'package:star_lock/app_settings/app_settings.dart';
import 'package:star_lock/talk/starChart/handle/other/talk_data_model.dart';
import 'package:star_lock/talk/starChart/proto/talk_data.pb.dart';
import '../../proto/talk_data_h264_frame.pb.dart';
class H264FrameHandler {
final void Function(TalkDataModel frameData) onCompleteFrame;
final void Function(List<int> frameData) onCompleteFrame;
// I帧的序号
int _lastProcessedIFrameSeq = -1;
H264FrameHandler({required this.onCompleteFrame});
void handleFrame(TalkDataH264Frame frame) {
onCompleteFrame(frame.frameData);
void handleFrame(TalkDataH264Frame frame, TalkData talkData) {
onCompleteFrame(
TalkDataModel(talkData: talkData, talkDataH264Frame: frame));
}
}

View File

@ -0,0 +1,9 @@
import 'package:star_lock/talk/starChart/proto/talk_data.pbserver.dart';
import 'package:star_lock/talk/starChart/proto/talk_data_h264_frame.pb.dart';
class TalkDataModel {
TalkData? talkData;
TalkDataH264Frame? talkDataH264Frame;
TalkDataModel({required this.talkData, this.talkDataH264Frame});
}

View File

@ -1,9 +1,10 @@
import 'dart:async';
import 'package:star_lock/talk/starChart/handle/other/talk_data_model.dart';
import 'package:star_lock/talk/starChart/proto/talk_data.pb.dart';
class TalkDataRepository {
TalkDataRepository._() {
_talkDataStreamController = StreamController<TalkData>.broadcast(
_talkDataStreamController = StreamController<TalkDataModel>.broadcast(
onListen: () {
_isListening = true;
},
@ -18,13 +19,13 @@ class TalkDataRepository {
static TalkDataRepository get instance => _instance;
late final StreamController<TalkData> _talkDataStreamController;
late final StreamController<TalkDataModel> _talkDataStreamController;
bool _isListening = false;
//
Stream<TalkData> get talkDataStream => _talkDataStreamController.stream;
Stream<TalkDataModel> get talkDataStream => _talkDataStreamController.stream;
void addTalkData(TalkData talkData) {
void addTalkData(TalkDataModel talkData) {
if (_isListening) {
_talkDataStreamController.add(talkData);
}

View File

@ -15,6 +15,7 @@ import 'package:star_lock/talk/starChart/constant/payload_type_constant.dart';
import 'package:star_lock/talk/starChart/constant/udp_constant.dart';
import 'package:star_lock/talk/starChart/entity/scp_message.dart';
import 'package:star_lock/talk/starChart/handle/other/h264_frame_handler.dart';
import 'package:star_lock/talk/starChart/handle/other/talk_data_model.dart';
import 'package:star_lock/talk/starChart/handle/other/talk_data_repository.dart';
import 'package:star_lock/talk/starChart/handle/other/talke_data_over_time_timer_manager.dart';
@ -55,10 +56,10 @@ class ScpMessageBaseHandle {
//
final H264FrameHandler frameHandler =
H264FrameHandler(onCompleteFrame: (frameData) {
H264FrameHandler(onCompleteFrame: (TalkDataModel talkDataModel) {
//
TalkDataRepository.instance.addTalkData(
TalkData(contentType: TalkData_ContentTypeE.H264, content: frameData),
talkDataModel,
);
});
@ -71,6 +72,7 @@ class ScpMessageBaseHandle {
messageId: scpMessage.MessageId!,
);
}
//
void replyErrorMessage(ScpMessage scpMessage) {
startChartManage.sendGenericRespErrorMessage(

View File

@ -113,7 +113,7 @@ class StartChartManage {
final int _maxPayloadSize = 8 * 1024; //
//
TalkExpectReq _defaultTalkExpect = TalkConstant.ImageExpect;
TalkExpectReq _defaultTalkExpect = TalkConstant.H264Expect;
String relayPeerId = ''; // peerId
@ -227,20 +227,6 @@ class StartChartManage {
///
_onReceiveData(_udpSocket!, Get.context!);
// //ToDo:
// //
// Timer.periodic(Duration(seconds: 1), (Timer t) {
// UdpTalkDataHandler().resetDataRates();
// //
// Provider.of<DebugInfoModel>(Get.context!, listen: false)
// .updateDebugInfo(
// UdpTalkDataHandler().getLastRecvDataRate() ~/ 1024, // KB
// UdpTalkDataHandler().getLastRecvPacketCount(),
// UdpTalkDataHandler().getLastSendDataRate() ~/ 1024, // KB
// UdpTalkDataHandler().getLastSendPacketCount(),
// );
// });
}).catchError((error) {
_log(text: 'Failed to bind UDP socket: $error');
});
@ -1145,7 +1131,7 @@ class StartChartManage {
}
void reSetDefaultTalkExpect() {
_defaultTalkExpect = TalkConstant.ImageExpect;
_defaultTalkExpect = TalkConstant.H264Expect;
}
TalkExpectReq getDefaultTalkExpect() {
@ -1163,12 +1149,27 @@ class StartChartManage {
}
///
void sendImageVideoAndG711AudioTalkExpectData() {
final talkExpectReq = TalkConstant.ImageExpect;
void sendOnlyH264VideoTalkExpectData() {
final talkExpectReq = TalkExpectReq(
videoType: [VideoTypeE.H264],
audioType: [],
);
changeTalkExpectDataTypeAndReStartTalkExpectMessageTimer(
talkExpect: talkExpectReq);
}
///
void sendImageVideoAndG711AudioTalkExpectData() {
changeTalkExpectDataTypeAndReStartTalkExpectMessageTimer(
talkExpect: TalkConstant.ImageExpect);
}
///
void sendH264VideoAndG711AudioTalkExpectData() {
changeTalkExpectDataTypeAndReStartTalkExpectMessageTimer(
talkExpect: TalkConstant.H264Expect);
}
///
void sendRemoteUnLockMessage({
required String bluetoothDeviceName,

View File

@ -23,6 +23,7 @@ import 'package:star_lock/main/lockMian/entity/lockListInfo_entity.dart';
import 'package:star_lock/network/api_repository.dart';
import 'package:star_lock/talk/call/g711.dart';
import 'package:star_lock/talk/starChart/constant/talk_status.dart';
import 'package:star_lock/talk/starChart/handle/other/talk_data_model.dart';
import 'package:star_lock/talk/starChart/proto/talk_data.pb.dart';
import 'package:star_lock/talk/starChart/proto/talk_expect.pb.dart';
import 'package:star_lock/talk/starChart/star_chart_manage.dart';
@ -96,31 +97,15 @@ class TalkViewLogic extends BaseGetXController {
//
void _startListenTalkData() {
state.talkDataRepository.talkDataStream.listen((TalkData talkData) async {
final contentType = talkData.contentType;
state.talkDataRepository.talkDataStream
.listen((TalkDataModel talkDataModel) async {
final talkData = talkDataModel.talkData;
final contentType = talkData!.contentType;
final currentTime = DateTime.now().millisecondsSinceEpoch;
//
switch (contentType) {
case TalkData_ContentTypeE.G711:
// //
// if (_isFirstAudioFrame) {
// _startAudioTime = currentTime;
// _isFirstAudioFrame = false;
// }
//
final expectedTime = _startAudioTime + talkData.durationMs;
final audioDelay = currentTime - expectedTime;
//
if (audioDelay > 500) {
state.audioBuffer.clear();
if (state.isOpenVoice.value) {
_playAudioFrames();
}
return;
}
if (state.audioBuffer.length >= audioBufferSize) {
state.audioBuffer.removeAt(0); //
}
@ -195,7 +180,6 @@ class TalkViewLogic extends BaseGetXController {
// _frameCount = 0;
// _lastFpsUpdateTime = currentTime;
// }
} else {
// AppLog.log('⚠️ 帧未找到缓存 - Key: $cacheKey');
state.videoBuffer.removeAt(oldestIndex); //
@ -497,7 +481,6 @@ class TalkViewLogic extends BaseGetXController {
_initAudioRecorder();
requestPermissions();
}
@override

View File

@ -25,7 +25,9 @@ import 'package:star_lock/network/api_repository.dart';
import 'package:star_lock/talk/call/g711.dart';
import 'package:star_lock/talk/starChart/constant/talk_status.dart';
import 'package:star_lock/talk/starChart/handle/other/packet_loss_statistics.dart';
import 'package:star_lock/talk/starChart/handle/other/talk_data_model.dart';
import 'package:star_lock/talk/starChart/proto/talk_data.pb.dart';
import 'package:star_lock/talk/starChart/proto/talk_data_h264_frame.pb.dart';
import 'package:star_lock/talk/starChart/proto/talk_expect.pb.dart';
import 'package:star_lock/talk/starChart/star_chart_manage.dart';
import 'package:star_lock/talk/starChart/views/talkView/talk_view_state.dart';
@ -44,6 +46,9 @@ class H264WebViewLogic extends BaseGetXController {
//
static const int CHUNK_SIZE = 4096;
Timer? _mockDataTimer;
int _startAudioTime = 0; //
int audioBufferSize = 2; // 2
//
final List<int> _bufferedAudioFrames = <int>[];
final Queue<List<int>> _frameBuffer = Queue<List<int>>();
@ -51,7 +56,6 @@ class H264WebViewLogic extends BaseGetXController {
@override
void onInit() {
super.onInit();
// WebView
state.webViewController = WebViewController()
..setJavaScriptMode(JavaScriptMode.unrestricted)
@ -63,18 +67,40 @@ class H264WebViewLogic extends BaseGetXController {
},
);
state.isShowLoading.value = true;
// HTML
_loadLocalHtml();
super.onInit();
//
_createFramesStreamListen();
// playLocalTestVideo();
_startListenTalkStatus();
state.talkStatus.value = state.startChartTalkStatus.status;
//
_initFlutterPcmSound();
//
_initAudioRecorder();
// HTML
_loadLocalHtml();
// playLocalTestVideo();
requestPermissions();
}
Future<void> requestPermissions() async {
//
var storageStatus = await Permission.storage.request();
//
var microphoneStatus = await Permission.microphone.request();
if (storageStatus.isGranted && microphoneStatus.isGranted) {
print("Permissions granted");
} else {
print("Permissions denied");
//
if (await Permission.storage.isPermanentlyDenied) {
openAppSettings(); //
}
}
}
///
@ -96,16 +122,37 @@ class H264WebViewLogic extends BaseGetXController {
}
void _createFramesStreamListen() async {
state.talkDataRepository.talkDataStream.listen((TalkData event) async {
//
_frameBuffer.add(event.content);
state.talkDataRepository.talkDataStream
.listen((TalkDataModel talkDataModel) async {
final talkData = talkDataModel.talkData;
final contentType = talkData!.contentType;
final currentTime = DateTime.now().millisecondsSinceEpoch;
// ,
while (_frameBuffer.length > FRAME_BUFFER_SIZE) {
if (_frameBuffer.isNotEmpty) {
final frame = _frameBuffer.removeFirst();
await _sendBufferedData(frame);
}
//
switch (contentType) {
case TalkData_ContentTypeE.G711:
if (state.audioBuffer.length >= audioBufferSize) {
state.audioBuffer.removeAt(0); //
}
state.audioBuffer.add(talkData); //
//
_playAudioFrames();
break;
case TalkData_ContentTypeE.H264:
// //
_frameBuffer.add(talkData.content);
// ,
while (_frameBuffer.length > FRAME_BUFFER_SIZE) {
if (_frameBuffer.isNotEmpty) {
final frame = _frameBuffer.removeFirst();
await _sendBufferedData(frame);
}
if (state.isShowLoading.isTrue) {
state.isShowLoading.value = false;
}
}
break;
}
});
}
@ -134,6 +181,51 @@ class H264WebViewLogic extends BaseGetXController {
// }
// }
//
void _playAudioFrames() {
//
//
if (state.audioBuffer.isEmpty ||
state.audioBuffer.length < audioBufferSize) {
return;
}
//
TalkData? oldestFrame;
int oldestIndex = -1;
for (int i = 0; i < state.audioBuffer.length; i++) {
if (oldestFrame == null ||
state.audioBuffer[i].durationMs < oldestFrame.durationMs) {
oldestFrame = state.audioBuffer[i];
oldestIndex = i;
}
}
//
if (oldestFrame != null && oldestIndex != -1) {
if (state.isOpenVoice.value) {
//
_playAudioData(oldestFrame);
}
state.audioBuffer.removeAt(oldestIndex);
}
}
///
void _playAudioData(TalkData talkData) async {
if (state.isOpenVoice.value) {
final list =
G711().decodeAndDenoise(talkData.content, true, 8000, 300, 150);
// // PCM PcmArrayInt16
final PcmArrayInt16 fromList = PcmArrayInt16.fromList(list);
FlutterPcmSound.feed(fromList);
if (!state.isPlaying.value) {
FlutterPcmSound.play();
state.isPlaying.value = true;
}
}
}
/// html文件
Future<void> _loadLocalHtml() async {
// HTML
@ -186,10 +278,10 @@ class H264WebViewLogic extends BaseGetXController {
Timer.periodic(const Duration(seconds: 1), (Timer t) {
if (state.isShowLoading.isFalse) {
state.oneMinuteTime.value++;
if (state.oneMinuteTime.value >= 60) {
t.cancel(); //
state.oneMinuteTime.value = 0;
}
// if (state.oneMinuteTime.value >= 60) {
// t.cancel(); //
// state.oneMinuteTime.value = 0;
// }
}
});
break;
@ -321,7 +413,7 @@ class H264WebViewLogic extends BaseGetXController {
}
//
List<int> amplifiedFrame = _applyGain(frame, 1.6);
List<int> amplifiedFrame = _applyGain(frame, 1.8);
// G711数据
List<int> encodedData = G711Tool.encode(amplifiedFrame, 0); // 0A-law
_bufferedAudioFrames.addAll(encodedData);
@ -409,7 +501,7 @@ class H264WebViewLogic extends BaseGetXController {
});
final LockSetInfoEntity lockSetInfoEntity =
await ApiRepository.to.getLockSettingInfoData(
await ApiRepository.to.getLockSettingInfoData(
lockId: lockId.toString(),
);
if (lockSetInfoEntity.errorCode!.codeIsSuccessful) {
@ -427,7 +519,6 @@ class H264WebViewLogic extends BaseGetXController {
}
}
@override
void dispose() {
// _mockDataTimer?.cancel();

View File

@ -5,6 +5,7 @@ import 'package:flutter_voice_processor/flutter_voice_processor.dart';
import 'package:get/get.dart';
import 'package:star_lock/talk/starChart/constant/talk_status.dart';
import 'package:star_lock/talk/starChart/handle/other/talk_data_repository.dart';
import 'package:star_lock/talk/starChart/proto/talk_data.pb.dart';
import 'package:star_lock/talk/starChart/status/star_chart_talk_status.dart';
import 'package:star_lock/talk/starChart/views/talkView/talk_view_state.dart';
import 'package:webview_flutter/webview_flutter.dart';
@ -49,4 +50,6 @@ class H264WebViewState {
RxInt rotateAngle = 0.obs; //
RxBool hasAudioData = false.obs; //
RxInt lastAudioTimestamp = 0.obs; //
List<TalkData> audioBuffer = <TalkData>[].obs;
RxBool isPlaying = false.obs; //
}