Merge branch 'develop_liyi' of code-internal.star-lock.cn:StarlockTeam/app-starlock into develop_liyi

This commit is contained in:
“DaisyWu” 2025-02-24 09:16:31 +08:00
commit d0e66e1694
21 changed files with 1151 additions and 2843 deletions

View File

@ -171,3 +171,14 @@ java -jar android/bundletool.jar build-apks --bundle=build/app/outputs/bundle/sk
```bash
java -jar android/bundletool.jar install-apks --apks=build/app/outputs/bundle/skyRelease/app-sky-release.aab.apks
```
## Jpush相关
极光推送目前app这边只依赖极光的透传能力推送能力通过截取极光拿到的各个厂商的推送token然后将推送token上报到自己业务服务器直接调用各个厂商推送通道进行消息推送所以对极光的flutter sdk进行了私有化定制改造改造点如下
* AndroidiOS平台原生代码中截取jpush获取到的厂商推送token将token回传到flutter业务应用层
* Android通过Jpush统一集成的各个厂商推送sdk统一获取到token
* iOS通过原生token回调接口获取到token
* flutter端将获取到的厂商token厂商标识上报到业务服务器
定制jpush_flutterhttp://code-internal.star-lock.cn/StarlockTeam/jpush_flutter

File diff suppressed because it is too large Load Diff

1
assets/html/jmuxer.min.js vendored Normal file

File diff suppressed because one or more lines are too long

View File

@ -76,12 +76,25 @@
/*
* deviceTokenCloudPush
*/
- (void)application:(UIApplication *)application didRegisterForRemoteNotificationsWithDeviceToken:(NSData *)deviceToken {
//- (void)application:(UIApplication *)application didRegisterForRemoteNotificationsWithDeviceToken:(NSData *)deviceToken {
// NSString *tokenString = [self hexStringFromData:deviceToken];
// NSLog(@"starlock didRegisterForRemoteNotificationsWithDeviceToken token: %@", tokenString);
// /// Required - DeviceToken
// [JPUSHService registerDeviceToken:deviceToken];
//
//}
/// Required - DeviceToken
[JPUSHService registerDeviceToken:deviceToken];
- (NSString *)hexStringFromData:(NSData *)data {
const unsigned char *dataBuffer = (const unsigned char *)[data bytes];
NSMutableString *hexString = [NSMutableString stringWithCapacity:data.length * 2];
for (NSInteger i = 0; i < data.length; i++) {
[hexString appendFormat:@"%02x", dataBuffer[i]];
}
return [hexString copy];
}
/*
*
*/

View File

@ -61,6 +61,7 @@ import 'package:star_lock/mine/valueAddedServices/advancedFeaturesWeb/advancedFe
import 'package:star_lock/mine/valueAddedServices/advancedFunctionRecord/advancedFunctionRecord_page.dart';
import 'package:star_lock/mine/valueAddedServices/valueAddedServicesRecord/value_added_services_record_page.dart';
import 'package:star_lock/talk/starChart/views/talkView/talk_view_page.dart';
import 'package:star_lock/talk/starChart/webView/h264_web_view.dart';
import 'common/safetyVerification/safetyVerification_page.dart';
import 'login/forgetPassword/starLock_forgetPassword_page.dart';
@ -515,6 +516,7 @@ abstract class Routers {
static const String doubleLockLinkPage = '/doubleLockLinkPage'; //
static const String starChartPage = '/starChartPage'; //
static const String starChartTalkView = '/starChartTalkView'; //
static const String h264WebView = '/h264WebView'; //
}
abstract class AppRouters {
@ -1195,5 +1197,6 @@ abstract class AppRouters {
page: () => const DoubleLockLinkPage()),
GetPage<dynamic>(
name: Routers.starChartTalkView, page: () => const TalkViewPage()),
GetPage<dynamic>(name: Routers.h264WebView, page: () => H264WebView()),
];
}

View File

@ -267,26 +267,31 @@ class _DoorLockLogPageState extends State<DoorLockLogPage> with RouteAware {
color: Colors.white,
borderRadius: BorderRadius.circular(16.w),
),
child: Obx(
() => state.lockLogItemList.isNotEmpty
? Timeline.tileBuilder(
builder: _timelineBuilderWidget(),
theme: TimelineThemeData(
nodePosition: 0.04, //
connectorTheme: const ConnectorThemeData(
thickness: 1.0,
color: AppColors.greyLineColor,
indent: 0.5,
child: Obx(() => EasyRefreshTool(
onRefresh: () async {
logic.mockNetworkDataRequest(isRefresh: true);
},
onLoad: () async {
logic.mockNetworkDataRequest(isRefresh: false);
},
child: state.lockLogItemList.isNotEmpty
? Timeline.tileBuilder(
builder: _timelineBuilderWidget(),
theme: TimelineThemeData(
nodePosition: 0.04, //
connectorTheme: const ConnectorThemeData(
thickness: 1.0,
color: AppColors.greyLineColor,
indent: 0.5,
),
indicatorTheme: const IndicatorThemeData(
size: 8.0,
color: AppColors.greyLineColor,
position: 0.4,
),
),
indicatorTheme: const IndicatorThemeData(
size: 8.0,
color: AppColors.greyLineColor,
position: 0.45,
),
),
)
: NoData(),
),
)
: NoData())),
);
}
@ -347,6 +352,9 @@ class _DoorLockLogPageState extends State<DoorLockLogPage> with RouteAware {
if (recordData.videoUrl != null && recordData.videoUrl!.isNotEmpty) {
final lockLogItemList = state.lockLogItemList.value;
final list = lockLogItemList
.where((e) =>
(e.videoUrl != null && e.videoUrl!.isNotEmpty) ||
(e.imagesUrl != null && e.imagesUrl!.isNotEmpty))
.map(
(e) => RecordListData(
videoUrl: e.videoUrl,

View File

@ -2,6 +2,7 @@ import 'dart:typed_data';
import 'package:star_lock/app_settings/app_settings.dart';
import 'package:star_lock/talk/starChart/constant/message_type_constant.dart';
import 'package:star_lock/talk/starChart/entity/scp_message.dart';
import 'package:star_lock/talk/starChart/handle/other/h264_frame_handler.dart';
import 'package:star_lock/talk/starChart/handle/scp_message_base_handle.dart';
import 'package:star_lock/talk/starChart/handle/scp_message_handle.dart';
import 'package:star_lock/talk/starChart/proto/talk_data.pb.dart';
@ -158,16 +159,13 @@ class UdpTalkDataHandler extends ScpMessageBaseHandle
void _handleVideoH264(TalkData talkData) {
final TalkDataH264Frame talkDataH264Frame = TalkDataH264Frame();
talkDataH264Frame.mergeFromBuffer(talkData.content);
// AppLog.log('H264 TalkData :$talkDataH264Frame');
// talkDataRepository.addTalkData(talkData);
frameHandler.handleFrame(talkDataH264Frame);
}
///
void _handleVideoImage(TalkData talkData) async {
final List<Uint8List> processCompletePayload =
await _processCompletePayload(Uint8List.fromList(talkData.content));
// AppLog.log('得到完整的帧:${processCompletePayload.length}'); //
processCompletePayload.forEach((element) {
talkData.content = element;
talkDataRepository.addTalkData(talkData);
@ -181,7 +179,7 @@ class UdpTalkDataHandler extends ScpMessageBaseHandle
// // pcm数据
// List<int> pcmBytes = G711().convertList(g711Data);
// talkData.content = pcmBytes;
talkDataRepository.addTalkData(talkData);
// talkDataRepository.addTalkData(talkData);
} catch (e) {
print('Error decoding G.711 to PCM: $e');
}

View File

@ -73,9 +73,19 @@ class UdpTalkRequestHandler extends ScpMessageBaseHandle
//
talkeRequestOverTimeTimerManager.start();
//
Get.toNamed(
Routers.starChartTalkView,
);
if (startChartManage
.getDefaultTalkExpect()
.videoType
.indexOf(VideoTypeE.H264) ==
-1) {
Get.toNamed(
Routers.starChartTalkView,
);
} else {
Get.toNamed(
Routers.h264WebView,
);
}
}
//

View File

@ -0,0 +1,22 @@
import 'dart:typed_data';
import 'package:star_lock/talk/starChart/proto/talk_data_h264_frame.pb.dart';
class H264FrameBuffer {
List<TalkDataH264Frame> frames = [];
void addFrame(TalkDataH264Frame frame) {
frames.add(frame);
}
Uint8List getCompleteStream() {
final List<int> completeStream = [];
for (final frame in frames) {
// 0x00 0x00 0x01
completeStream.addAll([0x00, 0x00, 0x01]);
//
completeStream.addAll(frame.frameData);
}
return Uint8List.fromList(completeStream);
}
}

View File

@ -0,0 +1,84 @@
import 'package:star_lock/app_settings/app_settings.dart';
import '../../proto/talk_data_h264_frame.pb.dart';
class H264FrameHandler {
final Map<int, TalkDataH264Frame> _frameBuffer = {};
final void Function(List<int> frameData) onCompleteFrame;
int _lastProcessedSeq = -1;
H264FrameHandler({required this.onCompleteFrame});
void handleFrame(TalkDataH264Frame frame) {
//
_frameBuffer[frame.frameSeq] = frame;
// GOP (Group of Pictures)
_tryAssembleFrames(frame.frameSeq);
}
void _tryAssembleFrames(int currentSeq) {
//
final List<int> sortedSeqs = _frameBuffer.keys.toList()..sort();
final List<int> framesToProcess = [];
// I P
int? startFrameSeq;
for (var seq in sortedSeqs.reversed) {
final frame = _frameBuffer[seq];
if (frame?.frameType == TalkDataH264Frame_FrameTypeE.I) {
startFrameSeq = seq;
break;
} else if (frame?.frameType == TalkDataH264Frame_FrameTypeE.P) {
// P I
if (_frameBuffer.containsKey(frame?.frameSeqI)) {
startFrameSeq = seq;
break;
} else {
// I P
_frameBuffer.remove(seq);
}
}
}
if (startFrameSeq != null) {
// I P
int expectedSeq = startFrameSeq;
for (var seq in sortedSeqs.where((s) => s >= startFrameSeq!)) {
if (seq != expectedSeq) break;
framesToProcess.add(seq);
expectedSeq++;
}
if (framesToProcess.isNotEmpty) {
_processFrames(framesToProcess);
}
} else {
_clearOldFrames(currentSeq);
}
}
void _clearOldFrames(int currentSeq) {
//
_frameBuffer.removeWhere((seq, frame) => seq < currentSeq - 200); //
}
void _processFrames(List<int> frameSeqs) {
//
final List<int> assembledData = [];
for (var seq in frameSeqs) {
final frame = _frameBuffer[seq]!;
assembledData.addAll(frame.frameData);
//
_frameBuffer.remove(seq);
}
//
onCompleteFrame(assembledData);
}
void clear() {
_frameBuffer.clear();
}
}

View File

@ -1,5 +1,4 @@
import 'dart:async';
import 'package:star_lock/talk/starChart/proto/talk_data.pb.dart';
class TalkDataRepository {
@ -27,6 +26,9 @@ class TalkDataRepository {
bool _isListening = false;
//
final List<TalkData> _buffer = [];
// Stream
Stream<TalkData> get talkDataStream =>
_talkDataStreamController.stream.transform(
@ -41,14 +43,11 @@ class TalkDataRepository {
},
),
);
final List<TalkData> _buffer = []; //
// TalkData Stream
void addTalkData(TalkData talkData) async {
void addTalkData(TalkData talkData) {
if (_isListening) {
Future.microtask(() {
_talkDataStreamController.add(talkData);
});
_talkDataStreamController.add(talkData);
}
}

View File

@ -14,6 +14,7 @@ import 'package:star_lock/talk/starChart/constant/payload_type_constant.dart';
import 'package:star_lock/talk/starChart/constant/udp_constant.dart';
import 'package:star_lock/talk/starChart/entity/scp_message.dart';
import 'package:star_lock/talk/starChart/handle/other/h264_frame_handler.dart';
import 'package:star_lock/talk/starChart/handle/other/talk_data_repository.dart';
import 'package:star_lock/talk/starChart/handle/other/talke_data_over_time_timer_manager.dart';
@ -52,6 +53,15 @@ class ScpMessageBaseHandle {
final audioManager = AudioPlayerManager();
//
final H264FrameHandler frameHandler =
H264FrameHandler(onCompleteFrame: (frameData) {
//
TalkDataRepository.instance.addTalkData(
TalkData(contentType: TalkData_ContentTypeE.H264, content: frameData),
);
});
//
void replySuccessMessage(ScpMessage scpMessage) {
startChartManage.sendGenericRespSuccessMessage(

View File

@ -22,6 +22,7 @@ class TalkDataH264Frame extends $pb.GeneratedMessage {
$core.int? frameSeq,
TalkDataH264Frame_FrameTypeE? frameType,
$core.List<$core.int>? frameData,
$core.int? frameSeqI,
}) {
final $result = create();
if (frameSeq != null) {
@ -33,6 +34,9 @@ class TalkDataH264Frame extends $pb.GeneratedMessage {
if (frameData != null) {
$result.frameData = frameData;
}
if (frameSeqI != null) {
$result.frameSeqI = frameSeqI;
}
return $result;
}
TalkDataH264Frame._() : super();
@ -43,6 +47,7 @@ class TalkDataH264Frame extends $pb.GeneratedMessage {
..a<$core.int>(1, _omitFieldNames ? '' : 'FrameSeq', $pb.PbFieldType.OU3, protoName: 'FrameSeq')
..e<TalkDataH264Frame_FrameTypeE>(2, _omitFieldNames ? '' : 'FrameType', $pb.PbFieldType.OE, protoName: 'FrameType', defaultOrMaker: TalkDataH264Frame_FrameTypeE.NONE, valueOf: TalkDataH264Frame_FrameTypeE.valueOf, enumValues: TalkDataH264Frame_FrameTypeE.values)
..a<$core.List<$core.int>>(3, _omitFieldNames ? '' : 'FrameData', $pb.PbFieldType.OY, protoName: 'FrameData')
..a<$core.int>(4, _omitFieldNames ? '' : 'FrameSeqI', $pb.PbFieldType.OU3, protoName: 'FrameSeqI')
..hasRequiredFields = false
;
@ -95,6 +100,16 @@ class TalkDataH264Frame extends $pb.GeneratedMessage {
$core.bool hasFrameData() => $_has(2);
@$pb.TagNumber(3)
void clearFrameData() => clearField(3);
/// I
@$pb.TagNumber(4)
$core.int get frameSeqI => $_getIZ(3);
@$pb.TagNumber(4)
set frameSeqI($core.int v) { $_setUnsignedInt32(3, v); }
@$pb.TagNumber(4)
$core.bool hasFrameSeqI() => $_has(3);
@$pb.TagNumber(4)
void clearFrameSeqI() => clearField(4);
}

View File

@ -20,6 +20,7 @@ const TalkDataH264Frame$json = {
{'1': 'FrameSeq', '3': 1, '4': 1, '5': 13, '10': 'FrameSeq'},
{'1': 'FrameType', '3': 2, '4': 1, '5': 14, '6': '.main.TalkDataH264Frame.FrameTypeE', '10': 'FrameType'},
{'1': 'FrameData', '3': 3, '4': 1, '5': 12, '10': 'FrameData'},
{'1': 'FrameSeqI', '3': 4, '4': 1, '5': 13, '10': 'FrameSeqI'},
],
'4': [TalkDataH264Frame_FrameTypeE$json],
};
@ -38,6 +39,6 @@ const TalkDataH264Frame_FrameTypeE$json = {
final $typed_data.Uint8List talkDataH264FrameDescriptor = $convert.base64Decode(
'ChFUYWxrRGF0YUgyNjRGcmFtZRIaCghGcmFtZVNlcRgBIAEoDVIIRnJhbWVTZXESQAoJRnJhbW'
'VUeXBlGAIgASgOMiIubWFpbi5UYWxrRGF0YUgyNjRGcmFtZS5GcmFtZVR5cGVFUglGcmFtZVR5'
'cGUSHAoJRnJhbWVEYXRhGAMgASgMUglGcmFtZURhdGEiJAoKRnJhbWVUeXBlRRIICgROT05FEA'
'ASBQoBSRABEgUKAVAQAg==');
'cGUSHAoJRnJhbWVEYXRhGAMgASgMUglGcmFtZURhdGESHAoJRnJhbWVTZXFJGAQgASgNUglGcm'
'FtZVNlcUkiJAoKRnJhbWVUeXBlRRIICgROT05FEAASBQoBSRABEgUKAVAQAg==');

View File

@ -15,4 +15,6 @@ message TalkDataH264Frame {
FrameTypeE FrameType = 2;
//
bytes FrameData = 3;
// I
uint32 FrameSeqI = 4;
}

View File

@ -112,7 +112,7 @@ class StartChartManage {
//
TalkExpectReq _defaultTalkExpect = TalkExpectReq(
videoType: [VideoTypeE.IMAGE],
videoType: [VideoTypeE.H264],
audioType: [AudioTypeE.G711],
);
@ -419,9 +419,15 @@ class StartChartManage {
if (talkStatus.status != TalkStatus.proactivelyCallWaitingAnswer) {
//
// AudioPlayerManager().playRingtone();
Get.toNamed(
Routers.starChartTalkView,
);
if (_defaultTalkExpect.videoType.contains(VideoTypeE.H264)) {
Get.toNamed(
Routers.h264WebView,
);
} else {
Get.toNamed(
Routers.starChartTalkView,
);
}
}
talkRequestTimer ??= Timer.periodic(
Duration(
@ -1113,15 +1119,19 @@ class StartChartManage {
void reSetDefaultTalkExpect() {
_defaultTalkExpect = TalkExpectReq(
videoType: [VideoTypeE.IMAGE],
videoType: [VideoTypeE.H264],
audioType: [AudioTypeE.G711],
);
}
TalkExpectReq getDefaultTalkExpect() {
return _defaultTalkExpect;
}
///
void sendOnlyImageVideoTalkExpectData() {
final talkExpectReq = TalkExpectReq(
videoType: [VideoTypeE.IMAGE],
videoType: [VideoTypeE.H264],
audioType: [],
);
changeTalkExpectDataTypeAndReStartTalkExpectMessageTimer(
@ -1131,7 +1141,7 @@ class StartChartManage {
///
void sendImageVideoAndG711AudioTalkExpectData() {
final talkExpectReq = TalkExpectReq(
videoType: [VideoTypeE.IMAGE],
videoType: [VideoTypeE.H264],
audioType: [AudioTypeE.G711],
);
changeTalkExpectDataTypeAndReStartTalkExpectMessageTimer(

View File

@ -0,0 +1,434 @@
import 'dart:async';
import 'dart:io';
import 'dart:ui' as ui;
import 'dart:math'; // Import the math package to use sqrt
import 'package:flutter/foundation.dart';
import 'package:flutter/rendering.dart';
import 'package:flutter/services.dart';
import 'package:flutter_pcm_sound/flutter_pcm_sound.dart';
import 'package:flutter_screen_recording/flutter_screen_recording.dart';
import 'package:flutter_voice_processor/flutter_voice_processor.dart';
import 'package:gallery_saver/gallery_saver.dart';
import 'package:get/get.dart';
import 'package:image_gallery_saver/image_gallery_saver.dart';
import 'package:path_provider/path_provider.dart';
import 'package:permission_handler/permission_handler.dart';
import 'package:star_lock/app_settings/app_settings.dart';
import 'package:star_lock/login/login/entity/LoginEntity.dart';
import 'package:star_lock/main/lockDetail/lockDetail/lockDetail_logic.dart';
import 'package:star_lock/main/lockDetail/lockDetail/lockDetail_state.dart';
import 'package:star_lock/main/lockDetail/lockDetail/lockNetToken_entity.dart';
import 'package:star_lock/main/lockDetail/lockSet/lockSet/lockSetInfo_entity.dart';
import 'package:star_lock/main/lockMian/entity/lockListInfo_entity.dart';
import 'package:star_lock/network/api_repository.dart';
import 'package:star_lock/talk/call/g711.dart';
import 'package:star_lock/talk/starChart/constant/talk_status.dart';
import 'package:star_lock/talk/starChart/proto/talk_data.pb.dart';
import 'package:star_lock/talk/starChart/proto/talk_expect.pb.dart';
import 'package:star_lock/talk/starChart/star_chart_manage.dart';
import 'package:star_lock/talk/starChart/views/talkView/talk_view_state.dart';
import 'package:star_lock/talk/starChart/webView/h264_web_view_state.dart';
import 'package:star_lock/tools/bugly/bugly_tool.dart';
import 'package:webview_flutter/webview_flutter.dart';
import '../../../../tools/baseGetXController.dart';
class H264WebViewLogic extends BaseGetXController {
final H264WebViewState state = H264WebViewState();
final LockDetailState lockDetailState = Get.put(LockDetailLogic()).state;
@override
void onInit() {
super.onInit();
// WebView
state.webViewController = WebViewController()
..setJavaScriptMode(JavaScriptMode.unrestricted)
..enableZoom(false)
..addJavaScriptChannel(
'Flutter',
onMessageReceived: (message) {
print("来自 HTML 的消息: ${message.message}");
},
);
state.isShowLoading.value = true;
// HTML
_loadLocalHtml();
//
_createFramesStreamListen();
_startListenTalkStatus();
state.talkStatus.value = state.startChartTalkStatus.status;
//
_initFlutterPcmSound();
//
_initAudioRecorder();
}
///
void _initAudioRecorder() {
state.voiceProcessor = VoiceProcessor.instance;
}
///
void _initFlutterPcmSound() {
const int sampleRate = 8000;
FlutterPcmSound.setLogLevel(LogLevel.none);
FlutterPcmSound.setup(sampleRate: sampleRate, channelCount: 1);
// feed
if (Platform.isAndroid) {
FlutterPcmSound.setFeedThreshold(1024); // Android
} else {
FlutterPcmSound.setFeedThreshold(2000); // Android
}
}
void _createFramesStreamListen() async {
state.talkDataRepository.talkDataStream.listen((TalkData event) async {
// js处理
_sendBufferedData(event.content);
});
}
/// html文件
Future<void> _loadLocalHtml() async {
// HTML
final String fileHtmlContent =
await rootBundle.loadString('assets/html/h264.html');
// JS
final String jsContent =
await rootBundle.loadString('assets/html/jmuxer.min.js');
// JS HTML
final String htmlWithJs = fileHtmlContent.replaceAll(
'<script src="jmuxer.min.js"></script>', // JS
'<script>$jsContent</script>' // 使 JS
);
// HTML WebView
if (state.webViewController != null) {
state.webViewController.loadHtmlString(htmlWithJs); // baseUrl
}
}
//
_sendBufferedData(List<int> buffer) async {
//
String jsCode = "feedDataFromFlutter($buffer);";
await state.webViewController.runJavaScript(jsCode);
if (state.isShowLoading.isTrue) {
await Future.delayed(Duration(seconds: 1));
state.isShowLoading.value = false;
}
}
///
void _startListenTalkStatus() {
state.startChartTalkStatus.statusStream.listen((talkStatus) {
state.talkStatus.value = talkStatus;
switch (talkStatus) {
case TalkStatus.rejected:
case TalkStatus.hangingUpDuring:
case TalkStatus.notTalkData:
case TalkStatus.notTalkPing:
case TalkStatus.end:
_handleInvalidTalkStatus();
break;
case TalkStatus.answeredSuccessfully:
state.oneMinuteTimeTimer?.cancel(); //
state.oneMinuteTimeTimer ??=
Timer.periodic(const Duration(seconds: 1), (Timer t) {
if (state.isShowLoading.isFalse) {
state.oneMinuteTime.value++;
if (state.oneMinuteTime.value >= 60) {
t.cancel(); //
state.oneMinuteTime.value = 0;
}
}
});
break;
default:
//
break;
}
});
}
///
void updateTalkExpect() {
TalkExpectReq talkExpectReq = TalkExpectReq();
state.isOpenVoice.value = !state.isOpenVoice.value;
if (!state.isOpenVoice.value) {
talkExpectReq = TalkExpectReq(
videoType: [VideoTypeE.IMAGE],
audioType: [],
);
showToast('已静音'.tr);
} else {
talkExpectReq = TalkExpectReq(
videoType: [VideoTypeE.IMAGE],
audioType: [AudioTypeE.G711],
);
}
///
StartChartManage().changeTalkExpectDataTypeAndReStartTalkExpectMessageTimer(
talkExpect: talkExpectReq);
}
///
void _handleInvalidTalkStatus() {}
///
Future<void> captureAndSavePng() async {
try {
if (state.globalKey.currentContext == null) {
AppLog.log('截图失败: 未找到当前上下文');
return;
}
final RenderRepaintBoundary boundary = state.globalKey.currentContext!
.findRenderObject()! as RenderRepaintBoundary;
final ui.Image image = await boundary.toImage();
final ByteData? byteData =
await image.toByteData(format: ui.ImageByteFormat.png);
if (byteData == null) {
AppLog.log('截图失败: 图像数据为空');
return;
}
final Uint8List pngBytes = byteData.buffer.asUint8List();
//
final Directory directory = await getApplicationDocumentsDirectory();
final String imagePath = '${directory.path}/screenshot.png';
//
final File imgFile = File(imagePath);
await imgFile.writeAsBytes(pngBytes);
//
await ImageGallerySaver.saveFile(imagePath);
AppLog.log('截图保存路径: $imagePath');
showToast('截图已保存到相册'.tr);
} catch (e) {
AppLog.log('截图失败: $e');
}
}
//
void initiateAnswerCommand() {
StartChartManage().startTalkAcceptTimer();
}
//
Future<void> startProcessingAudio() async {
//
state.voiceProcessor?.addFrameListener(_onFrame);
state.voiceProcessor?.addErrorListener(_onError);
try {
if (await state.voiceProcessor?.hasRecordAudioPermission() ?? false) {
await state.voiceProcessor?.start(state.frameLength, state.sampleRate);
final bool? isRecording = await state.voiceProcessor?.isRecording();
state.isRecordingAudio.value = isRecording!;
state.startRecordingAudioTime.value = DateTime.now();
} else {
// state.errorMessage.value = 'Recording permission not granted';
}
} on PlatformException catch (ex) {
// state.errorMessage.value = 'Failed to start recorder: $ex';
}
state.isOpenVoice.value = false;
}
///
Future<void> stopProcessingAudio() async {
try {
await state.voiceProcessor?.stop();
state.voiceProcessor?.removeFrameListener(_onFrame);
state.udpSendDataFrameNumber = 0;
//
state.endRecordingAudioTime.value = DateTime.now();
//
final duration = state.endRecordingAudioTime.value!
.difference(state.startRecordingAudioTime.value!);
state.recordingAudioTime.value = duration.inSeconds;
} on PlatformException catch (ex) {
// state.errorMessage.value = 'Failed to stop recorder: $ex';
} finally {
final bool? isRecording = await state.voiceProcessor?.isRecording();
state.isRecordingAudio.value = isRecording!;
state.isOpenVoice.value = true;
}
}
//
Future<void> _onFrame(List<int> frame) async {
// 线
// final processedFrame = await compute(preprocessAudio, frame);
// final list = listLinearToALaw(processedFrame);
final List<int> processedFrame = preprocessAudio(frame);
final List<int> list = listLinearToALaw(processedFrame);
final int ms = DateTime.now().millisecondsSinceEpoch -
state.startRecordingAudioTime.value.millisecondsSinceEpoch;
// UDP
await StartChartManage().sendTalkDataMessage(
talkData: TalkData(
content: list,
contentType: TalkData_ContentTypeE.G711,
durationMs: ms,
),
);
}
///
void udpHangUpAction() async {
if (state.talkStatus.value == TalkStatus.answeredSuccessfully) {
//
StartChartManage().startTalkHangupMessageTimer();
} else {
//
StartChartManage().startTalkRejectMessageTimer();
}
Get.back();
}
//
Future<void> remoteOpenLock() async {
final lockPeerId = StartChartManage().lockPeerId;
final lockListPeerId = StartChartManage().lockListPeerId;
int lockId = lockDetailState.keyInfos.value.lockId ?? 0;
// peerId使peerId
// peerId
lockListPeerId.forEach((element) {
if (element.network?.peerId == lockPeerId) {
lockId = element.lockId ?? 0;
}
});
final LockSetInfoEntity lockSetInfoEntity =
await ApiRepository.to.getLockSettingInfoData(
lockId: lockId.toString(),
);
if (lockSetInfoEntity.errorCode!.codeIsSuccessful) {
if (lockSetInfoEntity.data?.lockFeature?.remoteUnlock == 1 &&
lockSetInfoEntity.data?.lockSettingInfo?.remoteUnlock == 1) {
final LoginEntity entity = await ApiRepository.to
.remoteOpenLock(lockId: lockId.toString(), timeOut: 60);
if (entity.errorCode!.codeIsSuccessful) {
showToast('已开锁'.tr);
StartChartManage().lockListPeerId = [];
}
} else {
showToast('该锁的远程开锁功能未启用'.tr);
}
}
}
List<int> preprocessAudio(List<int> pcmList) {
//
final List<int> processedList = [];
for (int pcmVal in pcmList) {
// 0
if (pcmVal.abs() < 200) {
pcmVal = 0;
}
processedList.add(pcmVal);
}
return processedList;
}
List<int> listLinearToALaw(List<int> pcmList) {
final List<int> aLawList = [];
for (int pcmVal in pcmList) {
final int aLawVal = linearToALaw(pcmVal);
aLawList.add(aLawVal);
}
return aLawList;
}
int linearToALaw(int pcmVal) {
const int ALAW_MAX = 0x7FFF; // 32767
const int ALAW_BIAS = 0x84; // 132
int mask;
int seg;
int aLawVal;
// Handle sign
if (pcmVal < 0) {
pcmVal = -pcmVal;
mask = 0x7F; // 127 (sign bit is 1)
} else {
mask = 0xFF; // 255 (sign bit is 0)
}
// Add bias and clamp to ALAW_MAX
pcmVal += ALAW_BIAS;
if (pcmVal > ALAW_MAX) {
pcmVal = ALAW_MAX;
}
// Determine segment
seg = search(pcmVal);
// Calculate A-law value
if (seg >= 8) {
aLawVal = 0x7F ^ mask; // Clamp to maximum value
} else {
int quantized = (pcmVal >> (seg + 3)) & 0xF;
aLawVal = (seg << 4) | quantized;
aLawVal ^= 0xD5; // XOR with 0xD5 to match standard A-law table
}
return aLawVal;
}
int search(int val) {
final List<int> table = [
0xFF, // Segment 0
0x1FF, // Segment 1
0x3FF, // Segment 2
0x7FF, // Segment 3
0xFFF, // Segment 4
0x1FFF, // Segment 5
0x3FFF, // Segment 6
0x7FFF // Segment 7
];
const int size = 8;
for (int i = 0; i < size; i++) {
if (val <= table[i]) {
return i;
}
}
return size;
}
//
void _onError(VoiceProcessorException error) {
AppLog.log(error.message!);
}
@override
void dispose() {
// TODO: implement dispose
super.dispose();
StartChartManage().startTalkHangupMessageTimer();
state.animationController.dispose();
state.webViewController.clearCache();
state.webViewController.reload();
state.oneMinuteTimeTimer?.cancel();
state.oneMinuteTimeTimer = null;
stopProcessingAudio();
StartChartManage().reSetDefaultTalkExpect();
}
}

View File

@ -1,11 +1,18 @@
import 'dart:async';
import 'dart:convert';
import 'dart:math';
import 'package:flutter/material.dart';
import 'package:flutter/services.dart' show ByteData, Uint8List, rootBundle;
import 'package:flutter_screenutil/flutter_screenutil.dart';
import 'package:get/get.dart';
import 'package:star_lock/app_settings/app_colors.dart';
import 'package:star_lock/app_settings/app_settings.dart';
import 'package:star_lock/talk/starChart/constant/talk_status.dart';
import 'package:star_lock/talk/starChart/handle/other/talk_data_repository.dart';
import 'package:star_lock/talk/starChart/proto/talk_data.pbserver.dart';
import 'package:star_lock/talk/starChart/star_chart_manage.dart';
import 'package:star_lock/talk/starChart/webView/h264_web_logic.dart';
import 'package:star_lock/talk/starChart/webView/h264_web_view_state.dart';
import 'package:star_lock/tools/titleAppBar.dart';
import 'package:webview_flutter/webview_flutter.dart';
@ -14,169 +21,400 @@ class H264WebView extends StatefulWidget {
_H264WebViewState createState() => _H264WebViewState();
}
class _H264WebViewState extends State<H264WebView> {
late final WebViewController _controller;
Timer? timer;
Timer? _sendTimer;
// 访
final List<int> _buffer = [];
// html文件间隔时间
final int sendDataToHtmlIntervalTime = 820;
//
final TalkDataRepository talkDataRepository = TalkDataRepository.instance;
class _H264WebViewState extends State<H264WebView>
with TickerProviderStateMixin {
final H264WebViewLogic logic = Get.put(H264WebViewLogic());
final H264WebViewState state = Get.find<H264WebViewLogic>().state;
@override
void initState() {
// TODO: implement initState
super.initState();
state.animationController = AnimationController(
vsync: this, // 使TickerProvider是当前Widget
duration: const Duration(seconds: 1),
);
_controller = WebViewController()
..setJavaScriptMode(JavaScriptMode.unrestricted)
..enableZoom(false)
..addJavaScriptChannel(
'Flutter',
onMessageReceived: (message) {
print("来自 HTML 的消息: ${message.message}");
},
);
// HTML
_loadLocalHtml();
simulateStreamFromAsset();
_sendFramesToHtml();
}
void simulateStreamFromAsset() async {
// assets
final ByteData data = await rootBundle.load('assets/talk.h264');
final List<int> byteData = data.buffer.asUint8List();
int current = 0;
int start = 0;
int end = 0;
final List<int> chunks = extractChunks(byteData);
//
timer ??= Timer.periodic(Duration(milliseconds: 10), (timer) {
if (current >= chunks.length) {
print('数据已经发完,重新进行发送');
start = 0;
end = 0;
current = 0;
timer.cancel();
return;
}
// NALU chunks
end = chunks[current];
current++;
List<int> frameData = byteData.sublist(start, end);
if (frameData.length == 0) timer.cancel();
talkDataRepository.addTalkData(TalkData(contentType: TalkData_ContentTypeE.H264,content: frameData));
start = end;
});
}
void _sendFramesToHtml() async {
//
// talkDataRepository.talkDataStream.listen((TalkData event) async {
// _buffer.addAll(event.content);
// });
// 800ms的数据
_sendTimer ??= Timer.periodic(
Duration(milliseconds: sendDataToHtmlIntervalTime), (timer) async {
//
if (_buffer.isNotEmpty) {
await _sendBufferedData(_buffer);
_buffer.clear(); //
state.animationController.repeat();
//StatusListener
state.animationController.addStatusListener((AnimationStatus status) {
if (status == AnimationStatus.completed) {
state.animationController.reset();
state.animationController.forward();
} else if (status == AnimationStatus.dismissed) {
state.animationController.reset();
state.animationController.forward();
}
});
}
// NALU chunks
List<int> extractChunks(List<int> byteData) {
int i = 0;
int length = byteData.length;
int naluCount = 0;
int value;
int state = 0;
int lastIndex = 0;
List<int> result = [];
const minNaluPerChunk = 22; // NALU数量
while (i < length) {
value = byteData[i++];
// finding 3 or 4-byte start codes (00 00 01 OR 00 00 00 01)
switch (state) {
case 0:
if (value == 0) {
state = 1;
}
break;
case 1:
if (value == 0) {
state = 2;
} else {
state = 0;
}
break;
case 2:
case 3:
if (value == 0) {
state = 3;
} else if (value == 1 && i < length) {
if (lastIndex > 0) {
naluCount++;
}
if (naluCount >= minNaluPerChunk) {
result.add(lastIndex - state - 1);
naluCount = 0;
}
state = 0;
lastIndex = i;
} else {
state = 0;
}
break;
default:
break;
}
}
if (naluCount > 0) {
result.add(lastIndex);
}
return result;
}
/// html文件
Future<void> _loadLocalHtml() async {
final String fileHtmlContent =
await rootBundle.loadString('assets/html/h264.html');
_controller.loadHtmlString(fileHtmlContent);
}
// js处理
_sendBufferedData(List<int> buffer) async {
String jsCode = "feedDataFromFlutter(${buffer});";
await _controller.runJavaScript(jsCode);
}
@override
Widget build(BuildContext context) {
return WebViewWidget(controller: _controller);
return WillPopScope(
onWillPop: () async {
// false 退
return false;
},
child: SizedBox(
width: 1.sw,
height: 1.sh,
child: Stack(
alignment: Alignment.center,
children: [
Obx(() {
final double screenWidth = MediaQuery.of(context).size.width;
final double screenHeight = MediaQuery.of(context).size.height;
return state.isShowLoading.value
? Image.asset(
'images/main/monitorBg.png',
width: screenWidth,
height: screenHeight,
fit: BoxFit.cover,
)
: WebViewWidget(
controller: state.webViewController,
);
}),
Obx(
() => state.isShowLoading.value
? Positioned(
bottom: 310.h,
child: Text(
'正在创建安全连接...'.tr,
style: TextStyle(color: Colors.black, fontSize: 26.sp),
),
)
: Container(),
),
Obx(
() => state.isShowLoading.isFalse
? Positioned(
top: ScreenUtil().statusBarHeight + 75.h,
width: 1.sw,
child: Obx(
() {
final String sec = (state.oneMinuteTime.value % 60)
.toString()
.padLeft(2, '0');
final String min = (state.oneMinuteTime.value ~/ 60)
.toString()
.padLeft(2, '0');
return Row(
mainAxisAlignment: MainAxisAlignment.center,
children: <Widget>[
Text(
'$min:$sec',
style: TextStyle(
fontSize: 26.sp, color: Colors.white),
),
],
);
},
),
)
: Container(),
),
Positioned(
bottom: 10.w,
child: Container(
width: 1.sw - 30.w * 2,
// height: 300.h,
margin: EdgeInsets.all(30.w),
decoration: BoxDecoration(
color: Colors.black.withOpacity(0.2),
borderRadius: BorderRadius.circular(20.h)),
child: Column(
children: <Widget>[
SizedBox(height: 20.h),
bottomTopBtnWidget(),
SizedBox(height: 20.h),
bottomBottomBtnWidget(),
SizedBox(height: 20.h),
],
),
),
),
Obx(() => state.isShowLoading.isTrue
? buildRotationTransition()
: Container()),
Obx(() => state.isLongPressing.value
? Positioned(
top: 80.h,
left: 0,
right: 0,
child: Center(
child: Container(
padding: EdgeInsets.all(10.w),
decoration: BoxDecoration(
color: Colors.black.withOpacity(0.7),
borderRadius: BorderRadius.circular(10.w),
),
child: Row(
mainAxisSize: MainAxisSize.min,
children: <Widget>[
Icon(Icons.mic, color: Colors.white, size: 24.w),
SizedBox(width: 10.w),
Text(
'正在说话...'.tr,
style: TextStyle(
fontSize: 20.sp, color: Colors.white),
),
],
),
),
),
)
: Container()),
],
),
),
);
}
Widget bottomTopBtnWidget() {
return Row(mainAxisAlignment: MainAxisAlignment.center, children: <Widget>[
//
GestureDetector(
onTap: () {
if (state.talkStatus.value == TalkStatus.answeredSuccessfully) {
//
logic.updateTalkExpect();
}
},
child: Container(
width: 50.w,
height: 50.w,
padding: EdgeInsets.all(5.w),
child: Obx(() => Image(
width: 40.w,
height: 40.w,
image: state.isOpenVoice.value
? const AssetImage(
'images/main/icon_lockDetail_monitoringOpenVoice.png')
: const AssetImage(
'images/main/icon_lockDetail_monitoringCloseVoice.png'))),
),
),
SizedBox(width: 50.w),
//
GestureDetector(
onTap: () async {
if (state.talkStatus.value == TalkStatus.answeredSuccessfully) {
await logic.captureAndSavePng();
}
},
child: Container(
width: 50.w,
height: 50.w,
padding: EdgeInsets.all(5.w),
child: Image(
width: 40.w,
height: 40.w,
image: const AssetImage(
'images/main/icon_lockDetail_monitoringScreenshot.png')),
),
),
SizedBox(width: 50.w),
//
GestureDetector(
onTap: () async {
logic.showToast('功能暂未开放'.tr);
// if (
// state.talkStatus.value == TalkStatus.answeredSuccessfully) {
// if (state.isRecordingScreen.value) {
// await logic.stopRecording();
// } else {
// await logic.startRecording();
// }
// }
},
child: Container(
width: 50.w,
height: 50.w,
padding: EdgeInsets.all(5.w),
child: Image(
width: 40.w,
height: 40.w,
fit: BoxFit.fill,
image: const AssetImage(
'images/main/icon_lockDetail_monitoringScreenRecording.png'),
),
),
),
SizedBox(width: 50.w),
GestureDetector(
onTap: () {
logic.showToast('功能暂未开放'.tr);
},
child: Image(
width: 28.w,
height: 28.w,
fit: BoxFit.fill,
image: const AssetImage('images/main/icon_lockDetail_rectangle.png'),
),
),
]);
}
Widget bottomBottomBtnWidget() {
return Row(
mainAxisAlignment: MainAxisAlignment.spaceEvenly,
children: <Widget>[
//
Obx(
() => bottomBtnItemWidget(
getAnswerBtnImg(),
getAnswerBtnName(),
Colors.white,
longPress: () async {
if (state.talkStatus.value == TalkStatus.answeredSuccessfully) {
//
logic.startProcessingAudio();
state.isLongPressing.value = true;
}
},
longPressUp: () async {
//
logic.stopProcessingAudio();
state.isLongPressing.value = false;
},
onClick: () async {
if (state.talkStatus.value ==
TalkStatus.passiveCallWaitingAnswer) {
//
logic.initiateAnswerCommand();
}
},
),
),
bottomBtnItemWidget(
'images/main/icon_lockDetail_hangUp.png', '挂断'.tr, Colors.red,
onClick: () {
//
logic.udpHangUpAction();
}),
bottomBtnItemWidget(
'images/main/icon_lockDetail_monitoringUnlock.png',
'开锁'.tr,
AppColors.mainColor,
onClick: () {
// if (state.talkStatus.value == TalkStatus.answeredSuccessfully &&
// state.listData.value.length > 0) {
// logic.udpOpenDoorAction();
logic.remoteOpenLock();
// }
// if (UDPManage().remoteUnlock == 1) {
// logic.udpOpenDoorAction();
// showDeletPasswordAlertDialog(context);
// } else {
// logic.showToast('请在锁设置中开启远程开锁'.tr);
// }
},
)
]);
}
String getAnswerBtnImg() {
switch (state.talkStatus.value) {
case TalkStatus.passiveCallWaitingAnswer:
return 'images/main/icon_lockDetail_monitoringAnswerCalls.png';
case TalkStatus.answeredSuccessfully:
case TalkStatus.proactivelyCallWaitingAnswer:
return 'images/main/icon_lockDetail_monitoringUnTalkback.png';
default:
return 'images/main/icon_lockDetail_monitoringAnswerCalls.png';
}
}
String getAnswerBtnName() {
switch (state.talkStatus.value) {
case TalkStatus.passiveCallWaitingAnswer:
return '接听'.tr;
case TalkStatus.proactivelyCallWaitingAnswer:
case TalkStatus.answeredSuccessfully:
return '长按说话'.tr;
default:
return '接听'.tr;
}
}
Widget bottomBtnItemWidget(
String iconUrl,
String name,
Color backgroundColor, {
required Function() onClick,
Function()? longPress,
Function()? longPressUp,
}) {
double wh = 80.w;
return GestureDetector(
onTap: onClick,
onLongPress: longPress,
onLongPressUp: longPressUp,
child: SizedBox(
height: 160.w,
width: 140.w,
child: Column(
crossAxisAlignment: CrossAxisAlignment.center,
children: <Widget>[
Container(
width: wh,
height: wh,
constraints: BoxConstraints(
minWidth: wh,
),
decoration: BoxDecoration(
color: backgroundColor,
borderRadius: BorderRadius.circular((wh + 10.w * 2) / 2),
),
padding: EdgeInsets.all(20.w),
child: Image.asset(iconUrl, fit: BoxFit.fitWidth),
),
SizedBox(height: 20.w),
Text(
name,
style: TextStyle(fontSize: 20.sp, color: Colors.white),
textAlign: TextAlign.center, // 使
maxLines: 2, // 1
)
],
),
),
);
}
//
Widget buildRotationTransition() {
return Positioned(
left: ScreenUtil().screenWidth / 2 - 220.w / 2,
top: ScreenUtil().screenHeight / 2 - 220.w / 2 - 150.h,
child: GestureDetector(
child: RotationTransition(
//
alignment: Alignment.center,
//
turns: state.animationController,
//view
child: AnimatedOpacity(
opacity: 0.5,
duration: const Duration(seconds: 2),
child: Image.asset(
'images/main/realTime_connecting.png',
width: 220.w,
height: 220.w,
),
),
),
onTap: () {
state.animationController.forward();
},
),
);
}
@override
void dispose() {
timer?.cancel();
timer = null;
_sendTimer?.cancel();
timer = null;
// talkDataRepository.dispose();
state.animationController.dispose(); //
super.dispose();
}
}

View File

@ -0,0 +1,52 @@
import 'dart:async';
import 'package:flutter/cupertino.dart';
import 'package:flutter_voice_processor/flutter_voice_processor.dart';
import 'package:get/get.dart';
import 'package:star_lock/talk/starChart/constant/talk_status.dart';
import 'package:star_lock/talk/starChart/handle/other/talk_data_repository.dart';
import 'package:star_lock/talk/starChart/status/star_chart_talk_status.dart';
import 'package:star_lock/talk/starChart/views/talkView/talk_view_state.dart';
import 'package:webview_flutter/webview_flutter.dart';
class H264WebViewState {
GlobalKey globalKey = GlobalKey();
int udpSendDataFrameNumber = 0; //
late AnimationController animationController;
// webview
late final WebViewController webViewController;
// startChartTalkStatus
final StartChartTalkStatus startChartTalkStatus =
StartChartTalkStatus.instance;
Rx<TalkStatus> talkStatus = TalkStatus.none.obs; //
RxBool isShowLoading = true.obs;
Timer? oneMinuteTimeTimer; // 60
RxInt oneMinuteTime = 0.obs; //
RxBool isLongPressing = false.obs; //
final TalkDataRepository talkDataRepository = TalkDataRepository.instance;
RxInt lastFrameTimestamp = 0.obs; // ,
Rx<NetworkStatus> networkStatus =
NetworkStatus.normal.obs; // 0- 1- 2- 3-
RxInt alertCount = 0.obs; //
RxInt maxAlertNumber = 3.obs; //
RxBool isOpenVoice = true.obs; //
RxBool isRecordingScreen = false.obs; //
RxBool isRecordingAudio = false.obs; //
Rx<DateTime> startRecordingAudioTime = DateTime.now().obs; //
Rx<DateTime> endRecordingAudioTime = DateTime.now().obs; //
RxInt recordingAudioTime = 0.obs; //
RxDouble fps = 0.0.obs; // FPS
late VoiceProcessor? voiceProcessor; //
final int frameLength = 320; //640
final int sampleRate = 8000; //8000
List<int> recordingAudioAllFrames = <int>[]; //
List<int> lockRecordingAudioAllFrames = <int>[]; //
RxInt rotateAngle = 0.obs; //
RxBool hasAudioData = false.obs; //
RxInt lastAudioTimestamp = 0.obs; //
}

View File

@ -992,8 +992,8 @@ packages:
dependency: "direct main"
description:
path: "."
ref: main
resolved-ref: aa93729f48762421658675800be68aee27b6d8fb
ref: "807ddb8e396c2dce16919df84efe795072404dde"
resolved-ref: "807ddb8e396c2dce16919df84efe795072404dde"
url: "git@code-internal.star-lock.cn:StarlockTeam/jpush_flutter.git"
source: git
version: "2.5.8"

View File

@ -214,7 +214,7 @@ dependencies:
jpush_flutter:
git:
url: git@code-internal.star-lock.cn:StarlockTeam/jpush_flutter.git
ref: main
ref: 807ddb8e396c2dce16919df84efe795072404dde
#视频播放器
video_player: ^2.9.2
@ -316,6 +316,7 @@ flutter:
- images/lockType/
- assets/
- assets/html/h264.html
- assets/html/jmuxer.min.js
- lan/
# An image asset can refer to one or more resolution-specific "variants", see
# https://flutter.dev/assets-and-images/#resolution-aware