Merge branch 'develop_liyi' of code-internal.star-lock.cn:StarlockTeam/app-starlock into develop_liyi
This commit is contained in:
commit
d0e66e1694
11
README.md
11
README.md
@ -171,3 +171,14 @@ java -jar android/bundletool.jar build-apks --bundle=build/app/outputs/bundle/sk
|
||||
```bash
|
||||
java -jar android/bundletool.jar install-apks --apks=build/app/outputs/bundle/skyRelease/app-sky-release.aab.apks
|
||||
```
|
||||
|
||||
## Jpush相关
|
||||
|
||||
极光推送,目前app这边只依赖极光的透传能力,推送能力通过截取极光拿到的各个厂商的推送token,然后将推送token上报到自己业务服务器直接调用各个厂商推送通道进行消息推送,所以对极光的flutter sdk进行了私有化定制改造,改造点如下:
|
||||
|
||||
* Android,iOS平台原生代码中截取jpush获取到的厂商推送token,将token回传到flutter业务应用层
|
||||
* Android通过Jpush统一集成的各个厂商推送sdk,统一获取到token
|
||||
* iOS通过原生token回调接口获取到token
|
||||
* flutter端,将获取到的厂商token,厂商标识上报到业务服务器
|
||||
|
||||
定制jpush_flutter:http://code-internal.star-lock.cn/StarlockTeam/jpush_flutter
|
||||
File diff suppressed because it is too large
Load Diff
1
assets/html/jmuxer.min.js
vendored
Normal file
1
assets/html/jmuxer.min.js
vendored
Normal file
File diff suppressed because one or more lines are too long
@ -76,12 +76,25 @@
|
||||
/*
|
||||
* 苹果推送注册成功回调,将苹果返回的deviceToken上传到CloudPush服务器
|
||||
*/
|
||||
- (void)application:(UIApplication *)application didRegisterForRemoteNotificationsWithDeviceToken:(NSData *)deviceToken {
|
||||
//- (void)application:(UIApplication *)application didRegisterForRemoteNotificationsWithDeviceToken:(NSData *)deviceToken {
|
||||
// NSString *tokenString = [self hexStringFromData:deviceToken];
|
||||
// NSLog(@"starlock didRegisterForRemoteNotificationsWithDeviceToken token: %@", tokenString);
|
||||
// /// Required - 注册 DeviceToken
|
||||
// [JPUSHService registerDeviceToken:deviceToken];
|
||||
//
|
||||
//}
|
||||
|
||||
/// Required - 注册 DeviceToken
|
||||
[JPUSHService registerDeviceToken:deviceToken];
|
||||
- (NSString *)hexStringFromData:(NSData *)data {
|
||||
const unsigned char *dataBuffer = (const unsigned char *)[data bytes];
|
||||
NSMutableString *hexString = [NSMutableString stringWithCapacity:data.length * 2];
|
||||
|
||||
for (NSInteger i = 0; i < data.length; i++) {
|
||||
[hexString appendFormat:@"%02x", dataBuffer[i]];
|
||||
}
|
||||
|
||||
return [hexString copy];
|
||||
}
|
||||
|
||||
/*
|
||||
* 苹果推送注册失败回调
|
||||
*/
|
||||
|
||||
@ -61,6 +61,7 @@ import 'package:star_lock/mine/valueAddedServices/advancedFeaturesWeb/advancedFe
|
||||
import 'package:star_lock/mine/valueAddedServices/advancedFunctionRecord/advancedFunctionRecord_page.dart';
|
||||
import 'package:star_lock/mine/valueAddedServices/valueAddedServicesRecord/value_added_services_record_page.dart';
|
||||
import 'package:star_lock/talk/starChart/views/talkView/talk_view_page.dart';
|
||||
import 'package:star_lock/talk/starChart/webView/h264_web_view.dart';
|
||||
|
||||
import 'common/safetyVerification/safetyVerification_page.dart';
|
||||
import 'login/forgetPassword/starLock_forgetPassword_page.dart';
|
||||
@ -515,6 +516,7 @@ abstract class Routers {
|
||||
static const String doubleLockLinkPage = '/doubleLockLinkPage'; //双锁联动
|
||||
static const String starChartPage = '/starChartPage'; //星图
|
||||
static const String starChartTalkView = '/starChartTalkView'; //星图对讲页面
|
||||
static const String h264WebView = '/h264WebView'; //星图对讲页面
|
||||
}
|
||||
|
||||
abstract class AppRouters {
|
||||
@ -1195,5 +1197,6 @@ abstract class AppRouters {
|
||||
page: () => const DoubleLockLinkPage()),
|
||||
GetPage<dynamic>(
|
||||
name: Routers.starChartTalkView, page: () => const TalkViewPage()),
|
||||
GetPage<dynamic>(name: Routers.h264WebView, page: () => H264WebView()),
|
||||
];
|
||||
}
|
||||
|
||||
@ -267,26 +267,31 @@ class _DoorLockLogPageState extends State<DoorLockLogPage> with RouteAware {
|
||||
color: Colors.white,
|
||||
borderRadius: BorderRadius.circular(16.w),
|
||||
),
|
||||
child: Obx(
|
||||
() => state.lockLogItemList.isNotEmpty
|
||||
? Timeline.tileBuilder(
|
||||
builder: _timelineBuilderWidget(),
|
||||
theme: TimelineThemeData(
|
||||
nodePosition: 0.04, //居左侧距离
|
||||
connectorTheme: const ConnectorThemeData(
|
||||
thickness: 1.0,
|
||||
color: AppColors.greyLineColor,
|
||||
indent: 0.5,
|
||||
child: Obx(() => EasyRefreshTool(
|
||||
onRefresh: () async {
|
||||
logic.mockNetworkDataRequest(isRefresh: true);
|
||||
},
|
||||
onLoad: () async {
|
||||
logic.mockNetworkDataRequest(isRefresh: false);
|
||||
},
|
||||
child: state.lockLogItemList.isNotEmpty
|
||||
? Timeline.tileBuilder(
|
||||
builder: _timelineBuilderWidget(),
|
||||
theme: TimelineThemeData(
|
||||
nodePosition: 0.04, //居左侧距离
|
||||
connectorTheme: const ConnectorThemeData(
|
||||
thickness: 1.0,
|
||||
color: AppColors.greyLineColor,
|
||||
indent: 0.5,
|
||||
),
|
||||
indicatorTheme: const IndicatorThemeData(
|
||||
size: 8.0,
|
||||
color: AppColors.greyLineColor,
|
||||
position: 0.4,
|
||||
),
|
||||
),
|
||||
indicatorTheme: const IndicatorThemeData(
|
||||
size: 8.0,
|
||||
color: AppColors.greyLineColor,
|
||||
position: 0.45,
|
||||
),
|
||||
),
|
||||
)
|
||||
: NoData(),
|
||||
),
|
||||
)
|
||||
: NoData())),
|
||||
);
|
||||
}
|
||||
|
||||
@ -347,6 +352,9 @@ class _DoorLockLogPageState extends State<DoorLockLogPage> with RouteAware {
|
||||
if (recordData.videoUrl != null && recordData.videoUrl!.isNotEmpty) {
|
||||
final lockLogItemList = state.lockLogItemList.value;
|
||||
final list = lockLogItemList
|
||||
.where((e) =>
|
||||
(e.videoUrl != null && e.videoUrl!.isNotEmpty) ||
|
||||
(e.imagesUrl != null && e.imagesUrl!.isNotEmpty))
|
||||
.map(
|
||||
(e) => RecordListData(
|
||||
videoUrl: e.videoUrl,
|
||||
|
||||
@ -2,6 +2,7 @@ import 'dart:typed_data';
|
||||
import 'package:star_lock/app_settings/app_settings.dart';
|
||||
import 'package:star_lock/talk/starChart/constant/message_type_constant.dart';
|
||||
import 'package:star_lock/talk/starChart/entity/scp_message.dart';
|
||||
import 'package:star_lock/talk/starChart/handle/other/h264_frame_handler.dart';
|
||||
import 'package:star_lock/talk/starChart/handle/scp_message_base_handle.dart';
|
||||
import 'package:star_lock/talk/starChart/handle/scp_message_handle.dart';
|
||||
import 'package:star_lock/talk/starChart/proto/talk_data.pb.dart';
|
||||
@ -158,16 +159,13 @@ class UdpTalkDataHandler extends ScpMessageBaseHandle
|
||||
void _handleVideoH264(TalkData talkData) {
|
||||
final TalkDataH264Frame talkDataH264Frame = TalkDataH264Frame();
|
||||
talkDataH264Frame.mergeFromBuffer(talkData.content);
|
||||
// AppLog.log('H264 TalkData :$talkDataH264Frame');
|
||||
// talkDataRepository.addTalkData(talkData);
|
||||
frameHandler.handleFrame(talkDataH264Frame);
|
||||
}
|
||||
|
||||
/// 处理图片数据
|
||||
void _handleVideoImage(TalkData talkData) async {
|
||||
final List<Uint8List> processCompletePayload =
|
||||
await _processCompletePayload(Uint8List.fromList(talkData.content));
|
||||
// AppLog.log('得到完整的帧:${processCompletePayload.length}'); // 循环发送每一帧的数据
|
||||
|
||||
processCompletePayload.forEach((element) {
|
||||
talkData.content = element;
|
||||
talkDataRepository.addTalkData(talkData);
|
||||
@ -181,7 +179,7 @@ class UdpTalkDataHandler extends ScpMessageBaseHandle
|
||||
// // 转pcm数据
|
||||
// List<int> pcmBytes = G711().convertList(g711Data);
|
||||
// talkData.content = pcmBytes;
|
||||
talkDataRepository.addTalkData(talkData);
|
||||
// talkDataRepository.addTalkData(talkData);
|
||||
} catch (e) {
|
||||
print('Error decoding G.711 to PCM: $e');
|
||||
}
|
||||
|
||||
@ -73,9 +73,19 @@ class UdpTalkRequestHandler extends ScpMessageBaseHandle
|
||||
// 启动对讲请求超时定时器
|
||||
talkeRequestOverTimeTimerManager.start();
|
||||
// 收到呼叫请求,跳转到接听页面
|
||||
Get.toNamed(
|
||||
Routers.starChartTalkView,
|
||||
);
|
||||
if (startChartManage
|
||||
.getDefaultTalkExpect()
|
||||
.videoType
|
||||
.indexOf(VideoTypeE.H264) ==
|
||||
-1) {
|
||||
Get.toNamed(
|
||||
Routers.starChartTalkView,
|
||||
);
|
||||
} else {
|
||||
Get.toNamed(
|
||||
Routers.h264WebView,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// 收到来电请求时进行本地通知
|
||||
|
||||
22
lib/talk/starChart/handle/other/h264_frame_buffer.dart
Normal file
22
lib/talk/starChart/handle/other/h264_frame_buffer.dart
Normal file
@ -0,0 +1,22 @@
|
||||
import 'dart:typed_data';
|
||||
|
||||
import 'package:star_lock/talk/starChart/proto/talk_data_h264_frame.pb.dart';
|
||||
|
||||
class H264FrameBuffer {
|
||||
List<TalkDataH264Frame> frames = [];
|
||||
|
||||
void addFrame(TalkDataH264Frame frame) {
|
||||
frames.add(frame);
|
||||
}
|
||||
|
||||
Uint8List getCompleteStream() {
|
||||
final List<int> completeStream = [];
|
||||
for (final frame in frames) {
|
||||
// 添加起始码(假设为 0x00 0x00 0x01)
|
||||
completeStream.addAll([0x00, 0x00, 0x01]);
|
||||
// 添加帧数据
|
||||
completeStream.addAll(frame.frameData);
|
||||
}
|
||||
return Uint8List.fromList(completeStream);
|
||||
}
|
||||
}
|
||||
84
lib/talk/starChart/handle/other/h264_frame_handler.dart
Normal file
84
lib/talk/starChart/handle/other/h264_frame_handler.dart
Normal file
@ -0,0 +1,84 @@
|
||||
import 'package:star_lock/app_settings/app_settings.dart';
|
||||
import '../../proto/talk_data_h264_frame.pb.dart';
|
||||
|
||||
class H264FrameHandler {
|
||||
final Map<int, TalkDataH264Frame> _frameBuffer = {};
|
||||
final void Function(List<int> frameData) onCompleteFrame;
|
||||
int _lastProcessedSeq = -1;
|
||||
|
||||
H264FrameHandler({required this.onCompleteFrame});
|
||||
|
||||
void handleFrame(TalkDataH264Frame frame) {
|
||||
// 存储帧
|
||||
_frameBuffer[frame.frameSeq] = frame;
|
||||
|
||||
// 检查是否可以组装完整的 GOP (Group of Pictures)
|
||||
_tryAssembleFrames(frame.frameSeq);
|
||||
}
|
||||
|
||||
void _tryAssembleFrames(int currentSeq) {
|
||||
// 找到连续的帧序列
|
||||
final List<int> sortedSeqs = _frameBuffer.keys.toList()..sort();
|
||||
final List<int> framesToProcess = [];
|
||||
|
||||
// 从当前帧开始向前找到最近的 I 帧或 P 帧
|
||||
int? startFrameSeq;
|
||||
for (var seq in sortedSeqs.reversed) {
|
||||
final frame = _frameBuffer[seq];
|
||||
if (frame?.frameType == TalkDataH264Frame_FrameTypeE.I) {
|
||||
startFrameSeq = seq;
|
||||
break;
|
||||
} else if (frame?.frameType == TalkDataH264Frame_FrameTypeE.P) {
|
||||
// 检查 P 帧是否有对应的 I 帧
|
||||
if (_frameBuffer.containsKey(frame?.frameSeqI)) {
|
||||
startFrameSeq = seq;
|
||||
break;
|
||||
} else {
|
||||
// 丢弃没有对应 I 帧的 P 帧
|
||||
_frameBuffer.remove(seq);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (startFrameSeq != null) {
|
||||
// 收集从 I 帧或 P 帧开始的连续帧
|
||||
int expectedSeq = startFrameSeq;
|
||||
for (var seq in sortedSeqs.where((s) => s >= startFrameSeq!)) {
|
||||
if (seq != expectedSeq) break;
|
||||
framesToProcess.add(seq);
|
||||
expectedSeq++;
|
||||
}
|
||||
|
||||
if (framesToProcess.isNotEmpty) {
|
||||
_processFrames(framesToProcess);
|
||||
}
|
||||
} else {
|
||||
_clearOldFrames(currentSeq);
|
||||
}
|
||||
}
|
||||
|
||||
void _clearOldFrames(int currentSeq) {
|
||||
// 清理比当前帧序列旧的帧
|
||||
_frameBuffer.removeWhere((seq, frame) => seq < currentSeq - 200); // 调整阈值
|
||||
}
|
||||
|
||||
void _processFrames(List<int> frameSeqs) {
|
||||
// 按顺序组装帧数据
|
||||
final List<int> assembledData = [];
|
||||
|
||||
for (var seq in frameSeqs) {
|
||||
final frame = _frameBuffer[seq]!;
|
||||
assembledData.addAll(frame.frameData);
|
||||
|
||||
// 处理完后从缓冲区移除
|
||||
_frameBuffer.remove(seq);
|
||||
}
|
||||
|
||||
// 回调完整的帧数据
|
||||
onCompleteFrame(assembledData);
|
||||
}
|
||||
|
||||
void clear() {
|
||||
_frameBuffer.clear();
|
||||
}
|
||||
}
|
||||
@ -1,5 +1,4 @@
|
||||
import 'dart:async';
|
||||
|
||||
import 'package:star_lock/talk/starChart/proto/talk_data.pb.dart';
|
||||
|
||||
class TalkDataRepository {
|
||||
@ -27,6 +26,9 @@ class TalkDataRepository {
|
||||
|
||||
bool _isListening = false;
|
||||
|
||||
// 用于存储数据的缓冲区
|
||||
final List<TalkData> _buffer = [];
|
||||
|
||||
// 提供一个方法来获取 Stream
|
||||
Stream<TalkData> get talkDataStream =>
|
||||
_talkDataStreamController.stream.transform(
|
||||
@ -41,14 +43,11 @@ class TalkDataRepository {
|
||||
},
|
||||
),
|
||||
);
|
||||
final List<TalkData> _buffer = []; // 用于存储数据的缓冲区
|
||||
|
||||
// 提供一个方法来添加 TalkData 到 Stream
|
||||
void addTalkData(TalkData talkData) async {
|
||||
void addTalkData(TalkData talkData) {
|
||||
if (_isListening) {
|
||||
Future.microtask(() {
|
||||
_talkDataStreamController.add(talkData);
|
||||
});
|
||||
_talkDataStreamController.add(talkData);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -14,6 +14,7 @@ import 'package:star_lock/talk/starChart/constant/payload_type_constant.dart';
|
||||
|
||||
import 'package:star_lock/talk/starChart/constant/udp_constant.dart';
|
||||
import 'package:star_lock/talk/starChart/entity/scp_message.dart';
|
||||
import 'package:star_lock/talk/starChart/handle/other/h264_frame_handler.dart';
|
||||
|
||||
import 'package:star_lock/talk/starChart/handle/other/talk_data_repository.dart';
|
||||
import 'package:star_lock/talk/starChart/handle/other/talke_data_over_time_timer_manager.dart';
|
||||
@ -52,6 +53,15 @@ class ScpMessageBaseHandle {
|
||||
|
||||
final audioManager = AudioPlayerManager();
|
||||
|
||||
// 处理出完整帧数据后的回调
|
||||
final H264FrameHandler frameHandler =
|
||||
H264FrameHandler(onCompleteFrame: (frameData) {
|
||||
// 处理完整的帧数据
|
||||
TalkDataRepository.instance.addTalkData(
|
||||
TalkData(contentType: TalkData_ContentTypeE.H264, content: frameData),
|
||||
);
|
||||
});
|
||||
|
||||
// 回复成功消息
|
||||
void replySuccessMessage(ScpMessage scpMessage) {
|
||||
startChartManage.sendGenericRespSuccessMessage(
|
||||
|
||||
@ -22,6 +22,7 @@ class TalkDataH264Frame extends $pb.GeneratedMessage {
|
||||
$core.int? frameSeq,
|
||||
TalkDataH264Frame_FrameTypeE? frameType,
|
||||
$core.List<$core.int>? frameData,
|
||||
$core.int? frameSeqI,
|
||||
}) {
|
||||
final $result = create();
|
||||
if (frameSeq != null) {
|
||||
@ -33,6 +34,9 @@ class TalkDataH264Frame extends $pb.GeneratedMessage {
|
||||
if (frameData != null) {
|
||||
$result.frameData = frameData;
|
||||
}
|
||||
if (frameSeqI != null) {
|
||||
$result.frameSeqI = frameSeqI;
|
||||
}
|
||||
return $result;
|
||||
}
|
||||
TalkDataH264Frame._() : super();
|
||||
@ -43,6 +47,7 @@ class TalkDataH264Frame extends $pb.GeneratedMessage {
|
||||
..a<$core.int>(1, _omitFieldNames ? '' : 'FrameSeq', $pb.PbFieldType.OU3, protoName: 'FrameSeq')
|
||||
..e<TalkDataH264Frame_FrameTypeE>(2, _omitFieldNames ? '' : 'FrameType', $pb.PbFieldType.OE, protoName: 'FrameType', defaultOrMaker: TalkDataH264Frame_FrameTypeE.NONE, valueOf: TalkDataH264Frame_FrameTypeE.valueOf, enumValues: TalkDataH264Frame_FrameTypeE.values)
|
||||
..a<$core.List<$core.int>>(3, _omitFieldNames ? '' : 'FrameData', $pb.PbFieldType.OY, protoName: 'FrameData')
|
||||
..a<$core.int>(4, _omitFieldNames ? '' : 'FrameSeqI', $pb.PbFieldType.OU3, protoName: 'FrameSeqI')
|
||||
..hasRequiredFields = false
|
||||
;
|
||||
|
||||
@ -95,6 +100,16 @@ class TalkDataH264Frame extends $pb.GeneratedMessage {
|
||||
$core.bool hasFrameData() => $_has(2);
|
||||
@$pb.TagNumber(3)
|
||||
void clearFrameData() => clearField(3);
|
||||
|
||||
/// 帧序号I
|
||||
@$pb.TagNumber(4)
|
||||
$core.int get frameSeqI => $_getIZ(3);
|
||||
@$pb.TagNumber(4)
|
||||
set frameSeqI($core.int v) { $_setUnsignedInt32(3, v); }
|
||||
@$pb.TagNumber(4)
|
||||
$core.bool hasFrameSeqI() => $_has(3);
|
||||
@$pb.TagNumber(4)
|
||||
void clearFrameSeqI() => clearField(4);
|
||||
}
|
||||
|
||||
|
||||
|
||||
@ -20,6 +20,7 @@ const TalkDataH264Frame$json = {
|
||||
{'1': 'FrameSeq', '3': 1, '4': 1, '5': 13, '10': 'FrameSeq'},
|
||||
{'1': 'FrameType', '3': 2, '4': 1, '5': 14, '6': '.main.TalkDataH264Frame.FrameTypeE', '10': 'FrameType'},
|
||||
{'1': 'FrameData', '3': 3, '4': 1, '5': 12, '10': 'FrameData'},
|
||||
{'1': 'FrameSeqI', '3': 4, '4': 1, '5': 13, '10': 'FrameSeqI'},
|
||||
],
|
||||
'4': [TalkDataH264Frame_FrameTypeE$json],
|
||||
};
|
||||
@ -38,6 +39,6 @@ const TalkDataH264Frame_FrameTypeE$json = {
|
||||
final $typed_data.Uint8List talkDataH264FrameDescriptor = $convert.base64Decode(
|
||||
'ChFUYWxrRGF0YUgyNjRGcmFtZRIaCghGcmFtZVNlcRgBIAEoDVIIRnJhbWVTZXESQAoJRnJhbW'
|
||||
'VUeXBlGAIgASgOMiIubWFpbi5UYWxrRGF0YUgyNjRGcmFtZS5GcmFtZVR5cGVFUglGcmFtZVR5'
|
||||
'cGUSHAoJRnJhbWVEYXRhGAMgASgMUglGcmFtZURhdGEiJAoKRnJhbWVUeXBlRRIICgROT05FEA'
|
||||
'ASBQoBSRABEgUKAVAQAg==');
|
||||
'cGUSHAoJRnJhbWVEYXRhGAMgASgMUglGcmFtZURhdGESHAoJRnJhbWVTZXFJGAQgASgNUglGcm'
|
||||
'FtZVNlcUkiJAoKRnJhbWVUeXBlRRIICgROT05FEAASBQoBSRABEgUKAVAQAg==');
|
||||
|
||||
|
||||
@ -15,4 +15,6 @@ message TalkDataH264Frame {
|
||||
FrameTypeE FrameType = 2;
|
||||
// 帧数据
|
||||
bytes FrameData = 3;
|
||||
// 帧序号I
|
||||
uint32 FrameSeqI = 4;
|
||||
}
|
||||
|
||||
@ -112,7 +112,7 @@ class StartChartManage {
|
||||
|
||||
// 默认通话的期望数据格式
|
||||
TalkExpectReq _defaultTalkExpect = TalkExpectReq(
|
||||
videoType: [VideoTypeE.IMAGE],
|
||||
videoType: [VideoTypeE.H264],
|
||||
audioType: [AudioTypeE.G711],
|
||||
);
|
||||
|
||||
@ -419,9 +419,15 @@ class StartChartManage {
|
||||
if (talkStatus.status != TalkStatus.proactivelyCallWaitingAnswer) {
|
||||
// 停止播放铃声
|
||||
// AudioPlayerManager().playRingtone();
|
||||
Get.toNamed(
|
||||
Routers.starChartTalkView,
|
||||
);
|
||||
if (_defaultTalkExpect.videoType.contains(VideoTypeE.H264)) {
|
||||
Get.toNamed(
|
||||
Routers.h264WebView,
|
||||
);
|
||||
} else {
|
||||
Get.toNamed(
|
||||
Routers.starChartTalkView,
|
||||
);
|
||||
}
|
||||
}
|
||||
talkRequestTimer ??= Timer.periodic(
|
||||
Duration(
|
||||
@ -1113,15 +1119,19 @@ class StartChartManage {
|
||||
|
||||
void reSetDefaultTalkExpect() {
|
||||
_defaultTalkExpect = TalkExpectReq(
|
||||
videoType: [VideoTypeE.IMAGE],
|
||||
videoType: [VideoTypeE.H264],
|
||||
audioType: [AudioTypeE.G711],
|
||||
);
|
||||
}
|
||||
|
||||
TalkExpectReq getDefaultTalkExpect() {
|
||||
return _defaultTalkExpect;
|
||||
}
|
||||
|
||||
/// 修改预期接收到的数据
|
||||
void sendOnlyImageVideoTalkExpectData() {
|
||||
final talkExpectReq = TalkExpectReq(
|
||||
videoType: [VideoTypeE.IMAGE],
|
||||
videoType: [VideoTypeE.H264],
|
||||
audioType: [],
|
||||
);
|
||||
changeTalkExpectDataTypeAndReStartTalkExpectMessageTimer(
|
||||
@ -1131,7 +1141,7 @@ class StartChartManage {
|
||||
/// 修改预期接收到的数据
|
||||
void sendImageVideoAndG711AudioTalkExpectData() {
|
||||
final talkExpectReq = TalkExpectReq(
|
||||
videoType: [VideoTypeE.IMAGE],
|
||||
videoType: [VideoTypeE.H264],
|
||||
audioType: [AudioTypeE.G711],
|
||||
);
|
||||
changeTalkExpectDataTypeAndReStartTalkExpectMessageTimer(
|
||||
|
||||
434
lib/talk/starChart/webView/h264_web_logic.dart
Normal file
434
lib/talk/starChart/webView/h264_web_logic.dart
Normal file
@ -0,0 +1,434 @@
|
||||
import 'dart:async';
|
||||
import 'dart:io';
|
||||
import 'dart:ui' as ui;
|
||||
import 'dart:math'; // Import the math package to use sqrt
|
||||
|
||||
import 'package:flutter/foundation.dart';
|
||||
import 'package:flutter/rendering.dart';
|
||||
import 'package:flutter/services.dart';
|
||||
import 'package:flutter_pcm_sound/flutter_pcm_sound.dart';
|
||||
import 'package:flutter_screen_recording/flutter_screen_recording.dart';
|
||||
import 'package:flutter_voice_processor/flutter_voice_processor.dart';
|
||||
import 'package:gallery_saver/gallery_saver.dart';
|
||||
import 'package:get/get.dart';
|
||||
import 'package:image_gallery_saver/image_gallery_saver.dart';
|
||||
import 'package:path_provider/path_provider.dart';
|
||||
import 'package:permission_handler/permission_handler.dart';
|
||||
import 'package:star_lock/app_settings/app_settings.dart';
|
||||
import 'package:star_lock/login/login/entity/LoginEntity.dart';
|
||||
import 'package:star_lock/main/lockDetail/lockDetail/lockDetail_logic.dart';
|
||||
import 'package:star_lock/main/lockDetail/lockDetail/lockDetail_state.dart';
|
||||
import 'package:star_lock/main/lockDetail/lockDetail/lockNetToken_entity.dart';
|
||||
import 'package:star_lock/main/lockDetail/lockSet/lockSet/lockSetInfo_entity.dart';
|
||||
import 'package:star_lock/main/lockMian/entity/lockListInfo_entity.dart';
|
||||
import 'package:star_lock/network/api_repository.dart';
|
||||
import 'package:star_lock/talk/call/g711.dart';
|
||||
import 'package:star_lock/talk/starChart/constant/talk_status.dart';
|
||||
import 'package:star_lock/talk/starChart/proto/talk_data.pb.dart';
|
||||
import 'package:star_lock/talk/starChart/proto/talk_expect.pb.dart';
|
||||
import 'package:star_lock/talk/starChart/star_chart_manage.dart';
|
||||
import 'package:star_lock/talk/starChart/views/talkView/talk_view_state.dart';
|
||||
import 'package:star_lock/talk/starChart/webView/h264_web_view_state.dart';
|
||||
import 'package:star_lock/tools/bugly/bugly_tool.dart';
|
||||
import 'package:webview_flutter/webview_flutter.dart';
|
||||
|
||||
import '../../../../tools/baseGetXController.dart';
|
||||
|
||||
class H264WebViewLogic extends BaseGetXController {
|
||||
final H264WebViewState state = H264WebViewState();
|
||||
|
||||
final LockDetailState lockDetailState = Get.put(LockDetailLogic()).state;
|
||||
|
||||
@override
|
||||
void onInit() {
|
||||
super.onInit();
|
||||
// 初始化 WebView 控制器
|
||||
state.webViewController = WebViewController()
|
||||
..setJavaScriptMode(JavaScriptMode.unrestricted)
|
||||
..enableZoom(false)
|
||||
..addJavaScriptChannel(
|
||||
'Flutter',
|
||||
onMessageReceived: (message) {
|
||||
print("来自 HTML 的消息: ${message.message}");
|
||||
},
|
||||
);
|
||||
|
||||
state.isShowLoading.value = true;
|
||||
// 加载本地 HTML
|
||||
_loadLocalHtml();
|
||||
// 创建流数据监听
|
||||
_createFramesStreamListen();
|
||||
|
||||
_startListenTalkStatus();
|
||||
state.talkStatus.value = state.startChartTalkStatus.status;
|
||||
// 初始化音频播放器
|
||||
_initFlutterPcmSound();
|
||||
// 初始化录音控制器
|
||||
_initAudioRecorder();
|
||||
}
|
||||
|
||||
/// 初始化音频录制器
|
||||
void _initAudioRecorder() {
|
||||
state.voiceProcessor = VoiceProcessor.instance;
|
||||
}
|
||||
|
||||
/// 初始化音频播放器
|
||||
void _initFlutterPcmSound() {
|
||||
const int sampleRate = 8000;
|
||||
FlutterPcmSound.setLogLevel(LogLevel.none);
|
||||
FlutterPcmSound.setup(sampleRate: sampleRate, channelCount: 1);
|
||||
// 设置 feed 阈值
|
||||
if (Platform.isAndroid) {
|
||||
FlutterPcmSound.setFeedThreshold(1024); // Android 平台的特殊处理
|
||||
} else {
|
||||
FlutterPcmSound.setFeedThreshold(2000); // 非 Android 平台的处理
|
||||
}
|
||||
}
|
||||
|
||||
void _createFramesStreamListen() async {
|
||||
state.talkDataRepository.talkDataStream.listen((TalkData event) async {
|
||||
// 发送数据给js处理
|
||||
_sendBufferedData(event.content);
|
||||
});
|
||||
}
|
||||
|
||||
/// 加载html文件
|
||||
Future<void> _loadLocalHtml() async {
|
||||
// 加载 HTML 文件内容
|
||||
final String fileHtmlContent =
|
||||
await rootBundle.loadString('assets/html/h264.html');
|
||||
|
||||
// 加载 JS 文件内容
|
||||
final String jsContent =
|
||||
await rootBundle.loadString('assets/html/jmuxer.min.js');
|
||||
|
||||
// 将 JS 文件内容嵌入到 HTML 中
|
||||
final String htmlWithJs = fileHtmlContent.replaceAll(
|
||||
'<script src="jmuxer.min.js"></script>', // 替换掉引用外部 JS 的标签
|
||||
'<script>$jsContent</script>' // 使用内联方式嵌入 JS 内容
|
||||
);
|
||||
|
||||
// 加载最终的 HTML 字符串到 WebView 中
|
||||
if (state.webViewController != null) {
|
||||
state.webViewController.loadHtmlString(htmlWithJs); // 设置 baseUrl 避免资源加载问题
|
||||
}
|
||||
}
|
||||
|
||||
// 修改后的发送方法
|
||||
_sendBufferedData(List<int> buffer) async {
|
||||
// 原始发送逻辑
|
||||
String jsCode = "feedDataFromFlutter($buffer);";
|
||||
await state.webViewController.runJavaScript(jsCode);
|
||||
|
||||
if (state.isShowLoading.isTrue) {
|
||||
await Future.delayed(Duration(seconds: 1));
|
||||
state.isShowLoading.value = false;
|
||||
}
|
||||
}
|
||||
|
||||
/// 监听对讲状态
|
||||
void _startListenTalkStatus() {
|
||||
state.startChartTalkStatus.statusStream.listen((talkStatus) {
|
||||
state.talkStatus.value = talkStatus;
|
||||
switch (talkStatus) {
|
||||
case TalkStatus.rejected:
|
||||
case TalkStatus.hangingUpDuring:
|
||||
case TalkStatus.notTalkData:
|
||||
case TalkStatus.notTalkPing:
|
||||
case TalkStatus.end:
|
||||
_handleInvalidTalkStatus();
|
||||
break;
|
||||
case TalkStatus.answeredSuccessfully:
|
||||
state.oneMinuteTimeTimer?.cancel(); // 取消旧定时器
|
||||
state.oneMinuteTimeTimer ??=
|
||||
Timer.periodic(const Duration(seconds: 1), (Timer t) {
|
||||
if (state.isShowLoading.isFalse) {
|
||||
state.oneMinuteTime.value++;
|
||||
if (state.oneMinuteTime.value >= 60) {
|
||||
t.cancel(); // 取消定时器
|
||||
state.oneMinuteTime.value = 0;
|
||||
}
|
||||
}
|
||||
});
|
||||
break;
|
||||
default:
|
||||
// 其他状态的处理
|
||||
break;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/// 更新发送预期数据
|
||||
void updateTalkExpect() {
|
||||
TalkExpectReq talkExpectReq = TalkExpectReq();
|
||||
state.isOpenVoice.value = !state.isOpenVoice.value;
|
||||
if (!state.isOpenVoice.value) {
|
||||
talkExpectReq = TalkExpectReq(
|
||||
videoType: [VideoTypeE.IMAGE],
|
||||
audioType: [],
|
||||
);
|
||||
showToast('已静音'.tr);
|
||||
} else {
|
||||
talkExpectReq = TalkExpectReq(
|
||||
videoType: [VideoTypeE.IMAGE],
|
||||
audioType: [AudioTypeE.G711],
|
||||
);
|
||||
}
|
||||
|
||||
/// 修改发送预期数据
|
||||
StartChartManage().changeTalkExpectDataTypeAndReStartTalkExpectMessageTimer(
|
||||
talkExpect: talkExpectReq);
|
||||
}
|
||||
|
||||
/// 处理无效通话状态
|
||||
void _handleInvalidTalkStatus() {}
|
||||
|
||||
/// 截图并保存到相册
|
||||
Future<void> captureAndSavePng() async {
|
||||
try {
|
||||
if (state.globalKey.currentContext == null) {
|
||||
AppLog.log('截图失败: 未找到当前上下文');
|
||||
return;
|
||||
}
|
||||
final RenderRepaintBoundary boundary = state.globalKey.currentContext!
|
||||
.findRenderObject()! as RenderRepaintBoundary;
|
||||
final ui.Image image = await boundary.toImage();
|
||||
final ByteData? byteData =
|
||||
await image.toByteData(format: ui.ImageByteFormat.png);
|
||||
|
||||
if (byteData == null) {
|
||||
AppLog.log('截图失败: 图像数据为空');
|
||||
return;
|
||||
}
|
||||
final Uint8List pngBytes = byteData.buffer.asUint8List();
|
||||
|
||||
// 获取应用程序的文档目录
|
||||
final Directory directory = await getApplicationDocumentsDirectory();
|
||||
final String imagePath = '${directory.path}/screenshot.png';
|
||||
|
||||
// 将截图保存为文件
|
||||
final File imgFile = File(imagePath);
|
||||
await imgFile.writeAsBytes(pngBytes);
|
||||
|
||||
// 将截图保存到相册
|
||||
await ImageGallerySaver.saveFile(imagePath);
|
||||
|
||||
AppLog.log('截图保存路径: $imagePath');
|
||||
showToast('截图已保存到相册'.tr);
|
||||
} catch (e) {
|
||||
AppLog.log('截图失败: $e');
|
||||
}
|
||||
}
|
||||
|
||||
// 发起接听命令
|
||||
void initiateAnswerCommand() {
|
||||
StartChartManage().startTalkAcceptTimer();
|
||||
}
|
||||
|
||||
//开始录音
|
||||
Future<void> startProcessingAudio() async {
|
||||
// 增加录音帧监听器和错误监听器
|
||||
state.voiceProcessor?.addFrameListener(_onFrame);
|
||||
state.voiceProcessor?.addErrorListener(_onError);
|
||||
try {
|
||||
if (await state.voiceProcessor?.hasRecordAudioPermission() ?? false) {
|
||||
await state.voiceProcessor?.start(state.frameLength, state.sampleRate);
|
||||
final bool? isRecording = await state.voiceProcessor?.isRecording();
|
||||
state.isRecordingAudio.value = isRecording!;
|
||||
state.startRecordingAudioTime.value = DateTime.now();
|
||||
} else {
|
||||
// state.errorMessage.value = 'Recording permission not granted';
|
||||
}
|
||||
} on PlatformException catch (ex) {
|
||||
// state.errorMessage.value = 'Failed to start recorder: $ex';
|
||||
}
|
||||
state.isOpenVoice.value = false;
|
||||
}
|
||||
|
||||
/// 停止录音
|
||||
Future<void> stopProcessingAudio() async {
|
||||
try {
|
||||
await state.voiceProcessor?.stop();
|
||||
state.voiceProcessor?.removeFrameListener(_onFrame);
|
||||
state.udpSendDataFrameNumber = 0;
|
||||
// 记录结束时间
|
||||
state.endRecordingAudioTime.value = DateTime.now();
|
||||
|
||||
// 计算录音的持续时间
|
||||
final duration = state.endRecordingAudioTime.value!
|
||||
.difference(state.startRecordingAudioTime.value!);
|
||||
|
||||
state.recordingAudioTime.value = duration.inSeconds;
|
||||
} on PlatformException catch (ex) {
|
||||
// state.errorMessage.value = 'Failed to stop recorder: $ex';
|
||||
} finally {
|
||||
final bool? isRecording = await state.voiceProcessor?.isRecording();
|
||||
state.isRecordingAudio.value = isRecording!;
|
||||
state.isOpenVoice.value = true;
|
||||
}
|
||||
}
|
||||
|
||||
// 音频帧处理
|
||||
Future<void> _onFrame(List<int> frame) async {
|
||||
// 预处理和转码操作放到异步计算线程
|
||||
// final processedFrame = await compute(preprocessAudio, frame);
|
||||
// final list = listLinearToALaw(processedFrame);
|
||||
final List<int> processedFrame = preprocessAudio(frame);
|
||||
final List<int> list = listLinearToALaw(processedFrame);
|
||||
|
||||
final int ms = DateTime.now().millisecondsSinceEpoch -
|
||||
state.startRecordingAudioTime.value.millisecondsSinceEpoch;
|
||||
|
||||
// 发送音频数据到UDP
|
||||
await StartChartManage().sendTalkDataMessage(
|
||||
talkData: TalkData(
|
||||
content: list,
|
||||
contentType: TalkData_ContentTypeE.G711,
|
||||
durationMs: ms,
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
/// 挂断
|
||||
void udpHangUpAction() async {
|
||||
if (state.talkStatus.value == TalkStatus.answeredSuccessfully) {
|
||||
// 如果是通话中就挂断
|
||||
StartChartManage().startTalkHangupMessageTimer();
|
||||
} else {
|
||||
// 拒绝
|
||||
StartChartManage().startTalkRejectMessageTimer();
|
||||
}
|
||||
Get.back();
|
||||
}
|
||||
|
||||
// 远程开锁
|
||||
Future<void> remoteOpenLock() async {
|
||||
final lockPeerId = StartChartManage().lockPeerId;
|
||||
final lockListPeerId = StartChartManage().lockListPeerId;
|
||||
int lockId = lockDetailState.keyInfos.value.lockId ?? 0;
|
||||
|
||||
// 如果锁列表获取到peerId,代表有多个锁,使用锁列表的peerId
|
||||
// 从列表中遍历出对应的peerId
|
||||
lockListPeerId.forEach((element) {
|
||||
if (element.network?.peerId == lockPeerId) {
|
||||
lockId = element.lockId ?? 0;
|
||||
}
|
||||
});
|
||||
|
||||
final LockSetInfoEntity lockSetInfoEntity =
|
||||
await ApiRepository.to.getLockSettingInfoData(
|
||||
lockId: lockId.toString(),
|
||||
);
|
||||
if (lockSetInfoEntity.errorCode!.codeIsSuccessful) {
|
||||
if (lockSetInfoEntity.data?.lockFeature?.remoteUnlock == 1 &&
|
||||
lockSetInfoEntity.data?.lockSettingInfo?.remoteUnlock == 1) {
|
||||
final LoginEntity entity = await ApiRepository.to
|
||||
.remoteOpenLock(lockId: lockId.toString(), timeOut: 60);
|
||||
if (entity.errorCode!.codeIsSuccessful) {
|
||||
showToast('已开锁'.tr);
|
||||
StartChartManage().lockListPeerId = [];
|
||||
}
|
||||
} else {
|
||||
showToast('该锁的远程开锁功能未启用'.tr);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
List<int> preprocessAudio(List<int> pcmList) {
|
||||
// 简单的降噪处理
|
||||
final List<int> processedList = [];
|
||||
for (int pcmVal in pcmList) {
|
||||
// 简单的降噪示例:将小于阈值的信号置为0
|
||||
if (pcmVal.abs() < 200) {
|
||||
pcmVal = 0;
|
||||
}
|
||||
processedList.add(pcmVal);
|
||||
}
|
||||
return processedList;
|
||||
}
|
||||
|
||||
List<int> listLinearToALaw(List<int> pcmList) {
|
||||
final List<int> aLawList = [];
|
||||
for (int pcmVal in pcmList) {
|
||||
final int aLawVal = linearToALaw(pcmVal);
|
||||
aLawList.add(aLawVal);
|
||||
}
|
||||
return aLawList;
|
||||
}
|
||||
|
||||
int linearToALaw(int pcmVal) {
|
||||
const int ALAW_MAX = 0x7FFF; // 32767
|
||||
const int ALAW_BIAS = 0x84; // 132
|
||||
|
||||
int mask;
|
||||
int seg;
|
||||
int aLawVal;
|
||||
|
||||
// Handle sign
|
||||
if (pcmVal < 0) {
|
||||
pcmVal = -pcmVal;
|
||||
mask = 0x7F; // 127 (sign bit is 1)
|
||||
} else {
|
||||
mask = 0xFF; // 255 (sign bit is 0)
|
||||
}
|
||||
|
||||
// Add bias and clamp to ALAW_MAX
|
||||
pcmVal += ALAW_BIAS;
|
||||
if (pcmVal > ALAW_MAX) {
|
||||
pcmVal = ALAW_MAX;
|
||||
}
|
||||
|
||||
// Determine segment
|
||||
seg = search(pcmVal);
|
||||
|
||||
// Calculate A-law value
|
||||
if (seg >= 8) {
|
||||
aLawVal = 0x7F ^ mask; // Clamp to maximum value
|
||||
} else {
|
||||
int quantized = (pcmVal >> (seg + 3)) & 0xF;
|
||||
aLawVal = (seg << 4) | quantized;
|
||||
aLawVal ^= 0xD5; // XOR with 0xD5 to match standard A-law table
|
||||
}
|
||||
|
||||
return aLawVal;
|
||||
}
|
||||
|
||||
int search(int val) {
|
||||
final List<int> table = [
|
||||
0xFF, // Segment 0
|
||||
0x1FF, // Segment 1
|
||||
0x3FF, // Segment 2
|
||||
0x7FF, // Segment 3
|
||||
0xFFF, // Segment 4
|
||||
0x1FFF, // Segment 5
|
||||
0x3FFF, // Segment 6
|
||||
0x7FFF // Segment 7
|
||||
];
|
||||
const int size = 8;
|
||||
for (int i = 0; i < size; i++) {
|
||||
if (val <= table[i]) {
|
||||
return i;
|
||||
}
|
||||
}
|
||||
return size;
|
||||
}
|
||||
|
||||
// 错误监听
|
||||
void _onError(VoiceProcessorException error) {
|
||||
AppLog.log(error.message!);
|
||||
}
|
||||
|
||||
@override
|
||||
void dispose() {
|
||||
// TODO: implement dispose
|
||||
super.dispose();
|
||||
StartChartManage().startTalkHangupMessageTimer();
|
||||
state.animationController.dispose();
|
||||
state.webViewController.clearCache();
|
||||
state.webViewController.reload();
|
||||
state.oneMinuteTimeTimer?.cancel();
|
||||
state.oneMinuteTimeTimer = null;
|
||||
stopProcessingAudio();
|
||||
StartChartManage().reSetDefaultTalkExpect();
|
||||
}
|
||||
}
|
||||
@ -1,11 +1,18 @@
|
||||
import 'dart:async';
|
||||
import 'dart:convert';
|
||||
import 'dart:math';
|
||||
import 'package:flutter/material.dart';
|
||||
import 'package:flutter/services.dart' show ByteData, Uint8List, rootBundle;
|
||||
import 'package:flutter_screenutil/flutter_screenutil.dart';
|
||||
import 'package:get/get.dart';
|
||||
import 'package:star_lock/app_settings/app_colors.dart';
|
||||
import 'package:star_lock/app_settings/app_settings.dart';
|
||||
import 'package:star_lock/talk/starChart/constant/talk_status.dart';
|
||||
import 'package:star_lock/talk/starChart/handle/other/talk_data_repository.dart';
|
||||
import 'package:star_lock/talk/starChart/proto/talk_data.pbserver.dart';
|
||||
import 'package:star_lock/talk/starChart/star_chart_manage.dart';
|
||||
import 'package:star_lock/talk/starChart/webView/h264_web_logic.dart';
|
||||
import 'package:star_lock/talk/starChart/webView/h264_web_view_state.dart';
|
||||
import 'package:star_lock/tools/titleAppBar.dart';
|
||||
import 'package:webview_flutter/webview_flutter.dart';
|
||||
|
||||
@ -14,169 +21,400 @@ class H264WebView extends StatefulWidget {
|
||||
_H264WebViewState createState() => _H264WebViewState();
|
||||
}
|
||||
|
||||
class _H264WebViewState extends State<H264WebView> {
|
||||
late final WebViewController _controller;
|
||||
Timer? timer;
|
||||
Timer? _sendTimer;
|
||||
|
||||
// 私有缓冲区,外部无法直接访问
|
||||
final List<int> _buffer = [];
|
||||
|
||||
// 发送数据至html文件间隔时间
|
||||
final int sendDataToHtmlIntervalTime = 820;
|
||||
|
||||
// 通话数据流的单例流数据处理类
|
||||
final TalkDataRepository talkDataRepository = TalkDataRepository.instance;
|
||||
class _H264WebViewState extends State<H264WebView>
|
||||
with TickerProviderStateMixin {
|
||||
final H264WebViewLogic logic = Get.put(H264WebViewLogic());
|
||||
final H264WebViewState state = Get.find<H264WebViewLogic>().state;
|
||||
|
||||
@override
|
||||
void initState() {
|
||||
// TODO: implement initState
|
||||
super.initState();
|
||||
state.animationController = AnimationController(
|
||||
vsync: this, // 确保使用的TickerProvider是当前Widget
|
||||
duration: const Duration(seconds: 1),
|
||||
);
|
||||
|
||||
_controller = WebViewController()
|
||||
..setJavaScriptMode(JavaScriptMode.unrestricted)
|
||||
..enableZoom(false)
|
||||
..addJavaScriptChannel(
|
||||
'Flutter',
|
||||
onMessageReceived: (message) {
|
||||
print("来自 HTML 的消息: ${message.message}");
|
||||
},
|
||||
);
|
||||
|
||||
// 加载本地 HTML
|
||||
_loadLocalHtml();
|
||||
simulateStreamFromAsset();
|
||||
_sendFramesToHtml();
|
||||
}
|
||||
|
||||
void simulateStreamFromAsset() async {
|
||||
// 读取 assets 文件
|
||||
final ByteData data = await rootBundle.load('assets/talk.h264');
|
||||
final List<int> byteData = data.buffer.asUint8List();
|
||||
int current = 0;
|
||||
int start = 0;
|
||||
int end = 0;
|
||||
final List<int> chunks = extractChunks(byteData);
|
||||
// 定时器控制发送数据块的节奏
|
||||
timer ??= Timer.periodic(Duration(milliseconds: 10), (timer) {
|
||||
if (current >= chunks.length) {
|
||||
print('数据已经发完,重新进行发送');
|
||||
start = 0;
|
||||
end = 0;
|
||||
current = 0;
|
||||
timer.cancel();
|
||||
return;
|
||||
}
|
||||
// 提取 NALU 边界并生成 chunks
|
||||
end = chunks[current];
|
||||
current++;
|
||||
List<int> frameData = byteData.sublist(start, end);
|
||||
if (frameData.length == 0) timer.cancel();
|
||||
|
||||
talkDataRepository.addTalkData(TalkData(contentType: TalkData_ContentTypeE.H264,content: frameData));
|
||||
start = end;
|
||||
});
|
||||
}
|
||||
|
||||
void _sendFramesToHtml() async {
|
||||
// 接收到流数据,保存到缓冲区
|
||||
// talkDataRepository.talkDataStream.listen((TalkData event) async {
|
||||
// _buffer.addAll(event.content);
|
||||
// });
|
||||
// 缓冲800ms的数据,定时发送
|
||||
_sendTimer ??= Timer.periodic(
|
||||
Duration(milliseconds: sendDataToHtmlIntervalTime), (timer) async {
|
||||
// 发送累积的数据
|
||||
if (_buffer.isNotEmpty) {
|
||||
await _sendBufferedData(_buffer);
|
||||
_buffer.clear(); // 清空缓冲区
|
||||
state.animationController.repeat();
|
||||
//动画开始、结束、向前移动或向后移动时会调用StatusListener
|
||||
state.animationController.addStatusListener((AnimationStatus status) {
|
||||
if (status == AnimationStatus.completed) {
|
||||
state.animationController.reset();
|
||||
state.animationController.forward();
|
||||
} else if (status == AnimationStatus.dismissed) {
|
||||
state.animationController.reset();
|
||||
state.animationController.forward();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// 提取 NALU 边界并生成 chunks
|
||||
List<int> extractChunks(List<int> byteData) {
|
||||
int i = 0;
|
||||
int length = byteData.length;
|
||||
int naluCount = 0;
|
||||
int value;
|
||||
int state = 0;
|
||||
int lastIndex = 0;
|
||||
List<int> result = [];
|
||||
const minNaluPerChunk = 22; // 每个数据块包含的最小NALU数量
|
||||
|
||||
while (i < length) {
|
||||
value = byteData[i++];
|
||||
// finding 3 or 4-byte start codes (00 00 01 OR 00 00 00 01)
|
||||
switch (state) {
|
||||
case 0:
|
||||
if (value == 0) {
|
||||
state = 1;
|
||||
}
|
||||
break;
|
||||
case 1:
|
||||
if (value == 0) {
|
||||
state = 2;
|
||||
} else {
|
||||
state = 0;
|
||||
}
|
||||
break;
|
||||
case 2:
|
||||
case 3:
|
||||
if (value == 0) {
|
||||
state = 3;
|
||||
} else if (value == 1 && i < length) {
|
||||
if (lastIndex > 0) {
|
||||
naluCount++;
|
||||
}
|
||||
if (naluCount >= minNaluPerChunk) {
|
||||
result.add(lastIndex - state - 1);
|
||||
naluCount = 0;
|
||||
}
|
||||
state = 0;
|
||||
lastIndex = i;
|
||||
} else {
|
||||
state = 0;
|
||||
}
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (naluCount > 0) {
|
||||
result.add(lastIndex);
|
||||
}
|
||||
|
||||
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/// 加载html文件
|
||||
Future<void> _loadLocalHtml() async {
|
||||
final String fileHtmlContent =
|
||||
await rootBundle.loadString('assets/html/h264.html');
|
||||
_controller.loadHtmlString(fileHtmlContent);
|
||||
}
|
||||
|
||||
// 发送数据给js处理
|
||||
_sendBufferedData(List<int> buffer) async {
|
||||
String jsCode = "feedDataFromFlutter(${buffer});";
|
||||
await _controller.runJavaScript(jsCode);
|
||||
}
|
||||
|
||||
@override
|
||||
Widget build(BuildContext context) {
|
||||
return WebViewWidget(controller: _controller);
|
||||
return WillPopScope(
|
||||
onWillPop: () async {
|
||||
// 返回 false 表示禁止退出
|
||||
return false;
|
||||
},
|
||||
child: SizedBox(
|
||||
width: 1.sw,
|
||||
height: 1.sh,
|
||||
child: Stack(
|
||||
alignment: Alignment.center,
|
||||
children: [
|
||||
Obx(() {
|
||||
final double screenWidth = MediaQuery.of(context).size.width;
|
||||
final double screenHeight = MediaQuery.of(context).size.height;
|
||||
return state.isShowLoading.value
|
||||
? Image.asset(
|
||||
'images/main/monitorBg.png',
|
||||
width: screenWidth,
|
||||
height: screenHeight,
|
||||
fit: BoxFit.cover,
|
||||
)
|
||||
: WebViewWidget(
|
||||
controller: state.webViewController,
|
||||
);
|
||||
}),
|
||||
Obx(
|
||||
() => state.isShowLoading.value
|
||||
? Positioned(
|
||||
bottom: 310.h,
|
||||
child: Text(
|
||||
'正在创建安全连接...'.tr,
|
||||
style: TextStyle(color: Colors.black, fontSize: 26.sp),
|
||||
),
|
||||
)
|
||||
: Container(),
|
||||
),
|
||||
Obx(
|
||||
() => state.isShowLoading.isFalse
|
||||
? Positioned(
|
||||
top: ScreenUtil().statusBarHeight + 75.h,
|
||||
width: 1.sw,
|
||||
child: Obx(
|
||||
() {
|
||||
final String sec = (state.oneMinuteTime.value % 60)
|
||||
.toString()
|
||||
.padLeft(2, '0');
|
||||
final String min = (state.oneMinuteTime.value ~/ 60)
|
||||
.toString()
|
||||
.padLeft(2, '0');
|
||||
return Row(
|
||||
mainAxisAlignment: MainAxisAlignment.center,
|
||||
children: <Widget>[
|
||||
Text(
|
||||
'$min:$sec',
|
||||
style: TextStyle(
|
||||
fontSize: 26.sp, color: Colors.white),
|
||||
),
|
||||
],
|
||||
);
|
||||
},
|
||||
),
|
||||
)
|
||||
: Container(),
|
||||
),
|
||||
Positioned(
|
||||
bottom: 10.w,
|
||||
child: Container(
|
||||
width: 1.sw - 30.w * 2,
|
||||
// height: 300.h,
|
||||
margin: EdgeInsets.all(30.w),
|
||||
decoration: BoxDecoration(
|
||||
color: Colors.black.withOpacity(0.2),
|
||||
borderRadius: BorderRadius.circular(20.h)),
|
||||
child: Column(
|
||||
children: <Widget>[
|
||||
SizedBox(height: 20.h),
|
||||
bottomTopBtnWidget(),
|
||||
SizedBox(height: 20.h),
|
||||
bottomBottomBtnWidget(),
|
||||
SizedBox(height: 20.h),
|
||||
],
|
||||
),
|
||||
),
|
||||
),
|
||||
Obx(() => state.isShowLoading.isTrue
|
||||
? buildRotationTransition()
|
||||
: Container()),
|
||||
Obx(() => state.isLongPressing.value
|
||||
? Positioned(
|
||||
top: 80.h,
|
||||
left: 0,
|
||||
right: 0,
|
||||
child: Center(
|
||||
child: Container(
|
||||
padding: EdgeInsets.all(10.w),
|
||||
decoration: BoxDecoration(
|
||||
color: Colors.black.withOpacity(0.7),
|
||||
borderRadius: BorderRadius.circular(10.w),
|
||||
),
|
||||
child: Row(
|
||||
mainAxisSize: MainAxisSize.min,
|
||||
children: <Widget>[
|
||||
Icon(Icons.mic, color: Colors.white, size: 24.w),
|
||||
SizedBox(width: 10.w),
|
||||
Text(
|
||||
'正在说话...'.tr,
|
||||
style: TextStyle(
|
||||
fontSize: 20.sp, color: Colors.white),
|
||||
),
|
||||
],
|
||||
),
|
||||
),
|
||||
),
|
||||
)
|
||||
: Container()),
|
||||
],
|
||||
),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
Widget bottomTopBtnWidget() {
|
||||
return Row(mainAxisAlignment: MainAxisAlignment.center, children: <Widget>[
|
||||
// 打开关闭声音
|
||||
GestureDetector(
|
||||
onTap: () {
|
||||
if (state.talkStatus.value == TalkStatus.answeredSuccessfully) {
|
||||
// 打开关闭声音
|
||||
logic.updateTalkExpect();
|
||||
}
|
||||
},
|
||||
child: Container(
|
||||
width: 50.w,
|
||||
height: 50.w,
|
||||
padding: EdgeInsets.all(5.w),
|
||||
child: Obx(() => Image(
|
||||
width: 40.w,
|
||||
height: 40.w,
|
||||
image: state.isOpenVoice.value
|
||||
? const AssetImage(
|
||||
'images/main/icon_lockDetail_monitoringOpenVoice.png')
|
||||
: const AssetImage(
|
||||
'images/main/icon_lockDetail_monitoringCloseVoice.png'))),
|
||||
),
|
||||
),
|
||||
SizedBox(width: 50.w),
|
||||
// 截图
|
||||
GestureDetector(
|
||||
onTap: () async {
|
||||
if (state.talkStatus.value == TalkStatus.answeredSuccessfully) {
|
||||
await logic.captureAndSavePng();
|
||||
}
|
||||
},
|
||||
child: Container(
|
||||
width: 50.w,
|
||||
height: 50.w,
|
||||
padding: EdgeInsets.all(5.w),
|
||||
child: Image(
|
||||
width: 40.w,
|
||||
height: 40.w,
|
||||
image: const AssetImage(
|
||||
'images/main/icon_lockDetail_monitoringScreenshot.png')),
|
||||
),
|
||||
),
|
||||
SizedBox(width: 50.w),
|
||||
// 录制
|
||||
GestureDetector(
|
||||
onTap: () async {
|
||||
logic.showToast('功能暂未开放'.tr);
|
||||
// if (
|
||||
// state.talkStatus.value == TalkStatus.answeredSuccessfully) {
|
||||
// if (state.isRecordingScreen.value) {
|
||||
// await logic.stopRecording();
|
||||
// } else {
|
||||
// await logic.startRecording();
|
||||
// }
|
||||
// }
|
||||
},
|
||||
child: Container(
|
||||
width: 50.w,
|
||||
height: 50.w,
|
||||
padding: EdgeInsets.all(5.w),
|
||||
child: Image(
|
||||
width: 40.w,
|
||||
height: 40.w,
|
||||
fit: BoxFit.fill,
|
||||
image: const AssetImage(
|
||||
'images/main/icon_lockDetail_monitoringScreenRecording.png'),
|
||||
),
|
||||
),
|
||||
),
|
||||
SizedBox(width: 50.w),
|
||||
GestureDetector(
|
||||
onTap: () {
|
||||
logic.showToast('功能暂未开放'.tr);
|
||||
},
|
||||
child: Image(
|
||||
width: 28.w,
|
||||
height: 28.w,
|
||||
fit: BoxFit.fill,
|
||||
image: const AssetImage('images/main/icon_lockDetail_rectangle.png'),
|
||||
),
|
||||
),
|
||||
]);
|
||||
}
|
||||
|
||||
Widget bottomBottomBtnWidget() {
|
||||
return Row(
|
||||
mainAxisAlignment: MainAxisAlignment.spaceEvenly,
|
||||
children: <Widget>[
|
||||
// 接听
|
||||
Obx(
|
||||
() => bottomBtnItemWidget(
|
||||
getAnswerBtnImg(),
|
||||
getAnswerBtnName(),
|
||||
Colors.white,
|
||||
longPress: () async {
|
||||
if (state.talkStatus.value == TalkStatus.answeredSuccessfully) {
|
||||
// 启动录音
|
||||
logic.startProcessingAudio();
|
||||
state.isLongPressing.value = true;
|
||||
}
|
||||
},
|
||||
longPressUp: () async {
|
||||
// 停止录音
|
||||
logic.stopProcessingAudio();
|
||||
state.isLongPressing.value = false;
|
||||
},
|
||||
onClick: () async {
|
||||
if (state.talkStatus.value ==
|
||||
TalkStatus.passiveCallWaitingAnswer) {
|
||||
// 接听
|
||||
logic.initiateAnswerCommand();
|
||||
}
|
||||
},
|
||||
),
|
||||
),
|
||||
bottomBtnItemWidget(
|
||||
'images/main/icon_lockDetail_hangUp.png', '挂断'.tr, Colors.red,
|
||||
onClick: () {
|
||||
// 挂断
|
||||
logic.udpHangUpAction();
|
||||
}),
|
||||
bottomBtnItemWidget(
|
||||
'images/main/icon_lockDetail_monitoringUnlock.png',
|
||||
'开锁'.tr,
|
||||
AppColors.mainColor,
|
||||
onClick: () {
|
||||
// if (state.talkStatus.value == TalkStatus.answeredSuccessfully &&
|
||||
// state.listData.value.length > 0) {
|
||||
// logic.udpOpenDoorAction();
|
||||
logic.remoteOpenLock();
|
||||
// }
|
||||
// if (UDPManage().remoteUnlock == 1) {
|
||||
// logic.udpOpenDoorAction();
|
||||
// showDeletPasswordAlertDialog(context);
|
||||
// } else {
|
||||
// logic.showToast('请在锁设置中开启远程开锁'.tr);
|
||||
// }
|
||||
},
|
||||
)
|
||||
]);
|
||||
}
|
||||
|
||||
String getAnswerBtnImg() {
|
||||
switch (state.talkStatus.value) {
|
||||
case TalkStatus.passiveCallWaitingAnswer:
|
||||
return 'images/main/icon_lockDetail_monitoringAnswerCalls.png';
|
||||
case TalkStatus.answeredSuccessfully:
|
||||
case TalkStatus.proactivelyCallWaitingAnswer:
|
||||
return 'images/main/icon_lockDetail_monitoringUnTalkback.png';
|
||||
default:
|
||||
return 'images/main/icon_lockDetail_monitoringAnswerCalls.png';
|
||||
}
|
||||
}
|
||||
|
||||
String getAnswerBtnName() {
|
||||
switch (state.talkStatus.value) {
|
||||
case TalkStatus.passiveCallWaitingAnswer:
|
||||
return '接听'.tr;
|
||||
case TalkStatus.proactivelyCallWaitingAnswer:
|
||||
case TalkStatus.answeredSuccessfully:
|
||||
return '长按说话'.tr;
|
||||
default:
|
||||
return '接听'.tr;
|
||||
}
|
||||
}
|
||||
|
||||
Widget bottomBtnItemWidget(
|
||||
String iconUrl,
|
||||
String name,
|
||||
Color backgroundColor, {
|
||||
required Function() onClick,
|
||||
Function()? longPress,
|
||||
Function()? longPressUp,
|
||||
}) {
|
||||
double wh = 80.w;
|
||||
return GestureDetector(
|
||||
onTap: onClick,
|
||||
onLongPress: longPress,
|
||||
onLongPressUp: longPressUp,
|
||||
child: SizedBox(
|
||||
height: 160.w,
|
||||
width: 140.w,
|
||||
child: Column(
|
||||
crossAxisAlignment: CrossAxisAlignment.center,
|
||||
children: <Widget>[
|
||||
Container(
|
||||
width: wh,
|
||||
height: wh,
|
||||
constraints: BoxConstraints(
|
||||
minWidth: wh,
|
||||
),
|
||||
decoration: BoxDecoration(
|
||||
color: backgroundColor,
|
||||
borderRadius: BorderRadius.circular((wh + 10.w * 2) / 2),
|
||||
),
|
||||
padding: EdgeInsets.all(20.w),
|
||||
child: Image.asset(iconUrl, fit: BoxFit.fitWidth),
|
||||
),
|
||||
SizedBox(height: 20.w),
|
||||
Text(
|
||||
name,
|
||||
style: TextStyle(fontSize: 20.sp, color: Colors.white),
|
||||
textAlign: TextAlign.center, // 当文本超出指定行数时,使用省略号表示
|
||||
maxLines: 2, // 设置最大行数为1
|
||||
)
|
||||
],
|
||||
),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
//旋转动画
|
||||
Widget buildRotationTransition() {
|
||||
return Positioned(
|
||||
left: ScreenUtil().screenWidth / 2 - 220.w / 2,
|
||||
top: ScreenUtil().screenHeight / 2 - 220.w / 2 - 150.h,
|
||||
child: GestureDetector(
|
||||
child: RotationTransition(
|
||||
//设置动画的旋转中心
|
||||
alignment: Alignment.center,
|
||||
//动画控制器
|
||||
turns: state.animationController,
|
||||
//将要执行动画的子view
|
||||
child: AnimatedOpacity(
|
||||
opacity: 0.5,
|
||||
duration: const Duration(seconds: 2),
|
||||
child: Image.asset(
|
||||
'images/main/realTime_connecting.png',
|
||||
width: 220.w,
|
||||
height: 220.w,
|
||||
),
|
||||
),
|
||||
),
|
||||
onTap: () {
|
||||
state.animationController.forward();
|
||||
},
|
||||
),
|
||||
);
|
||||
}
|
||||
@override
|
||||
void dispose() {
|
||||
timer?.cancel();
|
||||
timer = null;
|
||||
_sendTimer?.cancel();
|
||||
timer = null;
|
||||
// talkDataRepository.dispose();
|
||||
state.animationController.dispose(); // 确保释放控制器
|
||||
super.dispose();
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
52
lib/talk/starChart/webView/h264_web_view_state.dart
Normal file
52
lib/talk/starChart/webView/h264_web_view_state.dart
Normal file
@ -0,0 +1,52 @@
|
||||
import 'dart:async';
|
||||
|
||||
import 'package:flutter/cupertino.dart';
|
||||
import 'package:flutter_voice_processor/flutter_voice_processor.dart';
|
||||
import 'package:get/get.dart';
|
||||
import 'package:star_lock/talk/starChart/constant/talk_status.dart';
|
||||
import 'package:star_lock/talk/starChart/handle/other/talk_data_repository.dart';
|
||||
import 'package:star_lock/talk/starChart/status/star_chart_talk_status.dart';
|
||||
import 'package:star_lock/talk/starChart/views/talkView/talk_view_state.dart';
|
||||
import 'package:webview_flutter/webview_flutter.dart';
|
||||
|
||||
class H264WebViewState {
|
||||
GlobalKey globalKey = GlobalKey();
|
||||
int udpSendDataFrameNumber = 0; // 帧序号
|
||||
late AnimationController animationController;
|
||||
|
||||
// webview 控制器
|
||||
late final WebViewController webViewController;
|
||||
|
||||
// 获取 startChartTalkStatus 的唯一实例
|
||||
final StartChartTalkStatus startChartTalkStatus =
|
||||
StartChartTalkStatus.instance;
|
||||
Rx<TalkStatus> talkStatus = TalkStatus.none.obs; //星图对讲状态
|
||||
|
||||
RxBool isShowLoading = true.obs;
|
||||
|
||||
Timer? oneMinuteTimeTimer; // 定时器超过60秒关闭当前界面
|
||||
RxInt oneMinuteTime = 0.obs; // 定时器秒数
|
||||
|
||||
RxBool isLongPressing = false.obs; // 是否长按说话
|
||||
final TalkDataRepository talkDataRepository = TalkDataRepository.instance;
|
||||
RxInt lastFrameTimestamp = 0.obs; // 上一帧的时间戳,用来判断网络环境
|
||||
Rx<NetworkStatus> networkStatus =
|
||||
NetworkStatus.normal.obs; // 网络状态:0-正常 1-网络卡顿 2-网络延迟 3-网络丢包
|
||||
RxInt alertCount = 0.obs; // 网络状态提示计数器
|
||||
RxInt maxAlertNumber = 3.obs; // 网络状态提示最大提示次数
|
||||
RxBool isOpenVoice = true.obs; // 是否打开声音
|
||||
RxBool isRecordingScreen = false.obs; // 是否录屏中
|
||||
RxBool isRecordingAudio = false.obs; // 是否录音中
|
||||
Rx<DateTime> startRecordingAudioTime = DateTime.now().obs; // 开始录音时间
|
||||
Rx<DateTime> endRecordingAudioTime = DateTime.now().obs; // 结束录音时间
|
||||
RxInt recordingAudioTime = 0.obs; // 录音时间持续时间
|
||||
RxDouble fps = 0.0.obs; // 添加 FPS 计数
|
||||
late VoiceProcessor? voiceProcessor; // 音频处理器、录音
|
||||
final int frameLength = 320; //录音视频帧长度为640
|
||||
final int sampleRate = 8000; //录音频采样率为8000
|
||||
List<int> recordingAudioAllFrames = <int>[]; // 录制音频的所有帧
|
||||
List<int> lockRecordingAudioAllFrames = <int>[]; // 录制音频的所有帧
|
||||
RxInt rotateAngle = 0.obs; // 旋转角度(以弧度为单位)
|
||||
RxBool hasAudioData = false.obs; // 是否有音频数据
|
||||
RxInt lastAudioTimestamp = 0.obs; // 最后接收到的音频数据的时间戳
|
||||
}
|
||||
@ -992,8 +992,8 @@ packages:
|
||||
dependency: "direct main"
|
||||
description:
|
||||
path: "."
|
||||
ref: main
|
||||
resolved-ref: aa93729f48762421658675800be68aee27b6d8fb
|
||||
ref: "807ddb8e396c2dce16919df84efe795072404dde"
|
||||
resolved-ref: "807ddb8e396c2dce16919df84efe795072404dde"
|
||||
url: "git@code-internal.star-lock.cn:StarlockTeam/jpush_flutter.git"
|
||||
source: git
|
||||
version: "2.5.8"
|
||||
|
||||
@ -214,7 +214,7 @@ dependencies:
|
||||
jpush_flutter:
|
||||
git:
|
||||
url: git@code-internal.star-lock.cn:StarlockTeam/jpush_flutter.git
|
||||
ref: main
|
||||
ref: 807ddb8e396c2dce16919df84efe795072404dde
|
||||
|
||||
#视频播放器
|
||||
video_player: ^2.9.2
|
||||
@ -316,6 +316,7 @@ flutter:
|
||||
- images/lockType/
|
||||
- assets/
|
||||
- assets/html/h264.html
|
||||
- assets/html/jmuxer.min.js
|
||||
- lan/
|
||||
# An image asset can refer to one or more resolution-specific "variants", see
|
||||
# https://flutter.dev/assets-and-images/#resolution-aware
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user