fix:增加h264调试页面

This commit is contained in:
liyi 2025-02-21 15:55:35 +08:00
parent 34ca2a3f75
commit 5e1979d8b3
17 changed files with 316 additions and 2826 deletions

File diff suppressed because it is too large Load Diff

1
assets/html/jmuxer.min.js vendored Normal file

File diff suppressed because one or more lines are too long

View File

@ -61,6 +61,7 @@ import 'package:star_lock/mine/valueAddedServices/advancedFeaturesWeb/advancedFe
import 'package:star_lock/mine/valueAddedServices/advancedFunctionRecord/advancedFunctionRecord_page.dart'; import 'package:star_lock/mine/valueAddedServices/advancedFunctionRecord/advancedFunctionRecord_page.dart';
import 'package:star_lock/mine/valueAddedServices/valueAddedServicesRecord/value_added_services_record_page.dart'; import 'package:star_lock/mine/valueAddedServices/valueAddedServicesRecord/value_added_services_record_page.dart';
import 'package:star_lock/talk/starChart/views/talkView/talk_view_page.dart'; import 'package:star_lock/talk/starChart/views/talkView/talk_view_page.dart';
import 'package:star_lock/talk/starChart/webView/h264_web_view.dart';
import 'common/safetyVerification/safetyVerification_page.dart'; import 'common/safetyVerification/safetyVerification_page.dart';
import 'login/forgetPassword/starLock_forgetPassword_page.dart'; import 'login/forgetPassword/starLock_forgetPassword_page.dart';
@ -515,6 +516,7 @@ abstract class Routers {
static const String doubleLockLinkPage = '/doubleLockLinkPage'; // static const String doubleLockLinkPage = '/doubleLockLinkPage'; //
static const String starChartPage = '/starChartPage'; // static const String starChartPage = '/starChartPage'; //
static const String starChartTalkView = '/starChartTalkView'; // static const String starChartTalkView = '/starChartTalkView'; //
static const String h264WebView = '/h264WebView'; //
} }
abstract class AppRouters { abstract class AppRouters {
@ -1195,5 +1197,6 @@ abstract class AppRouters {
page: () => const DoubleLockLinkPage()), page: () => const DoubleLockLinkPage()),
GetPage<dynamic>( GetPage<dynamic>(
name: Routers.starChartTalkView, page: () => const TalkViewPage()), name: Routers.starChartTalkView, page: () => const TalkViewPage()),
GetPage<dynamic>(name: Routers.h264WebView, page: () => H264WebView()),
]; ];
} }

View File

@ -2,6 +2,7 @@ import 'dart:typed_data';
import 'package:star_lock/app_settings/app_settings.dart'; import 'package:star_lock/app_settings/app_settings.dart';
import 'package:star_lock/talk/starChart/constant/message_type_constant.dart'; import 'package:star_lock/talk/starChart/constant/message_type_constant.dart';
import 'package:star_lock/talk/starChart/entity/scp_message.dart'; import 'package:star_lock/talk/starChart/entity/scp_message.dart';
import 'package:star_lock/talk/starChart/handle/other/h264_frame_handler.dart';
import 'package:star_lock/talk/starChart/handle/scp_message_base_handle.dart'; import 'package:star_lock/talk/starChart/handle/scp_message_base_handle.dart';
import 'package:star_lock/talk/starChart/handle/scp_message_handle.dart'; import 'package:star_lock/talk/starChart/handle/scp_message_handle.dart';
import 'package:star_lock/talk/starChart/proto/talk_data.pb.dart'; import 'package:star_lock/talk/starChart/proto/talk_data.pb.dart';
@ -158,16 +159,13 @@ class UdpTalkDataHandler extends ScpMessageBaseHandle
void _handleVideoH264(TalkData talkData) { void _handleVideoH264(TalkData talkData) {
final TalkDataH264Frame talkDataH264Frame = TalkDataH264Frame(); final TalkDataH264Frame talkDataH264Frame = TalkDataH264Frame();
talkDataH264Frame.mergeFromBuffer(talkData.content); talkDataH264Frame.mergeFromBuffer(talkData.content);
// AppLog.log('H264 TalkData :$talkDataH264Frame'); frameHandler.handleFrame(talkDataH264Frame);
// talkDataRepository.addTalkData(talkData);
} }
/// ///
void _handleVideoImage(TalkData talkData) async { void _handleVideoImage(TalkData talkData) async {
final List<Uint8List> processCompletePayload = final List<Uint8List> processCompletePayload =
await _processCompletePayload(Uint8List.fromList(talkData.content)); await _processCompletePayload(Uint8List.fromList(talkData.content));
// AppLog.log('得到完整的帧:${processCompletePayload.length}'); //
processCompletePayload.forEach((element) { processCompletePayload.forEach((element) {
talkData.content = element; talkData.content = element;
talkDataRepository.addTalkData(talkData); talkDataRepository.addTalkData(talkData);
@ -181,7 +179,7 @@ class UdpTalkDataHandler extends ScpMessageBaseHandle
// // pcm数据 // // pcm数据
// List<int> pcmBytes = G711().convertList(g711Data); // List<int> pcmBytes = G711().convertList(g711Data);
// talkData.content = pcmBytes; // talkData.content = pcmBytes;
talkDataRepository.addTalkData(talkData); // talkDataRepository.addTalkData(talkData);
} catch (e) { } catch (e) {
print('Error decoding G.711 to PCM: $e'); print('Error decoding G.711 to PCM: $e');
} }

View File

@ -73,9 +73,19 @@ class UdpTalkRequestHandler extends ScpMessageBaseHandle
// //
talkeRequestOverTimeTimerManager.start(); talkeRequestOverTimeTimerManager.start();
// //
Get.toNamed( if (startChartManage
Routers.starChartTalkView, .getDefaultTalkExpect()
); .videoType
.indexOf(VideoTypeE.H264) ==
-1) {
Get.toNamed(
Routers.starChartTalkView,
);
} else {
Get.toNamed(
Routers.h264WebView,
);
}
} }
// //

View File

@ -0,0 +1,22 @@
import 'dart:typed_data';
import 'package:star_lock/talk/starChart/proto/talk_data_h264_frame.pb.dart';
class H264FrameBuffer {
List<TalkDataH264Frame> frames = [];
void addFrame(TalkDataH264Frame frame) {
frames.add(frame);
}
Uint8List getCompleteStream() {
final List<int> completeStream = [];
for (final frame in frames) {
// 0x00 0x00 0x01
completeStream.addAll([0x00, 0x00, 0x01]);
//
completeStream.addAll(frame.frameData);
}
return Uint8List.fromList(completeStream);
}
}

View File

@ -0,0 +1,84 @@
import 'package:star_lock/app_settings/app_settings.dart';
import '../../proto/talk_data_h264_frame.pb.dart';
class H264FrameHandler {
final Map<int, TalkDataH264Frame> _frameBuffer = {};
final void Function(List<int> frameData) onCompleteFrame;
int _lastProcessedSeq = -1;
H264FrameHandler({required this.onCompleteFrame});
void handleFrame(TalkDataH264Frame frame) {
//
_frameBuffer[frame.frameSeq] = frame;
// GOP (Group of Pictures)
_tryAssembleFrames(frame.frameSeq);
}
void _tryAssembleFrames(int currentSeq) {
//
final List<int> sortedSeqs = _frameBuffer.keys.toList()..sort();
final List<int> framesToProcess = [];
// I P
int? startFrameSeq;
for (var seq in sortedSeqs.reversed) {
final frame = _frameBuffer[seq];
if (frame?.frameType == TalkDataH264Frame_FrameTypeE.I) {
startFrameSeq = seq;
break;
} else if (frame?.frameType == TalkDataH264Frame_FrameTypeE.P) {
// P I
if (_frameBuffer.containsKey(frame?.frameSeqI)) {
startFrameSeq = seq;
break;
} else {
// I P
_frameBuffer.remove(seq);
}
}
}
if (startFrameSeq != null) {
// I P
int expectedSeq = startFrameSeq;
for (var seq in sortedSeqs.where((s) => s >= startFrameSeq!)) {
if (seq != expectedSeq) break;
framesToProcess.add(seq);
expectedSeq++;
}
if (framesToProcess.isNotEmpty) {
_processFrames(framesToProcess);
}
} else {
_clearOldFrames(currentSeq);
}
}
void _clearOldFrames(int currentSeq) {
//
_frameBuffer.removeWhere((seq, frame) => seq < currentSeq - 200); //
}
void _processFrames(List<int> frameSeqs) {
//
final List<int> assembledData = [];
for (var seq in frameSeqs) {
final frame = _frameBuffer[seq]!;
assembledData.addAll(frame.frameData);
//
_frameBuffer.remove(seq);
}
//
onCompleteFrame(assembledData);
}
void clear() {
_frameBuffer.clear();
}
}

View File

@ -1,5 +1,4 @@
import 'dart:async'; import 'dart:async';
import 'package:star_lock/talk/starChart/proto/talk_data.pb.dart'; import 'package:star_lock/talk/starChart/proto/talk_data.pb.dart';
class TalkDataRepository { class TalkDataRepository {
@ -27,6 +26,9 @@ class TalkDataRepository {
bool _isListening = false; bool _isListening = false;
//
final List<TalkData> _buffer = [];
// Stream // Stream
Stream<TalkData> get talkDataStream => Stream<TalkData> get talkDataStream =>
_talkDataStreamController.stream.transform( _talkDataStreamController.stream.transform(
@ -41,14 +43,11 @@ class TalkDataRepository {
}, },
), ),
); );
final List<TalkData> _buffer = []; //
// TalkData Stream // TalkData Stream
void addTalkData(TalkData talkData) async { void addTalkData(TalkData talkData) {
if (_isListening) { if (_isListening) {
Future.microtask(() { _talkDataStreamController.add(talkData);
_talkDataStreamController.add(talkData);
});
} }
} }

View File

@ -14,6 +14,7 @@ import 'package:star_lock/talk/starChart/constant/payload_type_constant.dart';
import 'package:star_lock/talk/starChart/constant/udp_constant.dart'; import 'package:star_lock/talk/starChart/constant/udp_constant.dart';
import 'package:star_lock/talk/starChart/entity/scp_message.dart'; import 'package:star_lock/talk/starChart/entity/scp_message.dart';
import 'package:star_lock/talk/starChart/handle/other/h264_frame_handler.dart';
import 'package:star_lock/talk/starChart/handle/other/talk_data_repository.dart'; import 'package:star_lock/talk/starChart/handle/other/talk_data_repository.dart';
import 'package:star_lock/talk/starChart/handle/other/talke_data_over_time_timer_manager.dart'; import 'package:star_lock/talk/starChart/handle/other/talke_data_over_time_timer_manager.dart';
@ -52,6 +53,15 @@ class ScpMessageBaseHandle {
final audioManager = AudioPlayerManager(); final audioManager = AudioPlayerManager();
//
final H264FrameHandler frameHandler =
H264FrameHandler(onCompleteFrame: (frameData) {
//
TalkDataRepository.instance.addTalkData(
TalkData(contentType: TalkData_ContentTypeE.H264, content: frameData),
);
});
// //
void replySuccessMessage(ScpMessage scpMessage) { void replySuccessMessage(ScpMessage scpMessage) {
startChartManage.sendGenericRespSuccessMessage( startChartManage.sendGenericRespSuccessMessage(

View File

@ -22,6 +22,7 @@ class TalkDataH264Frame extends $pb.GeneratedMessage {
$core.int? frameSeq, $core.int? frameSeq,
TalkDataH264Frame_FrameTypeE? frameType, TalkDataH264Frame_FrameTypeE? frameType,
$core.List<$core.int>? frameData, $core.List<$core.int>? frameData,
$core.int? frameSeqI,
}) { }) {
final $result = create(); final $result = create();
if (frameSeq != null) { if (frameSeq != null) {
@ -33,6 +34,9 @@ class TalkDataH264Frame extends $pb.GeneratedMessage {
if (frameData != null) { if (frameData != null) {
$result.frameData = frameData; $result.frameData = frameData;
} }
if (frameSeqI != null) {
$result.frameSeqI = frameSeqI;
}
return $result; return $result;
} }
TalkDataH264Frame._() : super(); TalkDataH264Frame._() : super();
@ -43,6 +47,7 @@ class TalkDataH264Frame extends $pb.GeneratedMessage {
..a<$core.int>(1, _omitFieldNames ? '' : 'FrameSeq', $pb.PbFieldType.OU3, protoName: 'FrameSeq') ..a<$core.int>(1, _omitFieldNames ? '' : 'FrameSeq', $pb.PbFieldType.OU3, protoName: 'FrameSeq')
..e<TalkDataH264Frame_FrameTypeE>(2, _omitFieldNames ? '' : 'FrameType', $pb.PbFieldType.OE, protoName: 'FrameType', defaultOrMaker: TalkDataH264Frame_FrameTypeE.NONE, valueOf: TalkDataH264Frame_FrameTypeE.valueOf, enumValues: TalkDataH264Frame_FrameTypeE.values) ..e<TalkDataH264Frame_FrameTypeE>(2, _omitFieldNames ? '' : 'FrameType', $pb.PbFieldType.OE, protoName: 'FrameType', defaultOrMaker: TalkDataH264Frame_FrameTypeE.NONE, valueOf: TalkDataH264Frame_FrameTypeE.valueOf, enumValues: TalkDataH264Frame_FrameTypeE.values)
..a<$core.List<$core.int>>(3, _omitFieldNames ? '' : 'FrameData', $pb.PbFieldType.OY, protoName: 'FrameData') ..a<$core.List<$core.int>>(3, _omitFieldNames ? '' : 'FrameData', $pb.PbFieldType.OY, protoName: 'FrameData')
..a<$core.int>(4, _omitFieldNames ? '' : 'FrameSeqI', $pb.PbFieldType.OU3, protoName: 'FrameSeqI')
..hasRequiredFields = false ..hasRequiredFields = false
; ;
@ -95,6 +100,16 @@ class TalkDataH264Frame extends $pb.GeneratedMessage {
$core.bool hasFrameData() => $_has(2); $core.bool hasFrameData() => $_has(2);
@$pb.TagNumber(3) @$pb.TagNumber(3)
void clearFrameData() => clearField(3); void clearFrameData() => clearField(3);
/// I
@$pb.TagNumber(4)
$core.int get frameSeqI => $_getIZ(3);
@$pb.TagNumber(4)
set frameSeqI($core.int v) { $_setUnsignedInt32(3, v); }
@$pb.TagNumber(4)
$core.bool hasFrameSeqI() => $_has(3);
@$pb.TagNumber(4)
void clearFrameSeqI() => clearField(4);
} }

View File

@ -20,6 +20,7 @@ const TalkDataH264Frame$json = {
{'1': 'FrameSeq', '3': 1, '4': 1, '5': 13, '10': 'FrameSeq'}, {'1': 'FrameSeq', '3': 1, '4': 1, '5': 13, '10': 'FrameSeq'},
{'1': 'FrameType', '3': 2, '4': 1, '5': 14, '6': '.main.TalkDataH264Frame.FrameTypeE', '10': 'FrameType'}, {'1': 'FrameType', '3': 2, '4': 1, '5': 14, '6': '.main.TalkDataH264Frame.FrameTypeE', '10': 'FrameType'},
{'1': 'FrameData', '3': 3, '4': 1, '5': 12, '10': 'FrameData'}, {'1': 'FrameData', '3': 3, '4': 1, '5': 12, '10': 'FrameData'},
{'1': 'FrameSeqI', '3': 4, '4': 1, '5': 13, '10': 'FrameSeqI'},
], ],
'4': [TalkDataH264Frame_FrameTypeE$json], '4': [TalkDataH264Frame_FrameTypeE$json],
}; };
@ -38,6 +39,6 @@ const TalkDataH264Frame_FrameTypeE$json = {
final $typed_data.Uint8List talkDataH264FrameDescriptor = $convert.base64Decode( final $typed_data.Uint8List talkDataH264FrameDescriptor = $convert.base64Decode(
'ChFUYWxrRGF0YUgyNjRGcmFtZRIaCghGcmFtZVNlcRgBIAEoDVIIRnJhbWVTZXESQAoJRnJhbW' 'ChFUYWxrRGF0YUgyNjRGcmFtZRIaCghGcmFtZVNlcRgBIAEoDVIIRnJhbWVTZXESQAoJRnJhbW'
'VUeXBlGAIgASgOMiIubWFpbi5UYWxrRGF0YUgyNjRGcmFtZS5GcmFtZVR5cGVFUglGcmFtZVR5' 'VUeXBlGAIgASgOMiIubWFpbi5UYWxrRGF0YUgyNjRGcmFtZS5GcmFtZVR5cGVFUglGcmFtZVR5'
'cGUSHAoJRnJhbWVEYXRhGAMgASgMUglGcmFtZURhdGEiJAoKRnJhbWVUeXBlRRIICgROT05FEA' 'cGUSHAoJRnJhbWVEYXRhGAMgASgMUglGcmFtZURhdGESHAoJRnJhbWVTZXFJGAQgASgNUglGcm'
'ASBQoBSRABEgUKAVAQAg=='); 'FtZVNlcUkiJAoKRnJhbWVUeXBlRRIICgROT05FEAASBQoBSRABEgUKAVAQAg==');

View File

@ -15,4 +15,6 @@ message TalkDataH264Frame {
FrameTypeE FrameType = 2; FrameTypeE FrameType = 2;
// //
bytes FrameData = 3; bytes FrameData = 3;
// I
uint32 FrameSeqI = 4;
} }

View File

@ -419,9 +419,15 @@ class StartChartManage {
if (talkStatus.status != TalkStatus.proactivelyCallWaitingAnswer) { if (talkStatus.status != TalkStatus.proactivelyCallWaitingAnswer) {
// //
// AudioPlayerManager().playRingtone(); // AudioPlayerManager().playRingtone();
Get.toNamed( if (_defaultTalkExpect.videoType.contains(VideoTypeE.H264)) {
Routers.starChartTalkView, Get.toNamed(
); Routers.h264WebView,
);
} else {
Get.toNamed(
Routers.starChartTalkView,
);
}
} }
talkRequestTimer ??= Timer.periodic( talkRequestTimer ??= Timer.periodic(
Duration( Duration(
@ -1118,6 +1124,10 @@ class StartChartManage {
); );
} }
TalkExpectReq getDefaultTalkExpect() {
return _defaultTalkExpect;
}
/// ///
void sendOnlyImageVideoTalkExpectData() { void sendOnlyImageVideoTalkExpectData() {
final talkExpectReq = TalkExpectReq( final talkExpectReq = TalkExpectReq(

View File

@ -0,0 +1,75 @@
import 'dart:math';
import 'package:flutter/services.dart';
import 'package:star_lock/app_settings/app_settings.dart';
import 'package:star_lock/talk/starChart/proto/talk_data.pb.dart';
import 'package:star_lock/talk/starChart/star_chart_manage.dart';
import 'package:star_lock/talk/starChart/webView/h264_web_view_state.dart';
import 'package:star_lock/tools/baseGetXController.dart';
import 'package:webview_flutter/webview_flutter.dart';
class H264WebViewLogic extends BaseGetXController {
final H264WebViewState state = H264WebViewState();
@override
void onInit() {
super.onInit();
// WebView
state.webViewController = WebViewController()
..setJavaScriptMode(JavaScriptMode.unrestricted)
..enableZoom(false)
..addJavaScriptChannel(
'Flutter',
onMessageReceived: (message) {
print("来自 HTML 的消息: ${message.message}");
},
);
// HTML
_loadLocalHtml();
//
_createFramesStreamListen();
}
void _createFramesStreamListen() async {
state.talkDataRepository.talkDataStream.listen((TalkData event) async {
// js处理
_sendBufferedData(event.content);
});
}
/// html文件
Future<void> _loadLocalHtml() async {
// HTML
final String fileHtmlContent =
await rootBundle.loadString('assets/html/h264.html');
// JS
final String jsContent =
await rootBundle.loadString('assets/html/jmuxer.min.js');
// JS HTML
final String htmlWithJs = fileHtmlContent.replaceAll(
'<script src="jmuxer.min.js"></script>', // JS
'<script>$jsContent</script>' // 使 JS
);
// HTML WebView
if (state.webViewController != null) {
state.webViewController.loadHtmlString(htmlWithJs); // baseUrl
}
}
//
_sendBufferedData(List<int> buffer) async {
//
String jsCode = "feedDataFromFlutter($buffer);";
await state.webViewController.runJavaScript(jsCode);
}
@override
void onClose() {
super.onClose();
StartChartManage().startTalkHangupMessageTimer();
}
}

View File

@ -3,9 +3,14 @@ import 'dart:convert';
import 'package:flutter/material.dart'; import 'package:flutter/material.dart';
import 'package:flutter/services.dart' show ByteData, Uint8List, rootBundle; import 'package:flutter/services.dart' show ByteData, Uint8List, rootBundle;
import 'package:flutter_screenutil/flutter_screenutil.dart'; import 'package:flutter_screenutil/flutter_screenutil.dart';
import 'package:get/get.dart';
import 'package:star_lock/app_settings/app_colors.dart'; import 'package:star_lock/app_settings/app_colors.dart';
import 'package:star_lock/app_settings/app_settings.dart';
import 'package:star_lock/talk/starChart/handle/other/talk_data_repository.dart'; import 'package:star_lock/talk/starChart/handle/other/talk_data_repository.dart';
import 'package:star_lock/talk/starChart/proto/talk_data.pbserver.dart'; import 'package:star_lock/talk/starChart/proto/talk_data.pbserver.dart';
import 'package:star_lock/talk/starChart/star_chart_manage.dart';
import 'package:star_lock/talk/starChart/webView/h264_web_logic.dart';
import 'package:star_lock/talk/starChart/webView/h264_web_view_state.dart';
import 'package:star_lock/tools/titleAppBar.dart'; import 'package:star_lock/tools/titleAppBar.dart';
import 'package:webview_flutter/webview_flutter.dart'; import 'package:webview_flutter/webview_flutter.dart';
@ -15,168 +20,15 @@ class H264WebView extends StatefulWidget {
} }
class _H264WebViewState extends State<H264WebView> { class _H264WebViewState extends State<H264WebView> {
late final WebViewController _controller; final H264WebViewLogic logic = Get.put(H264WebViewLogic());
Timer? timer; final H264WebViewState state = Get.find<H264WebViewLogic>().state;
Timer? _sendTimer;
// 访
final List<int> _buffer = [];
// html文件间隔时间
final int sendDataToHtmlIntervalTime = 820;
//
final TalkDataRepository talkDataRepository = TalkDataRepository.instance;
@override
void initState() {
super.initState();
_controller = WebViewController()
..setJavaScriptMode(JavaScriptMode.unrestricted)
..enableZoom(false)
..addJavaScriptChannel(
'Flutter',
onMessageReceived: (message) {
print("来自 HTML 的消息: ${message.message}");
},
);
// HTML
_loadLocalHtml();
simulateStreamFromAsset();
_sendFramesToHtml();
}
void simulateStreamFromAsset() async {
// assets
final ByteData data = await rootBundle.load('assets/talk.h264');
final List<int> byteData = data.buffer.asUint8List();
int current = 0;
int start = 0;
int end = 0;
final List<int> chunks = extractChunks(byteData);
//
timer ??= Timer.periodic(Duration(milliseconds: 10), (timer) {
if (current >= chunks.length) {
print('数据已经发完,重新进行发送');
start = 0;
end = 0;
current = 0;
timer.cancel();
return;
}
// NALU chunks
end = chunks[current];
current++;
List<int> frameData = byteData.sublist(start, end);
if (frameData.length == 0) timer.cancel();
talkDataRepository.addTalkData(TalkData(contentType: TalkData_ContentTypeE.H264,content: frameData));
start = end;
});
}
void _sendFramesToHtml() async {
//
// talkDataRepository.talkDataStream.listen((TalkData event) async {
// _buffer.addAll(event.content);
// });
// 800ms的数据
_sendTimer ??= Timer.periodic(
Duration(milliseconds: sendDataToHtmlIntervalTime), (timer) async {
//
if (_buffer.isNotEmpty) {
await _sendBufferedData(_buffer);
_buffer.clear(); //
}
});
}
// NALU chunks
List<int> extractChunks(List<int> byteData) {
int i = 0;
int length = byteData.length;
int naluCount = 0;
int value;
int state = 0;
int lastIndex = 0;
List<int> result = [];
const minNaluPerChunk = 22; // NALU数量
while (i < length) {
value = byteData[i++];
// finding 3 or 4-byte start codes (00 00 01 OR 00 00 00 01)
switch (state) {
case 0:
if (value == 0) {
state = 1;
}
break;
case 1:
if (value == 0) {
state = 2;
} else {
state = 0;
}
break;
case 2:
case 3:
if (value == 0) {
state = 3;
} else if (value == 1 && i < length) {
if (lastIndex > 0) {
naluCount++;
}
if (naluCount >= minNaluPerChunk) {
result.add(lastIndex - state - 1);
naluCount = 0;
}
state = 0;
lastIndex = i;
} else {
state = 0;
}
break;
default:
break;
}
}
if (naluCount > 0) {
result.add(lastIndex);
}
return result;
}
/// html文件
Future<void> _loadLocalHtml() async {
final String fileHtmlContent =
await rootBundle.loadString('assets/html/h264.html');
_controller.loadHtmlString(fileHtmlContent);
}
// js处理
_sendBufferedData(List<int> buffer) async {
String jsCode = "feedDataFromFlutter(${buffer});";
await _controller.runJavaScript(jsCode);
}
@override @override
Widget build(BuildContext context) { Widget build(BuildContext context) {
return WebViewWidget(controller: _controller); return Stack(
} children: [
WebViewWidget(controller: state.webViewController),
@override ],
void dispose() { );
timer?.cancel();
timer = null;
_sendTimer?.cancel();
timer = null;
// talkDataRepository.dispose();
super.dispose();
} }
} }

View File

@ -0,0 +1,12 @@
import 'package:star_lock/talk/starChart/handle/other/talk_data_repository.dart';
import 'package:webview_flutter/webview_flutter.dart';
class H264WebViewState {
// webview
late final WebViewController webViewController;
//
final TalkDataRepository talkDataRepository = TalkDataRepository.instance;
}

View File

@ -313,6 +313,7 @@ flutter:
- images/lockType/ - images/lockType/
- assets/ - assets/
- assets/html/h264.html - assets/html/h264.html
- assets/html/jmuxer.min.js
- lan/ - lan/
# An image asset can refer to one or more resolution-specific "variants", see # An image asset can refer to one or more resolution-specific "variants", see
# https://flutter.dev/assets-and-images/#resolution-aware # https://flutter.dev/assets-and-images/#resolution-aware