fix:增加录音发送功能

This commit is contained in:
liyi 2024-12-30 11:53:42 +08:00
parent c865db7a9f
commit e850179c92
8 changed files with 270 additions and 75 deletions

View File

@ -270,10 +270,12 @@ class MessageCommand {
static List<int> talkDataMessage({ static List<int> talkDataMessage({
required String FromPeerId, required String FromPeerId,
required String ToPeerId, required String ToPeerId,
required TalkData talkData,
int? MessageId, int? MessageId,
List<int>? payload,
int? SpTotal,
int? SpIndex,
}) { }) {
final payload = talkData.writeToBuffer(); // final payload = talkData.writeToBuffer();
ScpMessage message = ScpMessage( ScpMessage message = ScpMessage(
ProtocolFlag: ProtocolFlagConstant.scp01, ProtocolFlag: ProtocolFlagConstant.scp01,
MessageType: MessageTypeConstant.RealTimeData, MessageType: MessageTypeConstant.RealTimeData,
@ -283,7 +285,7 @@ class MessageCommand {
FromPeerId: FromPeerId, FromPeerId: FromPeerId,
ToPeerId: ToPeerId, ToPeerId: ToPeerId,
Payload: payload, Payload: payload,
PayloadCRC: calculationCrc(payload), PayloadCRC: calculationCrc(Uint8List.fromList(payload!)),
PayloadLength: payload.length, PayloadLength: payload.length,
PayloadType: PayloadTypeConstant.talkData, PayloadType: PayloadTypeConstant.talkData,
); );

View File

@ -25,7 +25,7 @@ class UdpGoOnlineHandler extends ScpMessageBaseHandle
startChartManage.isOnlineStartChartServer = true; startChartManage.isOnlineStartChartServer = true;
// 线 // 线
startChartManage.stopReStartOnlineStartChartServer(); startChartManage.stopReStartOnlineStartChartServer();
log(text: '星图登录成功'); log(text: '星图登录成功,PeerID:${scpMessage.ToPeerId}');
} else { } else {
// 线 // 线
startChartManage.reStartOnlineStartChartServer(); startChartManage.reStartOnlineStartChartServer();
@ -42,7 +42,7 @@ class UdpGoOnlineHandler extends ScpMessageBaseHandle
deserializePayload( deserializePayload(
{required int payloadType, {required int payloadType,
required int messageType, required int messageType,
required List<int> byte, required List<int> byte,
int? offset, int? offset,
int? PayloadLength, int? PayloadLength,
int? spTotal, int? spTotal,

View File

@ -14,6 +14,7 @@ class UdpTalkPingHandler extends ScpMessageBaseHandle
@override @override
void handleReq(ScpMessage scpMessage) { void handleReq(ScpMessage scpMessage) {
// TODO: // TODO:
replySuccessMessage(scpMessage);
} }
@override @override

View File

@ -38,17 +38,17 @@ class ScpMessageBaseHandle {
// //
final talkeRequestOverTimeTimerManager = OverTimeTimerManager( final talkeRequestOverTimeTimerManager = OverTimeTimerManager(
timeoutInSeconds: 30, timeoutInSeconds: 8,
); );
// //
final talkePingOverTimeTimerManager = OverTimeTimerManager( final talkePingOverTimeTimerManager = OverTimeTimerManager(
timeoutInSeconds: 260, timeoutInSeconds: 5,
); );
// //
final talkDataOverTimeTimerManager = OverTimeTimerManager( final talkDataOverTimeTimerManager = OverTimeTimerManager(
timeoutInSeconds: 260, timeoutInSeconds: 30,
); );
// //
@ -64,8 +64,9 @@ class ScpMessageBaseHandle {
if (genericResp == null) return false; if (genericResp == null) return false;
final code = genericResp.code; final code = genericResp.code;
final message = genericResp.message; final message = genericResp.message;
return code == UdpConstant.genericRespSuccessCode && return (code == UdpConstant.genericRespSuccessCode || code == null) &&
message == UdpConstant.genericRespSuccessMsg; message.toLowerCase() ==
UdpConstant.genericRespSuccessMsg.toLowerCase();
} }
void log({required String text}) { void log({required String text}) {

View File

@ -92,7 +92,7 @@ class StartChartManage {
); );
// //
TalkData defaultTalkData = TalkData(); TalkData _defaultTalkData = TalkData();
String relayPeerId = ''; // peerId String relayPeerId = ''; // peerId
@ -252,14 +252,44 @@ class StartChartManage {
// //
Future<void> sendTalkDataMessage({required TalkData talkData}) async { Future<void> sendTalkDataMessage({required TalkData talkData}) async {
// 线 final List<int> payload = talkData.content;
final message = MessageCommand.talkDataMessage( //
FromPeerId: FromPeerId, final int totalPackets = (payload.length / _maxPayloadSize).ceil();
ToPeerId: ToPeerId, //
talkData: talkData, for (int i = 0; i < totalPackets; i++) {
MessageId: MessageCommand.getNextMessageId(ToPeerId, increment: true), int start = i * _maxPayloadSize;
); int end = (i + 1) * _maxPayloadSize;
await _sendMessage(message: message); if (end > payload.length) {
end = payload.length;
}
//
List<int> packet = payload.sublist(start, end);
// messageID
final messageId =
MessageCommand.getNextMessageId(ToPeerId, increment: false);
//
final message = MessageCommand.talkDataMessage(
ToPeerId: ToPeerId,
FromPeerId: FromPeerId,
payload: packet,
SpTotal: totalPackets,
SpIndex: i + 1,
MessageId: messageId,
);
//
await _sendMessage(message: message);
}
// id
MessageCommand.getNextMessageId(ToPeerId);
//
// final message = MessageCommand.talkDataMessage(
// FromPeerId: FromPeerId,
// ToPeerId: ToPeerId,
// talkData: talkData,
// MessageId: MessageCommand.getNextMessageId(ToPeerId, increment: true),
// );
// await _sendMessage(message: message);
} }
// //
@ -350,7 +380,7 @@ class StartChartManage {
await _sendMessage(message: message); await _sendMessage(message: message);
// //
talkStatus.setWaitingAnswer(); talkStatus.setWaitingAnswer();
_log(text: '发送同意接听消息'); // _log(text: '发送同意接听消息');
} }
// //
@ -381,6 +411,7 @@ class StartChartManage {
MessageId: MessageCommand.getNextMessageId(ToPeerId, increment: true), MessageId: MessageCommand.getNextMessageId(ToPeerId, increment: true),
); );
await _sendMessage(message: message); await _sendMessage(message: message);
// _log(text: '发送预期数据:${talkExpect}');
} }
// //
@ -420,6 +451,7 @@ class StartChartManage {
MessageId: MessageCommand.getNextMessageId(ToPeerId, increment: true), MessageId: MessageCommand.getNextMessageId(ToPeerId, increment: true),
); );
await _sendMessage(message: message); await _sendMessage(message: message);
_log(text: '发送通话保持');
} }
// //
@ -892,13 +924,13 @@ class StartChartManage {
current++; current++;
List<int> frameData = byteData.sublist(start, end); List<int> frameData = byteData.sublist(start, end);
if (frameData.length == 0) timer.cancel(); if (frameData.length == 0) timer.cancel();
defaultTalkData = TalkData( _defaultTalkData = TalkData(
content: frameData, content: frameData,
contentType: TalkData_ContentTypeE.H264, contentType: TalkData_ContentTypeE.H264,
); );
start = end; start = end;
// //
sendTalkDataMessage(talkData: defaultTalkData); sendTalkDataMessage(talkData: _defaultTalkData);
}, },
); );
} }
@ -991,7 +1023,15 @@ class StartChartManage {
stopHeartbeat(); stopHeartbeat();
stopReStartOnlineStartChartServer(); stopReStartOnlineStartChartServer();
stopTalkDataTimer(); stopTalkDataTimer();
_resetData();
// await Storage.removerRelayInfo(); // await Storage.removerRelayInfo();
// await Storage.removerStarChartRegisterNodeInfo(); // await Storage.removerStarChartRegisterNodeInfo();
} }
void _resetData() {
_defaultTalkExpect = TalkExpectReq(
videoType: [VideoTypeE.IMAGE],
audioType: [AudioTypeE.G711],
);
}
} }

View File

@ -7,6 +7,8 @@ import 'package:flutter/services.dart';
import 'package:flutter_easyloading/flutter_easyloading.dart'; import 'package:flutter_easyloading/flutter_easyloading.dart';
import 'package:flutter_pcm_sound/flutter_pcm_sound.dart'; import 'package:flutter_pcm_sound/flutter_pcm_sound.dart';
import 'package:flutter_screen_recording/flutter_screen_recording.dart'; import 'package:flutter_screen_recording/flutter_screen_recording.dart';
import 'package:flutter_voice_processor/flutter_voice_processor.dart';
import 'package:gallery_saver/gallery_saver.dart'; import 'package:gallery_saver/gallery_saver.dart';
import 'package:get/get.dart'; import 'package:get/get.dart';
@ -31,14 +33,12 @@ class TalkViewLogic extends BaseGetXController {
Timer? _syncTimer; Timer? _syncTimer;
int _startTime = 0; int _startTime = 0;
final int bufferSize = 20; // final int bufferSize = 20; //
final List<int> frameTimestamps = []; final List<int> frameTimestamps = []; // FPS
int frameIntervalMs = 45; // 4522FPS int frameIntervalMs = 45; // 4522FPS
int minFrameIntervalMs = 30; // 33 FPS int minFrameIntervalMs = 30; // 33 FPS
int maxFrameIntervalMs = 100; // 10 FPS int maxFrameIntervalMs = 100; // 10 FPS
/// Talk发送的状态 ///
StreamSubscription? _getTalkStatusRefreshUIEvent;
void _initFlutterPcmSound() { void _initFlutterPcmSound() {
const int sampleRate = 44100; const int sampleRate = 44100;
FlutterPcmSound.setLogLevel(LogLevel.verbose); FlutterPcmSound.setLogLevel(LogLevel.verbose);
@ -129,6 +129,7 @@ class TalkViewLogic extends BaseGetXController {
}); });
} }
///
void _playAudioData(TalkData talkData) { void _playAudioData(TalkData talkData) {
// PCM PcmArrayInt16 // PCM PcmArrayInt16
final PcmArrayInt16 fromList = PcmArrayInt16.fromList(talkData.content); final PcmArrayInt16 fromList = PcmArrayInt16.fromList(talkData.content);
@ -139,6 +140,7 @@ class TalkViewLogic extends BaseGetXController {
} }
} }
///
void _playVideoData(TalkData talkData) { void _playVideoData(TalkData talkData) {
state.listData.value = Uint8List.fromList(talkData.content); state.listData.value = Uint8List.fromList(talkData.content);
} }
@ -263,7 +265,9 @@ class TalkViewLogic extends BaseGetXController {
/// ///
void showNetworkStatus(String message) { void showNetworkStatus(String message) {
if (state.alertCount.value < 3 && !EasyLoading.isShow) { // EasyLoading
if (state.alertCount.value < state.maxAlertNumber.value &&
!EasyLoading.isShow) {
showToast(message); showToast(message);
state.alertCount++; state.alertCount++;
} }
@ -279,6 +283,7 @@ class TalkViewLogic extends BaseGetXController {
/// ///
udpOpenDoorAction(List<int> list) async {} udpOpenDoorAction(List<int> list) async {}
///
Future<bool> getPermissionStatus() async { Future<bool> getPermissionStatus() async {
final Permission permission = Permission.microphone; final Permission permission = Permission.microphone;
//granted denied permanentlyDenied //granted denied permanentlyDenied
@ -312,17 +317,22 @@ class TalkViewLogic extends BaseGetXController {
void onInit() { void onInit() {
super.onInit(); super.onInit();
// //
_startListenTalkData(); _startListenTalkData();
// //
_startListenTalkStatus(); _startListenTalkStatus();
// //
// *** *** // *** ***
state.talkStatus.value = state.startChartTalkStatus.status; state.talkStatus.value = state.startChartTalkStatus.status;
//
_initFlutterPcmSound(); _initFlutterPcmSound();
//
_startPlayback(); _startPlayback();
//
_initAudioRecorder();
} }
@override @override
@ -404,7 +414,7 @@ class TalkViewLogic extends BaseGetXController {
bool started = bool started =
await FlutterScreenRecording.startRecordScreenAndAudio("Recording"); await FlutterScreenRecording.startRecordScreenAndAudio("Recording");
if (started) { if (started) {
state.isRecording.value = true; state.isRecordingScreen.value = true;
} }
} }
@ -412,13 +422,136 @@ class TalkViewLogic extends BaseGetXController {
Future<void> stopRecording() async { Future<void> stopRecording() async {
String path = await FlutterScreenRecording.stopRecordScreen; String path = await FlutterScreenRecording.stopRecordScreen;
if (path != null) { if (path != null) {
state.isRecording.value = false; state.isRecordingScreen.value = false;
// //
await GallerySaver.saveVideo(path).then((bool? success) {}); // await GallerySaver.saveVideo(path).then((bool? success) {});
//
await ImageGallerySaver.saveFile(path);
showToast('录屏已保存到相册'.tr); showToast('录屏已保存到相册'.tr);
} else { } else {
state.isRecording.value = false; state.isRecordingScreen.value = false;
print("Recording failed"); print("Recording failed");
} }
} }
///
void _initAudioRecorder() {
state.voiceProcessor = VoiceProcessor.instance;
}
//
Future<void> startProcessingAudio() async {
//
state.voiceProcessor?.addFrameListener(_onFrame);
state.voiceProcessor?.addErrorListener(_onError);
try {
if (await state.voiceProcessor?.hasRecordAudioPermission() ?? false) {
await state.voiceProcessor?.start(state.frameLength, state.sampleRate);
final bool? isRecording = await state.voiceProcessor?.isRecording();
state.isRecordingAudio.value = isRecording!;
state.startRecordingAudioTime.value = DateTime.now();
} else {
// state.errorMessage.value = 'Recording permission not granted';
}
} on PlatformException catch (ex) {
// state.errorMessage.value = 'Failed to start recorder: $ex';
}
}
///
Future<void> stopProcessingAudio() async {
try {
await state.voiceProcessor?.stop();
state.voiceProcessor?.removeFrameListener(_onFrame);
state.udpSendDataFrameNumber = 0;
//
state.endRecordingAudioTime.value = DateTime.now();
//
final duration = state.endRecordingAudioTime.value!
.difference(state.startRecordingAudioTime.value!);
state.recordingAudioTime.value = duration.inSeconds;
} on PlatformException catch (ex) {
// state.errorMessage.value = 'Failed to stop recorder: $ex';
} finally {
final bool? isRecording = await state.voiceProcessor?.isRecording();
state.isRecordingAudio.value = isRecording!;
}
}
Future<void> _onFrame(List<int> frame) async {
state.recordingAudioAllFrames.add(frame); //
// final List<int> concatenatedFrames =
// _concatenateFrames(state.recordingAudioAllFrames); //
final List<int> pcmBytes = _listLinearToULaw(frame);
//
StartChartManage().sendTalkDataMessage(
talkData: TalkData(
content: pcmBytes,
contentType: TalkData_ContentTypeE.G711,
durationMs: DateTime.now().millisecondsSinceEpoch -
state.startRecordingAudioTime.value.millisecondsSinceEpoch,
),
);
}
void _onError(VoiceProcessorException error) {
// state.errorMessage.value = error.message!;
AppLog.log(error.message!);
}
// pcm
List<int> _listLinearToULaw(List<int> pcmList) {
final List<int> uLawList = [];
for (int pcmVal in pcmList) {
final int uLawVal = _linearToULaw(pcmVal);
uLawList.add(uLawVal);
}
return uLawList;
}
// pcm
int _linearToULaw(int pcmVal) {
int mask;
int seg;
int uval;
if (pcmVal < 0) {
pcmVal = 0x84 - pcmVal;
mask = 0x7F;
} else {
pcmVal += 0x84;
mask = 0xFF;
}
seg = search(pcmVal);
if (seg >= 8) {
return 0x7F ^ mask;
} else {
uval = seg << 4;
uval |= (pcmVal >> (seg + 3)) & 0xF;
return uval ^ mask;
}
}
int search(int val) {
final List<int> table = [
0xFF,
0x1FF,
0x3FF,
0x7FF,
0xFFF,
0x1FFF,
0x3FFF,
0x7FFF
];
const int size = 8;
for (int i = 0; i < size; i++) {
if (val <= table[i]) {
return i;
}
}
return size;
}
} }

View File

@ -5,17 +5,14 @@ import 'dart:io';
import 'package:flutter/material.dart'; import 'package:flutter/material.dart';
import 'package:flutter/rendering.dart'; import 'package:flutter/rendering.dart';
import 'package:flutter/services.dart'; import 'package:flutter/services.dart';
import 'package:flutter_screen_recording/flutter_screen_recording.dart';
import 'package:flutter_screenutil/flutter_screenutil.dart'; import 'package:flutter_screenutil/flutter_screenutil.dart';
import 'package:gallery_saver/gallery_saver.dart';
import 'package:get/get.dart'; import 'package:get/get.dart';
import 'package:image_gallery_saver/image_gallery_saver.dart';
import 'package:path_provider/path_provider.dart';
import 'package:star_lock/app_settings/app_settings.dart';
import 'package:star_lock/main/lockDetail/realTimePicture/realTimePicture_state.dart';
import 'package:star_lock/talk/call/callTalk.dart'; import 'package:star_lock/talk/call/callTalk.dart';
import 'package:star_lock/talk/startChart/constant/talk_status.dart'; import 'package:star_lock/talk/startChart/constant/talk_status.dart';
import 'package:star_lock/talk/startChart/start_chart_talk_status.dart';
import 'package:star_lock/talk/startChart/views/talkView/talk_view_logic.dart'; import 'package:star_lock/talk/startChart/views/talkView/talk_view_logic.dart';
import 'package:star_lock/talk/startChart/views/talkView/talk_view_state.dart'; import 'package:star_lock/talk/startChart/views/talkView/talk_view_state.dart';
@ -75,24 +72,30 @@ class _TalkViewPageState extends State<TalkViewPage>
height: ScreenUtil().screenHeight, height: ScreenUtil().screenHeight,
fit: BoxFit.cover, fit: BoxFit.cover,
) )
: PopScope( : Container(
canPop: false, decoration: state.isRecordingScreen.value
child: RepaintBoundary( ? BoxDecoration(
key: state.globalKey, border: Border.all(color: Colors.red, width: 1))
child: Image.memory( : BoxDecoration(),
state.listData.value, child: PopScope(
gaplessPlayback: true, canPop: false,
width: 1.sw, child: RepaintBoundary(
height: 1.sh, key: state.globalKey,
fit: BoxFit.cover, child: Image.memory(
filterQuality: FilterQuality.high, state.listData.value,
errorBuilder: ( gaplessPlayback: true,
BuildContext context, width: 1.sw,
Object error, height: 1.sh,
StackTrace? stackTrace, fit: BoxFit.cover,
) { filterQuality: FilterQuality.high,
return Container(color: Colors.transparent); errorBuilder: (
}, BuildContext context,
Object error,
StackTrace? stackTrace,
) {
return Container(color: Colors.transparent);
},
),
), ),
), ),
), ),
@ -151,7 +154,10 @@ class _TalkViewPageState extends State<TalkViewPage>
// //
GestureDetector( GestureDetector(
onTap: () { onTap: () {
logic.updateTalkExpect(); if (state.talkStatus.value == TalkStatus.duringCall) {
//
logic.updateTalkExpect();
}
}, },
child: Container( child: Container(
width: 50.w, width: 50.w,
@ -171,7 +177,9 @@ class _TalkViewPageState extends State<TalkViewPage>
// //
GestureDetector( GestureDetector(
onTap: () async { onTap: () async {
await logic.captureAndSavePng(); if (state.talkStatus.value == TalkStatus.duringCall) {
await logic.captureAndSavePng();
}
}, },
child: Container( child: Container(
width: 50.w, width: 50.w,
@ -188,10 +196,12 @@ class _TalkViewPageState extends State<TalkViewPage>
// //
GestureDetector( GestureDetector(
onTap: () async { onTap: () async {
if (state.isRecording.value) { if (state.talkStatus.value == TalkStatus.duringCall) {
await logic.stopRecording(); if (state.isRecordingScreen.value) {
} else { await logic.stopRecording();
await logic.startRecording(); } else {
await logic.startRecording();
}
} }
}, },
child: Container( child: Container(
@ -234,9 +244,15 @@ class _TalkViewPageState extends State<TalkViewPage>
Colors.white, Colors.white,
longPress: () async { longPress: () async {
if (state.talkStatus.value == TalkStatus.answeredSuccessfully || if (state.talkStatus.value == TalkStatus.answeredSuccessfully ||
state.talkStatus.value == TalkStatus.duringCall) {} state.talkStatus.value == TalkStatus.duringCall) {
print('开始录音');
logic.startProcessingAudio();
}
},
longPressUp: () async {
print('停止录音');
logic.stopProcessingAudio();
}, },
longPressUp: () async {},
onClick: () async { onClick: () async {
if (state.talkStatus.value == TalkStatus.waitingAnswer) { if (state.talkStatus.value == TalkStatus.waitingAnswer) {
// //

View File

@ -21,7 +21,6 @@ enum NetworkStatus {
} }
class TalkViewState { class TalkViewState {
int udpSendDataFrameNumber = 0; // int udpSendDataFrameNumber = 0; //
// var isSenderAudioData = false.obs;// // var isSenderAudioData = false.obs;//
@ -35,7 +34,6 @@ class TalkViewState {
Rx<Uint8List> listData = Uint8List(0).obs; // Rx<Uint8List> listData = Uint8List(0).obs; //
RxList<int> listAudioData = <int>[].obs; // RxList<int> listAudioData = <int>[].obs; //
GlobalKey globalKey = GlobalKey(); GlobalKey globalKey = GlobalKey();
late final VoiceProcessor? voiceProcessor;
late Timer oneMinuteTimeTimer = late Timer oneMinuteTimeTimer =
Timer(const Duration(seconds: 1), () {}); // 60 Timer(const Duration(seconds: 1), () {}); // 60
@ -47,16 +45,12 @@ class TalkViewState {
late Timer openDoorTimer; late Timer openDoorTimer;
late AnimationController animationController; late AnimationController animationController;
late Timer autoBackTimer = late Timer autoBackTimer =
Timer(const Duration(seconds: 1), () {}); //30 Timer(const Duration(seconds: 1), () {}); //30
late Timer realTimePicTimer = late Timer realTimePicTimer =
Timer(const Duration(seconds: 1), () {}); // Timer(const Duration(seconds: 1), () {}); //
RxInt elapsedSeconds = 0.obs; RxInt elapsedSeconds = 0.obs;
// //
List<TalkData> audioBuffer = <TalkData>[].obs; List<TalkData> audioBuffer = <TalkData>[].obs;
List<TalkData> videoBuffer = <TalkData>[].obs; List<TalkData> videoBuffer = <TalkData>[].obs;
@ -65,6 +59,7 @@ class TalkViewState {
// startChartTalkStatus // startChartTalkStatus
final StartChartTalkStatus startChartTalkStatus = final StartChartTalkStatus startChartTalkStatus =
StartChartTalkStatus.instance; StartChartTalkStatus.instance;
// //
final TalkDataRepository talkDataRepository = TalkDataRepository.instance; final TalkDataRepository talkDataRepository = TalkDataRepository.instance;
RxInt lastFrameTimestamp = 0.obs; // , RxInt lastFrameTimestamp = 0.obs; // ,
@ -73,7 +68,14 @@ class TalkViewState {
RxInt alertCount = 0.obs; // RxInt alertCount = 0.obs; //
RxInt maxAlertNumber = 3.obs; // RxInt maxAlertNumber = 3.obs; //
RxBool isOpenVoice = true.obs; // RxBool isOpenVoice = true.obs; //
RxBool isRecording = true.obs; // RxBool isRecordingScreen = false.obs; //
RxBool isRecordingAudio = false.obs; //
Rx<DateTime> startRecordingAudioTime = DateTime.now().obs; //
Rx<DateTime> endRecordingAudioTime= DateTime.now().obs; //
RxInt recordingAudioTime= 0.obs; //
RxDouble fps = 0.0.obs; // FPS RxDouble fps = 0.0.obs; // FPS
late VoiceProcessor? voiceProcessor; //
final int frameLength = 320; //320
final int sampleRate = 8000; //8000
List<List<int>> recordingAudioAllFrames = <List<int>>[]; //
} }