fix:增加录音发送功能

This commit is contained in:
liyi 2024-12-30 11:53:42 +08:00
parent c865db7a9f
commit e850179c92
8 changed files with 270 additions and 75 deletions

View File

@ -270,10 +270,12 @@ class MessageCommand {
static List<int> talkDataMessage({
required String FromPeerId,
required String ToPeerId,
required TalkData talkData,
int? MessageId,
List<int>? payload,
int? SpTotal,
int? SpIndex,
}) {
final payload = talkData.writeToBuffer();
// final payload = talkData.writeToBuffer();
ScpMessage message = ScpMessage(
ProtocolFlag: ProtocolFlagConstant.scp01,
MessageType: MessageTypeConstant.RealTimeData,
@ -283,7 +285,7 @@ class MessageCommand {
FromPeerId: FromPeerId,
ToPeerId: ToPeerId,
Payload: payload,
PayloadCRC: calculationCrc(payload),
PayloadCRC: calculationCrc(Uint8List.fromList(payload!)),
PayloadLength: payload.length,
PayloadType: PayloadTypeConstant.talkData,
);

View File

@ -25,7 +25,7 @@ class UdpGoOnlineHandler extends ScpMessageBaseHandle
startChartManage.isOnlineStartChartServer = true;
// 线
startChartManage.stopReStartOnlineStartChartServer();
log(text: '星图登录成功');
log(text: '星图登录成功,PeerID:${scpMessage.ToPeerId}');
} else {
// 线
startChartManage.reStartOnlineStartChartServer();
@ -42,7 +42,7 @@ class UdpGoOnlineHandler extends ScpMessageBaseHandle
deserializePayload(
{required int payloadType,
required int messageType,
required List<int> byte,
required List<int> byte,
int? offset,
int? PayloadLength,
int? spTotal,

View File

@ -14,6 +14,7 @@ class UdpTalkPingHandler extends ScpMessageBaseHandle
@override
void handleReq(ScpMessage scpMessage) {
// TODO:
replySuccessMessage(scpMessage);
}
@override

View File

@ -38,17 +38,17 @@ class ScpMessageBaseHandle {
//
final talkeRequestOverTimeTimerManager = OverTimeTimerManager(
timeoutInSeconds: 30,
timeoutInSeconds: 8,
);
//
final talkePingOverTimeTimerManager = OverTimeTimerManager(
timeoutInSeconds: 260,
timeoutInSeconds: 5,
);
//
final talkDataOverTimeTimerManager = OverTimeTimerManager(
timeoutInSeconds: 260,
timeoutInSeconds: 30,
);
//
@ -64,8 +64,9 @@ class ScpMessageBaseHandle {
if (genericResp == null) return false;
final code = genericResp.code;
final message = genericResp.message;
return code == UdpConstant.genericRespSuccessCode &&
message == UdpConstant.genericRespSuccessMsg;
return (code == UdpConstant.genericRespSuccessCode || code == null) &&
message.toLowerCase() ==
UdpConstant.genericRespSuccessMsg.toLowerCase();
}
void log({required String text}) {

View File

@ -92,7 +92,7 @@ class StartChartManage {
);
//
TalkData defaultTalkData = TalkData();
TalkData _defaultTalkData = TalkData();
String relayPeerId = ''; // peerId
@ -252,14 +252,44 @@ class StartChartManage {
//
Future<void> sendTalkDataMessage({required TalkData talkData}) async {
// 线
final message = MessageCommand.talkDataMessage(
FromPeerId: FromPeerId,
ToPeerId: ToPeerId,
talkData: talkData,
MessageId: MessageCommand.getNextMessageId(ToPeerId, increment: true),
);
await _sendMessage(message: message);
final List<int> payload = talkData.content;
//
final int totalPackets = (payload.length / _maxPayloadSize).ceil();
//
for (int i = 0; i < totalPackets; i++) {
int start = i * _maxPayloadSize;
int end = (i + 1) * _maxPayloadSize;
if (end > payload.length) {
end = payload.length;
}
//
List<int> packet = payload.sublist(start, end);
// messageID
final messageId =
MessageCommand.getNextMessageId(ToPeerId, increment: false);
//
final message = MessageCommand.talkDataMessage(
ToPeerId: ToPeerId,
FromPeerId: FromPeerId,
payload: packet,
SpTotal: totalPackets,
SpIndex: i + 1,
MessageId: messageId,
);
//
await _sendMessage(message: message);
}
// id
MessageCommand.getNextMessageId(ToPeerId);
//
// final message = MessageCommand.talkDataMessage(
// FromPeerId: FromPeerId,
// ToPeerId: ToPeerId,
// talkData: talkData,
// MessageId: MessageCommand.getNextMessageId(ToPeerId, increment: true),
// );
// await _sendMessage(message: message);
}
//
@ -350,7 +380,7 @@ class StartChartManage {
await _sendMessage(message: message);
//
talkStatus.setWaitingAnswer();
_log(text: '发送同意接听消息');
// _log(text: '发送同意接听消息');
}
//
@ -381,6 +411,7 @@ class StartChartManage {
MessageId: MessageCommand.getNextMessageId(ToPeerId, increment: true),
);
await _sendMessage(message: message);
// _log(text: '发送预期数据:${talkExpect}');
}
//
@ -420,6 +451,7 @@ class StartChartManage {
MessageId: MessageCommand.getNextMessageId(ToPeerId, increment: true),
);
await _sendMessage(message: message);
_log(text: '发送通话保持');
}
//
@ -892,13 +924,13 @@ class StartChartManage {
current++;
List<int> frameData = byteData.sublist(start, end);
if (frameData.length == 0) timer.cancel();
defaultTalkData = TalkData(
_defaultTalkData = TalkData(
content: frameData,
contentType: TalkData_ContentTypeE.H264,
);
start = end;
//
sendTalkDataMessage(talkData: defaultTalkData);
//
sendTalkDataMessage(talkData: _defaultTalkData);
},
);
}
@ -991,7 +1023,15 @@ class StartChartManage {
stopHeartbeat();
stopReStartOnlineStartChartServer();
stopTalkDataTimer();
_resetData();
// await Storage.removerRelayInfo();
// await Storage.removerStarChartRegisterNodeInfo();
}
void _resetData() {
_defaultTalkExpect = TalkExpectReq(
videoType: [VideoTypeE.IMAGE],
audioType: [AudioTypeE.G711],
);
}
}

View File

@ -7,6 +7,8 @@ import 'package:flutter/services.dart';
import 'package:flutter_easyloading/flutter_easyloading.dart';
import 'package:flutter_pcm_sound/flutter_pcm_sound.dart';
import 'package:flutter_screen_recording/flutter_screen_recording.dart';
import 'package:flutter_voice_processor/flutter_voice_processor.dart';
import 'package:gallery_saver/gallery_saver.dart';
import 'package:get/get.dart';
@ -31,14 +33,12 @@ class TalkViewLogic extends BaseGetXController {
Timer? _syncTimer;
int _startTime = 0;
final int bufferSize = 20; //
final List<int> frameTimestamps = [];
final List<int> frameTimestamps = []; // FPS
int frameIntervalMs = 45; // 4522FPS
int minFrameIntervalMs = 30; // 33 FPS
int maxFrameIntervalMs = 100; // 10 FPS
/// Talk发送的状态
StreamSubscription? _getTalkStatusRefreshUIEvent;
///
void _initFlutterPcmSound() {
const int sampleRate = 44100;
FlutterPcmSound.setLogLevel(LogLevel.verbose);
@ -129,6 +129,7 @@ class TalkViewLogic extends BaseGetXController {
});
}
///
void _playAudioData(TalkData talkData) {
// PCM PcmArrayInt16
final PcmArrayInt16 fromList = PcmArrayInt16.fromList(talkData.content);
@ -139,6 +140,7 @@ class TalkViewLogic extends BaseGetXController {
}
}
///
void _playVideoData(TalkData talkData) {
state.listData.value = Uint8List.fromList(talkData.content);
}
@ -263,7 +265,9 @@ class TalkViewLogic extends BaseGetXController {
///
void showNetworkStatus(String message) {
if (state.alertCount.value < 3 && !EasyLoading.isShow) {
// EasyLoading
if (state.alertCount.value < state.maxAlertNumber.value &&
!EasyLoading.isShow) {
showToast(message);
state.alertCount++;
}
@ -279,6 +283,7 @@ class TalkViewLogic extends BaseGetXController {
///
udpOpenDoorAction(List<int> list) async {}
///
Future<bool> getPermissionStatus() async {
final Permission permission = Permission.microphone;
//granted denied permanentlyDenied
@ -312,17 +317,22 @@ class TalkViewLogic extends BaseGetXController {
void onInit() {
super.onInit();
//
//
_startListenTalkData();
//
//
_startListenTalkStatus();
//
// *** ***
state.talkStatus.value = state.startChartTalkStatus.status;
//
_initFlutterPcmSound();
//
_startPlayback();
//
_initAudioRecorder();
}
@override
@ -404,7 +414,7 @@ class TalkViewLogic extends BaseGetXController {
bool started =
await FlutterScreenRecording.startRecordScreenAndAudio("Recording");
if (started) {
state.isRecording.value = true;
state.isRecordingScreen.value = true;
}
}
@ -412,13 +422,136 @@ class TalkViewLogic extends BaseGetXController {
Future<void> stopRecording() async {
String path = await FlutterScreenRecording.stopRecordScreen;
if (path != null) {
state.isRecording.value = false;
state.isRecordingScreen.value = false;
//
await GallerySaver.saveVideo(path).then((bool? success) {});
// await GallerySaver.saveVideo(path).then((bool? success) {});
//
await ImageGallerySaver.saveFile(path);
showToast('录屏已保存到相册'.tr);
} else {
state.isRecording.value = false;
state.isRecordingScreen.value = false;
print("Recording failed");
}
}
///
void _initAudioRecorder() {
state.voiceProcessor = VoiceProcessor.instance;
}
//
Future<void> startProcessingAudio() async {
//
state.voiceProcessor?.addFrameListener(_onFrame);
state.voiceProcessor?.addErrorListener(_onError);
try {
if (await state.voiceProcessor?.hasRecordAudioPermission() ?? false) {
await state.voiceProcessor?.start(state.frameLength, state.sampleRate);
final bool? isRecording = await state.voiceProcessor?.isRecording();
state.isRecordingAudio.value = isRecording!;
state.startRecordingAudioTime.value = DateTime.now();
} else {
// state.errorMessage.value = 'Recording permission not granted';
}
} on PlatformException catch (ex) {
// state.errorMessage.value = 'Failed to start recorder: $ex';
}
}
///
Future<void> stopProcessingAudio() async {
try {
await state.voiceProcessor?.stop();
state.voiceProcessor?.removeFrameListener(_onFrame);
state.udpSendDataFrameNumber = 0;
//
state.endRecordingAudioTime.value = DateTime.now();
//
final duration = state.endRecordingAudioTime.value!
.difference(state.startRecordingAudioTime.value!);
state.recordingAudioTime.value = duration.inSeconds;
} on PlatformException catch (ex) {
// state.errorMessage.value = 'Failed to stop recorder: $ex';
} finally {
final bool? isRecording = await state.voiceProcessor?.isRecording();
state.isRecordingAudio.value = isRecording!;
}
}
Future<void> _onFrame(List<int> frame) async {
state.recordingAudioAllFrames.add(frame); //
// final List<int> concatenatedFrames =
// _concatenateFrames(state.recordingAudioAllFrames); //
final List<int> pcmBytes = _listLinearToULaw(frame);
//
StartChartManage().sendTalkDataMessage(
talkData: TalkData(
content: pcmBytes,
contentType: TalkData_ContentTypeE.G711,
durationMs: DateTime.now().millisecondsSinceEpoch -
state.startRecordingAudioTime.value.millisecondsSinceEpoch,
),
);
}
void _onError(VoiceProcessorException error) {
// state.errorMessage.value = error.message!;
AppLog.log(error.message!);
}
// pcm
List<int> _listLinearToULaw(List<int> pcmList) {
final List<int> uLawList = [];
for (int pcmVal in pcmList) {
final int uLawVal = _linearToULaw(pcmVal);
uLawList.add(uLawVal);
}
return uLawList;
}
// pcm
int _linearToULaw(int pcmVal) {
int mask;
int seg;
int uval;
if (pcmVal < 0) {
pcmVal = 0x84 - pcmVal;
mask = 0x7F;
} else {
pcmVal += 0x84;
mask = 0xFF;
}
seg = search(pcmVal);
if (seg >= 8) {
return 0x7F ^ mask;
} else {
uval = seg << 4;
uval |= (pcmVal >> (seg + 3)) & 0xF;
return uval ^ mask;
}
}
int search(int val) {
final List<int> table = [
0xFF,
0x1FF,
0x3FF,
0x7FF,
0xFFF,
0x1FFF,
0x3FFF,
0x7FFF
];
const int size = 8;
for (int i = 0; i < size; i++) {
if (val <= table[i]) {
return i;
}
}
return size;
}
}

View File

@ -5,17 +5,14 @@ import 'dart:io';
import 'package:flutter/material.dart';
import 'package:flutter/rendering.dart';
import 'package:flutter/services.dart';
import 'package:flutter_screen_recording/flutter_screen_recording.dart';
import 'package:flutter_screenutil/flutter_screenutil.dart';
import 'package:gallery_saver/gallery_saver.dart';
import 'package:get/get.dart';
import 'package:image_gallery_saver/image_gallery_saver.dart';
import 'package:path_provider/path_provider.dart';
import 'package:star_lock/app_settings/app_settings.dart';
import 'package:star_lock/main/lockDetail/realTimePicture/realTimePicture_state.dart';
import 'package:star_lock/talk/call/callTalk.dart';
import 'package:star_lock/talk/startChart/constant/talk_status.dart';
import 'package:star_lock/talk/startChart/start_chart_talk_status.dart';
import 'package:star_lock/talk/startChart/views/talkView/talk_view_logic.dart';
import 'package:star_lock/talk/startChart/views/talkView/talk_view_state.dart';
@ -75,24 +72,30 @@ class _TalkViewPageState extends State<TalkViewPage>
height: ScreenUtil().screenHeight,
fit: BoxFit.cover,
)
: PopScope(
canPop: false,
child: RepaintBoundary(
key: state.globalKey,
child: Image.memory(
state.listData.value,
gaplessPlayback: true,
width: 1.sw,
height: 1.sh,
fit: BoxFit.cover,
filterQuality: FilterQuality.high,
errorBuilder: (
BuildContext context,
Object error,
StackTrace? stackTrace,
) {
return Container(color: Colors.transparent);
},
: Container(
decoration: state.isRecordingScreen.value
? BoxDecoration(
border: Border.all(color: Colors.red, width: 1))
: BoxDecoration(),
child: PopScope(
canPop: false,
child: RepaintBoundary(
key: state.globalKey,
child: Image.memory(
state.listData.value,
gaplessPlayback: true,
width: 1.sw,
height: 1.sh,
fit: BoxFit.cover,
filterQuality: FilterQuality.high,
errorBuilder: (
BuildContext context,
Object error,
StackTrace? stackTrace,
) {
return Container(color: Colors.transparent);
},
),
),
),
),
@ -151,7 +154,10 @@ class _TalkViewPageState extends State<TalkViewPage>
//
GestureDetector(
onTap: () {
logic.updateTalkExpect();
if (state.talkStatus.value == TalkStatus.duringCall) {
//
logic.updateTalkExpect();
}
},
child: Container(
width: 50.w,
@ -171,7 +177,9 @@ class _TalkViewPageState extends State<TalkViewPage>
//
GestureDetector(
onTap: () async {
await logic.captureAndSavePng();
if (state.talkStatus.value == TalkStatus.duringCall) {
await logic.captureAndSavePng();
}
},
child: Container(
width: 50.w,
@ -188,10 +196,12 @@ class _TalkViewPageState extends State<TalkViewPage>
//
GestureDetector(
onTap: () async {
if (state.isRecording.value) {
await logic.stopRecording();
} else {
await logic.startRecording();
if (state.talkStatus.value == TalkStatus.duringCall) {
if (state.isRecordingScreen.value) {
await logic.stopRecording();
} else {
await logic.startRecording();
}
}
},
child: Container(
@ -234,9 +244,15 @@ class _TalkViewPageState extends State<TalkViewPage>
Colors.white,
longPress: () async {
if (state.talkStatus.value == TalkStatus.answeredSuccessfully ||
state.talkStatus.value == TalkStatus.duringCall) {}
state.talkStatus.value == TalkStatus.duringCall) {
print('开始录音');
logic.startProcessingAudio();
}
},
longPressUp: () async {
print('停止录音');
logic.stopProcessingAudio();
},
longPressUp: () async {},
onClick: () async {
if (state.talkStatus.value == TalkStatus.waitingAnswer) {
//

View File

@ -21,7 +21,6 @@ enum NetworkStatus {
}
class TalkViewState {
int udpSendDataFrameNumber = 0; //
// var isSenderAudioData = false.obs;//
@ -35,7 +34,6 @@ class TalkViewState {
Rx<Uint8List> listData = Uint8List(0).obs; //
RxList<int> listAudioData = <int>[].obs; //
GlobalKey globalKey = GlobalKey();
late final VoiceProcessor? voiceProcessor;
late Timer oneMinuteTimeTimer =
Timer(const Duration(seconds: 1), () {}); // 60
@ -47,16 +45,12 @@ class TalkViewState {
late Timer openDoorTimer;
late AnimationController animationController;
late Timer autoBackTimer =
Timer(const Duration(seconds: 1), () {}); //30
late Timer realTimePicTimer =
Timer(const Duration(seconds: 1), () {}); //
RxInt elapsedSeconds = 0.obs;
//
List<TalkData> audioBuffer = <TalkData>[].obs;
List<TalkData> videoBuffer = <TalkData>[].obs;
@ -65,6 +59,7 @@ class TalkViewState {
// startChartTalkStatus
final StartChartTalkStatus startChartTalkStatus =
StartChartTalkStatus.instance;
//
final TalkDataRepository talkDataRepository = TalkDataRepository.instance;
RxInt lastFrameTimestamp = 0.obs; // ,
@ -73,7 +68,14 @@ class TalkViewState {
RxInt alertCount = 0.obs; //
RxInt maxAlertNumber = 3.obs; //
RxBool isOpenVoice = true.obs; //
RxBool isRecording = true.obs; //
RxBool isRecordingScreen = false.obs; //
RxBool isRecordingAudio = false.obs; //
Rx<DateTime> startRecordingAudioTime = DateTime.now().obs; //
Rx<DateTime> endRecordingAudioTime= DateTime.now().obs; //
RxInt recordingAudioTime= 0.obs; //
RxDouble fps = 0.0.obs; // FPS
late VoiceProcessor? voiceProcessor; //
final int frameLength = 320; //320
final int sampleRate = 8000; //8000
List<List<int>> recordingAudioAllFrames = <List<int>>[]; //
}