fix:调整发送g711音频数据、增加回声消除、增大缓冲区、调整A律解码效果

This commit is contained in:
liyi 2025-01-16 15:55:14 +08:00
parent b876f608e4
commit 911396e1f3
3 changed files with 65 additions and 41 deletions

View File

@ -24,16 +24,28 @@ class G711 {
int decodeG711(int encodedValue, bool isALaw) {
if (isALaw) {
// A律解码
encodedValue = ~encodedValue;
int t = ((encodedValue & 0x0F) << 3) + 0x84;
t <<= (encodedValue & 0x70) >> 4;
return (encodedValue & 0x80) != 0 ? 0x84 - t : t - 0x84;
encodedValue ^= 0x55; //
int sign = encodedValue & 0x80; //
int exponent = (encodedValue & 0x70) >> 4; //
int mantissa = encodedValue & 0x0F; //
int pcmSample = (mantissa << 4) + 0x84; //
pcmSample <<= exponent; //
pcmSample = (sign == 0) ? pcmSample : -pcmSample; //
return pcmSample;
} else {
// μ
encodedValue = ~encodedValue;
int t = ((encodedValue & 0x0F) << 3) + 0x84;
t <<= (encodedValue & 0x70) >> 4;
return (encodedValue & 0x80) != 0 ? 0x84 - t : t - 0x84;
encodedValue ^= 0xFF; //
int sign = encodedValue & 0x80; //
int exponent = (encodedValue & 0x70) >> 4; //
int mantissa = encodedValue & 0x0F; //
int pcmSample = (mantissa << 3) + 0x84; //
pcmSample <<= exponent + 1; // μ1
pcmSample = (sign == 0) ? pcmSample : -pcmSample; //
return pcmSample;
}
}

View File

@ -1,6 +1,7 @@
import 'dart:convert';
import 'package:flutter_easyloading/flutter_easyloading.dart';
import 'package:get/get.dart';
import 'package:star_lock/talk/startChart/constant/message_type_constant.dart';
import 'package:star_lock/talk/startChart/constant/talk_status.dart';
import 'package:star_lock/talk/startChart/entity/scp_message.dart';
@ -34,6 +35,7 @@ class UdpTalkHangUpHandler extends ScpMessageBaseHandle
talkDataOverTimeTimerManager.cancel();
EasyLoading.showToast('已挂断');
Get.back();
}
@override

View File

@ -41,11 +41,12 @@ class TalkViewLogic extends BaseGetXController {
final TalkViewState state = TalkViewState();
final LockDetailState lockDetailState = Get.put(LockDetailLogic()).state;
Timer? _syncTimer; //
Timer? _audioTimer; //
int _startTime = 0; //
int bufferSize = 20; //
int audioBufferSize = 640; //
final List<int> frameTimestamps = []; // FPS
int frameIntervalMs = 35; // 4522FPS
int bufferSize = 50; //
int frameIntervalMs = 45; // 4522FPS
int audioFrameIntervalMs = 20; // 4522FPS
int minFrameIntervalMs = 30; // 33 FPS
int maxFrameIntervalMs = 100; // 1 FPS
// int maxFrameIntervalMs = 100; // 10 FPS
@ -53,11 +54,11 @@ class TalkViewLogic extends BaseGetXController {
///
void _initFlutterPcmSound() {
const int sampleRate = 8000;
FlutterPcmSound.setLogLevel(LogLevel.verbose);
FlutterPcmSound.setLogLevel(LogLevel.none);
FlutterPcmSound.setup(sampleRate: sampleRate, channelCount: 1);
// feed
if (Platform.isAndroid) {
FlutterPcmSound.setFeedThreshold(sampleRate ~/ 2); // Android
FlutterPcmSound.setFeedThreshold(1024); // Android
} else {
FlutterPcmSound.setFeedThreshold(sampleRate ~/ 32); // Android
}
@ -88,7 +89,7 @@ class TalkViewLogic extends BaseGetXController {
//
switch (contentType) {
case TalkData_ContentTypeE.G711:
if (state.audioBuffer.length >= audioBufferSize) {
if (state.audioBuffer.length >= bufferSize) {
state.audioBuffer.removeAt(0); //
// readAudioBufferSize.removeAt(0); //
}
@ -130,14 +131,17 @@ class TalkViewLogic extends BaseGetXController {
///
void _playAudioData(TalkData talkData) async {
// final list = G711().convertList(talkData.content);
final list = G711().decodeAndDenoise(talkData.content, true, 8000, 300, 50);
// // PCM PcmArrayInt16
final PcmArrayInt16 fromList = PcmArrayInt16.fromList(list);
FlutterPcmSound.feed(fromList);
if (!state.isPlaying.value) {
FlutterPcmSound.play();
state.isPlaying.value = true;
if (state.isOpenVoice.value) {
// final list = G711().convertList(talkData.content);
final list = G711().convertList(talkData.content);
// final list = G711().decodeAndDenoise(talkData.content, true,8000, 300, 50);
// // PCM PcmArrayInt16
final PcmArrayInt16 fromList = PcmArrayInt16.fromList(list);
FlutterPcmSound.feed(fromList);
if (!state.isPlaying.value) {
FlutterPcmSound.play();
state.isPlaying.value = true;
}
}
}
@ -154,8 +158,6 @@ class TalkViewLogic extends BaseGetXController {
Timer.periodic(Duration(milliseconds: frameIntervalMs), (timer) {
//
_adjustFrameInterval();
_playFrames();
});
});
}
@ -184,29 +186,37 @@ class TalkViewLogic extends BaseGetXController {
_syncTimer?.cancel();
_syncTimer =
Timer.periodic(Duration(milliseconds: frameIntervalMs), (timer) {
_playFrames();
//
_playVideoFrames();
});
_audioTimer?.cancel();
_audioTimer =
Timer.periodic(Duration(milliseconds: audioFrameIntervalMs), (timer) {
final currentTime = DateTime.now().millisecondsSinceEpoch;
final elapsedTime = currentTime - _startTime;
//
if (state.audioBuffer.isNotEmpty &&
state.audioBuffer.first.durationMs <= elapsedTime) {
//
if (state.isOpenVoice.value) {
_playAudioData(state.audioBuffer.removeAt(0));
} else {
//
//
//
state.audioBuffer.removeAt(0);
}
}
});
}
}
void _playFrames() {
void _playVideoFrames() {
final currentTime = DateTime.now().millisecondsSinceEpoch;
final elapsedTime = currentTime - _startTime;
//
if (state.audioBuffer.isNotEmpty &&
state.audioBuffer.first.durationMs <= elapsedTime) {
//
if (state.isOpenVoice.value) {
_playAudioData(state.audioBuffer.removeAt(0));
} else {
//
//
//
state.audioBuffer.removeAt(0);
}
}
//
//
int maxFramesToProcess = 5; // 5