fix:调整ios的录音发送逻辑

This commit is contained in:
liyi 2025-08-15 13:55:39 +08:00
parent 47ddb9b72a
commit fc3f27e951
3 changed files with 78 additions and 119 deletions

View File

@ -561,8 +561,11 @@ class ImageTransmissionLogic extends BaseGetXController {
state.voiceProcessor = VoiceProcessor.instance;
}
Timer? _startProcessingAudioTimer;
//
Future<void> startProcessingAudio() async {
try {
if (await state.voiceProcessor?.hasRecordAudioPermission() ?? false) {
await state.voiceProcessor?.start(state.frameLength, state.sampleRate);
@ -580,7 +583,6 @@ class ImageTransmissionLogic extends BaseGetXController {
} on PlatformException catch (ex) {
// state.errorMessage.value = 'Failed to start recorder: $ex';
}
state.isOpenVoice.value = false;
}
///
@ -602,48 +604,53 @@ class ImageTransmissionLogic extends BaseGetXController {
} finally {
final bool? isRecording = await state.voiceProcessor?.isRecording();
state.isRecordingAudio.value = isRecording!;
state.isOpenVoice.value = true;
}
_startProcessingAudioTimer?.cancel();
_startProcessingAudioTimer = null;
_bufferedAudioFrames.clear();
}
static const int chunkSize = 320; // 32010ms G.711
static const int intervalMs = 40; // 40ms发送一次4chunk
void _sendAudioChunk(Timer timer) async {
if (_bufferedAudioFrames.length < chunkSize) {
//
return;
}
// chunkSize
final chunk = _bufferedAudioFrames.sublist(0, chunkSize);
//
_bufferedAudioFrames.removeRange(0, chunkSize);
//
final int ms = DateTime.now().millisecondsSinceEpoch % 1000000;
print('Send chunk ${timer.tick}: ${chunk.take(10).toList()}...');
await StartChartManage().sendTalkDataMessage(
talkData: TalkData(
content: chunk,
contentType: TalkData_ContentTypeE.G711,
durationMs: ms,
),
);
}
//
Future<void> _onFrame(List<int> frame) async {
//
if (_bufferedAudioFrames.length > state.frameLength * 3) {
_bufferedAudioFrames.clear(); //
return;
}
final applyGain = _applyGain(frame, 1.6);
//
List<int> amplifiedFrame = _applyGain(frame, 1.6);
// G711数据
List<int> encodedData = G711Tool.encode(amplifiedFrame, 0); // 0A-law
List<int> encodedData = G711Tool.encode(applyGain, 0); // 0A-law
_bufferedAudioFrames.addAll(encodedData);
// 使
final int ms = DateTime.now().millisecondsSinceEpoch % 1000000; // 使
int getFrameLength = state.frameLength;
if (Platform.isIOS) {
getFrameLength = state.frameLength * 2;
}
//
if (_bufferedAudioFrames.length >= state.frameLength) {
try {
await StartChartManage().sendTalkDataMessage(
talkData: TalkData(
content: _bufferedAudioFrames,
contentType: TalkData_ContentTypeE.G711,
durationMs: ms,
),
);
} finally {
_bufferedAudioFrames.clear(); //
}
} else {
_bufferedAudioFrames.addAll(encodedData);
//
if (_startProcessingAudioTimer == null && _bufferedAudioFrames.length > chunkSize) {
_startProcessingAudioTimer = Timer.periodic(Duration(milliseconds: intervalMs), _sendAudioChunk);
}
}
//
void _onError(VoiceProcessorException error) {
AppLog.log(error.message!);

View File

@ -8,10 +8,7 @@ import 'package:flutter/foundation.dart';
import 'package:flutter/rendering.dart';
import 'package:flutter/services.dart';
import 'package:flutter_pcm_sound/flutter_pcm_sound.dart';
import 'package:flutter_sound/flutter_sound.dart';
import 'package:flutter_sound/public/flutter_sound_recorder.dart';
import 'package:flutter_voice_processor/flutter_voice_processor.dart';
import 'package:gallery_saver/gallery_saver.dart';
import 'package:get/get.dart';
import 'package:image_gallery_saver/image_gallery_saver.dart';
import 'package:path_provider/path_provider.dart';
@ -20,28 +17,20 @@ import 'package:star_lock/app_settings/app_settings.dart';
import 'package:star_lock/login/login/entity/LoginEntity.dart';
import 'package:star_lock/main/lockDetail/lockDetail/lockDetail_logic.dart';
import 'package:star_lock/main/lockDetail/lockDetail/lockDetail_state.dart';
import 'package:star_lock/main/lockDetail/lockDetail/lockNetToken_entity.dart';
import 'package:star_lock/main/lockDetail/lockSet/lockSet/lockSetInfo_entity.dart';
import 'package:star_lock/main/lockMian/entity/lockListInfo_entity.dart';
import 'package:star_lock/network/api_repository.dart';
import 'package:star_lock/talk/call/callTalk.dart';
import 'package:star_lock/talk/call/g711.dart';
import 'package:star_lock/talk/starChart/constant/talk_status.dart';
import 'package:star_lock/talk/starChart/entity/scp_message.dart';
import 'package:star_lock/talk/starChart/handle/other/packet_loss_statistics.dart';
import 'package:star_lock/talk/starChart/handle/other/talk_data_model.dart';
import 'package:star_lock/talk/starChart/proto/talk_data.pb.dart';
import 'package:star_lock/talk/starChart/proto/talk_data_h264_frame.pb.dart';
import 'package:star_lock/talk/starChart/proto/talk_expect.pb.dart';
import 'package:star_lock/talk/starChart/star_chart_manage.dart';
import 'package:star_lock/talk/starChart/views/native/talk_view_native_decode_state.dart';
import 'package:star_lock/talk/starChart/views/talkView/talk_view_state.dart';
import 'package:star_lock/tools/G711Tool.dart';
import 'package:star_lock/tools/bugly/bugly_tool.dart';
import 'package:star_lock/tools/callkit_handler.dart';
import 'package:star_lock/tools/commonDataManage.dart';
import 'package:star_lock/tools/storage.dart';
import 'package:video_decode_plugin/nalu_utils.dart';
import 'package:video_decode_plugin/video_decode_plugin.dart';
import '../../../../tools/baseGetXController.dart';
@ -861,51 +850,6 @@ class TalkViewNativeDecodeLogic extends BaseGetXController {
if (_startProcessingAudioTimer == null && _bufferedAudioFrames.length > chunkSize) {
_startProcessingAudioTimer = Timer.periodic(Duration(milliseconds: intervalMs), _sendAudioChunk);
}
// if (_startProcessingAudioTimer == null &&
// _bufferedAudioFrames.length > 320) {
// // 10ms 320
// const int intervalMs = 40;
// const int chunkSize = 320;
// _startProcessingAudioTimer =
// Timer.periodic(Duration(milliseconds: intervalMs), (timer) async {
// // _bufferedAudioFrames 320
// int startIndex = (timer.tick - 1) * chunkSize; // tick 1
// int endIndex = startIndex + chunkSize;
// // 使
// final int ms =
// DateTime.now().millisecondsSinceEpoch % 1000000; // 使
//
// // 使
// List<int> chunk;
// if (endIndex <= _bufferedAudioFrames.length) {
// chunk = _bufferedAudioFrames.sublist(startIndex, endIndex);
// } else {
// //
// chunk = <int>[];
// while (chunk.length < chunkSize) {
// int remaining = chunkSize - chunk.length;
// int take = endIndex > _bufferedAudioFrames.length
// ? _bufferedAudioFrames.length -
// (startIndex % _bufferedAudioFrames.length)
// : remaining;
// take = take.clamp(0, remaining);
// int start = startIndex % _bufferedAudioFrames.length;
// chunk.addAll(_bufferedAudioFrames.sublist(start,
// (start + take).clamp(start, _bufferedAudioFrames.length)));
// startIndex += take;
// }
// }
// // 10
// print('Send chunk ${timer.tick}: ${chunk.take(10).toList()}...');
// await StartChartManage().sendTalkDataMessage(
// talkData: TalkData(
// content: chunk,
// contentType: TalkData_ContentTypeE.G711,
// durationMs: ms,
// ),
// );
// });
// }
}
//

View File

@ -558,8 +558,11 @@ class TalkViewLogic extends BaseGetXController {
state.voiceProcessor = VoiceProcessor.instance;
}
Timer? _startProcessingAudioTimer;
//
Future<void> startProcessingAudio() async {
try {
if (await state.voiceProcessor?.hasRecordAudioPermission() ?? false) {
await state.voiceProcessor?.start(state.frameLength, state.sampleRate);
@ -577,7 +580,6 @@ class TalkViewLogic extends BaseGetXController {
} on PlatformException catch (ex) {
// state.errorMessage.value = 'Failed to start recorder: $ex';
}
state.isOpenVoice.value = false;
}
///
@ -599,45 +601,51 @@ class TalkViewLogic extends BaseGetXController {
} finally {
final bool? isRecording = await state.voiceProcessor?.isRecording();
state.isRecordingAudio.value = isRecording!;
state.isOpenVoice.value = true;
}
_startProcessingAudioTimer?.cancel();
_startProcessingAudioTimer = null;
_bufferedAudioFrames.clear();
}
static const int chunkSize = 320; // 32010ms G.711
static const int intervalMs = 40; // 40ms发送一次4chunk
void _sendAudioChunk(Timer timer) async {
if (_bufferedAudioFrames.length < chunkSize) {
//
return;
}
// chunkSize
final chunk = _bufferedAudioFrames.sublist(0, chunkSize);
//
_bufferedAudioFrames.removeRange(0, chunkSize);
//
final int ms = DateTime.now().millisecondsSinceEpoch % 1000000;
print('Send chunk ${timer.tick}: ${chunk.take(10).toList()}...');
await StartChartManage().sendTalkDataMessage(
talkData: TalkData(
content: chunk,
contentType: TalkData_ContentTypeE.G711,
durationMs: ms,
),
);
}
//
Future<void> _onFrame(List<int> frame) async {
//
if (_bufferedAudioFrames.length > state.frameLength * 3) {
_bufferedAudioFrames.clear(); //
return;
}
final applyGain = _applyGain(frame, 1.6);
//
List<int> amplifiedFrame = _applyGain(frame, 1.6);
// G711数据
List<int> encodedData = G711Tool.encode(amplifiedFrame, 0); // 0A-law
List<int> encodedData = G711Tool.encode(applyGain, 0); // 0A-law
_bufferedAudioFrames.addAll(encodedData);
// 使
final int ms = DateTime.now().millisecondsSinceEpoch % 1000000; // 使
int getFrameLength = state.frameLength;
if (Platform.isIOS) {
getFrameLength = state.frameLength * 2;
}
//
if (_bufferedAudioFrames.length >= state.frameLength) {
try {
await StartChartManage().sendTalkDataMessage(
talkData: TalkData(
content: _bufferedAudioFrames,
contentType: TalkData_ContentTypeE.G711,
durationMs: ms,
),
);
} finally {
_bufferedAudioFrames.clear(); //
}
} else {
_bufferedAudioFrames.addAll(encodedData);
//
if (_startProcessingAudioTimer == null && _bufferedAudioFrames.length > chunkSize) {
_startProcessingAudioTimer = Timer.periodic(Duration(milliseconds: intervalMs), _sendAudioChunk);
}
}