diff --git a/lib/talk/starChart/views/imageTransmission/image_transmission_logic.dart b/lib/talk/starChart/views/imageTransmission/image_transmission_logic.dart index c9c42baf..96d03afe 100644 --- a/lib/talk/starChart/views/imageTransmission/image_transmission_logic.dart +++ b/lib/talk/starChart/views/imageTransmission/image_transmission_logic.dart @@ -561,8 +561,11 @@ class ImageTransmissionLogic extends BaseGetXController { state.voiceProcessor = VoiceProcessor.instance; } + Timer? _startProcessingAudioTimer; + //开始录音 Future startProcessingAudio() async { + try { if (await state.voiceProcessor?.hasRecordAudioPermission() ?? false) { await state.voiceProcessor?.start(state.frameLength, state.sampleRate); @@ -580,7 +583,6 @@ class ImageTransmissionLogic extends BaseGetXController { } on PlatformException catch (ex) { // state.errorMessage.value = 'Failed to start recorder: $ex'; } - state.isOpenVoice.value = false; } /// 停止录音 @@ -602,48 +604,53 @@ class ImageTransmissionLogic extends BaseGetXController { } finally { final bool? isRecording = await state.voiceProcessor?.isRecording(); state.isRecordingAudio.value = isRecording!; - state.isOpenVoice.value = true; } + _startProcessingAudioTimer?.cancel(); + _startProcessingAudioTimer = null; + _bufferedAudioFrames.clear(); + } + + static const int chunkSize = 320; // 每次发送320字节(10ms G.711) + static const int intervalMs = 40; // 每40ms发送一次(4个chunk) + void _sendAudioChunk(Timer timer) async { + if (_bufferedAudioFrames.length < chunkSize) { + // 数据不足,等待下一周期 + return; + } + + // 截取前 chunkSize 个字节 + final chunk = _bufferedAudioFrames.sublist(0, chunkSize); + // 更新缓冲区:移除已发送部分 + _bufferedAudioFrames.removeRange(0, chunkSize); + + // 获取时间戳(相对时间) + final int ms = DateTime.now().millisecondsSinceEpoch % 1000000; + + print('Send chunk ${timer.tick}: ${chunk.take(10).toList()}...'); + + await StartChartManage().sendTalkDataMessage( + talkData: TalkData( + content: chunk, + contentType: TalkData_ContentTypeE.G711, + durationMs: ms, + ), + ); } // 音频帧处理 Future _onFrame(List frame) async { - // 添加最大缓冲限制 - if (_bufferedAudioFrames.length > state.frameLength * 3) { - _bufferedAudioFrames.clear(); // 清空过多积累的数据 - return; - } + final applyGain = _applyGain(frame, 1.6); - // 首先应用固定增益提升基础音量 - List amplifiedFrame = _applyGain(frame, 1.6); // 编码为G711数据 - List encodedData = G711Tool.encode(amplifiedFrame, 0); // 0表示A-law + List encodedData = G711Tool.encode(applyGain, 0); // 0表示A-law _bufferedAudioFrames.addAll(encodedData); - // 使用相对时间戳 - final int ms = DateTime.now().millisecondsSinceEpoch % 1000000; // 使用循环时间戳 - int getFrameLength = state.frameLength; - if (Platform.isIOS) { - getFrameLength = state.frameLength * 2; - } - // 添加发送间隔控制 - if (_bufferedAudioFrames.length >= state.frameLength) { - try { - await StartChartManage().sendTalkDataMessage( - talkData: TalkData( - content: _bufferedAudioFrames, - contentType: TalkData_ContentTypeE.G711, - durationMs: ms, - ), - ); - } finally { - _bufferedAudioFrames.clear(); // 确保清理缓冲区 - } - } else { - _bufferedAudioFrames.addAll(encodedData); + + // 启动定时发送器(仅启动一次) + if (_startProcessingAudioTimer == null && _bufferedAudioFrames.length > chunkSize) { + _startProcessingAudioTimer = Timer.periodic(Duration(milliseconds: intervalMs), _sendAudioChunk); } } - // 错误监听 void _onError(VoiceProcessorException error) { AppLog.log(error.message!); diff --git a/lib/talk/starChart/views/native/talk_view_native_decode_logic.dart b/lib/talk/starChart/views/native/talk_view_native_decode_logic.dart index 5ec8ae91..c6cb292e 100644 --- a/lib/talk/starChart/views/native/talk_view_native_decode_logic.dart +++ b/lib/talk/starChart/views/native/talk_view_native_decode_logic.dart @@ -8,10 +8,7 @@ import 'package:flutter/foundation.dart'; import 'package:flutter/rendering.dart'; import 'package:flutter/services.dart'; import 'package:flutter_pcm_sound/flutter_pcm_sound.dart'; -import 'package:flutter_sound/flutter_sound.dart'; -import 'package:flutter_sound/public/flutter_sound_recorder.dart'; import 'package:flutter_voice_processor/flutter_voice_processor.dart'; -import 'package:gallery_saver/gallery_saver.dart'; import 'package:get/get.dart'; import 'package:image_gallery_saver/image_gallery_saver.dart'; import 'package:path_provider/path_provider.dart'; @@ -20,28 +17,20 @@ import 'package:star_lock/app_settings/app_settings.dart'; import 'package:star_lock/login/login/entity/LoginEntity.dart'; import 'package:star_lock/main/lockDetail/lockDetail/lockDetail_logic.dart'; import 'package:star_lock/main/lockDetail/lockDetail/lockDetail_state.dart'; -import 'package:star_lock/main/lockDetail/lockDetail/lockNetToken_entity.dart'; -import 'package:star_lock/main/lockDetail/lockSet/lockSet/lockSetInfo_entity.dart'; import 'package:star_lock/main/lockMian/entity/lockListInfo_entity.dart'; import 'package:star_lock/network/api_repository.dart'; -import 'package:star_lock/talk/call/callTalk.dart'; -import 'package:star_lock/talk/call/g711.dart'; import 'package:star_lock/talk/starChart/constant/talk_status.dart'; import 'package:star_lock/talk/starChart/entity/scp_message.dart'; -import 'package:star_lock/talk/starChart/handle/other/packet_loss_statistics.dart'; import 'package:star_lock/talk/starChart/handle/other/talk_data_model.dart'; import 'package:star_lock/talk/starChart/proto/talk_data.pb.dart'; import 'package:star_lock/talk/starChart/proto/talk_data_h264_frame.pb.dart'; import 'package:star_lock/talk/starChart/proto/talk_expect.pb.dart'; import 'package:star_lock/talk/starChart/star_chart_manage.dart'; import 'package:star_lock/talk/starChart/views/native/talk_view_native_decode_state.dart'; -import 'package:star_lock/talk/starChart/views/talkView/talk_view_state.dart'; import 'package:star_lock/tools/G711Tool.dart'; -import 'package:star_lock/tools/bugly/bugly_tool.dart'; import 'package:star_lock/tools/callkit_handler.dart'; import 'package:star_lock/tools/commonDataManage.dart'; import 'package:star_lock/tools/storage.dart'; -import 'package:video_decode_plugin/nalu_utils.dart'; import 'package:video_decode_plugin/video_decode_plugin.dart'; import '../../../../tools/baseGetXController.dart'; @@ -861,51 +850,6 @@ class TalkViewNativeDecodeLogic extends BaseGetXController { if (_startProcessingAudioTimer == null && _bufferedAudioFrames.length > chunkSize) { _startProcessingAudioTimer = Timer.periodic(Duration(milliseconds: intervalMs), _sendAudioChunk); } - // if (_startProcessingAudioTimer == null && - // _bufferedAudioFrames.length > 320) { - // // 每 10ms 发送一次 320 长度的数据 - // const int intervalMs = 40; - // const int chunkSize = 320; - // _startProcessingAudioTimer = - // Timer.periodic(Duration(milliseconds: intervalMs), (timer) async { - // // 从 _bufferedAudioFrames 中截取 320 个数据(循环发送) - // int startIndex = (timer.tick - 1) * chunkSize; // tick 从 1 开始 - // int endIndex = startIndex + chunkSize; - // // 使用相对时间戳 - // final int ms = - // DateTime.now().millisecondsSinceEpoch % 1000000; // 使用循环时间戳 - // - // // 循环使用数据(防止越界) - // List chunk; - // if (endIndex <= _bufferedAudioFrames.length) { - // chunk = _bufferedAudioFrames.sublist(startIndex, endIndex); - // } else { - // // 超出范围时循环 - // chunk = []; - // while (chunk.length < chunkSize) { - // int remaining = chunkSize - chunk.length; - // int take = endIndex > _bufferedAudioFrames.length - // ? _bufferedAudioFrames.length - - // (startIndex % _bufferedAudioFrames.length) - // : remaining; - // take = take.clamp(0, remaining); - // int start = startIndex % _bufferedAudioFrames.length; - // chunk.addAll(_bufferedAudioFrames.sublist(start, - // (start + take).clamp(start, _bufferedAudioFrames.length))); - // startIndex += take; - // } - // } - // // 示例:打印前10个数据 - // print('Send chunk ${timer.tick}: ${chunk.take(10).toList()}...'); - // await StartChartManage().sendTalkDataMessage( - // talkData: TalkData( - // content: chunk, - // contentType: TalkData_ContentTypeE.G711, - // durationMs: ms, - // ), - // ); - // }); - // } } // 错误监听 diff --git a/lib/talk/starChart/views/talkView/talk_view_logic.dart b/lib/talk/starChart/views/talkView/talk_view_logic.dart index cfe0d6f2..d58e5093 100644 --- a/lib/talk/starChart/views/talkView/talk_view_logic.dart +++ b/lib/talk/starChart/views/talkView/talk_view_logic.dart @@ -558,8 +558,11 @@ class TalkViewLogic extends BaseGetXController { state.voiceProcessor = VoiceProcessor.instance; } + Timer? _startProcessingAudioTimer; + //开始录音 Future startProcessingAudio() async { + try { if (await state.voiceProcessor?.hasRecordAudioPermission() ?? false) { await state.voiceProcessor?.start(state.frameLength, state.sampleRate); @@ -577,7 +580,6 @@ class TalkViewLogic extends BaseGetXController { } on PlatformException catch (ex) { // state.errorMessage.value = 'Failed to start recorder: $ex'; } - state.isOpenVoice.value = false; } /// 停止录音 @@ -599,45 +601,51 @@ class TalkViewLogic extends BaseGetXController { } finally { final bool? isRecording = await state.voiceProcessor?.isRecording(); state.isRecordingAudio.value = isRecording!; - state.isOpenVoice.value = true; } + _startProcessingAudioTimer?.cancel(); + _startProcessingAudioTimer = null; + _bufferedAudioFrames.clear(); + } + + static const int chunkSize = 320; // 每次发送320字节(10ms G.711) + static const int intervalMs = 40; // 每40ms发送一次(4个chunk) + void _sendAudioChunk(Timer timer) async { + if (_bufferedAudioFrames.length < chunkSize) { + // 数据不足,等待下一周期 + return; + } + + // 截取前 chunkSize 个字节 + final chunk = _bufferedAudioFrames.sublist(0, chunkSize); + // 更新缓冲区:移除已发送部分 + _bufferedAudioFrames.removeRange(0, chunkSize); + + // 获取时间戳(相对时间) + final int ms = DateTime.now().millisecondsSinceEpoch % 1000000; + + print('Send chunk ${timer.tick}: ${chunk.take(10).toList()}...'); + + await StartChartManage().sendTalkDataMessage( + talkData: TalkData( + content: chunk, + contentType: TalkData_ContentTypeE.G711, + durationMs: ms, + ), + ); } // 音频帧处理 Future _onFrame(List frame) async { - // 添加最大缓冲限制 - if (_bufferedAudioFrames.length > state.frameLength * 3) { - _bufferedAudioFrames.clear(); // 清空过多积累的数据 - return; - } + final applyGain = _applyGain(frame, 1.6); - // 首先应用固定增益提升基础音量 - List amplifiedFrame = _applyGain(frame, 1.6); // 编码为G711数据 - List encodedData = G711Tool.encode(amplifiedFrame, 0); // 0表示A-law + List encodedData = G711Tool.encode(applyGain, 0); // 0表示A-law _bufferedAudioFrames.addAll(encodedData); - // 使用相对时间戳 - final int ms = DateTime.now().millisecondsSinceEpoch % 1000000; // 使用循环时间戳 - int getFrameLength = state.frameLength; - if (Platform.isIOS) { - getFrameLength = state.frameLength * 2; - } - // 添加发送间隔控制 - if (_bufferedAudioFrames.length >= state.frameLength) { - try { - await StartChartManage().sendTalkDataMessage( - talkData: TalkData( - content: _bufferedAudioFrames, - contentType: TalkData_ContentTypeE.G711, - durationMs: ms, - ), - ); - } finally { - _bufferedAudioFrames.clear(); // 确保清理缓冲区 - } - } else { - _bufferedAudioFrames.addAll(encodedData); + + // 启动定时发送器(仅启动一次) + if (_startProcessingAudioTimer == null && _bufferedAudioFrames.length > chunkSize) { + _startProcessingAudioTimer = Timer.periodic(Duration(milliseconds: intervalMs), _sendAudioChunk); } }