Merge branch 'develop_liyi' into 'canary_release'

Develop liyi

See merge request StarlockTeam/app-starlock!17
This commit is contained in:
李仪 2025-04-07 10:33:28 +00:00
commit 306b366f73
4 changed files with 69 additions and 46 deletions

View File

@ -7,7 +7,6 @@ import 'package:flutter/foundation.dart';
import 'package:flutter/rendering.dart'; import 'package:flutter/rendering.dart';
import 'package:flutter/services.dart'; import 'package:flutter/services.dart';
import 'package:flutter_pcm_sound/flutter_pcm_sound.dart'; import 'package:flutter_pcm_sound/flutter_pcm_sound.dart';
import 'package:flutter_screen_recording/flutter_screen_recording.dart';
import 'package:flutter_voice_processor/flutter_voice_processor.dart'; import 'package:flutter_voice_processor/flutter_voice_processor.dart';
import 'package:gallery_saver/gallery_saver.dart'; import 'package:gallery_saver/gallery_saver.dart';
import 'package:get/get.dart'; import 'package:get/get.dart';
@ -57,6 +56,11 @@ class TalkViewLogic extends BaseGetXController {
final Map<String, ui.Image> _imageCache = {}; final Map<String, ui.Image> _imageCache = {};
//
int _frameCount = 0;
int _lastFpsUpdateTime = 0;
Timer? _fpsTimer;
/// ///
void _initFlutterPcmSound() { void _initFlutterPcmSound() {
const int sampleRate = 8000; const int sampleRate = 8000;
@ -176,6 +180,17 @@ class TalkViewLogic extends BaseGetXController {
state.listData.value = Uint8List.fromList(oldestFrame.content); state.listData.value = Uint8List.fromList(oldestFrame.content);
state.videoBuffer.removeAt(oldestIndex); // state.videoBuffer.removeAt(oldestIndex); //
//
_frameCount++;
final currentTime = DateTime.now().millisecondsSinceEpoch;
final elapsed = currentTime - _lastFpsUpdateTime;
if (elapsed >= 1000) {
//
state.fps.value = (_frameCount * 1000 / elapsed).round();
_frameCount = 0;
_lastFpsUpdateTime = currentTime;
}
// AppLog.log('🎬 播放帧 - 缓冲区剩余: ${state.videoBuffer.length}/${bufferSize}, ' // AppLog.log('🎬 播放帧 - 缓冲区剩余: ${state.videoBuffer.length}/${bufferSize}, '
// '播放延迟: ${currentTime - oldestFrame.durationMs}ms, ' // '播放延迟: ${currentTime - oldestFrame.durationMs}ms, '
// '帧时间戳: ${oldestFrame.durationMs}'); // '帧时间戳: ${oldestFrame.durationMs}');
@ -420,35 +435,35 @@ class TalkViewLogic extends BaseGetXController {
} }
Future<void> startRecording() async { Future<void> startRecording() async {
requestPermissions(); // requestPermissions();
if (state.isRecordingScreen.value) { // if (state.isRecordingScreen.value) {
showToast('录屏已开始,请勿重复点击'); // showToast('录屏已开始,请勿重复点击');
} // }
bool start = await FlutterScreenRecording.startRecordScreen( // bool start = await FlutterScreenRecording.startRecordScreen(
"Screen Recording", // // "Screen Recording", //
titleNotification: "Recording in progress", // // titleNotification: "Recording in progress", //
messageNotification: "Tap to stop recording", // // messageNotification: "Tap to stop recording", //
); // );
//
if (start) { // if (start) {
state.isRecordingScreen.value = true; // state.isRecordingScreen.value = true;
} // }
} }
Future<void> stopRecording() async { Future<void> stopRecording() async {
String path = await FlutterScreenRecording.stopRecordScreen; // String path = await FlutterScreenRecording.stopRecordScreen;
print("Recording saved to: $path"); // print("Recording saved to: $path");
//
// // //
bool? success = await GallerySaver.saveVideo(path); // bool? success = await GallerySaver.saveVideo(path);
if (success == true) { // if (success == true) {
print("Video saved to gallery"); // print("Video saved to gallery");
} else { // } else {
print("Failed to save video to gallery"); // print("Failed to save video to gallery");
} // }
//
showToast('录屏结束,已保存到系统相册'); // showToast('录屏结束,已保存到系统相册');
state.isRecordingScreen.value = false; // state.isRecordingScreen.value = false;
} }
@override @override
@ -496,6 +511,7 @@ class TalkViewLogic extends BaseGetXController {
state.oneMinuteTimeTimer?.cancel(); // state.oneMinuteTimeTimer?.cancel(); //
state.oneMinuteTimeTimer = null; // state.oneMinuteTimeTimer = null; //
state.oneMinuteTime.value = 0; state.oneMinuteTime.value = 0;
super.onClose(); super.onClose();
} }
@ -659,32 +675,39 @@ class TalkViewLogic extends BaseGetXController {
// //
Future<void> _onFrame(List<int> frame) async { Future<void> _onFrame(List<int> frame) async {
//
if (_bufferedAudioFrames.length > state.frameLength * 3) {
_bufferedAudioFrames.clear(); //
return;
}
// //
List<int> amplifiedFrame = _applyGain(frame, 1.6); List<int> amplifiedFrame = _applyGain(frame, 1.6);
// G711数据 // G711数据
List<int> encodedData = G711Tool.encode(amplifiedFrame, 0); // 0A-law List<int> encodedData = G711Tool.encode(amplifiedFrame, 0); // 0A-law
_bufferedAudioFrames.addAll(encodedData); _bufferedAudioFrames.addAll(encodedData);
// 使
final int ms = DateTime.now().millisecondsSinceEpoch - final int ms = DateTime.now().millisecondsSinceEpoch % 1000000; // 使
state.startRecordingAudioTime.value.millisecondsSinceEpoch;
int getFrameLength = state.frameLength; int getFrameLength = state.frameLength;
if (Platform.isIOS) { if (Platform.isIOS) {
getFrameLength = state.frameLength * 2; getFrameLength = state.frameLength * 2;
} }
if (_bufferedAudioFrames.length >= getFrameLength) { //
// UDP if (_bufferedAudioFrames.length >= state.frameLength) {
await StartChartManage() try {
.sendTalkDataMessage( await StartChartManage().sendTalkDataMessage(
talkData: TalkData( talkData: TalkData(
content: _bufferedAudioFrames, content: _bufferedAudioFrames,
contentType: TalkData_ContentTypeE.G711, contentType: TalkData_ContentTypeE.G711,
durationMs: ms, durationMs: ms,
), ),
) );
.then((value) { } finally {
_bufferedAudioFrames.clear(); _bufferedAudioFrames.clear(); //
}); }
} else {
_bufferedAudioFrames.addAll(encodedData);
} }
} }

View File

@ -45,6 +45,7 @@ class TalkViewState {
late Timer answerTimer; late Timer answerTimer;
late Timer hangUpTimer; late Timer hangUpTimer;
late Timer openDoorTimer; late Timer openDoorTimer;
Timer? fpsTimer;
late AnimationController animationController; late AnimationController animationController;
late Timer autoBackTimer = late Timer autoBackTimer =
@ -79,7 +80,7 @@ class TalkViewState {
Rx<DateTime> startRecordingAudioTime = DateTime.now().obs; // Rx<DateTime> startRecordingAudioTime = DateTime.now().obs; //
Rx<DateTime> endRecordingAudioTime = DateTime.now().obs; // Rx<DateTime> endRecordingAudioTime = DateTime.now().obs; //
RxInt recordingAudioTime = 0.obs; // RxInt recordingAudioTime = 0.obs; //
RxDouble fps = 0.0.obs; // FPS RxInt fps = 0.obs; // FPS
late VoiceProcessor? voiceProcessor; // late VoiceProcessor? voiceProcessor; //
final int frameLength = 320; //640 final int frameLength = 320; //640
final int sampleRate = 8000; //8000 final int sampleRate = 8000; //8000

View File

@ -7,7 +7,6 @@ import 'package:flutter/foundation.dart';
import 'package:flutter/rendering.dart'; import 'package:flutter/rendering.dart';
import 'package:flutter/services.dart'; import 'package:flutter/services.dart';
import 'package:flutter_pcm_sound/flutter_pcm_sound.dart'; import 'package:flutter_pcm_sound/flutter_pcm_sound.dart';
import 'package:flutter_screen_recording/flutter_screen_recording.dart';
import 'package:flutter_voice_processor/flutter_voice_processor.dart'; import 'package:flutter_voice_processor/flutter_voice_processor.dart';
import 'package:gallery_saver/gallery_saver.dart'; import 'package:gallery_saver/gallery_saver.dart';
import 'package:get/get.dart'; import 'package:get/get.dart';

View File

@ -266,7 +266,7 @@ dependencies:
# fast_rsa: ^3.6.6 # fast_rsa: ^3.6.6
protobuf: ^3.1.0 protobuf: ^3.1.0
#录屏 #录屏
flutter_screen_recording: 2.0.16 #flutter_screen_recording: 2.0.16
#图库保存 #图库保存
gallery_saver: ^2.3.2 gallery_saver: ^2.3.2
fixnum: ^1.1.1 fixnum: ^1.1.1