diff --git a/App.vue b/App.vue index 412bcca..ab386a2 100644 --- a/App.vue +++ b/App.vue @@ -31,7 +31,7 @@ return 'XHJ' } // #endif - return 'XHJ' + return 'PRE' } }, computed: { diff --git a/manifest.json b/manifest.json index 9390eab..9e7e280 100644 --- a/manifest.json +++ b/manifest.json @@ -1,114 +1,114 @@ { - "name": "星星锁Lite", - "appid": "__UNI__933D519", - "description": "", - "versionName": "1.3.1", - "versionCode": "39", - "mp-weixin": { - "appid": "wx9829a39e65550757", - "setting": { - "urlCheck": true, - "minified": true - }, - "permission": { - "scope.bluetooth": { - "desc": "蓝牙将用于控制和管理您的智能门锁" - } - }, - "usingComponents": true, - "lazyCodeLoading": "requiredComponents", - "optimization": { - "subPackages": true - }, - "plugins": { - "wmpf-voip": { - "version": "latest", - "provider": "wxf830863afde621eb", - "genericsImplementation": { - "call-page-plugin": { - "custombox": "pages/main/customBox" - } - } - } - } - }, - "vueVersion": "3", - "app-plus": { - "distribute": { - "icons": { - "android": { - "hdpi": "unpackage/res/icons/72x72.png", - "xhdpi": "unpackage/res/icons/96x96.png", - "xxhdpi": "unpackage/res/icons/144x144.png", - "xxxhdpi": "unpackage/res/icons/192x192.png" + "name" : "星星锁Lite", + "appid" : "__UNI__933D519", + "description" : "", + "versionName" : "1.3.1", + "versionCode" : "39", + "mp-weixin" : { + "appid" : "wx9829a39e65550757", + "setting" : { + "urlCheck" : true, + "minified" : true }, - "ios": { - "appstore": "unpackage/res/icons/1024x1024.png", - "ipad": { - "app": "unpackage/res/icons/76x76.png", - "app@2x": "unpackage/res/icons/152x152.png", - "notification": "unpackage/res/icons/20x20.png", - "notification@2x": "unpackage/res/icons/40x40.png", - "proapp@2x": "unpackage/res/icons/167x167.png", - "settings": "unpackage/res/icons/29x29.png", - "settings@2x": "unpackage/res/icons/58x58.png", - "spotlight": "unpackage/res/icons/40x40.png", - "spotlight@2x": "unpackage/res/icons/80x80.png" - }, - "iphone": { - "app@2x": "unpackage/res/icons/120x120.png", - "app@3x": "unpackage/res/icons/180x180.png", - "notification@2x": "unpackage/res/icons/40x40.png", - "notification@3x": "unpackage/res/icons/60x60.png", - "settings@2x": "unpackage/res/icons/58x58.png", - "settings@3x": "unpackage/res/icons/87x87.png", - "spotlight@2x": "unpackage/res/icons/80x80.png", - "spotlight@3x": "unpackage/res/icons/120x120.png" - } + "permission" : { + "scope.bluetooth" : { + "desc" : "蓝牙将用于控制和管理您的智能门锁" + } + }, + "usingComponents" : true, + "lazyCodeLoading" : "requiredComponents", + "optimization" : { + "subPackages" : true + }, + "plugins" : { + "wmpf-voip" : { + "version" : "latest", + "provider" : "wxf830863afde621eb", + "genericsImplementation" : { + "call-page-plugin" : { + "custombox" : "pages/main/customBox" + } + } + } } - }, - "android": { - "permissions": [ - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "" - ], - "targetSdkVersion": 34, - "abiFilters": ["armeabi-v7a", "arm64-v8a"] - }, - "ios": { - "dSYMs": false - } }, - "modules": { - "Bluetooth": {}, - "VideoPlayer": {}, - "Camera": {}, - "Record": {} - }, - "splashscreen": { - "waiting": false + "vueVersion" : "3", + "app-plus" : { + "distribute" : { + "icons" : { + "android" : { + "hdpi" : "unpackage/res/icons/72x72.png", + "xhdpi" : "unpackage/res/icons/96x96.png", + "xxhdpi" : "unpackage/res/icons/144x144.png", + "xxxhdpi" : "unpackage/res/icons/192x192.png" + }, + "ios" : { + "appstore" : "unpackage/res/icons/1024x1024.png", + "ipad" : { + "app" : "unpackage/res/icons/76x76.png", + "app@2x" : "unpackage/res/icons/152x152.png", + "notification" : "unpackage/res/icons/20x20.png", + "notification@2x" : "unpackage/res/icons/40x40.png", + "proapp@2x" : "unpackage/res/icons/167x167.png", + "settings" : "unpackage/res/icons/29x29.png", + "settings@2x" : "unpackage/res/icons/58x58.png", + "spotlight" : "unpackage/res/icons/40x40.png", + "spotlight@2x" : "unpackage/res/icons/80x80.png" + }, + "iphone" : { + "app@2x" : "unpackage/res/icons/120x120.png", + "app@3x" : "unpackage/res/icons/180x180.png", + "notification@2x" : "unpackage/res/icons/40x40.png", + "notification@3x" : "unpackage/res/icons/60x60.png", + "settings@2x" : "unpackage/res/icons/58x58.png", + "settings@3x" : "unpackage/res/icons/87x87.png", + "spotlight@2x" : "unpackage/res/icons/80x80.png", + "spotlight@3x" : "unpackage/res/icons/120x120.png" + } + } + }, + "android" : { + "permissions" : [ + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "" + ], + "targetSdkVersion" : 34, + "abiFilters" : [ "armeabi-v7a", "arm64-v8a" ] + }, + "ios" : { + "dSYMs" : false + } + }, + "modules" : { + "Bluetooth" : {}, + "VideoPlayer" : {}, + "Camera" : {}, + "Record" : {} + }, + "splashscreen" : { + "waiting" : false + } } - } } diff --git a/uni_modules/xhj-record/utssdk/app-ios/hybrid.swift b/uni_modules/xhj-record/utssdk/app-ios/hybrid.swift index 1783300..ce8640c 100644 --- a/uni_modules/xhj-record/utssdk/app-ios/hybrid.swift +++ b/uni_modules/xhj-record/utssdk/app-ios/hybrid.swift @@ -1,231 +1,398 @@ -import Foundation import AVFoundation +import CoreMedia +import Foundation -@objc +@objc(RecordPermission) public class RecordPermission: NSObject { @objc - public static func requestRecordPermission(_ completion: @escaping (Bool) -> Void) { + public static func requestRecordPermission(_ callback: @escaping (Bool) -> Void) { AVAudioSession.sharedInstance().requestRecordPermission { granted in - completion(granted) + callback(granted) } } } -@objc +@objc(AudioRecorderManager) public class AudioRecorderManager: NSObject { + @objc public static let shared = AudioRecorderManager() + + private enum State { + case idle, recording, stopped + } + + private var state: State = .idle + private let processingQueue = DispatchQueue(label: "com.xhjcn.lock.lite.audio.processing.queue") + + private var recordingCallback: ((Data?, Bool, String) -> Void)? + private var audioEngine: AVAudioEngine? private var audioConverter: AVAudioConverter? - private var aacBuffer: AVAudioCompressedBuffer? + private var outputBuffer: AVAudioCompressedBuffer? + private var flvMuxer: FLVMuxer? - private let lock = NSLock() - private var _isRecording = false - var isRecording: Bool { - get { - lock.lock() - defer { lock.unlock() } - return _isRecording - } - set { - lock.lock() - _isRecording = newValue - lock.unlock() - } + @objc + public func initAudio(_ callback: @escaping (Bool, String) -> Void) { + callback(true, "init success") } @objc - public static let shared = AudioRecorderManager() + public func startRecord(_ callback: @escaping (Data?, Bool, String) -> Void) { + processingQueue.async { [weak self] in + guard let self = self else { return } - private override init() {} + guard self.state == .idle || self.state == .stopped else { + self.dispatchError(callback: callback, message: "Recording is already in progress.") + return + } - @objc - public func initAudio(_ completion: @escaping (Bool, String) -> Void) { - completion(true, "Module initialized") - } + self.recordingCallback = callback - @objc - public func startRecord(_ completion: @escaping (Data?, Bool, String) -> Void) { - if self.isRecording { - completion(nil, false, "Recording is already in progress.") - return - } + if AVAudioSession.sharedInstance().recordPermission != .granted { + self.dispatchError(callback: callback, message: "Microphone permission not granted.") + return + } - if audioEngine == nil { - let session = AVAudioSession.sharedInstance() do { - try session.setCategory(.playAndRecord, mode: .default, options: .defaultToSpeaker) - try session.setPreferredSampleRate(16000.0) - try session.setPreferredInputNumberOfChannels(1) - try session.setActive(true) + try self.setupAudioSession() + try self.setupAudioEngine() + + guard let converter = self.audioConverter else { + self.cleanUp() + self.dispatchError(callback: callback, message: "Failed to setup audio engine components.") + return + } + + self.flvMuxer = FLVMuxer(sampleRate: converter.outputFormat.sampleRate, channels: Double(converter.outputFormat.channelCount)) + + var initialData = Data() + + let header = self.flvMuxer!.getHeader() + initialData.append(header) + + let metaTag = self.flvMuxer!.getMetaTag() + initialData.append(metaTag) + + let audioConfigTag = self.flvMuxer!.getAudioSpecificConfigTag(config: nil) + initialData.append(audioConfigTag) + + let inputNode = self.audioEngine!.inputNode + let inputFormat = inputNode.outputFormat(forBus: 0) + inputNode.installTap(onBus: 0, bufferSize: 1024, format: inputFormat) { [weak self] (buffer, _) in + self?.processAudioBuffer(buffer) + } + + DispatchQueue.main.async { + self.dispatchData(initialData) + + DispatchQueue.main.asyncAfter(deadline: .now() + 0.1) { + self.processingQueue.async { [weak self] in + guard let self = self, let engine = self.audioEngine else { return } + do { + engine.prepare() + try engine.start() + self.state = .recording + self.dispatchSuccess(message: "Recording started successfully.") + } catch { + self.cleanUp() + self.dispatchError(callback: self.recordingCallback ?? {_,_,_ in }, message: "Failed to start engine: \(error.localizedDescription)") + } + } + } + } + } catch { - completion(nil, false, "Failed to set up audio session: \(error.localizedDescription)") - return + self.cleanUp() + self.dispatchError(callback: callback, message: "Failed to start recording: \(error.localizedDescription)") } - - audioEngine = AVAudioEngine() - guard let audioEngine = audioEngine else { - completion(nil, false, "Failed to create audio engine") - return - } - - let inputNode = audioEngine.inputNode - let inputFormat = inputNode.outputFormat(forBus: 0) - - var outputFormatDescription = AudioStreamBasicDescription( - mSampleRate: 16000.0, - mFormatID: kAudioFormatMPEG4AAC, - mFormatFlags: 2, - mBytesPerPacket: 0, - mFramesPerPacket: 1024, - mBytesPerFrame: 0, - mChannelsPerFrame: inputFormat.channelCount, - mBitsPerChannel: 0, - mReserved: 0 - ) - - guard let outputFormat = AVAudioFormat(streamDescription: &outputFormatDescription) else { - completion(nil, false, "Failed to create output audio format") - return - } - - guard let converter = AVAudioConverter(from: inputFormat, to: outputFormat) else { - completion(nil, false, "Failed to create audio converter") - return - } - self.audioConverter = converter - - self.aacBuffer = AVAudioCompressedBuffer( - format: outputFormat, - packetCapacity: 1, - maximumPacketSize: converter.maximumOutputPacketSize - ) - - inputNode.installTap(onBus: 0, bufferSize: 1024, format: inputFormat) { [weak self] (pcmBuffer, when) in - guard let self = self, self.isRecording else { return } - self.convert(pcmBuffer: pcmBuffer, completion: completion) - } - - audioEngine.prepare() - } - - do { - guard let audioEngine = audioEngine else { - completion(nil, false, "Audio engine not initialized.") - return - } - try audioEngine.start() - self.isRecording = true - completion(nil, true, "Recording started") - } catch { - self.isRecording = false - completion(nil, false, "Failed to start audio engine: \(error.localizedDescription)") } } - private func convert(pcmBuffer: AVAudioPCMBuffer, completion: @escaping (Data?, Bool, String) -> Void) { - guard let converter = self.audioConverter, let outputBuffer = self.aacBuffer else { return } + @objc + public func stopRecord(_ callback: @escaping (Bool, String, String) -> Void) { + processingQueue.async { [weak self] in + guard let self = self else { return } + guard self.state == .recording else { + callback(false, "Not recording.", "") + return + } + self.cleanUp() + self.state = .stopped + callback(true, "Recording stopped.", "") + } + } + + @objc + public func releaseRecord(_ callback: @escaping (Bool, String) -> Void) { + processingQueue.async { [weak self] in + guard let self = self else { return } + self.cleanUp() + self.state = .idle + callback(true, "Record released.") + } + } + + private func setupAudioSession() throws { + let audioSession = AVAudioSession.sharedInstance() + try audioSession.setCategory(.playAndRecord, mode: .voiceChat, options: [.defaultToSpeaker, .allowBluetooth]) + try audioSession.setActive(true) + } + + private func setupAudioEngine() throws { + audioEngine = AVAudioEngine() + guard let engine = audioEngine else { + throw NSError(domain: "AudioRecorderManager", code: -1, userInfo: [NSLocalizedDescriptionKey: "Failed to create AVAudioEngine"]) + } + + let inputNode = engine.inputNode + let inputFormat = inputNode.outputFormat(forBus: 0) + + guard inputFormat.sampleRate > 0 else { + throw NSError(domain: "AudioRecorderManager", code: -2, userInfo: [NSLocalizedDescriptionKey: "Input node has an invalid sample rate."]) + } + + let outputFormatSettings: [String: Any] = [ + AVFormatIDKey: kAudioFormatMPEG4AAC, + AVSampleRateKey: 16000, + AVNumberOfChannelsKey: inputFormat.channelCount, + AVEncoderBitRateKey: 48000, + AVEncoderAudioQualityKey: AVAudioQuality.high.rawValue + ] + + guard let outputFormat = AVAudioFormat(settings: outputFormatSettings) else { + throw NSError(domain: "AudioRecorderManager", code: -3, userInfo: [NSLocalizedDescriptionKey: "Failed to create output format."]) + } + + guard let converter = AVAudioConverter(from: inputFormat, to: outputFormat) else { + throw NSError(domain: "AudioRecorderManager", code: -4, userInfo: [NSLocalizedDescriptionKey: "Failed to create audio converter."]) + } + + converter.bitRate = 48000 + converter.bitRateStrategy = AVAudioBitRateStrategy_Constant + + self.audioConverter = converter + + self.outputBuffer = AVAudioCompressedBuffer( + format: converter.outputFormat, + packetCapacity: 1, + maximumPacketSize: converter.maximumOutputPacketSize + ) + } + + private func processAudioBuffer(_ pcmBuffer: AVAudioPCMBuffer) { + guard state == .recording, + let converter = self.audioConverter, + let outputBuffer = self.outputBuffer, + let flvMuxer = self.flvMuxer else { return } - outputBuffer.byteLength = 0 outputBuffer.packetCount = 0 + outputBuffer.byteLength = 0 var error: NSError? - var pcmBufferWasProvided = false let status = converter.convert(to: outputBuffer, error: &error) { _, outStatus in - if pcmBufferWasProvided { - outStatus.pointee = .noDataNow - return nil - } outStatus.pointee = .haveData - pcmBufferWasProvided = true return pcmBuffer } - guard status != .error, error == nil else { - print("AAC conversion error: \(error?.localizedDescription ?? "unknown")") - return + if status == .haveData, outputBuffer.byteLength > 0 { + let aacData = Data(bytes: outputBuffer.data, count: Int(outputBuffer.byteLength)) + let aacTag = flvMuxer.getAACTag(data: aacData) + dispatchData(aacTag) + } else if let error = error { + processingQueue.async { [weak self] in + guard let self = self, self.state == .recording else { return } + + let errorMessage = "AAC Conversion Error: \(error.localizedDescription)" + self.dispatchError(callback: self.recordingCallback ?? { _, _, _ in }, message: errorMessage) + + self.cleanUp() + self.state = .stopped + } } - - if outputBuffer.byteLength == 0 { - return - } - - let aacData = Data(bytes: outputBuffer.data, count: Int(outputBuffer.byteLength)) - - guard let adtsHeader = self.adtsHeader(for: aacData.count) else { - print("Failed to create ADTS header") - return - } - - var fullPacket = Data() - fullPacket.append(Data(adtsHeader)) - fullPacket.append(aacData) - - completion(fullPacket, true, "") } - private func adtsHeader(for aacFrameSize: Int) -> [UInt8]? { - guard let outputFormat = self.audioConverter?.outputFormat else { return nil } - - let adtsLength = aacFrameSize + 7 - let sampleRate = outputFormat.sampleRate - let channels = outputFormat.channelCount - - let sampleRateIndices: [Double: Int] = [ - 96000: 0, 88200: 1, 64000: 2, 48000: 3, 44100: 4, 32000: 5, - 24000: 6, 22050: 7, 16000: 8, 12000: 9, 11025: 10, 8000: 11, 7350: 12 - ] - guard let freqIndex = sampleRateIndices[sampleRate] else { - print("Unsupported sample rate for ADTS header: \(sampleRate)") - return nil - } - - let profile = 2 // AAC-LC - let channelCfg = channels - - var adtsHeader = [UInt8](repeating: 0, count: 7) - adtsHeader[0] = 0xFF - adtsHeader[1] = 0xF9 - adtsHeader[2] = UInt8(((profile - 1) << 6) | (freqIndex << 2) | (Int(channelCfg) >> 2)) - adtsHeader[3] = UInt8((Int(channelCfg) & 3) << 6 | (adtsLength >> 11)) - adtsHeader[4] = UInt8((adtsLength & 0x7FF) >> 3) - adtsHeader[5] = UInt8(((adtsLength & 7) << 5) | 0x1F) - adtsHeader[6] = 0xFC - - return adtsHeader - } - - @objc - public func stopRecord(_ completion: @escaping (Bool, String, String) -> Void) { - guard self.isRecording else { - completion(false, "Recording is not in progress.", "") - return - } - self.isRecording = false + private func cleanUp() { audioEngine?.stop() - completion(true, "Recording stopped", "") - } + audioEngine?.inputNode.removeTap(onBus: 0) + try? AVAudioSession.sharedInstance().setActive(false) - @objc - public func releaseRecord(_ completion: @escaping (Bool, String) -> Void) { - if self.isRecording { - self.isRecording = false - audioEngine?.stop() - audioEngine?.inputNode.removeTap(onBus: 0) - } audioEngine = nil audioConverter = nil - aacBuffer = nil + outputBuffer = nil + flvMuxer = nil + recordingCallback = nil + } - do { - try AVAudioSession.sharedInstance().setActive(false, options: .notifyOthersOnDeactivation) - } catch { - print("Failed to deactivate audio session on release: \(error)") + private func dispatchData(_ data: Data) { + DispatchQueue.main.async { [weak self] in + self?.recordingCallback?(data, false, "") } + } - completion(true, "Record released") + private func dispatchError(callback: @escaping (Data?, Bool, String) -> Void, message: String) { + DispatchQueue.main.async { + callback(nil, false, message) + } + } + + private func dispatchSuccess(message: String) { + DispatchQueue.main.async { [weak self] in + self?.recordingCallback?(nil, true, message) + } } } -@objc +private class FLVMuxer { + private var startTime: CFTimeInterval + private var sampleRate: Double + private var channels: Double + + init(sampleRate: Double = 44100.0, channels: Double = 1.0) { + self.startTime = CACurrentMediaTime() + self.sampleRate = sampleRate + self.channels = channels + } + + func getHeader() -> Data { + return Data([ + 0x46, 0x4C, 0x56, + 0x01, + 0x04, + 0x00, 0x00, 0x00, 0x09, + 0x00, 0x00, 0x00, 0x00 + ]) + } + + func getMetaTag() -> Data { + var scriptData = Data() + + scriptData.append(amfString("onMetaData")) + + let properties: [(String, Any)] = [ + ("audiocodecid", 10.0), + ("audiosamplerate", self.sampleRate), + ("audiochannels", self.channels), + ("stereo", self.channels > 1), + ("creator", "starlock-record") + ] + scriptData.append(amfECMAArray(properties)) + + let tag = createTag(type: .script, timestamp: 0, body: scriptData) + return tag + } + + func getAudioSpecificConfigTag(config: Data?) -> Data { + let audioObjectType: UInt8 = 2 + guard let frequencyIndex = getSamplingFrequencyIndex(sampleRate: self.sampleRate) else { + let defaultConfig = Data([0x12, 0x08]) + let body = Data([0xAE, 0x00]) + defaultConfig + return createTag(type: .audio, timestamp: 0, body: body) + } + + let channelConfig = UInt8(self.channels) + + var configByte1: UInt8 = 0 + configByte1 |= (audioObjectType << 3) + configByte1 |= (frequencyIndex >> 1) + + var configByte2: UInt8 = 0 + configByte2 |= ((frequencyIndex & 0x01) << 7) + configByte2 |= (channelConfig << 3) + + let audioSpecificConfig = Data([configByte1, configByte2]) + + let body = Data([0xAE, 0x00]) + audioSpecificConfig + let tag = createTag(type: .audio, timestamp: 0, body: body) + return tag + } + + func getAACTag(data: Data) -> Data { + let elapsed = CACurrentMediaTime() - self.startTime + let timestamp = UInt32(elapsed * 1000) + let body = Data([0xAE, 0x01]) + data + return createTag(type: .audio, timestamp: timestamp, body: body) + } + + private func createTag(type: TagType, timestamp: UInt32, body: Data) -> Data { + let dataSize = UInt32(body.count) + let tagHeaderSize: UInt32 = 11 + + var header = Data(capacity: Int(tagHeaderSize)) + header.append(type.rawValue) + header.append(UInt8((dataSize >> 16) & 0xFF)) + header.append(UInt8((dataSize >> 8) & 0xFF)) + header.append(UInt8(dataSize & 0xFF)) + + header.append(UInt8((timestamp >> 16) & 0xFF)) + header.append(UInt8((timestamp >> 8) & 0xFF)) + header.append(UInt8(timestamp & 0xFF)) + header.append(UInt8((timestamp >> 24) & 0xFF)) + + header.append(contentsOf: [0x00, 0x00, 0x00]) + + var completeTag = Data() + completeTag.append(header) + completeTag.append(body) + + let fullTagSize = UInt32(header.count + body.count) + completeTag.append(contentsOf: fullTagSize.bytes) + + return completeTag + } + + private func getSamplingFrequencyIndex(sampleRate: Double) -> UInt8? { + let rates: [Double: UInt8] = [ + 96000: 0, 88200: 1, 64000: 2, 48000: 3, 44100: 4, 32000: 5, + 24000: 6, 22050: 7, 16000: 8, 12000: 9, 11025: 10, 8000: 11, 7350: 12 + ] + return rates[sampleRate] + } + + private enum TagType: UInt8 { case audio = 0x08, video = 0x09, script = 0x12 } + + private func amfString(_ str: String) -> Data { + var data = Data([0x02]) + let bytes = [UInt8](str.utf8) + data.append(UInt8((bytes.count >> 8) & 0xFF)) + data.append(UInt8(bytes.count & 0xFF)) + data.append(contentsOf: bytes) + return data + } + + private func amfNumber(_ val: Double) -> Data { + var data = Data([0x00]) + data.append(contentsOf: val.bitPattern.bytes) + return data + } + + private func amfBool(_ val: Bool) -> Data { + return Data([0x01, val ? 0x01 : 0x00]) + } + + private func amfECMAArray(_ properties: [(String, Any)]) -> Data { + var data = Data([0x08]) + let countBytes = UInt32(properties.count).bytes + data.append(contentsOf: countBytes) + + for (key, value) in properties { + data.append(amfString(key).dropFirst(1)) + if let numValue = value as? Double { + data.append(amfNumber(numValue)) + } else if let boolValue = value as? Bool { + data.append(amfBool(boolValue)) + } else if let stringValue = value as? String { + data.append(amfString(stringValue)) + } + } + data.append(contentsOf: [0x00, 0x00, 0x09]) + return data + } +} + +fileprivate extension FixedWidthInteger { + var bytes: [UInt8] { withUnsafeBytes(of: self.bigEndian) { Array($0) } } +} + +@objc(UTSConversionHelper) public class UTSConversionHelper: NSObject { @objc public static func dataToNSArray(_ data: Data) -> NSArray { diff --git a/uni_modules/xhj-record/utssdk/app-ios/index.uts b/uni_modules/xhj-record/utssdk/app-ios/index.uts index f8e9a74..7e1de7a 100644 --- a/uni_modules/xhj-record/utssdk/app-ios/index.uts +++ b/uni_modules/xhj-record/utssdk/app-ios/index.uts @@ -61,7 +61,7 @@ export const stopRecord = async function (): Promise { AudioRecorderManager.shared.stopRecord( (success: boolean, message: string, filePath: string) => { if (success) { - resolve({ code: 0, data: { filePath: filePath }, message: message }) + resolve({ code: 0, data: {}, message: message }) } else { resolve({ code: -1, data: {}, message: message }) }