245 lines
7.7 KiB
Swift
245 lines
7.7 KiB
Swift
import Foundation
|
|
import AVFoundation
|
|
|
|
@objc
|
|
public class RecordPermission: NSObject {
|
|
@objc
|
|
public static func requestRecordPermission(_ completion: @escaping (Bool) -> Void) {
|
|
AVAudioSession.sharedInstance().requestRecordPermission { granted in
|
|
completion(granted)
|
|
}
|
|
}
|
|
}
|
|
|
|
@objc
|
|
public class AudioRecorderManager: NSObject {
|
|
private var audioEngine: AVAudioEngine?
|
|
private var audioConverter: AVAudioConverter?
|
|
private var aacBuffer: AVAudioCompressedBuffer?
|
|
|
|
private let lock = NSLock()
|
|
private var _isRecording = false
|
|
var isRecording: Bool {
|
|
get {
|
|
lock.lock()
|
|
defer { lock.unlock() }
|
|
return _isRecording
|
|
}
|
|
set {
|
|
lock.lock()
|
|
_isRecording = newValue
|
|
lock.unlock()
|
|
}
|
|
}
|
|
|
|
@objc
|
|
public static let shared = AudioRecorderManager()
|
|
|
|
private override init() {}
|
|
|
|
@objc
|
|
public func initAudio(_ completion: @escaping (Bool, String) -> Void) {
|
|
completion(true, "Module initialized")
|
|
}
|
|
|
|
@objc
|
|
public func startRecord(_ completion: @escaping (Data?, Bool, String) -> Void) {
|
|
if self.isRecording {
|
|
completion(nil, false, "Recording is already in progress.")
|
|
return
|
|
}
|
|
|
|
let session = AVAudioSession.sharedInstance()
|
|
do {
|
|
try session.setCategory(.playAndRecord, mode: .default, options: .defaultToSpeaker)
|
|
try session.setPreferredSampleRate(16000.0)
|
|
try session.setPreferredInputNumberOfChannels(1)
|
|
try session.setActive(true)
|
|
} catch {
|
|
completion(nil, false, "Failed to set up audio session: \(error.localizedDescription)")
|
|
return
|
|
}
|
|
|
|
audioEngine = AVAudioEngine()
|
|
guard let audioEngine = audioEngine else {
|
|
completion(nil, false, "Failed to create audio engine")
|
|
return
|
|
}
|
|
|
|
let inputNode = audioEngine.inputNode
|
|
let inputFormat = inputNode.outputFormat(forBus: 0)
|
|
|
|
var outputFormatDescription = AudioStreamBasicDescription(
|
|
mSampleRate: 16000.0,
|
|
mFormatID: kAudioFormatMPEG4AAC,
|
|
mFormatFlags: 2,
|
|
mBytesPerPacket: 0,
|
|
mFramesPerPacket: 1024,
|
|
mBytesPerFrame: 0,
|
|
mChannelsPerFrame: 1,
|
|
mBitsPerChannel: 0,
|
|
mReserved: 0
|
|
)
|
|
|
|
guard let outputFormat = AVAudioFormat(streamDescription: &outputFormatDescription) else {
|
|
completion(nil, false, "Failed to create output audio format")
|
|
return
|
|
}
|
|
|
|
guard let converter = AVAudioConverter(from: inputFormat, to: outputFormat) else {
|
|
completion(nil, false, "Failed to create audio converter")
|
|
return
|
|
}
|
|
self.audioConverter = converter
|
|
|
|
self.aacBuffer = AVAudioCompressedBuffer(
|
|
format: outputFormat,
|
|
packetCapacity: 1,
|
|
maximumPacketSize: converter.maximumOutputPacketSize
|
|
)
|
|
|
|
inputNode.installTap(onBus: 0, bufferSize: 1024, format: inputFormat) { [weak self] (pcmBuffer, when) in
|
|
guard let self = self, self.isRecording else { return }
|
|
self.convert(pcmBuffer: pcmBuffer, completion: completion)
|
|
}
|
|
|
|
do {
|
|
audioEngine.prepare()
|
|
try audioEngine.start()
|
|
self.isRecording = true
|
|
completion(nil, true, "Recording started")
|
|
} catch {
|
|
self.isRecording = false
|
|
completion(nil, false, "Failed to start audio engine: \(error.localizedDescription)")
|
|
}
|
|
}
|
|
|
|
private func convert(pcmBuffer: AVAudioPCMBuffer, completion: @escaping (Data?, Bool, String) -> Void) {
|
|
guard let converter = self.audioConverter, let outputBuffer = self.aacBuffer else { return }
|
|
|
|
outputBuffer.byteLength = 0
|
|
outputBuffer.packetCount = 0
|
|
|
|
var error: NSError?
|
|
var pcmBufferWasProvided = false
|
|
let status = converter.convert(to: outputBuffer, error: &error) { _, outStatus in
|
|
if pcmBufferWasProvided {
|
|
outStatus.pointee = .noDataNow
|
|
return nil
|
|
}
|
|
outStatus.pointee = .haveData
|
|
pcmBufferWasProvided = true
|
|
return pcmBuffer
|
|
}
|
|
|
|
guard status != .error, error == nil else {
|
|
print("AAC conversion error: \(error?.localizedDescription ?? "unknown")")
|
|
return
|
|
}
|
|
|
|
if outputBuffer.byteLength == 0 {
|
|
return
|
|
}
|
|
|
|
let aacData = Data(bytes: outputBuffer.data, count: Int(outputBuffer.byteLength))
|
|
|
|
guard let adtsHeader = self.adtsHeader(for: aacData.count) else {
|
|
print("Failed to create ADTS header")
|
|
return
|
|
}
|
|
|
|
var fullPacket = Data()
|
|
fullPacket.append(Data(adtsHeader))
|
|
fullPacket.append(aacData)
|
|
|
|
completion(fullPacket, true, "")
|
|
}
|
|
|
|
private func adtsHeader(for aacFrameSize: Int) -> [UInt8]? {
|
|
guard let outputFormat = self.audioConverter?.outputFormat else { return nil }
|
|
|
|
let adtsLength = aacFrameSize + 7
|
|
let sampleRate = outputFormat.sampleRate
|
|
let channels = outputFormat.channelCount
|
|
|
|
let sampleRateIndices: [Double: Int] = [
|
|
96000: 0, 88200: 1, 64000: 2, 48000: 3, 44100: 4, 32000: 5,
|
|
24000: 6, 22050: 7, 16000: 8, 12000: 9, 11025: 10, 8000: 11, 7350: 12
|
|
]
|
|
guard let freqIndex = sampleRateIndices[sampleRate] else {
|
|
print("Unsupported sample rate for ADTS header: \(sampleRate)")
|
|
return nil
|
|
}
|
|
|
|
let profile = 2 // AAC-LC
|
|
let channelCfg = channels
|
|
|
|
var adtsHeader = [UInt8](repeating: 0, count: 7)
|
|
adtsHeader[0] = 0xFF
|
|
adtsHeader[1] = 0xF9
|
|
adtsHeader[2] = UInt8(((profile - 1) << 6) | (freqIndex << 2) | (Int(channelCfg) >> 2))
|
|
adtsHeader[3] = UInt8((Int(channelCfg) & 3) << 6 | (adtsLength >> 11))
|
|
adtsHeader[4] = UInt8((adtsLength & 0x7FF) >> 3)
|
|
adtsHeader[5] = UInt8(((adtsLength & 7) << 5) | 0x1F)
|
|
adtsHeader[6] = 0xFC
|
|
|
|
return adtsHeader
|
|
}
|
|
|
|
@objc
|
|
public func stopRecord(_ completion: @escaping (Bool, String, String) -> Void) {
|
|
guard self.isRecording else {
|
|
completion(false, "Recording is not in progress.", "")
|
|
return
|
|
}
|
|
self.isRecording = false
|
|
|
|
audioEngine?.stop()
|
|
audioEngine?.inputNode.removeTap(onBus: 0)
|
|
audioEngine = nil
|
|
audioConverter = nil
|
|
aacBuffer = nil
|
|
|
|
do {
|
|
try AVAudioSession.sharedInstance().setActive(false, options: .notifyOthersOnDeactivation)
|
|
} catch {
|
|
print("Failed to deactivate audio session: \(error)")
|
|
}
|
|
|
|
completion(true, "Recording stopped", "")
|
|
}
|
|
|
|
@objc
|
|
public func releaseRecord(_ completion: @escaping (Bool, String) -> Void) {
|
|
if self.isRecording {
|
|
self.isRecording = false
|
|
audioEngine?.stop()
|
|
audioEngine?.inputNode.removeTap(onBus: 0)
|
|
}
|
|
audioEngine = nil
|
|
audioConverter = nil
|
|
aacBuffer = nil
|
|
|
|
do {
|
|
try AVAudioSession.sharedInstance().setActive(false, options: .notifyOthersOnDeactivation)
|
|
} catch {
|
|
print("Failed to deactivate audio session on release: \(error)")
|
|
}
|
|
|
|
completion(true, "Record released")
|
|
}
|
|
}
|
|
|
|
@objc
|
|
public class UTSConversionHelper: NSObject {
|
|
@objc
|
|
public static func dataToNSArray(_ data: Data) -> NSArray {
|
|
let byteArray = [UInt8](data)
|
|
let nsArray = NSMutableArray()
|
|
for byte in byteArray {
|
|
nsArray.add(NSNumber(value: byte))
|
|
}
|
|
return nsArray
|
|
}
|
|
}
|