I am new swift development, I am trying get audio from microphone as a byte Array, I get permission below code, and I am trying get byte Array in startVoice function in below, but When I run real device(iPhone 6s) it doesn't get audio data from microphone, how can I get audio data as a byte data in real time?
func permissions() {
print("Permissions")
switch AVAudioSession.sharedInstance().recordPermission {
case AVAudioSession.RecordPermission.granted:
print("Permission granted")
case AVAudioSession.RecordPermission.denied:
print("Pemission denied")
case AVAudioSession.RecordPermission.undetermined:
print("Request permission here")
AVAudioSession.sharedInstance().requestRecordPermission({ (granted) in
// Handle granted
})
}
}
func startVoice() {
let engine = AVAudioEngine()
let bus = 0
let inputNode = engine.inputNode
let equalizer = AVAudioUnitEQ(numberOfBands: 2)
equalizer.bands[0].filterType = .lowPass
equalizer.bands[0].frequency = 3000
equalizer.bands[0].bypass = false
equalizer.bands[1].filterType = .highPass
equalizer.bands[1].frequency = 1000
equalizer.bands[1].bypass = false
engine.attach(equalizer) //Attach equalizer
// Connect nodes
engine.connect(inputNode, to: equalizer, format: inputNode.inputFormat(forBus: 0))
engine.connect(equalizer, to: engine.mainMixerNode, format: inputNode.inputFormat(forBus: 0))
// call before creating converter because this changes the mainMixer's output format
engine.prepare()
let outputFormat = AVAudioFormat(commonFormat: .pcmFormatInt16,
sampleRate: 1000,
channels: 1,
interleaved: false)!
// Downsampling converter
guard let converter: AVAudioConverter = AVAudioConverter(from: engine.mainMixerNode.outputFormat(forBus: 0), to: outputFormat) else {
print("Can't convert in to this format")
return
}
engine.mainMixerNode.installTap(onBus: bus, bufferSize: 2688, format: nil) { (buffer, time) in
var newBufferAvailable = true
let inputCallback: AVAudioConverterInputBlock = { inNumPackets, outStatus in
if newBufferAvailable {
outStatus.pointee = .haveData
newBufferAvailable = false
return buffer
} else {
outStatus.pointee = .noDataNow
return nil
}
}
let convertedBuffer = AVAudioPCMBuffer(pcmFormat: outputFormat, frameCapacity: AVAudioFrameCount(outputFormat.sampleRate) * buffer.frameLength / AVAudioFrameCount(buffer.format.sampleRate))!
var error: NSError?
let status = converter.convert(to: convertedBuffer, error: &error, withInputFrom: inputCallback)
assert(status != .error)
if status == .haveData {
// Process with converted buffer
}
}
do {
try engine.start()
} catch {
print("Can't start the engine: \(error)")
}
}