I hope you can help me out. I tried to write a small simple program where I record the audio for 5 seconds and save it in the document directory.
I have tested it with the simulator and I could see the path in the output and I could also see the sound.wav file, which got created.
And I could turn on that sound.wav file.
Here is the path I got with the simulator:
(file:///Users/Username/Library/Developer/CoreSimulator/Devices/3721A435-C765-4C8D-BA59-04477B070F80/data/Containers/Data/PluginKitPlugin/476CD646-D534-46C0-B5D2-88D7429DFF8F/Documents/sound.wav)
However, when I run it on the actual smartwatch, there is nothing in the output in Xcode. Even though, I put print commands into the code, they are not shown. I do not know what the problem is.
I also want to get the audiofile and send it to the server (mysql). How is it possible to get the actual audio file, the wav file in this case? And how can I get the path from the document directory where the audio file is supposed to be saved.
Here is how my code looks like:
I hope you can help me out. Thank you very much.
import WatchKit
import Foundation
import AVFoundation
class InterfaceController: WKInterfaceController, AVAudioRecorderDelegate{
@IBOutlet weak var btn: WKInterfaceButton!
var recordingSession : AVAudioSession!
var audioRecorder : AVAudioRecorder!
var settings = [String : Any]()
override func awake(withContext context: Any?) {
super.awake(withContext: context)
recordingSession = AVAudioSession.sharedInstance()
do{
try recordingSession.setCategory(AVAudioSession.Category.playAndRecord)
try recordingSession.setActive(true)
recordingSession.requestRecordPermission(){[unowned self] allowed in
DispatchQueue.main.async {
if allowed{
print("Allow")
} else{
print("Don't Allow")
}
}
}
}
catch{
print("failed to record!")
}
// Configure interface objects here.
// Audio Settings
settings = [
AVFormatIDKey:Int(kAudioFormatLinearPCM),
AVSampleRateKey:44100.0,
AVNumberOfChannelsKey:1,
AVLinearPCMBitDepthKey:8,
AVLinearPCMIsFloatKey:false,
AVLinearPCMIsBigEndianKey:false,
AVEncoderAudioQualityKey:AVAudioQuality.max.rawValue
]
}
override func willActivate() {
// This method is called when watch view controller is about to be visible to user
super.willActivate()
}
override func didDeactivate() {
// This method is called when watch view controller is no longer visible
super.didDeactivate()
}
func directoryURL() -> NSURL? {
let fileManager = FileManager.default
let urls = fileManager.urls(for: .documentDirectory, in: .userDomainMask)
let documentDirectory = urls[0] as NSURL
let soundUrl = documentDirectory.appendingPathComponent("sound.wav")
print(soundUrl)
return soundUrl as NSURL?
}
func startRecording(){
let audioSession = AVAudioSession.sharedInstance()
do{
audioRecorder = try AVAudioRecorder(url: self.directoryURL()! as URL,
settings: settings)
audioRecorder.delegate = self
audioRecorder.prepareToRecord()
audioRecorder.record(forDuration: 5.0)
}
catch {
finishRecording(success: false)
}
do {
try audioSession.setActive(true)
audioRecorder.record()
} catch {
}
}
func finishRecording(success: Bool) {
audioRecorder.stop()
if success {
print(success)
} else {
audioRecorder = nil
print("Somthing Wrong.")
}
}
@IBAction func recordAudio() {
if audioRecorder == nil {
print("Pressed")
self.btn.setTitle("Stop")
self.btn.setBackgroundColor(UIColor(red: 119.0/255.0, green: 119.0/255.0, blue: 119.0/255.0, alpha: 1.0))
self.startRecording()
} else {
self.btn.setTitle("Record")
print("Pressed2")
self.btn.setBackgroundColor(UIColor(red: 221.0/255.0, green: 27.0/255.0, blue: 50.0/255.0, alpha: 1.0))
self.finishRecording(success: true)
}
}
func audioRecorderDidFinishRecording(_ recorder: AVAudioRecorder, successfully flag: Bool) {
if !flag {
finishRecording(success: false)
}
}
}
The filepaths for a simulator and actual device is different. For simulators, it uses your MacOS file storage, whereas in an actual device it would use the device's storage.