Home > AI > IOS > AVFoundation >

AVAudioRecorder

Example:

import SwiftUI
import AVFoundation
import Combine


struct ContentView: View {
    @ObservedObject var myRecorder: MyAudioRecorder
    
    
    var body: some View {
        NavigationView {
        
            
            VStack {
                
                List {
                    ForEach(myRecorder.recordings, id: \.createdAt) { recording in
                        RecordingRow(audioURL: recording.fileURL)
                    }
                }
                
                if myRecorder.recording == false {
                    Button(action: {
                        self.myRecorder.startRecording()
                    }) {
                        Image(systemName: "circle.fill")
                            .resizable()
                            .aspectRatio(contentMode: .fill)
                            .frame(width: 100, height: 100)
                            .clipped()
                            .foregroundColor(.red)
                            .padding(.bottom, 40)
                    }
                } else {
                    Button(action: {
                        self.myRecorder.stopRecording()
                    }) {
                        Image(systemName: "stop.fill")
                            .resizable()
                            .aspectRatio(contentMode: .fill)
                            .frame(width: 100, height: 100)
                            .clipped()
                            .foregroundColor(.red)
                            .padding(.bottom, 40)
                    }
                }
            }
                
        }
    }
    

}
 
 

struct RecordingRow: View {
    
    var audioURL: URL
    
    var body: some View {
        HStack {
            Text("\(audioURL.lastPathComponent)")
            Spacer()
        }
    }
}




class MyAudioRecorder: ObservableObject {

    let objectWillChange = PassthroughSubject<MyAudioRecorder, Never>()
    var audioRecorder: AVAudioRecorder!
    var recordings = [RecordingModel]()
    
    // update subscribing views using our objectWillChange property.
    var recording = false {
        didSet {
            objectWillChange.send(self)
        }
    }
    
    init() {
        fetchRecordings()
    }
    
    func startRecording() {
        let recordingSession = AVAudioSession.sharedInstance()
        do {
            try recordingSession.setCategory(.playAndRecord, mode: .default)
            try recordingSession.setActive(true)
        } catch {
            print("Failed to set up recording session")
        }
        
        
        let documentPath = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0]
        let audioFilename = documentPath.appendingPathComponent("\(Date().toString(dateFormat: "dd-MM-YY_'at'_HH:mm:ss")).m4a")
        
        
        let settings = [
            AVFormatIDKey: Int(kAudioFormatMPEG4AAC),
            AVSampleRateKey: 12000,
            AVNumberOfChannelsKey: 1,
            AVEncoderAudioQualityKey: AVAudioQuality.high.rawValue
        ]
        
        
        do {
            audioRecorder = try AVAudioRecorder(url: audioFilename, settings: settings)
            audioRecorder.record()
            recording = true
        } catch {
            print("Could not start recording")
        }
            
    }
    
    
    func stopRecording() {
        audioRecorder.stop()
        recording = false
        
        fetchRecordings()
    }
    
    
    
    func fetchRecordings() {
        recordings.removeAll()
        
        let fileManager = FileManager.default
        let documentDirectory = fileManager.urls(for: .documentDirectory, in: .userDomainMask)[0]
        let directoryContents = try! fileManager.contentsOfDirectory(at: documentDirectory, includingPropertiesForKeys: nil)
        
        for audio in directoryContents {
            let recording = RecordingModel(fileURL: audio, createdAt: getCreationDate(for: audio))
            recordings.append(recording)
            
            recordings.sort(by: { $0.createdAt.compare($1.createdAt) == .orderedAscending})
            objectWillChange.send(self)
        }
    }
    
    
    func getCreationDate(for file: URL) -> Date {
        if let attributes = try? FileManager.default.attributesOfItem(atPath: file.path) as [FileAttributeKey: Any],
            let creationDate = attributes[FileAttributeKey.creationDate] as? Date {
            return creationDate
        } else {
            return Date()
        }
    }
}


extension Date
{
    func toString(dateFormat format  : String ) -> String
    {
        let dateFormatter = DateFormatter()
        dateFormatter.dateFormat = format
        return dateFormatter.string(from: self)
    }

}


struct RecordingModel {
    let fileURL: URL
    let createdAt: Date
}

Enabling microphone access in the info.plist

Privacy - Microphone Usage Description

Checked the recorded files:

Window / Devices and Simulators / Select one target / Download Container. And Show Package Contents to check the recorded audios. If the app is run on your real machine and your machine is connected to the computer.

Leave a Reply