Home > AI > IOS > AVFoundation >

play the recorded audio

I intended to see the difference between .record and .playAndRecord, but this example can’t show this.

AVAudioPlayer and AVAudioRecorder are changable variable, so that you need to put them in ObservableObject, or the change will not be caputured in SwiftUI.

import SwiftUI
import AVFoundation
import Combine


struct ContentView: View {
    @ObservedObject var myRecorder: MyAudioRecorder
    
    
    var body: some View {
        NavigationView {
        
            
            VStack {
                
                List {
                    ForEach(myRecorder.recordings, id: \.createdAt) { recording in
                        RecordingRow(audioURL: recording.fileURL)
                    }
                }
                
                if myRecorder.recording == false {
                    Button(action: {
                        self.myRecorder.startRecording()
                    }) {
                        Image(systemName: "circle.fill")
                            .resizable()
                            .aspectRatio(contentMode: .fill)
                            .frame(width: 100, height: 100)
                            .clipped()
                            .foregroundColor(.red)
                            .padding(.bottom, 40)
                    }
                } else {
                    Button(action: {
                        self.myRecorder.stopRecording()
                    }) {
                        Image(systemName: "stop.fill")
                            .resizable()
                            .aspectRatio(contentMode: .fill)
                            .frame(width: 100, height: 100)
                            .clipped()
                            .foregroundColor(.red)
                            .padding(.bottom, 40)
                    }
                }
            }
                
        }
    }
    

}
 
 

struct RecordingRow: View {
    
    @ObservedObject var myPlayer: MyAudioPlayer
    var audioURL: URL
    init(audioURL: URL) {
        self.audioURL = audioURL
        self.myPlayer = MyAudioPlayer(audioURL: audioURL)
    }

    
    var body: some View {
        Button {
            print(myPlayer.audioPlayer.url)
            myPlayer.audioPlayer.prepareToPlay()
            myPlayer.audioPlayer.play()
        } label: {
            HStack {
                Text("\(audioURL.lastPathComponent)")
                Spacer()
            }
        }
    }
}


class MyAudioPlayer: ObservableObject {
    let objectWillChange = PassthroughSubject<MyAudioPlayer, Never>()
    var audioPlayer: AVAudioPlayer!
    var audioURL: URL
    
    init(audioURL: URL) {
        self.audioURL = audioURL
        do {
            self.audioPlayer = try AVAudioPlayer(contentsOf: audioURL)
        } catch {
            print(error)
            self.audioPlayer = AVAudioPlayer()
        }
        objectWillChange.send(self)
    }
}


class MyAudioRecorder: ObservableObject {

    let objectWillChange = PassthroughSubject<MyAudioRecorder, Never>()
    var audioRecorder: AVAudioRecorder!
    var recordings = [RecordingModel]()
    
    // update subscribing views using our objectWillChange property.
    var recording = false {
        didSet {
            objectWillChange.send(self)
        }
    }
    
    init() {
        fetchRecordings()
    }
    
    func startRecording() {
        let recordingSession = AVAudioSession.sharedInstance()
        do {
            try recordingSession.setCategory(.playAndRecord, mode: .default)
            try recordingSession.setActive(true)
        } catch {
            print("Failed to set up recording session")
        }
        
        
        let documentPath = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0]
        let audioFilename = documentPath.appendingPathComponent("\(Date().toString(dateFormat: "dd-MM-YY_'at'_HH:mm:ss")).m4a")
        
        
        let settings = [
            AVFormatIDKey: Int(kAudioFormatMPEG4AAC),
            AVSampleRateKey: 12000,
            AVNumberOfChannelsKey: 1,
            AVEncoderAudioQualityKey: AVAudioQuality.high.rawValue
        ]
        
        
        do {
            audioRecorder = try AVAudioRecorder(url: audioFilename, settings: settings)
            audioRecorder.record()
            recording = true
        } catch {
            print("Could not start recording")
        }
            
    }
    
    
    func stopRecording() {
        audioRecorder.stop()
        recording = false
        
        fetchRecordings()
    }
    
    
    
    func fetchRecordings() {
        recordings.removeAll()
        
        let fileManager = FileManager.default
        let documentDirectory = fileManager.urls(for: .documentDirectory, in: .userDomainMask)[0]
        let directoryContents = try! fileManager.contentsOfDirectory(at: documentDirectory, includingPropertiesForKeys: nil)
        
        for audio in directoryContents {
            let recording = RecordingModel(fileURL: audio, createdAt: getCreationDate(for: audio))
            recordings.append(recording)
            
            recordings.sort(by: { $0.createdAt.compare($1.createdAt) == .orderedAscending})
            objectWillChange.send(self)
        }
    }
    
    
    func getCreationDate(for file: URL) -> Date {
        if let attributes = try? FileManager.default.attributesOfItem(atPath: file.path) as [FileAttributeKey: Any],
            let creationDate = attributes[FileAttributeKey.creationDate] as? Date {
            return creationDate
        } else {
            return Date()
        }
    }
}


extension Date{
    func toString(dateFormat format  : String ) -> String
    {
        let dateFormatter = DateFormatter()
        dateFormatter.dateFormat = format
        return dateFormatter.string(from: self)
    }

}


struct RecordingModel {
    let fileURL: URL
    let createdAt: Date
}

Leave a Reply