import SwiftUI
import Observation
import SoundAnalysis
import AVFoundation

@Observable class ApplicationData {
   var result: String = ""

   var audioEngine = AVAudioEngine()
   var audioAnalyzer: SNAudioStreamAnalyzer?
   var observer: SNResultsObserving?

   static let shared: ApplicationData = ApplicationData()
   private init() { }

   func setupAudioSession() async {
      do {
         // Check authorization to access Microphone
         if AVCaptureDevice.authorizationStatus(for: .audio) != .authorized {
            guard await AVCaptureDevice.requestAccess(for: .audio) else { return }
         }
         // Set up audio session
         let audioSession = AVAudioSession.sharedInstance()
         try audioSession.setCategory(.playAndRecord, mode: .spokenAudio)
         try audioSession.setActive(true, options: .notifyOthersOnDeactivation)
      } catch {
         print("Error: \(error)")
      }
   }
   func startAnalysis() async {
      do {
         let inputFormat = audioEngine.inputNode.inputFormat(forBus: 0)
         let outputFormat = audioEngine.inputNode.outputFormat(forBus: 0)
         audioAnalyzer = SNAudioStreamAnalyzer(format: inputFormat)

         let request = try SNClassifySoundRequest(classifierIdentifier: .version1)
         observer = AudioObserver()
         try audioAnalyzer?.add(request, withObserver: observer!)

         audioEngine.inputNode.installTap(onBus: 0, bufferSize: 4096, format: outputFormat) { buffer, time in
             Task.detached(priority: .userInitiated) { [weak self] in
                await self?.audioAnalyzer?.analyze(buffer, atAudioFramePosition: time.sampleTime)
             }
         }
         audioEngine.prepare()
         try audioEngine.start()
      } catch {
         print("Error: \(error)")
      }
   }
   func stopRecording() async {
      audioEngine.stop()
      audioEngine.inputNode.removeTap(onBus: 0)
      audioAnalyzer?.completeAnalysis()
      result = ""
   }
}