// // AudioPlayerView.swift // Bremer // // Created by yhornisse on 2023/08/05. // import SwiftUI struct AudioPlayerView : View { @ObservedObject var audioPlayerViewModel : AudioPlayerViewModel @State var playingTime = "00:00 / 00:00" @State var playingTimeRate : Double = 0.0 var body : some View { let timer = Timer.publish(every: 0.3, on: .main, in: .common).autoconnect() VStack { Text("\(audioPlayerViewModel.playingMusic?.name ?? "") - \(audioPlayerViewModel.playingMusic?.album ?? "")") .font(.system(size: 14)) .frame(height: 35, alignment: .center) .aspectRatio(contentMode: .fit) Slider(value: $playingTimeRate, onEditingChanged: { editing in audioPlayerViewModel.setAudioSeq(seq: playingTimeRate * (audioPlayerViewModel.musicPlayer?.duration ?? 0.0)) }) .onReceive(timer, perform: {time in playingTimeRate = audioPlayerViewModel.musicTimeRate() }) if audioPlayerViewModel.musicPlayer != nil { if audioPlayerViewModel.isPlaying == true { Button(action: { audioPlayerViewModel.pauseAudio() }) { Image(systemName: "pause.fill") .resizable() .scaledToFill() .frame(width: 20, height: 20) } } else { Button(action: { audioPlayerViewModel.playAudio() }) { Image(systemName: "play.fill") .resizable() .scaledToFill() .frame(width: 20, height: 20) } } } else { Image(systemName: "play.slash") .resizable() .scaledToFill() .frame(width: 20, height: 20) } Text(playingTime) .font(Font(UIFont.monospacedSystemFont(ofSize: 18.0, weight: .regular))) .onReceive(timer, perform: {time in playingTime = audioPlayerViewModel.musicTime() }) } } }