diff --git a/Tiny/Core/Services/Audio/AudioPostProcessingManager.swift b/Tiny/Core/Services/Audio/AudioPostProcessingManager.swift index bfa5aa7..6f5111e 100644 --- a/Tiny/Core/Services/Audio/AudioPostProcessingManager.swift +++ b/Tiny/Core/Services/Audio/AudioPostProcessingManager.swift @@ -25,6 +25,15 @@ class AudioPostProcessingManager: ObservableObject { @Published var currentTime: TimeInterval = 0 @Published var duration: TimeInterval = 0 @Published var amplitude: Float = 0.0 + + var isHapticsEnabled: Bool { + hapticManager?.isHapticsEnabled ?? false + } + + func toggleHaptics() { + hapticManager?.isHapticsEnabled.toggle() + objectWillChange.send() + } init() { engine = AudioEngine() diff --git a/Tiny/Core/Services/Audio/HapticManager.swift b/Tiny/Core/Services/Audio/HapticManager.swift index 897f8a2..33056e3 100644 --- a/Tiny/Core/Services/Audio/HapticManager.swift +++ b/Tiny/Core/Services/Audio/HapticManager.swift @@ -14,7 +14,16 @@ class HapticManager { private let amplitudeThresholdLower: Float = 0.08 // Triggers for sounds above this private let amplitudeThresholdUpper: Float = 0.2 // Does not trigger for sounds above this (too loud noise) + var isHapticsEnabled: Bool { + get { UserDefaults.standard.bool(forKey: "isHapticsEnabled") } + set { UserDefaults.standard.set(newValue, forKey: "isHapticsEnabled") } + } + init() { + // Initialize default value if not set + if UserDefaults.standard.object(forKey: "isHapticsEnabled") == nil { + UserDefaults.standard.set(true, forKey: "isHapticsEnabled") + } prepareHaptics() } @@ -55,6 +64,8 @@ class HapticManager { } func playHapticFromAmplitude(_ amplitude: Float) { + guard isHapticsEnabled else { return } + let now = Date() var shouldTriggerHaptic = false diff --git a/Tiny/Features/LiveListen/ViewModels/OrbLiveListenViewModel.swift b/Tiny/Features/LiveListen/ViewModels/OrbLiveListenViewModel.swift index e45ca0d..1c2656f 100644 --- a/Tiny/Features/LiveListen/ViewModels/OrbLiveListenViewModel.swift +++ b/Tiny/Features/LiveListen/ViewModels/OrbLiveListenViewModel.swift @@ -25,6 +25,14 @@ class OrbLiveListenViewModel: ObservableObject { @Published var orbDragScale: CGFloat = 1.0 @Published var canSaveCurrentRecording = false @Published var currentTime: TimeInterval = 0 + + var isHapticsEnabled: Bool { + audioPostProcessingManager.isHapticsEnabled + } + + func toggleHaptics() { + audioPostProcessingManager.toggleHaptics() + } private var longPressTimer: Timer? private var playbackTimer: Timer? diff --git a/Tiny/Features/LiveListen/ViewModels/SavedRecordingPlaybackViewModel.swift b/Tiny/Features/LiveListen/ViewModels/SavedRecordingPlaybackViewModel.swift index f6bf571..7d6b6b0 100644 --- a/Tiny/Features/LiveListen/ViewModels/SavedRecordingPlaybackViewModel.swift +++ b/Tiny/Features/LiveListen/ViewModels/SavedRecordingPlaybackViewModel.swift @@ -15,11 +15,13 @@ import SwiftData class SavedRecordingPlaybackViewModel: ObservableObject { @Published var isPlaying = false @Published var currentTime: TimeInterval = 0 + @Published var duration: TimeInterval = 0 @Published var recordingName = "Heartbeat Recording" @Published var editedName = "Heartbeat Recording" @Published var isEditingName = false @Published var showSuccessAlert = false @Published var formattedDate = "" + @Published var showShareSheet = false // Drag state @Published var dragOffset: CGFloat = 0 @@ -27,12 +29,36 @@ class SavedRecordingPlaybackViewModel: ObservableObject { @Published var isDraggingToDelete = false @Published var deleteButtonScale: CGFloat = 1.0 - private var playbackTimer: Timer? private var audioManager: HeartbeatSoundManager? private var currentRecording: Recording? private var modelContext: ModelContext? private var onRecordingUpdated: (() -> Void)? + let audioPostProcessingManager = AudioPostProcessingManager() + private var cancellables = Set() + + var isHapticsEnabled: Bool { + audioPostProcessingManager.isHapticsEnabled + } + + init() { + // Subscribe to audioPostProcessingManager changes to trigger UI updates + audioPostProcessingManager.objectWillChange + .receive(on: DispatchQueue.main) + .sink { [weak self] _ in + guard let self = self else { return } + self.isPlaying = self.audioPostProcessingManager.isPlaying + self.currentTime = self.audioPostProcessingManager.currentTime + self.duration = self.audioPostProcessingManager.duration + } + .store(in: &cancellables) + } + + func toggleHaptics() { + audioPostProcessingManager.toggleHaptics() + objectWillChange.send() + } + func setupPlayback(for recording: Recording, manager: HeartbeatSoundManager, modelContext: ModelContext, onRecordingUpdated: @escaping () -> Void) { self.audioManager = manager self.currentRecording = recording @@ -66,24 +92,27 @@ class SavedRecordingPlaybackViewModel: ObservableObject { formatter.dateFormat = "d MMMM yyyy" self.formattedDate = formatter.string(from: recording.createdAt) - // Start playback - manager.togglePlayback(recording: recording) - startPlaybackTimer(manager: manager) + // Stop any existing playback in manager + manager.stop() + + // Start playback with AudioPostProcessingManager + audioPostProcessingManager.loadAndPlay(fileURL: recording.fileURL) } func togglePlayback(manager: HeartbeatSoundManager, recording: Recording) { - manager.togglePlayback(recording: recording) - - if manager.isPlayingPlayback { - startPlaybackTimer(manager: manager) + if audioPostProcessingManager.isPlaying { + audioPostProcessingManager.pause() } else { - stopPlaybackTimer() + if audioPostProcessingManager.currentTime > 0 { + audioPostProcessingManager.resume() + } else { + audioPostProcessingManager.loadAndPlay(fileURL: recording.fileURL) + } } } func cleanup() { - stopPlaybackTimer() - audioManager?.player?.stop() + audioPostProcessingManager.stop() } func handleDragChange(value: DragGesture.Value, geometry: GeometryProxy) { @@ -134,21 +163,6 @@ class SavedRecordingPlaybackViewModel: ObservableObject { } } - private func startPlaybackTimer(manager: HeartbeatSoundManager) { - stopPlaybackTimer() - isPlaying = true - - playbackTimer = Timer.scheduledTimer(withTimeInterval: 0.1, repeats: true) { [weak self, weak manager] _ in - guard let self = self, let manager = manager else { return } - - self.isPlaying = manager.isPlayingPlayback - - if !manager.isPlayingPlayback { - self.stopPlaybackTimer() - } - } - } - func startEditing() { isEditingName = true } @@ -226,10 +240,4 @@ class SavedRecordingPlaybackViewModel: ObservableObject { print("❌ Error saving recording name: \(error)") } } - - private func stopPlaybackTimer() { - playbackTimer?.invalidate() - playbackTimer = nil - isPlaying = false - } } diff --git a/Tiny/Features/LiveListen/Views/OrbLiveListenView.swift b/Tiny/Features/LiveListen/Views/OrbLiveListenView.swift index 0df771b..749a354 100644 --- a/Tiny/Features/LiveListen/Views/OrbLiveListenView.swift +++ b/Tiny/Features/LiveListen/Views/OrbLiveListenView.swift @@ -45,15 +45,6 @@ struct OrbLiveListenView: View { // Delete Button (Only visible when dragging up) deleteButton(geometry: geometry) - // Floating Button to Open Timeline manually - if !viewModel.isListening && !viewModel.isDraggingToSave && !viewModel.isDraggingToDelete { - libraryOpenButton(geometry: geometry) - .opacity(viewModel.isDraggingToSave || viewModel.isDraggingToDelete || showSuccessAlert ? 0.0 : 1.0) - .animation(.easeOut(duration: 0.2), value: viewModel.isDraggingToSave) - .animation(.easeOut(duration: 0.2), value: viewModel.isDraggingToDelete) - .animation(.easeOut(duration: 0.2), value: showSuccessAlert) - } - coachMarkView // Success Alert (Slide down, no overlay) @@ -133,10 +124,35 @@ struct OrbLiveListenView: View { .clipShape(Circle()) }) .glassEffect(.clear) - .padding(.bottom, 50) .transition(.opacity.animation(.easeInOut)) + + Spacer() + + HStack { + Button { + viewModel.toggleHaptics() + } label: { + Image(systemName: "iphone.gen3.radiowaves.left.and.right") + .font(.body) + .foregroundColor(viewModel.isHapticsEnabled ? .white : .white.opacity(0.4)) + .frame(width: 50, height: 50) + } + .glassEffect(.clear) + + Button { + viewModel.showShareSheet = true + } label: { + Image(systemName: "square.and.arrow.up") + .font(.body) + .foregroundColor(.white) + .frame(width: 50, height: 50) + } + .glassEffect(.clear) + } + .transition(.opacity.animation(.easeInOut)) + } else { + Spacer() } - Spacer() } .padding() Spacer() @@ -144,45 +160,6 @@ struct OrbLiveListenView: View { } } - private func libraryOpenButton(geometry: GeometryProxy) -> some View { - VStack { - HStack { - Spacer() - - if viewModel.isPlaybackMode { - Button { - viewModel.showShareSheet = true - } label: { - Image(systemName: "square.and.arrow.up") - .font(.body) - .foregroundColor(.white) - .frame(width: 50, height: 50) - .clipShape(Circle()) - } - .glassEffect(.clear) - .padding(.bottom, 50) - .transition(.opacity.animation(.easeInOut)) - } - - Button { - withAnimation(.spring(response: 0.6, dampingFraction: 0.8)) { - showTimeline = true - } - } label: { - Image(systemName: "book.fill") - .font(.body) - .foregroundColor(.white) - .frame(width: 50, height: 50) - .clipShape(Circle()) - } - .glassEffect(.clear) - .padding(.bottom, 50) - } - .padding() - Spacer() - } - } - private func saveButton(geometry: GeometryProxy) -> some View { Image(systemName: "book.fill") .font(.system(size: 28)) diff --git a/Tiny/Features/LiveListen/Views/SavedRecordingPlaybackView.swift b/Tiny/Features/LiveListen/Views/SavedRecordingPlaybackView.swift index ffecc37..e067f50 100644 --- a/Tiny/Features/LiveListen/Views/SavedRecordingPlaybackView.swift +++ b/Tiny/Features/LiveListen/Views/SavedRecordingPlaybackView.swift @@ -88,7 +88,9 @@ struct SavedRecordingPlaybackView: View { } } .ignoresSafeArea() - + .sheet(isPresented: $viewModel.showShareSheet) { + ShareSheet(activityItems: [recording.fileURL]) + } } .onAppear { viewModel.setupPlayback( @@ -157,15 +159,17 @@ struct SavedRecordingPlaybackView: View { // Normal buttons HStack { Button { + viewModel.toggleHaptics() } label: { Image(systemName: "iphone.gen3.radiowaves.left.and.right") .font(.body) - .foregroundColor(.white) + .foregroundColor(viewModel.isHapticsEnabled ? .white : .white.opacity(0.4)) .frame(width: 48, height: 48) } .glassEffect(.clear) Button { + viewModel.showShareSheet = true } label: { Image(systemName: "square.and.arrow.up") .font(.body) @@ -212,6 +216,7 @@ struct SavedRecordingPlaybackView: View { } // Then delete after alert is visible DispatchQueue.main.asyncAfter(deadline: .now() + 1.5) { + viewModel.cleanup() heartbeatSoundManager.deleteRecording(recording) // Navigate after another delay DispatchQueue.main.asyncAfter(deadline: .now() + 0.5) { @@ -239,6 +244,12 @@ struct SavedRecordingPlaybackView: View { .font(.system(size: 14)) .foregroundColor(.white.opacity(0.7)) + if viewModel.duration > 0 && !viewModel.isDraggingToDelete { + Text("\(Int(viewModel.currentTime))s / \(Int(viewModel.duration))s") + .font(.caption) + .foregroundColor(.white.opacity(0.7)) + } + if !viewModel.isDraggingToDelete { Text("Drag up to delete") .font(.caption)