Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 9 additions & 0 deletions Tiny/Core/Services/Audio/AudioPostProcessingManager.swift
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,15 @@ class AudioPostProcessingManager: ObservableObject {
@Published var currentTime: TimeInterval = 0
@Published var duration: TimeInterval = 0
@Published var amplitude: Float = 0.0

var isHapticsEnabled: Bool {
hapticManager?.isHapticsEnabled ?? false
}

func toggleHaptics() {
hapticManager?.isHapticsEnabled.toggle()
objectWillChange.send()
}

init() {
engine = AudioEngine()
Expand Down
11 changes: 11 additions & 0 deletions Tiny/Core/Services/Audio/HapticManager.swift
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,16 @@ class HapticManager {
private let amplitudeThresholdLower: Float = 0.08 // Triggers for sounds above this
private let amplitudeThresholdUpper: Float = 0.2 // Does not trigger for sounds above this (too loud noise)

var isHapticsEnabled: Bool {
get { UserDefaults.standard.bool(forKey: "isHapticsEnabled") }
set { UserDefaults.standard.set(newValue, forKey: "isHapticsEnabled") }
}

init() {
// Initialize default value if not set
if UserDefaults.standard.object(forKey: "isHapticsEnabled") == nil {
UserDefaults.standard.set(true, forKey: "isHapticsEnabled")
}
prepareHaptics()
}

Expand Down Expand Up @@ -55,6 +64,8 @@ class HapticManager {
}

func playHapticFromAmplitude(_ amplitude: Float) {
guard isHapticsEnabled else { return }

let now = Date()
var shouldTriggerHaptic = false

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,14 @@ class OrbLiveListenViewModel: ObservableObject {
@Published var orbDragScale: CGFloat = 1.0
@Published var canSaveCurrentRecording = false
@Published var currentTime: TimeInterval = 0

var isHapticsEnabled: Bool {
audioPostProcessingManager.isHapticsEnabled
}

func toggleHaptics() {
audioPostProcessingManager.toggleHaptics()
}

private var longPressTimer: Timer?
private var playbackTimer: Timer?
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,24 +15,50 @@ import SwiftData
class SavedRecordingPlaybackViewModel: ObservableObject {
@Published var isPlaying = false
@Published var currentTime: TimeInterval = 0
@Published var duration: TimeInterval = 0
@Published var recordingName = "Heartbeat Recording"
@Published var editedName = "Heartbeat Recording"
@Published var isEditingName = false
@Published var showSuccessAlert = false
@Published var formattedDate = ""
@Published var showShareSheet = false

// Drag state
@Published var dragOffset: CGFloat = 0
@Published var orbDragScale: CGFloat = 1.0
@Published var isDraggingToDelete = false
@Published var deleteButtonScale: CGFloat = 1.0

private var playbackTimer: Timer?
private var audioManager: HeartbeatSoundManager?
private var currentRecording: Recording?
private var modelContext: ModelContext?
private var onRecordingUpdated: (() -> Void)?

let audioPostProcessingManager = AudioPostProcessingManager()
private var cancellables = Set<AnyCancellable>()

var isHapticsEnabled: Bool {
audioPostProcessingManager.isHapticsEnabled
}

init() {
// Subscribe to audioPostProcessingManager changes to trigger UI updates
audioPostProcessingManager.objectWillChange
.receive(on: DispatchQueue.main)
.sink { [weak self] _ in
guard let self = self else { return }
self.isPlaying = self.audioPostProcessingManager.isPlaying
self.currentTime = self.audioPostProcessingManager.currentTime
self.duration = self.audioPostProcessingManager.duration
}
.store(in: &cancellables)
}

func toggleHaptics() {
audioPostProcessingManager.toggleHaptics()
objectWillChange.send()
}

func setupPlayback(for recording: Recording, manager: HeartbeatSoundManager, modelContext: ModelContext, onRecordingUpdated: @escaping () -> Void) {
self.audioManager = manager
self.currentRecording = recording
Expand Down Expand Up @@ -66,24 +92,27 @@ class SavedRecordingPlaybackViewModel: ObservableObject {
formatter.dateFormat = "d MMMM yyyy"
self.formattedDate = formatter.string(from: recording.createdAt)

// Start playback
manager.togglePlayback(recording: recording)
startPlaybackTimer(manager: manager)
// Stop any existing playback in manager
manager.stop()

// Start playback with AudioPostProcessingManager
audioPostProcessingManager.loadAndPlay(fileURL: recording.fileURL)
}

func togglePlayback(manager: HeartbeatSoundManager, recording: Recording) {
manager.togglePlayback(recording: recording)

if manager.isPlayingPlayback {
startPlaybackTimer(manager: manager)
if audioPostProcessingManager.isPlaying {
audioPostProcessingManager.pause()
} else {
stopPlaybackTimer()
if audioPostProcessingManager.currentTime > 0 {
audioPostProcessingManager.resume()
} else {
audioPostProcessingManager.loadAndPlay(fileURL: recording.fileURL)
}
}
}

func cleanup() {
stopPlaybackTimer()
audioManager?.player?.stop()
audioPostProcessingManager.stop()
}

func handleDragChange(value: DragGesture.Value, geometry: GeometryProxy) {
Expand Down Expand Up @@ -134,21 +163,6 @@ class SavedRecordingPlaybackViewModel: ObservableObject {
}
}

private func startPlaybackTimer(manager: HeartbeatSoundManager) {
stopPlaybackTimer()
isPlaying = true

playbackTimer = Timer.scheduledTimer(withTimeInterval: 0.1, repeats: true) { [weak self, weak manager] _ in
guard let self = self, let manager = manager else { return }

self.isPlaying = manager.isPlayingPlayback

if !manager.isPlayingPlayback {
self.stopPlaybackTimer()
}
}
}

func startEditing() {
isEditingName = true
}
Expand Down Expand Up @@ -226,10 +240,4 @@ class SavedRecordingPlaybackViewModel: ObservableObject {
print("❌ Error saving recording name: \(error)")
}
}

private func stopPlaybackTimer() {
playbackTimer?.invalidate()
playbackTimer = nil
isPlaying = false
}
}
77 changes: 27 additions & 50 deletions Tiny/Features/LiveListen/Views/OrbLiveListenView.swift
Original file line number Diff line number Diff line change
Expand Up @@ -45,15 +45,6 @@ struct OrbLiveListenView: View {
// Delete Button (Only visible when dragging up)
deleteButton(geometry: geometry)

// Floating Button to Open Timeline manually
if !viewModel.isListening && !viewModel.isDraggingToSave && !viewModel.isDraggingToDelete {
libraryOpenButton(geometry: geometry)
.opacity(viewModel.isDraggingToSave || viewModel.isDraggingToDelete || showSuccessAlert ? 0.0 : 1.0)
.animation(.easeOut(duration: 0.2), value: viewModel.isDraggingToSave)
.animation(.easeOut(duration: 0.2), value: viewModel.isDraggingToDelete)
.animation(.easeOut(duration: 0.2), value: showSuccessAlert)
}

coachMarkView

// Success Alert (Slide down, no overlay)
Expand Down Expand Up @@ -133,56 +124,42 @@ struct OrbLiveListenView: View {
.clipShape(Circle())
})
.glassEffect(.clear)
.padding(.bottom, 50)
.transition(.opacity.animation(.easeInOut))

Spacer()

HStack {
Button {
viewModel.toggleHaptics()
} label: {
Image(systemName: "iphone.gen3.radiowaves.left.and.right")
.font(.body)
.foregroundColor(viewModel.isHapticsEnabled ? .white : .white.opacity(0.4))
.frame(width: 50, height: 50)
}
.glassEffect(.clear)

Button {
viewModel.showShareSheet = true
} label: {
Image(systemName: "square.and.arrow.up")
.font(.body)
.foregroundColor(.white)
.frame(width: 50, height: 50)
}
.glassEffect(.clear)
}
.transition(.opacity.animation(.easeInOut))
} else {
Spacer()
}
Spacer()
}
.padding()
Spacer()
}
}
}

private func libraryOpenButton(geometry: GeometryProxy) -> some View {
VStack {
HStack {
Spacer()

if viewModel.isPlaybackMode {
Button {
viewModel.showShareSheet = true
} label: {
Image(systemName: "square.and.arrow.up")
.font(.body)
.foregroundColor(.white)
.frame(width: 50, height: 50)
.clipShape(Circle())
}
.glassEffect(.clear)
.padding(.bottom, 50)
.transition(.opacity.animation(.easeInOut))
}

Button {
withAnimation(.spring(response: 0.6, dampingFraction: 0.8)) {
showTimeline = true
}
} label: {
Image(systemName: "book.fill")
.font(.body)
.foregroundColor(.white)
.frame(width: 50, height: 50)
.clipShape(Circle())
}
.glassEffect(.clear)
.padding(.bottom, 50)
}
.padding()
Spacer()
}
}

private func saveButton(geometry: GeometryProxy) -> some View {
Image(systemName: "book.fill")
.font(.system(size: 28))
Expand Down
15 changes: 13 additions & 2 deletions Tiny/Features/LiveListen/Views/SavedRecordingPlaybackView.swift
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,9 @@ struct SavedRecordingPlaybackView: View {
}
}
.ignoresSafeArea()

.sheet(isPresented: $viewModel.showShareSheet) {
ShareSheet(activityItems: [recording.fileURL])
}
}
.onAppear {
viewModel.setupPlayback(
Expand Down Expand Up @@ -157,15 +159,17 @@ struct SavedRecordingPlaybackView: View {
// Normal buttons
HStack {
Button {
viewModel.toggleHaptics()
} label: {
Image(systemName: "iphone.gen3.radiowaves.left.and.right")
.font(.body)
.foregroundColor(.white)
.foregroundColor(viewModel.isHapticsEnabled ? .white : .white.opacity(0.4))
.frame(width: 48, height: 48)
}
.glassEffect(.clear)

Button {
viewModel.showShareSheet = true
} label: {
Image(systemName: "square.and.arrow.up")
.font(.body)
Expand Down Expand Up @@ -212,6 +216,7 @@ struct SavedRecordingPlaybackView: View {
}
// Then delete after alert is visible
DispatchQueue.main.asyncAfter(deadline: .now() + 1.5) {
viewModel.cleanup()
heartbeatSoundManager.deleteRecording(recording)
// Navigate after another delay
DispatchQueue.main.asyncAfter(deadline: .now() + 0.5) {
Expand Down Expand Up @@ -239,6 +244,12 @@ struct SavedRecordingPlaybackView: View {
.font(.system(size: 14))
.foregroundColor(.white.opacity(0.7))

if viewModel.duration > 0 && !viewModel.isDraggingToDelete {
Text("\(Int(viewModel.currentTime))s / \(Int(viewModel.duration))s")
.font(.caption)
.foregroundColor(.white.opacity(0.7))
}

if !viewModel.isDraggingToDelete {
Text("Drag up to delete")
.font(.caption)
Expand Down