Skip to content

Commit

Permalink
Add support for showing media playback controls on the lock screen
Browse files Browse the repository at this point in the history
  • Loading branch information
stefanceriu committed Sep 6, 2024
1 parent d056550 commit d245956
Show file tree
Hide file tree
Showing 15 changed files with 153 additions and 33 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -282,9 +282,7 @@ class OnboardingFlowCoordinator: FlowCoordinatorProtocol {
let coordinator = SessionVerificationScreenCoordinator(parameters: parameters)

coordinator.actions
.sink { [weak self] action in
guard let self else { return }

.sink { action in
switch action {
case .done:
break // Moving to next state is handled by the global session verification listener
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,9 @@ final class ComposerToolbarViewModel: ComposerToolbarViewModelType, ComposerTool
mentionBuilder = MentionBuilder()
attributedStringBuilder = AttributedStringBuilder(cacheKey: "Composer", mentionBuilder: mentionBuilder)

super.init(initialViewState: ComposerToolbarViewState(audioPlayerState: .init(id: .recorderPreview, duration: 0),
super.init(initialViewState: ComposerToolbarViewState(audioPlayerState: .init(id: .recorderPreview,
title: L10n.commonVoiceMessage,
duration: 0),
audioRecorderState: .init(),
bindings: .init()),
mediaProvider: mediaProvider)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -401,7 +401,11 @@ extension ComposerToolbar {
mentionDisplayHelper: ComposerMentionDisplayHelper.mock,
analyticsService: ServiceLocator.shared.analytics,
composerDraftService: ComposerDraftServiceMock())
model.state.composerMode = .previewVoiceMessage(state: AudioPlayerState(id: .recorderPreview, duration: 10.0), waveform: .data(waveformData), isUploading: uploading)
model.state.composerMode = .previewVoiceMessage(state: AudioPlayerState(id: .recorderPreview,
title: L10n.commonVoiceMessage,
duration: 10.0),
waveform: .data(waveformData),
isUploading: uploading)
return model
}
return ComposerToolbar(context: composerViewModel.context,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -106,6 +106,7 @@ private extension DateFormatter {

struct VoiceMessagePreviewComposer_Previews: PreviewProvider, TestablePreview {
static let playerState = AudioPlayerState(id: .recorderPreview,
title: L10n.commonVoiceMessage,
duration: 10.0,
waveform: EstimatedWaveform.mockWaveform,
progress: 0.4)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -488,6 +488,7 @@ class TimelineInteractionHandler {
}

let playerState = AudioPlayerState(id: .timelineItemIdentifier(itemID),
title: L10n.commonVoiceMessage,
duration: voiceMessageRoomTimelineItem.content.duration,
waveform: voiceMessageRoomTimelineItem.content.waveform)
mediaPlayerProvider.register(audioPlayerState: playerState)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -434,7 +434,10 @@ struct TimelineItemBubbledStylerView_Previews: PreviewProvider, TestablePreview
replyDetails: .loaded(sender: .init(id: "", displayName: "Alice"),
eventID: "123",
eventContent: .message(.text(.init(body: "Short"))))),
playerState: AudioPlayerState(id: .timelineItemIdentifier(.random), duration: 10, waveform: EstimatedWaveform.mockWaveform))
playerState: AudioPlayerState(id: .timelineItemIdentifier(.random),
title: L10n.commonVoiceMessage,
duration: 10,
waveform: EstimatedWaveform.mockWaveform))
}
.environmentObject(viewModel.context)
}
Expand Down Expand Up @@ -552,7 +555,10 @@ struct TimelineItemBubbledStylerView_Previews: PreviewProvider, TestablePreview
source: nil,
contentType: nil),
properties: RoomTimelineItemProperties(encryptionAuthenticity: .notGuaranteed(color: .gray))),
playerState: AudioPlayerState(id: .timelineItemIdentifier(.random), duration: 10, waveform: EstimatedWaveform.mockWaveform))
playerState: AudioPlayerState(id: .timelineItemIdentifier(.random),
title: L10n.commonVoiceMessage,
duration: 10,
waveform: EstimatedWaveform.mockWaveform))
}
.environmentObject(viewModel.context)
}
Expand Down
18 changes: 9 additions & 9 deletions ElementX/Sources/Services/Audio/Player/AudioPlayer.swift
Original file line number Diff line number Diff line change
Expand Up @@ -157,8 +157,8 @@ class AudioPlayer: NSObject, AudioPlayerProtocol {
releaseAudioSessionTask = Task { [weak self] in
try? await Task.sleep(for: .seconds(timeInterval))
guard !Task.isCancelled else { return }
guard let self else { return }
self.releaseAudioSession()

self?.releaseAudioSession()
}
}

Expand Down Expand Up @@ -189,10 +189,10 @@ class AudioPlayer: NSObject, AudioPlayerProtocol {

switch playerItem.status {
case .failed:
self.setInternalState(.error(playerItem.error ?? AudioPlayerError.genericError))
setInternalState(.error(playerItem.error ?? AudioPlayerError.genericError))
case .readyToPlay:
guard state == .loading else { return }
self.setInternalState(.readyToPlay)
setInternalState(.readyToPlay)
default:
break
}
Expand All @@ -202,20 +202,20 @@ class AudioPlayer: NSObject, AudioPlayerProtocol {
guard let self else { return }

if internalAudioPlayer.rate == 0 {
if self.isStopped {
self.setInternalState(.stopped)
if isStopped {
setInternalState(.stopped)
} else {
self.setInternalState(.paused)
setInternalState(.paused)
}
} else {
self.setInternalState(.playing)
setInternalState(.playing)
}
}

NotificationCenter.default.publisher(for: Notification.Name.AVPlayerItemDidPlayToEndTime)
.sink { [weak self] _ in
guard let self else { return }
self.setInternalState(.finishedPlaying)
setInternalState(.finishedPlaying)
}
.store(in: &cancellables)
}
Expand Down
119 changes: 110 additions & 9 deletions ElementX/Sources/Services/Audio/Player/AudioPlayerState.swift
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@

import Combine
import Foundation
import MediaPlayer
import UIKit

enum AudioPlayerPlaybackState {
Expand All @@ -34,16 +35,15 @@ enum AudioPlayerStateIdentifier {
@MainActor
class AudioPlayerState: ObservableObject, Identifiable {
let id: AudioPlayerStateIdentifier
let title: String
private(set) var duration: Double
let waveform: EstimatedWaveform
@Published private(set) var progress: Double

@Published private(set) var playbackState: AudioPlayerPlaybackState
/// It's similar to `playbackState`, with the a difference: `.loading`
/// updates are delayed by a fixed amount of time
@Published private(set) var playerButtonPlaybackState: AudioPlayerPlaybackState
@Published private(set) var progress: Double
var showProgressIndicator: Bool {
progress > 0
}

private weak var audioPlayer: AudioPlayerProtocol?
private var audioPlayerSubscription: AnyCancellable?
Expand All @@ -53,6 +53,10 @@ class AudioPlayerState: ObservableObject, Identifiable {
/// The file url that the last player attached to this object has loaded.
/// The file url persists even if the AudioPlayer will be detached later.
private(set) var fileURL: URL?

var showProgressIndicator: Bool {
progress > 0
}

var isAttached: Bool {
audioPlayer != nil
Expand All @@ -62,8 +66,9 @@ class AudioPlayerState: ObservableObject, Identifiable {
displayLink != nil
}

init(id: AudioPlayerStateIdentifier, duration: Double, waveform: EstimatedWaveform? = nil, progress: Double = 0.0) {
init(id: AudioPlayerStateIdentifier, title: String, duration: Double, waveform: EstimatedWaveform? = nil, progress: Double = 0.0) {
self.id = id
self.title = title
self.duration = duration
self.waveform = waveform ?? EstimatedWaveform(data: [])
self.progress = progress
Expand Down Expand Up @@ -146,12 +151,19 @@ class AudioPlayerState: ObservableObject, Identifiable {
}
startPublishProgress()
playbackState = .playing
case .didPausePlaying, .didStopPlaying, .didFinishPlaying:
setUpRemoteCommandCenter()
case .didPausePlaying:
stopPublishProgress()
playbackState = .stopped
if case .didFinishPlaying = action {
progress = 0.0
}
case .didStopPlaying:
playbackState = .stopped
stopPublishProgress()
tearDownRemoteCommandCenter()
case .didFinishPlaying:
playbackState = .stopped
progress = 0.0
stopPublishProgress()
tearDownRemoteCommandCenter()
case .didFailWithError:
stopPublishProgress()
playbackState = .error
Expand All @@ -172,6 +184,8 @@ class AudioPlayerState: ObservableObject, Identifiable {
if let currentTime = audioPlayer?.currentTime, duration > 0 {
progress = currentTime / duration
}

updateNowPlayingInfoCenter()
}

private func stopPublishProgress() {
Expand Down Expand Up @@ -200,6 +214,93 @@ class AudioPlayerState: ObservableObject, Identifiable {
.removeDuplicates()
.weakAssign(to: \.playerButtonPlaybackState, on: self)
}

private func setUpRemoteCommandCenter() {
UIApplication.shared.beginReceivingRemoteControlEvents()

let commandCenter = MPRemoteCommandCenter.shared()

commandCenter.playCommand.isEnabled = true
commandCenter.playCommand.removeTarget(nil)
commandCenter.playCommand.addTarget { [weak self] _ in
guard let audioPlayer = self?.audioPlayer else {
return MPRemoteCommandHandlerStatus.commandFailed
}

audioPlayer.play()

return MPRemoteCommandHandlerStatus.success
}

commandCenter.pauseCommand.isEnabled = true
commandCenter.pauseCommand.removeTarget(nil)
commandCenter.pauseCommand.addTarget { [weak self] _ in
guard let audioPlayer = self?.audioPlayer else {
return MPRemoteCommandHandlerStatus.commandFailed
}

audioPlayer.pause()

return MPRemoteCommandHandlerStatus.success
}

commandCenter.skipForwardCommand.isEnabled = true
commandCenter.skipForwardCommand.removeTarget(nil)
commandCenter.skipForwardCommand.addTarget { [weak self] event in
guard let audioPlayer = self?.audioPlayer, let skipEvent = event as? MPSkipIntervalCommandEvent else {
return MPRemoteCommandHandlerStatus.commandFailed
}

Task {
await audioPlayer.seek(to: audioPlayer.currentTime + skipEvent.interval)
}

return MPRemoteCommandHandlerStatus.success
}

commandCenter.skipBackwardCommand.isEnabled = true
commandCenter.skipBackwardCommand.removeTarget(nil)
commandCenter.skipBackwardCommand.addTarget { [weak self] event in
guard let audioPlayer = self?.audioPlayer, let skipEvent = event as? MPSkipIntervalCommandEvent else {
return MPRemoteCommandHandlerStatus.commandFailed
}

Task {
await audioPlayer.seek(to: audioPlayer.currentTime - skipEvent.interval)
}

return MPRemoteCommandHandlerStatus.success
}
}

private func tearDownRemoteCommandCenter() {
UIApplication.shared.endReceivingRemoteControlEvents()

let nowPlayingInfoCenter = MPNowPlayingInfoCenter.default()
nowPlayingInfoCenter.nowPlayingInfo = nil
nowPlayingInfoCenter.playbackState = .stopped

let commandCenter = MPRemoteCommandCenter.shared()
commandCenter.playCommand.isEnabled = false
commandCenter.playCommand.removeTarget(nil)
commandCenter.pauseCommand.isEnabled = false
commandCenter.pauseCommand.removeTarget(nil)
commandCenter.skipForwardCommand.isEnabled = false
commandCenter.skipForwardCommand.removeTarget(nil)
commandCenter.skipBackwardCommand.isEnabled = false
commandCenter.skipBackwardCommand.removeTarget(nil)
}

private func updateNowPlayingInfoCenter() {
guard let audioPlayer else {
return
}

let nowPlayingInfoCenter = MPNowPlayingInfoCenter.default()
nowPlayingInfoCenter.nowPlayingInfo = [MPMediaItemPropertyTitle: title,
MPMediaItemPropertyPlaybackDuration: audioPlayer.duration as Any,
MPNowPlayingInfoPropertyElapsedPlaybackTime: audioPlayer.currentTime as Any]
}
}

extension AudioPlayerState: Equatable {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -107,6 +107,7 @@ struct VoiceMessageRoomPlaybackView_Previews: PreviewProvider, TestablePreview {
0, 0, 0, 0, 0, 3])

static var playerState = AudioPlayerState(id: .timelineItemIdentifier(.random),
title: L10n.commonVoiceMessage,
duration: 10.0,
waveform: waveform,
progress: 0.3)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,7 @@ struct VoiceMessageRoomTimelineView_Previews: PreviewProvider, TestablePreview {
contentType: nil))

static let playerState = AudioPlayerState(id: .timelineItemIdentifier(timelineItemIdentifier),
title: L10n.commonVoiceMessage,
duration: 10.0,
waveform: EstimatedWaveform.mockWaveform,
progress: 0.4)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,9 @@ struct RoomTimelineItemView: View {
case .poll(let item):
PollRoomTimelineView(timelineItem: item)
case .voice(let item):
VoiceMessageRoomTimelineView(timelineItem: item, playerState: context?.viewState.audioPlayerStateProvider?(item.id) ?? AudioPlayerState(id: .timelineItemIdentifier(item.id), duration: 0))
VoiceMessageRoomTimelineView(timelineItem: item, playerState: context?.viewState.audioPlayerStateProvider?(item.id) ?? AudioPlayerState(id: .timelineItemIdentifier(item.id),
title: L10n.commonVoiceMessage,
duration: 0))
case .callInvite(let item):
CallInviteRoomTimelineView(timelineItem: item)
case .callNotification(let item):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -248,7 +248,7 @@ class VoiceMessageRecorder: VoiceMessageRecorderProtocol {
}

// Build the preview audio player state
previewAudioPlayerState = await AudioPlayerState(id: .recorderPreview, duration: recordingDuration, waveform: EstimatedWaveform(data: []))
previewAudioPlayerState = await AudioPlayerState(id: .recorderPreview, title: L10n.commonVoiceMessage, duration: recordingDuration, waveform: EstimatedWaveform(data: []))

// Build the preview audio player
let mediaSource = MediaSourceProxy(url: url, mimeType: mp4accMimeType)
Expand Down
5 changes: 3 additions & 2 deletions UnitTests/Sources/AudioPlayerStateTests.swift
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ class AudioPlayerStateTests: XCTestCase {
audioPlayerMock.underlyingActions = audioPlayerActions
audioPlayerMock.state = .stopped
audioPlayerMock.currentTime = 0.0
audioPlayerMock.duration = 0.0
audioPlayerMock.seekToClosure = { [audioPlayerSeekCallsSubject] progress in
audioPlayerSeekCallsSubject?.send(progress)
}
Expand All @@ -46,7 +47,7 @@ class AudioPlayerStateTests: XCTestCase {
override func setUp() async throws {
audioPlayerActionsSubject = .init()
audioPlayerSeekCallsSubject = .init()
audioPlayerState = AudioPlayerState(id: .timelineItemIdentifier(.random), duration: Self.audioDuration)
audioPlayerState = AudioPlayerState(id: .timelineItemIdentifier(.random), title: "", duration: Self.audioDuration)
audioPlayerMock = buildAudioPlayerMock()
audioPlayerMock.seekToClosure = { [weak self] progress in
self?.audioPlayerMock.currentTime = Self.audioDuration * progress
Expand Down Expand Up @@ -170,7 +171,7 @@ class AudioPlayerStateTests: XCTestCase {
func testHandlingAudioPlayerActionDidFinishLoading() async throws {
audioPlayerMock.duration = 10.0

audioPlayerState = AudioPlayerState(id: .timelineItemIdentifier(.random), duration: 0)
audioPlayerState = AudioPlayerState(id: .timelineItemIdentifier(.random), title: "", duration: 0)
audioPlayerState.attachAudioPlayer(audioPlayerMock)

let deferred = deferFulfillment(audioPlayerState.$playbackState) { action in
Expand Down
4 changes: 3 additions & 1 deletion UnitTests/Sources/ComposerToolbarViewModelTests.swift
Original file line number Diff line number Diff line change
Expand Up @@ -332,7 +332,9 @@ class ComposerToolbarViewModelTests: XCTestCase {
viewModel.context.composerFormattingEnabled = false
let waveformData: [Float] = Array(repeating: 1.0, count: 1000)
viewModel.context.plainComposerText = .init(string: "Hello world!")
viewModel.process(timelineAction: .setMode(mode: .previewVoiceMessage(state: AudioPlayerState(id: .recorderPreview, duration: 10.0), waveform: .data(waveformData), isUploading: false)))
viewModel.process(timelineAction: .setMode(mode: .previewVoiceMessage(state: AudioPlayerState(id: .recorderPreview, title: "", duration: 10.0),
waveform: .data(waveformData),
isUploading: false)))
viewModel.saveDraft()

await fulfillment(of: [expectation], timeout: 10)
Expand Down
Loading

0 comments on commit d245956

Please sign in to comment.