Skip to content

Commit

Permalink
Allow toggling of speaker output (livekit#107)
Browse files Browse the repository at this point in the history
* route change observer

* move to audio manager

* config

* tweaks

* ref

* ref

* ref

* custom

* dep

* fix

* clean
  • Loading branch information
hiroshihorie authored Jul 19, 2022
1 parent beaa6c5 commit 7570b68
Show file tree
Hide file tree
Showing 3 changed files with 198 additions and 94 deletions.
72 changes: 5 additions & 67 deletions Sources/LiveKit/Extensions/LiveKit+AudioSession.swift
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,9 @@ import WebRTC
/// - Parameters:
/// - newState: The new state of audio tracks
/// - oldState: The previous state of audio tracks
public typealias ShouldConfigureAudioSessionFunc = (_ newState: AudioManager.State,
_ oldState: AudioManager.State) -> Void
@available(*, deprecated, message: "Moved to AudioManager.ConfigureAudioSessionFunc")
public typealias ShouldConfigureAudioSessionFunc = (_ newState: AudioManager.TrackState,
_ oldState: AudioManager.TrackState) -> Void

extension LiveKit {

Expand All @@ -36,71 +37,8 @@ extension LiveKit {
///
/// View ``defaultShouldConfigureAudioSessionFunc(newState:oldState:)`` for the default implementation.
///
public static var onShouldConfigureAudioSession: ShouldConfigureAudioSessionFunc = defaultShouldConfigureAudioSessionFunc
@available(*, deprecated, message: "Use AudioManager.shared.customConfigureFunc instead")
public static var onShouldConfigureAudioSession: ShouldConfigureAudioSessionFunc?

/// Configure the `RTCAudioSession` of `WebRTC` framework.
///
/// > Note: It is recommended to use `RTCAudioSessionConfiguration.webRTC()` to obtain an instance of `RTCAudioSessionConfiguration` instead of instantiating directly.
///
/// View ``defaultShouldConfigureAudioSessionFunc(newState:oldState:)`` for usage of this method.
///
/// - Parameters:
/// - configuration: A configured RTCAudioSessionConfiguration
/// - setActive: passing true/false will call `AVAudioSession.setActive` internally
public static func configureAudioSession(_ configuration: RTCAudioSessionConfiguration,
setActive: Bool? = nil) {

let audioSession: RTCAudioSession = DispatchQueue.webRTC.sync {
let result = RTCAudioSession.sharedInstance()
result.lockForConfiguration()
return result
}

defer { DispatchQueue.webRTC.sync { audioSession.unlockForConfiguration() } }

do {
logger.log("configuring audio session with category: \(configuration.category), mode: \(configuration.mode), setActive: \(String(describing: setActive))", type: LiveKit.self)

if let setActive = setActive {
try DispatchQueue.webRTC.sync { try audioSession.setConfiguration(configuration, active: setActive) }
} else {
try DispatchQueue.webRTC.sync { try audioSession.setConfiguration(configuration) }
}
} catch let error {
logger.log("Failed to configureAudioSession with error: \(error)", .error, type: LiveKit.self)
}
}

/// The default implementation when audio session configuration is requested by the SDK.
public static func defaultShouldConfigureAudioSessionFunc(newState: AudioManager.State,
oldState: AudioManager.State) {

let config = DispatchQueue.webRTC.sync { RTCAudioSessionConfiguration.webRTC() }

switch newState {
case .remoteOnly:
config.category = AVAudioSession.Category.playback.rawValue
config.mode = AVAudioSession.Mode.spokenAudio.rawValue
config.categoryOptions = AVAudioSession.CategoryOptions.duckOthers
case .localOnly, .localAndRemote:
config.category = AVAudioSession.Category.playAndRecord.rawValue
config.mode = AVAudioSession.Mode.videoChat.rawValue
config.categoryOptions = AVAudioSession.CategoryOptions.duckOthers
default:
config.category = AVAudioSession.Category.soloAmbient.rawValue
config.mode = AVAudioSession.Mode.default.rawValue
}

var setActive: Bool?
if newState != .none, oldState == .none {
// activate audio session when there is any local/remote audio track
setActive = true
} else if newState == .none, oldState != .none {
// deactivate audio session when there are no more local/remote audio tracks
setActive = false
}

configureAudioSession(config, setActive: setActive)
}
#endif
}
1 change: 1 addition & 0 deletions Sources/LiveKit/Support/StateSync.swift
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,7 @@ internal final class StateSync<Value> {

// read only
subscript<Property>(dynamicMember keyPath: KeyPath<Value, Property>) -> Property {
// concurrent
queue.sync { _value[keyPath: keyPath] }
}
}
Expand Down
219 changes: 192 additions & 27 deletions Sources/LiveKit/Track/AudioManager.swift
Original file line number Diff line number Diff line change
Expand Up @@ -20,61 +20,226 @@ import WebRTC
// Audio Session Configuration related
public class AudioManager: Loggable {

public enum State {
// MARK: - Public

public static let shared = AudioManager()

public typealias ConfigureAudioSessionFunc = (_ newState: State,
_ oldState: State) -> Void

public var customConfigureFunc: ConfigureAudioSessionFunc?

public enum TrackState {
case none
case localOnly
case remoteOnly
case localAndRemote
}

public struct State {
var localTracksCount: Int = 0
var remoteTracksCount: Int = 0
var preferSpeakerOutput: Bool = false
}

public var localTracksCount: Int { _state.localTracksCount }
public var remoteTracksCount: Int { _state.remoteTracksCount }
public var preferSpeakerOutput: Bool {
get { _state.preferSpeakerOutput }
set { _state.mutate { $0.preferSpeakerOutput = newValue } }
}

// MARK: - Internal

internal enum `Type` {
case local
case remote
}

public static let shared = AudioManager()
// MARK: - Private

private var _state = StateSync(State())
private let configureQueue = DispatchQueue(label: "LiveKitSDK.AudioManager.configure", qos: .default)

#if os(iOS)
private let notificationQueue = OperationQueue()
private var routeChangeObserver: NSObjectProtocol?
#endif

// Singleton
private init() {

public private(set) var state: State = .none {
didSet {
guard oldValue != state else { return }
log("AudioManager.state didUpdate \(oldValue) -> \(state)")
#if os(iOS)
LiveKit.onShouldConfigureAudioSession(state, oldValue)
#endif
#if os(iOS)
//
routeChangeObserver = NotificationCenter.default.addObserver(forName: AVAudioSession.routeChangeNotification,
object: nil,
queue: notificationQueue) { [weak self] notification in
//
guard let self = self else { return }
self.log("AVAudioSession.routeChangeNotification \(String(describing: notification.userInfo))")

guard let number = notification.userInfo?[AVAudioSessionRouteChangeReasonKey] as? NSNumber?,
let uint = number?.uintValue,
let reason = AVAudioSession.RouteChangeReason(rawValue: uint) else { return }

switch reason {
case .newDeviceAvailable:
self.log("newDeviceAvailable")
default: break
}
}
}
#endif

public private(set) var localTracksCount = 0 {
didSet { recomputeState() }
// trigger events when state mutates
_state.onMutate = { [weak self] newState, oldState in
guard let self = self else { return }
self.configureQueue.async {
self.configureAudioSession(newState: newState, oldState: oldState)
}
}
}

public private(set) var remoteTracksCount = 0 {
didSet { recomputeState() }
deinit {
#if os(iOS)
// remove the route change observer
if let observer = routeChangeObserver {
NotificationCenter.default.removeObserver(observer)
}
#endif
}

// Singleton
private init() {}

internal func trackDidStart(_ type: Type) {
if type == .local { localTracksCount += 1 }
if type == .remote { remoteTracksCount += 1 }
// async mutation
_state.mutateAsync { state in
if type == .local { state.localTracksCount += 1 }
if type == .remote { state.remoteTracksCount += 1 }
}
}

internal func trackDidStop(_ type: Type) {
if type == .local { localTracksCount -= 1 }
if type == .remote { remoteTracksCount -= 1 }
// async mutation
_state.mutateAsync { state in
if type == .local { state.localTracksCount -= 1 }
if type == .remote { state.remoteTracksCount -= 1 }
}
}

private func configureAudioSession(newState: State,
oldState: State) {
log("\(oldState) -> \(newState)")

#if os(iOS)
if let _deprecatedFunc = LiveKit.onShouldConfigureAudioSession {
_deprecatedFunc(newState.trackState, oldState.trackState)
} else if let customConfigureFunc = customConfigureFunc {
customConfigureFunc(newState, oldState)
} else {
defaultShouldConfigureAudioSessionFunc(newState: newState,
oldState: oldState)
}
#endif
}

private func recomputeState() {
#if os(iOS)
/// Configure the `RTCAudioSession` of `WebRTC` framework.
///
/// > Note: It is recommended to use `RTCAudioSessionConfiguration.webRTC()` to obtain an instance of `RTCAudioSessionConfiguration` instead of instantiating directly.
///
/// View ``defaultShouldConfigureAudioSessionFunc(newState:oldState:)`` for usage of this method.
///
/// - Parameters:
/// - configuration: A configured RTCAudioSessionConfiguration
/// - setActive: passing true/false will call `AVAudioSession.setActive` internally
public func configureAudioSession(_ configuration: RTCAudioSessionConfiguration,
setActive: Bool? = nil,
preferSpeakerOutput: Bool = true) {

let session: RTCAudioSession = DispatchQueue.webRTC.sync {
let result = RTCAudioSession.sharedInstance()
result.lockForConfiguration()
return result
}

defer { DispatchQueue.webRTC.sync { session.unlockForConfiguration() } }

do {
logger.log("configuring audio session with category: \(configuration.category), mode: \(configuration.mode), setActive: \(String(describing: setActive))", type: AudioManager.self)

if let setActive = setActive {
try DispatchQueue.webRTC.sync { try session.setConfiguration(configuration, active: setActive) }
} else {
try DispatchQueue.webRTC.sync { try session.setConfiguration(configuration) }
}

} catch let error {
logger.log("Failed to configureAudioSession with error: \(error)", .error, type: AudioManager.self)
}

do {
logger.log("preferSpeakerOutput: \(preferSpeakerOutput)", type: AudioManager.self)
try DispatchQueue.webRTC.sync { try session.overrideOutputAudioPort(preferSpeakerOutput ? .speaker : .none) }
} catch let error {
logger.log("Failed to overrideOutputAudioPort with error: \(error)", .error, type: AudioManager.self)
}
}

/// The default implementation when audio session configuration is requested by the SDK.
public func defaultShouldConfigureAudioSessionFunc(newState: State,
oldState: State) {

let config = DispatchQueue.webRTC.sync { RTCAudioSessionConfiguration.webRTC() }

var categoryOptions: AVAudioSession.CategoryOptions = []

switch newState.trackState {
case .remoteOnly:
config.category = AVAudioSession.Category.playback.rawValue
config.mode = AVAudioSession.Mode.spokenAudio.rawValue
case .localOnly, .localAndRemote:
config.category = AVAudioSession.Category.playAndRecord.rawValue
config.mode = AVAudioSession.Mode.videoChat.rawValue

categoryOptions = [.allowBluetooth, .allowBluetoothA2DP]

if newState.preferSpeakerOutput {
categoryOptions.insert(.defaultToSpeaker)
}

default:
config.category = AVAudioSession.Category.soloAmbient.rawValue
config.mode = AVAudioSession.Mode.default.rawValue
}

config.categoryOptions = categoryOptions

var setActive: Bool?
if newState.trackState != .none, oldState.trackState == .none {
// activate audio session when there is any local/remote audio track
setActive = true
} else if newState.trackState == .none, oldState.trackState != .none {
// deactivate audio session when there are no more local/remote audio tracks
setActive = false
}

configureAudioSession(config,
setActive: setActive,
preferSpeakerOutput: newState.preferSpeakerOutput)
}
#endif
}

extension AudioManager.State {

public var trackState: AudioManager.TrackState {

if localTracksCount > 0 && remoteTracksCount == 0 {
state = .localOnly
return .localOnly
} else if localTracksCount == 0 && remoteTracksCount > 0 {
state = .remoteOnly
return .remoteOnly
} else if localTracksCount > 0 && remoteTracksCount > 0 {
state = .localAndRemote
} else {
state = .none
return .localAndRemote
}

return .none
}
}

0 comments on commit 7570b68

Please sign in to comment.