Skip to content

Commit

Permalink
[Fix]AudioSession management via CallSettings (#585)
Browse files Browse the repository at this point in the history
  • Loading branch information
ipavlidakis authored Nov 6, 2024
1 parent 0f23f47 commit 6fe9f25
Show file tree
Hide file tree
Showing 41 changed files with 1,898 additions and 1,311 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
- Sync microphone mute state between the SDK and CallKit [#590](https://github.com/GetStream/stream-video-swift/pull/590)

### 🐞 Fixed
- Toggling the speaker during a call wasn't always working. [#585](https://github.com/GetStream/stream-video-swift/pull/585)
- In some cases when joining a call setup wasn't completed correctly which lead in issues during the call (e.g. missing video tracks or mute state not updating). [#586](https://github.com/GetStream/stream-video-swift/pull/586)

# [1.13.0](https://github.com/GetStream/stream-video-swift/releases/tag/1.13.0)
Expand Down
53 changes: 0 additions & 53 deletions Sources/StreamVideo/Utils/AudioRecorder/AudioSessionProtocol.swift

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -15,13 +15,11 @@ open class StreamCallAudioRecorder: @unchecked Sendable {
private struct StartRecordingRequest: Hashable { var hasActiveCall, ignoreActiveCall, isRecording: Bool }

@Injected(\.activeCallProvider) private var activeCallProvider
@Injected(\.activeCallAudioSession) private var activeCallAudioSession

/// The builder used to create the AVAudioRecorder instance.
let audioRecorderBuilder: AVAudioRecorderBuilder

/// The audio session used for recording and playback.
let audioSession: AudioSessionProtocol

/// A private task responsible for setting up the recorder in the background.
private var setUpTask: Task<Void, Error>?

Expand All @@ -36,29 +34,15 @@ open class StreamCallAudioRecorder: @unchecked Sendable {
/// A public publisher that exposes the average power of the audio signal.
open private(set) lazy var metersPublisher: AnyPublisher<Float, Never> = _metersPublisher.eraseToAnyPublisher()

private let queue = UnfairQueue()
private var _isRecording: Bool = false
private var isRecording: Bool {
get { queue.sync { _isRecording } }
set { queue.sync { _isRecording = newValue } }
}
@Atomic private var isRecording: Bool = false

/// Indicates whether an active call is present, influencing recording behaviour.
private var hasActiveCall: Bool = false {
didSet {
guard hasActiveCall != oldValue else { return }
log.debug("🎙️updated with hasActiveCall:\(hasActiveCall).")
if !hasActiveCall {
Task {
await stopRecording()
do {
/// It's safe to deactivate the session as a call isn't in progress.
try audioSession.setActive(false, options: [])
log.debug("🎙️AudioSession deactivated.")
} catch {
log.error("🎙️Failed to deactivate AudioSession.", error: error)
}
}
Task { await stopRecording() }
}
}
}
Expand All @@ -70,21 +54,17 @@ open class StreamCallAudioRecorder: @unchecked Sendable {
/// - Parameter filename: The name of the file to record to.
public init(filename: String) {
audioRecorderBuilder = .init(inCacheDirectoryWithFilename: filename)
audioSession = AVAudioSession.sharedInstance()

setUp()
}

/// Initializes the recorder with a custom builder and audio session.
///
/// - Parameter audioRecorderBuilder: The builder used to create the recorder.
/// - Parameter audioSession: The audio session used for recording and playback.
init(
audioRecorderBuilder: AVAudioRecorderBuilder,
audioSession: AudioSessionProtocol
audioRecorderBuilder: AVAudioRecorderBuilder
) {
self.audioRecorderBuilder = audioRecorderBuilder
self.audioSession = audioSession

setUp()
}
Expand Down Expand Up @@ -196,11 +176,8 @@ open class StreamCallAudioRecorder: @unchecked Sendable {
}

private func setUpAudioCaptureIfRequired() async throws -> AVAudioRecorder {
try audioSession.setCategory(.playAndRecord)
try audioSession.setActive(true, options: [])

guard
await audioSession.requestRecordPermission()
await activeCallAudioSession?.requestRecordPermission() == true
else {
throw ClientError("🎙️Permission denied.")
}
Expand Down
107 changes: 107 additions & 0 deletions Sources/StreamVideo/Utils/AudioSession/AudioSessionProtocol.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,107 @@
//
// Copyright © 2024 Stream.io Inc. All rights reserved.
//

import AVFoundation
import Foundation
import StreamWebRTC

/// A protocol defining the interface for managing an audio session,
/// with properties and methods to control audio settings, activation,
/// and routing configurations.
public protocol AudioSessionProtocol: AnyObject {

/// A Boolean value indicating whether the audio session is active.
var isActive: Bool { get }

/// The current route description for the audio session.
var currentRoute: AVAudioSessionRouteDescription { get }

/// The audio category of the session.
var category: String { get }

/// A Boolean value indicating whether the audio session uses speaker output.
var isUsingSpeakerOutput: Bool { get }

/// A Boolean value indicating whether the audio session uses an external
/// audio output, such as headphones or Bluetooth.
var isUsingExternalOutput: Bool { get }

/// A Boolean value indicating whether the session uses manual audio routing.
var useManualAudio: Bool { get set }

/// A Boolean value indicating whether audio is enabled for the session.
var isAudioEnabled: Bool { get set }

/// Adds a delegate to receive updates about audio session events.
/// - Parameter delegate: The delegate conforming to `RTCAudioSessionDelegate`.
func add(_ delegate: RTCAudioSessionDelegate)

/// Sets the audio mode of the session.
/// - Parameter mode: The audio mode to set, such as `.videoChat` or `.voiceChat`.
/// - Throws: An error if setting the mode fails, usually because the configuration hasn't been locked.
/// Prefer wrapping this method using `updateConfiguration`.
func setMode(_ mode: String) throws

/// Configures the audio category and options for the session.
/// - Parameters:
/// - category: The audio category to set, like `.playAndRecord`.
/// - categoryOptions: Options for the audio category, such as
/// `.allowBluetooth` or `.defaultToSpeaker`.
/// - Throws: An error if setting the mode fails, usually because the configuration hasn't been locked.
/// Prefer wrapping this method using `updateConfiguration`.
func setCategory(
_ category: String,
with categoryOptions: AVAudioSession.CategoryOptions
) throws

/// Activates or deactivates the audio session.
/// - Parameter isActive: A Boolean indicating whether the session
/// should be activated.
/// - Throws: An error if setting the mode fails, usually because the configuration hasn't been locked.
/// Prefer wrapping this method using `updateConfiguration`.
func setActive(_ isActive: Bool) throws

/// Sets the session configuration for WebRTC audio settings.
/// - Parameter configuration: The configuration to apply to the session.
/// - Throws: An error if setting the mode fails, usually because the configuration hasn't been locked.
/// Prefer wrapping this method using `updateConfiguration`.
func setConfiguration(_ configuration: RTCAudioSessionConfiguration) throws

/// Overrides the current output audio port for the session.
/// - Parameter port: The port to use, such as `.speaker` or `.none`.
/// - Throws: An error if setting the mode fails, usually because the configuration hasn't been locked.
/// Prefer wrapping this method using `updateConfiguration`.
func overrideOutputAudioPort(_ port: AVAudioSession.PortOverride) throws

/// Updates the audio session configuration by performing an asynchronous
/// operation.
/// - Parameters:
/// - functionName: The name of the calling function.
/// - file: The source file of the calling function.
/// - line: The line number of the calling function.
/// - block: The closure to execute, providing the audio session for
/// configuration updates.
func updateConfiguration(
functionName: StaticString,
file: StaticString,
line: UInt,
_ block: @escaping (AudioSessionProtocol) throws -> Void
)

/// Requests permission to record audio from the user.
/// - Returns: A Boolean indicating whether permission was granted.
func requestRecordPermission() async -> Bool
}

extension AVAudioSession {
/// Asynchronously requests permission to record audio.
/// - Returns: A Boolean indicating whether permission was granted.
private func requestRecordPermission() async -> Bool {
await withCheckedContinuation { continuation in
self.requestRecordPermission { result in
continuation.resume(returning: result)
}
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,72 @@
//
// Copyright © 2024 Stream.io Inc. All rights reserved.
//

import AVFoundation

extension AVAudioSession.CategoryOptions: CustomStringConvertible {
/// Provides a description of the `CategoryOptions` set, listing each option
/// contained within. This allows for easy logging and debugging of audio
/// session configurations.
public var description: String {
// Initialize an empty array to hold the names of the options.
var options: [String] = []

// Check each specific category option to see if it is present in
// `CategoryOptions`. If it is, append the corresponding name to the `options` array.

// Adds ".mixWithOthers" if this option is present, allowing audio to mix
// with other active audio sessions instead of interrupting them.
if contains(.mixWithOthers) {
options.append(".mixWithOthers")
}

// Adds ".duckOthers" if present, allowing other audio to temporarily
// reduce volume when this session plays sound.
if contains(.duckOthers) {
options.append(".duckOthers")
}

// Adds ".allowBluetooth" if present, permitting audio playback through
// Bluetooth devices.
if contains(.allowBluetooth) {
options.append(".allowBluetooth")
}

// Adds ".defaultToSpeaker" if present, enabling speaker output by default.
if contains(.defaultToSpeaker) {
options.append(".defaultToSpeaker")
}

// Adds ".interruptSpokenAudioAndMixWithOthers" if present, enabling this
// session to interrupt other spoken audio content but still mix with others.
if contains(.interruptSpokenAudioAndMixWithOthers) {
options.append(".interruptSpokenAudioAndMixWithOthers")
}

// Adds ".allowBluetoothA2DP" if present, allowing audio output via
// Bluetooth Advanced Audio Distribution Profile (A2DP) devices.
if contains(.allowBluetoothA2DP) {
options.append(".allowBluetoothA2DP")
}

// Adds ".allowAirPlay" if present, permitting audio playback through
// AirPlay-compatible devices.
if contains(.allowAirPlay) {
options.append(".allowAirPlay")
}

// Checks if the `.overrideMutedMicrophoneInterruption` option is available
// in iOS 14.5+ and adds it if present, allowing sessions to override
// microphone interruptions when muted.
if #available(iOS 14.5, *) {
if contains(.overrideMutedMicrophoneInterruption) {
options.append(".overrideMutedMicrophoneInterruption")
}
}

// If no options were appended, return ".noOptions". Otherwise, join
// the list of option names with commas for readability.
return options.isEmpty ? ".noOptions" : options.joined(separator: ", ")
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
//
// Copyright © 2024 Stream.io Inc. All rights reserved.
//

import AVFoundation

extension AVAudioSessionPortDescription {
override public var description: String {
"<Port type:\(portType.rawValue) name:\(portName)>"
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
//
// Copyright © 2024 Stream.io Inc. All rights reserved.
//

import AVFoundation

extension AVAudioSessionRouteDescription {

override open var description: String {
let inputNames = inputs.map(\.portName).joined(separator: ",")
let inputTypes = inputs.map(\.portType.rawValue).joined(separator: ",")

let outputNames = outputs.map(\.portName).joined(separator: ",")
let outputTypes = outputs.map(\.portType.rawValue).joined(separator: ",")
return "AudioSessionRoute isExternal:\(isExternal) input:[name:\(inputNames) types:\(inputTypes)] output:[name:\(outputNames) types:\(outputTypes)]."
}

/// A set of port types that represent external audio outputs, such as
/// Bluetooth and car audio systems. These are used to determine if
/// the route includes an external output device.
private static let externalPorts: Set<AVAudioSession.Port> = [
.bluetoothA2DP, .bluetoothLE, .bluetoothHFP, .carAudio, .headphones
]

/// A Boolean value indicating whether the audio output is external.
/// Checks if any of the output port types match the defined set of
/// `externalPorts`.
var isExternal: Bool {
// Maps the port types of each output and checks if any are within
// the `externalPorts` set.
outputs.map(\.portType).contains { Self.externalPorts.contains($0) }
}

/// A Boolean value indicating if the output is directed to the built-in
/// speaker of the device.
var isSpeaker: Bool {
// Maps the output port types and checks if any type is `.builtInSpeaker`.
outputs.map(\.portType).contains { $0 == .builtInSpeaker }
}

/// A Boolean value indicating if the output is directed to the built-in
/// receiver (typically used for in-ear audio).
var isReceiver: Bool {
// Maps the output port types and checks if any type is `.builtInReceiver`.
outputs.map(\.portType).contains { $0 == .builtInReceiver }
}

/// A comma-separated string listing the types of all output ports.
/// Useful for logging the specific types of outputs currently in use.
var outputTypes: String {
// Maps each output port type to its raw string value and joins them
// with commas to create a readable output list.
outputs
.map(\.portType.rawValue)
.joined(separator: ",")
}
}
Loading

0 comments on commit 6fe9f25

Please sign in to comment.