diff --git a/CHANGELOG.md b/CHANGELOG.md index edafd02a5..fed2fdd65 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -11,6 +11,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). - Sync microphone mute state between the SDK and CallKit [#590](https://github.com/GetStream/stream-video-swift/pull/590) ### 🐞 Fixed +- Toggling the speaker during a call wasn't always working. [#585](https://github.com/GetStream/stream-video-swift/pull/585) - In some cases when joining a call setup wasn't completed correctly which lead in issues during the call (e.g. missing video tracks or mute state not updating). [#586](https://github.com/GetStream/stream-video-swift/pull/586) # [1.13.0](https://github.com/GetStream/stream-video-swift/releases/tag/1.13.0) diff --git a/Sources/StreamVideo/Utils/AudioRecorder/AudioSessionProtocol.swift b/Sources/StreamVideo/Utils/AudioRecorder/AudioSessionProtocol.swift deleted file mode 100644 index 2b2299734..000000000 --- a/Sources/StreamVideo/Utils/AudioRecorder/AudioSessionProtocol.swift +++ /dev/null @@ -1,53 +0,0 @@ -// -// Copyright © 2024 Stream.io Inc. All rights reserved. -// - -import AVFoundation -import Foundation -import StreamWebRTC - -/// A simple protocol that abstracts the usage of AVAudioSession. -public protocol AudioSessionProtocol: AnyObject { - - func setCategory(_ category: AVAudioSession.Category) throws - - func setActive( - _ active: Bool, - options: AVAudioSession.SetActiveOptions - ) throws - - func requestRecordPermission() async -> Bool -} - -extension AVAudioSession: AudioSessionProtocol { - - public func requestRecordPermission() async -> Bool { - await withCheckedContinuation { continuation in - self.requestRecordPermission { result in - continuation.resume(returning: result) - } - } - } -} - -extension RTCAudioSession: AudioSessionProtocol { - public func setCategory(_ category: AVAudioSession.Category) throws { - lockForConfiguration() - try setCategory(category.rawValue, with: [.allowAirPlay, .allowBluetooth]) - unlockForConfiguration() - } - - public func setActive(_ active: Bool, options: AVAudioSession.SetActiveOptions) throws { - lockForConfiguration() - try setActive(active) - unlockForConfiguration() - } - - public func requestRecordPermission() async -> Bool { - await withCheckedContinuation { continuation in - AVAudioSession.sharedInstance().requestRecordPermission { result in - continuation.resume(returning: result) - } - } - } -} diff --git a/Sources/StreamVideo/Utils/AudioRecorder/AVAudioRecorderBuilder.swift b/Sources/StreamVideo/Utils/AudioSession/AudioRecorder/AVAudioRecorderBuilder.swift similarity index 100% rename from Sources/StreamVideo/Utils/AudioRecorder/AVAudioRecorderBuilder.swift rename to Sources/StreamVideo/Utils/AudioSession/AudioRecorder/AVAudioRecorderBuilder.swift diff --git a/Sources/StreamVideo/Utils/AudioRecorder/StreamActiveCallProvider.swift b/Sources/StreamVideo/Utils/AudioSession/AudioRecorder/StreamActiveCallProvider.swift similarity index 100% rename from Sources/StreamVideo/Utils/AudioRecorder/StreamActiveCallProvider.swift rename to Sources/StreamVideo/Utils/AudioSession/AudioRecorder/StreamActiveCallProvider.swift diff --git a/Sources/StreamVideo/Utils/AudioRecorder/StreamCallAudioRecorder.swift b/Sources/StreamVideo/Utils/AudioSession/AudioRecorder/StreamCallAudioRecorder.swift similarity index 85% rename from Sources/StreamVideo/Utils/AudioRecorder/StreamCallAudioRecorder.swift rename to Sources/StreamVideo/Utils/AudioSession/AudioRecorder/StreamCallAudioRecorder.swift index 3feace631..19121ea38 100644 --- a/Sources/StreamVideo/Utils/AudioRecorder/StreamCallAudioRecorder.swift +++ b/Sources/StreamVideo/Utils/AudioSession/AudioRecorder/StreamCallAudioRecorder.swift @@ -15,13 +15,11 @@ open class StreamCallAudioRecorder: @unchecked Sendable { private struct StartRecordingRequest: Hashable { var hasActiveCall, ignoreActiveCall, isRecording: Bool } @Injected(\.activeCallProvider) private var activeCallProvider + @Injected(\.activeCallAudioSession) private var activeCallAudioSession /// The builder used to create the AVAudioRecorder instance. let audioRecorderBuilder: AVAudioRecorderBuilder - /// The audio session used for recording and playback. - let audioSession: AudioSessionProtocol - /// A private task responsible for setting up the recorder in the background. private var setUpTask: Task? @@ -36,12 +34,7 @@ open class StreamCallAudioRecorder: @unchecked Sendable { /// A public publisher that exposes the average power of the audio signal. open private(set) lazy var metersPublisher: AnyPublisher = _metersPublisher.eraseToAnyPublisher() - private let queue = UnfairQueue() - private var _isRecording: Bool = false - private var isRecording: Bool { - get { queue.sync { _isRecording } } - set { queue.sync { _isRecording = newValue } } - } + @Atomic private var isRecording: Bool = false /// Indicates whether an active call is present, influencing recording behaviour. private var hasActiveCall: Bool = false { @@ -49,16 +42,7 @@ open class StreamCallAudioRecorder: @unchecked Sendable { guard hasActiveCall != oldValue else { return } log.debug("🎙️updated with hasActiveCall:\(hasActiveCall).") if !hasActiveCall { - Task { - await stopRecording() - do { - /// It's safe to deactivate the session as a call isn't in progress. - try audioSession.setActive(false, options: []) - log.debug("🎙️AudioSession deactivated.") - } catch { - log.error("🎙️Failed to deactivate AudioSession.", error: error) - } - } + Task { await stopRecording() } } } } @@ -70,7 +54,6 @@ open class StreamCallAudioRecorder: @unchecked Sendable { /// - Parameter filename: The name of the file to record to. public init(filename: String) { audioRecorderBuilder = .init(inCacheDirectoryWithFilename: filename) - audioSession = AVAudioSession.sharedInstance() setUp() } @@ -78,13 +61,10 @@ open class StreamCallAudioRecorder: @unchecked Sendable { /// Initializes the recorder with a custom builder and audio session. /// /// - Parameter audioRecorderBuilder: The builder used to create the recorder. - /// - Parameter audioSession: The audio session used for recording and playback. init( - audioRecorderBuilder: AVAudioRecorderBuilder, - audioSession: AudioSessionProtocol + audioRecorderBuilder: AVAudioRecorderBuilder ) { self.audioRecorderBuilder = audioRecorderBuilder - self.audioSession = audioSession setUp() } @@ -196,11 +176,8 @@ open class StreamCallAudioRecorder: @unchecked Sendable { } private func setUpAudioCaptureIfRequired() async throws -> AVAudioRecorder { - try audioSession.setCategory(.playAndRecord) - try audioSession.setActive(true, options: []) - guard - await audioSession.requestRecordPermission() + await activeCallAudioSession?.requestRecordPermission() == true else { throw ClientError("🎙️Permission denied.") } diff --git a/Sources/StreamVideo/Utils/AudioSession/AudioSessionProtocol.swift b/Sources/StreamVideo/Utils/AudioSession/AudioSessionProtocol.swift new file mode 100644 index 000000000..e67ea8c9e --- /dev/null +++ b/Sources/StreamVideo/Utils/AudioSession/AudioSessionProtocol.swift @@ -0,0 +1,107 @@ +// +// Copyright © 2024 Stream.io Inc. All rights reserved. +// + +import AVFoundation +import Foundation +import StreamWebRTC + +/// A protocol defining the interface for managing an audio session, +/// with properties and methods to control audio settings, activation, +/// and routing configurations. +public protocol AudioSessionProtocol: AnyObject { + + /// A Boolean value indicating whether the audio session is active. + var isActive: Bool { get } + + /// The current route description for the audio session. + var currentRoute: AVAudioSessionRouteDescription { get } + + /// The audio category of the session. + var category: String { get } + + /// A Boolean value indicating whether the audio session uses speaker output. + var isUsingSpeakerOutput: Bool { get } + + /// A Boolean value indicating whether the audio session uses an external + /// audio output, such as headphones or Bluetooth. + var isUsingExternalOutput: Bool { get } + + /// A Boolean value indicating whether the session uses manual audio routing. + var useManualAudio: Bool { get set } + + /// A Boolean value indicating whether audio is enabled for the session. + var isAudioEnabled: Bool { get set } + + /// Adds a delegate to receive updates about audio session events. + /// - Parameter delegate: The delegate conforming to `RTCAudioSessionDelegate`. + func add(_ delegate: RTCAudioSessionDelegate) + + /// Sets the audio mode of the session. + /// - Parameter mode: The audio mode to set, such as `.videoChat` or `.voiceChat`. + /// - Throws: An error if setting the mode fails, usually because the configuration hasn't been locked. + /// Prefer wrapping this method using `updateConfiguration`. + func setMode(_ mode: String) throws + + /// Configures the audio category and options for the session. + /// - Parameters: + /// - category: The audio category to set, like `.playAndRecord`. + /// - categoryOptions: Options for the audio category, such as + /// `.allowBluetooth` or `.defaultToSpeaker`. + /// - Throws: An error if setting the mode fails, usually because the configuration hasn't been locked. + /// Prefer wrapping this method using `updateConfiguration`. + func setCategory( + _ category: String, + with categoryOptions: AVAudioSession.CategoryOptions + ) throws + + /// Activates or deactivates the audio session. + /// - Parameter isActive: A Boolean indicating whether the session + /// should be activated. + /// - Throws: An error if setting the mode fails, usually because the configuration hasn't been locked. + /// Prefer wrapping this method using `updateConfiguration`. + func setActive(_ isActive: Bool) throws + + /// Sets the session configuration for WebRTC audio settings. + /// - Parameter configuration: The configuration to apply to the session. + /// - Throws: An error if setting the mode fails, usually because the configuration hasn't been locked. + /// Prefer wrapping this method using `updateConfiguration`. + func setConfiguration(_ configuration: RTCAudioSessionConfiguration) throws + + /// Overrides the current output audio port for the session. + /// - Parameter port: The port to use, such as `.speaker` or `.none`. + /// - Throws: An error if setting the mode fails, usually because the configuration hasn't been locked. + /// Prefer wrapping this method using `updateConfiguration`. + func overrideOutputAudioPort(_ port: AVAudioSession.PortOverride) throws + + /// Updates the audio session configuration by performing an asynchronous + /// operation. + /// - Parameters: + /// - functionName: The name of the calling function. + /// - file: The source file of the calling function. + /// - line: The line number of the calling function. + /// - block: The closure to execute, providing the audio session for + /// configuration updates. + func updateConfiguration( + functionName: StaticString, + file: StaticString, + line: UInt, + _ block: @escaping (AudioSessionProtocol) throws -> Void + ) + + /// Requests permission to record audio from the user. + /// - Returns: A Boolean indicating whether permission was granted. + func requestRecordPermission() async -> Bool +} + +extension AVAudioSession { + /// Asynchronously requests permission to record audio. + /// - Returns: A Boolean indicating whether permission was granted. + private func requestRecordPermission() async -> Bool { + await withCheckedContinuation { continuation in + self.requestRecordPermission { result in + continuation.resume(returning: result) + } + } + } +} diff --git a/Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSession+CategoryOptions+Convenience.swift b/Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSession+CategoryOptions+Convenience.swift new file mode 100644 index 000000000..a5ac5d052 --- /dev/null +++ b/Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSession+CategoryOptions+Convenience.swift @@ -0,0 +1,72 @@ +// +// Copyright © 2024 Stream.io Inc. All rights reserved. +// + +import AVFoundation + +extension AVAudioSession.CategoryOptions: CustomStringConvertible { + /// Provides a description of the `CategoryOptions` set, listing each option + /// contained within. This allows for easy logging and debugging of audio + /// session configurations. + public var description: String { + // Initialize an empty array to hold the names of the options. + var options: [String] = [] + + // Check each specific category option to see if it is present in + // `CategoryOptions`. If it is, append the corresponding name to the `options` array. + + // Adds ".mixWithOthers" if this option is present, allowing audio to mix + // with other active audio sessions instead of interrupting them. + if contains(.mixWithOthers) { + options.append(".mixWithOthers") + } + + // Adds ".duckOthers" if present, allowing other audio to temporarily + // reduce volume when this session plays sound. + if contains(.duckOthers) { + options.append(".duckOthers") + } + + // Adds ".allowBluetooth" if present, permitting audio playback through + // Bluetooth devices. + if contains(.allowBluetooth) { + options.append(".allowBluetooth") + } + + // Adds ".defaultToSpeaker" if present, enabling speaker output by default. + if contains(.defaultToSpeaker) { + options.append(".defaultToSpeaker") + } + + // Adds ".interruptSpokenAudioAndMixWithOthers" if present, enabling this + // session to interrupt other spoken audio content but still mix with others. + if contains(.interruptSpokenAudioAndMixWithOthers) { + options.append(".interruptSpokenAudioAndMixWithOthers") + } + + // Adds ".allowBluetoothA2DP" if present, allowing audio output via + // Bluetooth Advanced Audio Distribution Profile (A2DP) devices. + if contains(.allowBluetoothA2DP) { + options.append(".allowBluetoothA2DP") + } + + // Adds ".allowAirPlay" if present, permitting audio playback through + // AirPlay-compatible devices. + if contains(.allowAirPlay) { + options.append(".allowAirPlay") + } + + // Checks if the `.overrideMutedMicrophoneInterruption` option is available + // in iOS 14.5+ and adds it if present, allowing sessions to override + // microphone interruptions when muted. + if #available(iOS 14.5, *) { + if contains(.overrideMutedMicrophoneInterruption) { + options.append(".overrideMutedMicrophoneInterruption") + } + } + + // If no options were appended, return ".noOptions". Otherwise, join + // the list of option names with commas for readability. + return options.isEmpty ? ".noOptions" : options.joined(separator: ", ") + } +} diff --git a/Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSessionPortDescription+Convenience.swift b/Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSessionPortDescription+Convenience.swift new file mode 100644 index 000000000..8f3559514 --- /dev/null +++ b/Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSessionPortDescription+Convenience.swift @@ -0,0 +1,11 @@ +// +// Copyright © 2024 Stream.io Inc. All rights reserved. +// + +import AVFoundation + +extension AVAudioSessionPortDescription { + override public var description: String { + "" + } +} diff --git a/Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSessionRouteDescription+Convenience.swift b/Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSessionRouteDescription+Convenience.swift new file mode 100644 index 000000000..4cb096de1 --- /dev/null +++ b/Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSessionRouteDescription+Convenience.swift @@ -0,0 +1,57 @@ +// +// Copyright © 2024 Stream.io Inc. All rights reserved. +// + +import AVFoundation + +extension AVAudioSessionRouteDescription { + + override open var description: String { + let inputNames = inputs.map(\.portName).joined(separator: ",") + let inputTypes = inputs.map(\.portType.rawValue).joined(separator: ",") + + let outputNames = outputs.map(\.portName).joined(separator: ",") + let outputTypes = outputs.map(\.portType.rawValue).joined(separator: ",") + return "AudioSessionRoute isExternal:\(isExternal) input:[name:\(inputNames) types:\(inputTypes)] output:[name:\(outputNames) types:\(outputTypes)]." + } + + /// A set of port types that represent external audio outputs, such as + /// Bluetooth and car audio systems. These are used to determine if + /// the route includes an external output device. + private static let externalPorts: Set = [ + .bluetoothA2DP, .bluetoothLE, .bluetoothHFP, .carAudio, .headphones + ] + + /// A Boolean value indicating whether the audio output is external. + /// Checks if any of the output port types match the defined set of + /// `externalPorts`. + var isExternal: Bool { + // Maps the port types of each output and checks if any are within + // the `externalPorts` set. + outputs.map(\.portType).contains { Self.externalPorts.contains($0) } + } + + /// A Boolean value indicating if the output is directed to the built-in + /// speaker of the device. + var isSpeaker: Bool { + // Maps the output port types and checks if any type is `.builtInSpeaker`. + outputs.map(\.portType).contains { $0 == .builtInSpeaker } + } + + /// A Boolean value indicating if the output is directed to the built-in + /// receiver (typically used for in-ear audio). + var isReceiver: Bool { + // Maps the output port types and checks if any type is `.builtInReceiver`. + outputs.map(\.portType).contains { $0 == .builtInReceiver } + } + + /// A comma-separated string listing the types of all output ports. + /// Useful for logging the specific types of outputs currently in use. + var outputTypes: String { + // Maps each output port type to its raw string value and joins them + // with commas to create a readable output list. + outputs + .map(\.portType.rawValue) + .joined(separator: ",") + } +} diff --git a/Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSession_RouteChangeReason+Convenience.swift b/Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSession_RouteChangeReason+Convenience.swift new file mode 100644 index 000000000..4224c0e16 --- /dev/null +++ b/Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSession_RouteChangeReason+Convenience.swift @@ -0,0 +1,30 @@ +// +// Copyright © 2024 Stream.io Inc. All rights reserved. +// + +import AVFoundation + +extension AVAudioSession.RouteChangeReason: CustomStringConvertible { + public var description: String { + switch self { + case .unknown: + return ".unknown" + case .newDeviceAvailable: + return ".newDeviceAvailable" + case .oldDeviceUnavailable: + return ".oldDeviceUnavailable" + case .categoryChange: + return ".categoryChange" + case .override: + return ".override" + case .wakeFromSleep: + return ".wakeFromSleep" + case .noSuitableRouteForCategory: + return ".noSuitableRouteForCategory" + case .routeConfigurationChange: + return ".routeConfigurationChange" + @unknown default: + return "Unknown Reason" + } + } +} diff --git a/Sources/StreamVideo/Utils/AudioSession/Extensions/RTCAudioSessionConfiguration+Default.swift b/Sources/StreamVideo/Utils/AudioSession/Extensions/RTCAudioSessionConfiguration+Default.swift new file mode 100644 index 000000000..2217a1c17 --- /dev/null +++ b/Sources/StreamVideo/Utils/AudioSession/Extensions/RTCAudioSessionConfiguration+Default.swift @@ -0,0 +1,30 @@ +// +// Copyright © 2024 Stream.io Inc. All rights reserved. +// + +import StreamWebRTC + +extension RTCAudioSessionConfiguration { + /// Provides a default configuration for `RTCAudioSessionConfiguration` + /// tailored for WebRTC audio sessions, setting it to be suitable for + /// both playback and recording. + static let `default`: RTCAudioSessionConfiguration = { + // Creates a new WebRTC-specific audio session configuration instance. + let configuration = RTCAudioSessionConfiguration.webRTC() + + // Sets the audio mode to the default system mode. This typically + // configures the session to use system default settings for + // playback and recording. + configuration.mode = AVAudioSession.Mode.default.rawValue + + // Sets the audio category to `.playAndRecord`, enabling the session + // to handle both audio playback and recording simultaneously. + // This category is commonly used in applications that require + // two-way audio, like video calls. + configuration.category = AVAudioSession.Category.playAndRecord.rawValue + + // Returns the fully configured default WebRTC audio session + // configuration. + return configuration + }() +} diff --git a/Sources/StreamVideo/Utils/AudioSession/StreamAudioSessionAdapter.swift b/Sources/StreamVideo/Utils/AudioSession/StreamAudioSessionAdapter.swift new file mode 100644 index 000000000..754a27fbd --- /dev/null +++ b/Sources/StreamVideo/Utils/AudioSession/StreamAudioSessionAdapter.swift @@ -0,0 +1,278 @@ +// +// Copyright © 2024 Stream.io Inc. All rights reserved. +// + +import AVFoundation +import Combine +import Foundation +import StreamWebRTC + +/// The `StreamAudioSessionAdapter` class manages the device's audio session +/// for an app, enabling control over activation, configuration, and routing +/// to output devices like speakers and in-ear speakers. +final class StreamAudioSessionAdapter: NSObject, RTCAudioSessionDelegate, @unchecked Sendable { + + /// An enum defining actions to update speaker routing based on call settings. + private enum SpeakerAction { + case routeUpdate(CallSettings) + case respectCallSettings(CallSettings) + } + + /// The shared audio session instance conforming to `AudioSessionProtocol` + /// that manages WebRTC audio settings. + private let audioSession: AudioSessionProtocol + + /// The current active call settings, or `nil` if no active call is in session. + @Atomic private(set) var activeCallSettings: CallSettings? + + /// The delegate for receiving audio session events, such as call settings + /// updates. + weak var delegate: StreamAudioSessionAdapterDelegate? + + /// Initializes a new `StreamAudioSessionAdapter` instance, configuring + /// the session with default settings and enabling manual audio control + /// for WebRTC. + /// - Parameter audioSession: An `AudioSessionProtocol` instance. Defaults + /// to `StreamRTCAudioSession`. + required init(_ audioSession: AudioSessionProtocol = StreamRTCAudioSession()) { + self.audioSession = audioSession + super.init() + + /// Update the active call's `audioSession` to make available to other components. + StreamActiveCallAudioSessionKey.currentValue = audioSession + + audioSession.add(self) + audioSession.useManualAudio = true + audioSession.isAudioEnabled = true + + let configuration = RTCAudioSessionConfiguration.default + audioSession.updateConfiguration( + functionName: #function, + file: #file, + line: #line + ) { + try $0.setConfiguration(.default) + log.debug( + "AudioSession updated configuration with category: \(configuration.category) options: \(configuration.categoryOptions) mode: \(configuration.mode)", + subsystems: .audioSession + ) + } + } + + deinit { + if StreamActiveCallAudioSessionKey.currentValue === audioSession { + // Reset activeCall audioSession. + StreamActiveCallAudioSessionKey.currentValue = nil + } + } + + // MARK: - CallSettings + + /// Updates the audio session with new call settings. + /// - Parameter settings: The new `CallSettings` to apply. + func didUpdateCallSettings( + _ settings: CallSettings + ) { + guard settings != activeCallSettings else { return } + + performSessionAction(settings.audioOutputOn) + performSpeakerUpdateAction(.respectCallSettings(settings)) + activeCallSettings = settings + + log.debug( + "AudioSession updated isActive:\(settings.audioOutputOn) speakerOn:\(settings.speakerOn).", + subsystems: .audioSession + ) + } + + // MARK: - RTCAudioSessionDelegate + + /// Handles audio route changes, updating the session based on the reason + /// for the change. + /// + /// For cases like `.newDeviceAvailable`, `.override`, + /// `.noSuitableRouteForCategory`, `.routeConfigurationChange`, `.default`, + /// or `.unknown`, the route change is accepted, and the `CallSettings` + /// are updated accordingly, triggering a delegate update. + /// + /// For other cases, the route change is ignored, enforcing the existing + /// `CallSettings`. + /// + /// - Parameters: + /// - session: The `RTCAudioSession` instance. + /// - reason: The reason for the route change. + /// - previousRoute: The previous audio route configuration. + func audioSessionDidChangeRoute( + _ session: RTCAudioSession, + reason: AVAudioSession.RouteChangeReason, + previousRoute: AVAudioSessionRouteDescription + ) { + guard let activeCallSettings else { + return + } + + switch reason { + case .unknown: + performSpeakerUpdateAction(.routeUpdate(activeCallSettings)) + case .newDeviceAvailable: + performSpeakerUpdateAction(.routeUpdate(activeCallSettings)) + case .oldDeviceUnavailable: + performSpeakerUpdateAction(.respectCallSettings(activeCallSettings)) + case .categoryChange: + performSpeakerUpdateAction(.respectCallSettings(activeCallSettings)) + case .override: + performSpeakerUpdateAction(.routeUpdate(activeCallSettings)) + case .wakeFromSleep: + performSpeakerUpdateAction(.respectCallSettings(activeCallSettings)) + case .noSuitableRouteForCategory: + performSpeakerUpdateAction(.routeUpdate(activeCallSettings)) + case .routeConfigurationChange: + performSpeakerUpdateAction(.respectCallSettings(activeCallSettings)) + @unknown default: + performSpeakerUpdateAction(.routeUpdate(activeCallSettings)) + } + } + + /// Logs the status when the session can play or record. + /// - Parameters: + /// - session: The `RTCAudioSession` instance. + /// - canPlayOrRecord: A Boolean indicating whether play or record + /// capabilities are available. + func audioSession( + _ session: RTCAudioSession, + didChangeCanPlayOrRecord canPlayOrRecord: Bool + ) { + log.info( + "AudioSession can playOrRecord:\(canPlayOrRecord).", + subsystems: .audioSession + ) + } + + /// Logs when the session stops playing or recording. + /// - Parameter session: The `RTCAudioSession` instance. + func audioSessionDidStopPlayOrRecord( + _ session: RTCAudioSession + ) { log.info("AudioSession cannot playOrRecord.", subsystems: .audioSession) } + + /// Configures the session's active state when it changes. + /// - Parameters: + /// - audioSession: The `RTCAudioSession` instance. + /// - active: A Boolean indicating the desired active state. + func audioSession( + _ audioSession: RTCAudioSession, + didSetActive active: Bool + ) { + guard let activeCallSettings else { return } + performSessionAction(active) + performSpeakerUpdateAction(.respectCallSettings(activeCallSettings)) + } + + /// Logs and manages failure when setting the active state. + /// - Parameters: + /// - audioSession: The `RTCAudioSession` instance. + /// - active: The desired active state. + /// - error: The error encountered during the state change. + func audioSession( + _ audioSession: RTCAudioSession, + failedToSetActive active: Bool, + error: any Error + ) { + log.error( + "AudioSession failedToSetActive active:\(active)", + subsystems: .audioSession, + error: error + ) + performSessionAction(false) + } + + /// Handles failure in starting audio unit playback or recording. + /// - Parameters: + /// - audioSession: The `RTCAudioSession` instance. + /// - error: The error encountered during startup. + func audioSession( + _ audioSession: RTCAudioSession, + audioUnitStartFailedWithError error: any Error + ) { + log.error( + "AudioSession audioUnitStartFailedWithError", + subsystems: .audioSession, + error: error + ) + performSessionAction(false) + } + + // MARK: - Private helpers + + /// Executes an action to update the speaker routing based on current + /// call settings. + /// - Parameter action: The action to perform, affecting routing. + private func performSpeakerUpdateAction(_ action: SpeakerAction) { + switch action { + case let .routeUpdate(currentCallSettings): + let updatedCallSettings = currentCallSettings + .withUpdatedSpeakerState(audioSession.isUsingSpeakerOutput) + + guard currentCallSettings != updatedCallSettings else { + return + } + + delegate?.audioSessionAdapterDidUpdateCallSettings( + self, + callSettings: updatedCallSettings + ) + log.debug( + "AudioSession route requires speaker update \(currentCallSettings.speakerOn) → \(updatedCallSettings.speakerOn).", + subsystems: .audioSession + ) + + case let .respectCallSettings(currentCallSettings): + if audioSession.isUsingSpeakerOutput != currentCallSettings.speakerOn { + let category = audioSession.category + let categoryOptions: AVAudioSession.CategoryOptions = currentCallSettings.speakerOn + ? [.defaultToSpeaker, .allowBluetooth, .allowBluetoothA2DP] + : [.allowBluetooth, .allowBluetoothA2DP] + + let mode: AVAudioSession.Mode = currentCallSettings.speakerOn + ? .videoChat + : .voiceChat + + let overrideOutputAudioPort: AVAudioSession.PortOverride = currentCallSettings.speakerOn + ? .speaker + : .none + + audioSession.updateConfiguration( + functionName: #function, + file: #file, + line: #line + ) { + try $0.setMode(mode.rawValue) + try $0.setCategory(category, with: categoryOptions) + try $0.overrideOutputAudioPort(overrideOutputAudioPort) + + log.debug( + "AudioSession updated mode:\(mode.rawValue) category:\(category) options:\(categoryOptions) overrideOutputAudioPort:\(overrideOutputAudioPort == .speaker ? ".speaker" : ".none")", + subsystems: .audioSession + ) + } + } + } + } + + /// Updates the active state of the session. + /// - Parameter isActive: A Boolean indicating if the session should be + /// active. + private func performSessionAction(_ isActive: Bool) { + guard audioSession.isActive != isActive else { + return + } + log.debug( + "AudioSession will attempt to set isActive:\(isActive).", + subsystems: .audioSession + ) + audioSession.updateConfiguration( + functionName: #function, + file: #file, + line: #line + ) { try $0.setActive(isActive) } + } +} diff --git a/Sources/StreamVideo/Utils/AudioSession/StreamAudioSessionAdapterDelegate.swift b/Sources/StreamVideo/Utils/AudioSession/StreamAudioSessionAdapterDelegate.swift new file mode 100644 index 000000000..52303110c --- /dev/null +++ b/Sources/StreamVideo/Utils/AudioSession/StreamAudioSessionAdapterDelegate.swift @@ -0,0 +1,18 @@ +// +// Copyright © 2024 Stream.io Inc. All rights reserved. +// + +import Foundation + +/// A delegate protocol for receiving updates related to the audio session's +/// call settings. +protocol StreamAudioSessionAdapterDelegate: AnyObject { + /// Called when the audio session updates its call settings. + /// - Parameters: + /// - audioSession: The `AudioSession` instance that made the update. + /// - callSettings: The updated `CallSettings`. + func audioSessionAdapterDidUpdateCallSettings( + _ adapter: StreamAudioSessionAdapter, + callSettings: CallSettings + ) +} diff --git a/Sources/StreamVideo/Utils/AudioSession/StreamRTCAudioSession.swift b/Sources/StreamVideo/Utils/AudioSession/StreamRTCAudioSession.swift new file mode 100644 index 000000000..26e443648 --- /dev/null +++ b/Sources/StreamVideo/Utils/AudioSession/StreamRTCAudioSession.swift @@ -0,0 +1,159 @@ +// +// Copyright © 2024 Stream.io Inc. All rights reserved. +// + +import AVFoundation +import Foundation +import StreamWebRTC + +/// A class implementing the `AudioSessionProtocol` that manages the WebRTC +/// audio session for the application, handling settings and route management. +final class StreamRTCAudioSession: AudioSessionProtocol { + + /// A queue for processing audio session operations asynchronously. + private let processingQueue = DispatchQueue( + label: "io.getstream.audiosession", + target: .global(qos: .userInteractive) + ) + + /// The shared instance of `RTCAudioSession` used for WebRTC audio + /// configuration and management. + private let source: RTCAudioSession = .sharedInstance() + + /// A Boolean value indicating whether the audio session is currently active. + var isActive: Bool { source.isActive } + + /// The current audio route description for the session. + var currentRoute: AVAudioSessionRouteDescription { source.currentRoute } + + /// The audio category of the session, such as `.playAndRecord`. + var category: String { source.category } + + /// A Boolean value indicating whether the audio session is using + /// the device's speaker. + var isUsingSpeakerOutput: Bool { currentRoute.isSpeaker } + + /// A Boolean value indicating whether the audio session is using + /// an external output, like Bluetooth or headphones. + var isUsingExternalOutput: Bool { currentRoute.isExternal } + + /// A Boolean value indicating whether the audio session uses manual + /// audio routing. + var useManualAudio: Bool { + set { source.useManualAudio = newValue } + get { source.useManualAudio } + } + + /// A Boolean value indicating whether audio is enabled for the session. + var isAudioEnabled: Bool { + set { source.isAudioEnabled = newValue } + get { source.isAudioEnabled } + } + + /// Adds a delegate to receive updates from the audio session. + /// - Parameter delegate: A delegate conforming to `RTCAudioSessionDelegate`. + func add(_ delegate: RTCAudioSessionDelegate) { + source.add(delegate) + } + + /// Sets the audio mode for the session, such as `.videoChat`. + /// - Parameter mode: The audio mode to set. + /// - Throws: An error if setting the mode fails. + func setMode(_ mode: String) throws { + try source.setMode(mode) + } + + /// Configures the audio category and category options for the session. + /// - Parameters: + /// - category: The audio category, such as `.playAndRecord`. + /// - categoryOptions: Options for the category, including + /// `.allowBluetooth` and `.defaultToSpeaker`. + /// - Throws: An error if setting the category fails. + func setCategory( + _ category: String, + with categoryOptions: AVAudioSession.CategoryOptions + ) throws { + try source.setCategory(category, with: categoryOptions) + } + + /// Activates or deactivates the audio session. + /// - Parameter isActive: A Boolean indicating whether the session + /// should be active. + /// - Throws: An error if activation or deactivation fails. + func setActive(_ isActive: Bool) throws { + try source.setActive(isActive) + } + + /// Sets the audio configuration for the WebRTC session. + /// - Parameter configuration: The configuration to apply. + /// - Throws: An error if setting the configuration fails. + func setConfiguration(_ configuration: RTCAudioSessionConfiguration) throws { + try source.setConfiguration(configuration) + } + + /// Overrides the audio output port, such as switching to speaker output. + /// - Parameter port: The output port to use, such as `.speaker`. + /// - Throws: An error if overriding the output port fails. + func overrideOutputAudioPort(_ port: AVAudioSession.PortOverride) throws { + try source.overrideOutputAudioPort(port) + } + + /// Performs an asynchronous update to the audio session configuration. + /// - Parameters: + /// - functionName: The name of the calling function. + /// - file: The source file of the calling function. + /// - line: The line number of the calling function. + /// - block: A closure that performs an audio configuration update. + func updateConfiguration( + functionName: StaticString, + file: StaticString, + line: UInt, + _ block: @escaping (any AudioSessionProtocol) throws -> Void + ) { + processingQueue.async { [weak self] in + guard let self else { return } + source.lockForConfiguration() + defer { source.unlockForConfiguration() } + do { + try block(self) + } catch { + log.error( + error, + subsystems: .audioSession, + functionName: functionName, + fileName: file, + lineNumber: line + ) + } + } + } + + /// Requests permission to record audio from the user. + /// - Returns: A Boolean indicating whether permission was granted. + func requestRecordPermission() async -> Bool { + await withCheckedContinuation { continuation in + AVAudioSession.sharedInstance().requestRecordPermission { result in + continuation.resume(returning: result) + } + } + } +} + +/// A key for dependency injection of an `AudioSessionProtocol` instance +/// that represents the active call audio session. +struct StreamActiveCallAudioSessionKey: InjectionKey { + static var currentValue: AudioSessionProtocol? +} + +extension InjectedValues { + /// The active call's audio session. The value is being set on `StreamAudioSessionAdapter` + /// `init` / `deinit` + var activeCallAudioSession: AudioSessionProtocol? { + get { + Self[StreamActiveCallAudioSessionKey.self] + } + set { + Self[StreamActiveCallAudioSessionKey.self] = newValue + } + } +} diff --git a/Sources/StreamVideo/Utils/Logger/Logger.swift b/Sources/StreamVideo/Utils/Logger/Logger.swift index 1bcb7a5a4..65d40dd8a 100644 --- a/Sources/StreamVideo/Utils/Logger/Logger.swift +++ b/Sources/StreamVideo/Utils/Logger/Logger.swift @@ -28,7 +28,8 @@ public struct LogSubsystem: OptionSet, CustomStringConvertible { .sfu, .iceAdapter, .mediaAdapter, - .thermalState + .thermalState, + .audioSession ] /// All subsystems within the SDK. @@ -44,7 +45,8 @@ public struct LogSubsystem: OptionSet, CustomStringConvertible { .sfu, .iceAdapter, .mediaAdapter, - .thermalState + .thermalState, + .audioSession ] /// The subsystem responsible for any other part of the SDK. @@ -64,11 +66,16 @@ public struct LogSubsystem: OptionSet, CustomStringConvertible { /// The subsystem responsible for PeerConnections. public static let peerConnectionPublisher = Self(rawValue: 1 << 6) public static let peerConnectionSubscriber = Self(rawValue: 1 << 7) - /// The subsystem responsible for PeerConnections. + /// The subsystem responsible for SFU interaction. public static let sfu = Self(rawValue: 1 << 8) + /// The subsystem responsible for ICE interactions. public static let iceAdapter = Self(rawValue: 1 << 9) + /// The subsystem responsible for Media publishing/subscribing. public static let mediaAdapter = Self(rawValue: 1 << 10) + /// The subsystem responsible for ThermalState observation. public static let thermalState = Self(rawValue: 1 << 11) + /// The subsystem responsible for interacting with the AudioSession. + public static let audioSession = Self(rawValue: 1 << 12) public var description: String { switch rawValue { @@ -89,13 +96,15 @@ public struct LogSubsystem: OptionSet, CustomStringConvertible { case LogSubsystem.peerConnectionSubscriber.rawValue: return "peerConnection-subscriber" case LogSubsystem.sfu.rawValue: - return "SFU" + return "sfu" case LogSubsystem.iceAdapter.rawValue: - return "ICEAdapter" + return "iceAdapter" case LogSubsystem.mediaAdapter.rawValue: - return "MediaAdapter" + return "mediaAdapter" case LogSubsystem.thermalState.rawValue: - return "Thermal State" + return "thermalState" + case LogSubsystem.audioSession.rawValue: + return "audioSession" default: return "unknown(rawValue:\(rawValue)" } diff --git a/Sources/StreamVideo/WebRTC/AudioSession.swift b/Sources/StreamVideo/WebRTC/AudioSession.swift deleted file mode 100644 index c8d941e9b..000000000 --- a/Sources/StreamVideo/WebRTC/AudioSession.swift +++ /dev/null @@ -1,72 +0,0 @@ -// -// Copyright © 2024 Stream.io Inc. All rights reserved. -// - -import Foundation -import StreamWebRTC - -extension RTCAudioSessionConfiguration: @unchecked Sendable {} - -actor AudioSession { - - private let rtcAudioSession: RTCAudioSession = RTCAudioSession.sharedInstance() - - var isActive: Bool { rtcAudioSession.isActive } - var isAudioEnabled: Bool { rtcAudioSession.isAudioEnabled } - var isSpeakerOn: Bool { rtcAudioSession.categoryOptions.contains(.defaultToSpeaker) } - - func configure( - _ configuration: RTCAudioSessionConfiguration = .default, - audioOn: Bool, - speakerOn: Bool - ) { - rtcAudioSession.lockForConfiguration() - defer { rtcAudioSession.unlockForConfiguration() } - rtcAudioSession.useManualAudio = true - rtcAudioSession.isAudioEnabled = true - - do { - log.debug( - """ - Configuring audio session - audioOn: \(audioOn) - speakerOn: \(speakerOn) - """ - ) - if speakerOn { - configuration.categoryOptions.insert(.defaultToSpeaker) - configuration.mode = AVAudioSession.Mode.videoChat.rawValue - } else { - configuration.categoryOptions.remove(.defaultToSpeaker) - configuration.mode = AVAudioSession.Mode.voiceChat.rawValue - } - try rtcAudioSession.setConfiguration(configuration, active: audioOn) - } catch { - log.error("Error occured while configuring audio session", error: error) - } - } - - func setAudioSessionEnabled(_ enabled: Bool) { - rtcAudioSession.lockForConfiguration() - defer { rtcAudioSession.unlockForConfiguration() } - rtcAudioSession.isAudioEnabled = enabled - } - - deinit { - rtcAudioSession.lockForConfiguration() - rtcAudioSession.isAudioEnabled = false - rtcAudioSession.unlockForConfiguration() - } -} - -extension RTCAudioSessionConfiguration { - - static let `default`: RTCAudioSessionConfiguration = { - let configuration = RTCAudioSessionConfiguration.webRTC() - var categoryOptions: AVAudioSession.CategoryOptions = [.allowBluetooth, .allowBluetoothA2DP] - configuration.mode = AVAudioSession.Mode.videoChat.rawValue - configuration.category = AVAudioSession.Category.playAndRecord.rawValue - configuration.categoryOptions = categoryOptions - return configuration - }() -} diff --git a/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/AudioMediaAdapter.swift b/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/AudioMediaAdapter.swift index 0daf9dc0d..d7315f6a6 100644 --- a/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/AudioMediaAdapter.swift +++ b/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/AudioMediaAdapter.swift @@ -18,9 +18,6 @@ final class AudioMediaAdapter: MediaAdapting, @unchecked Sendable { /// The factory for creating WebRTC peer connection components. private let peerConnectionFactory: PeerConnectionFactory - /// The audio session manager. - private let audioSession: AudioSession - /// The manager for local audio media. private let localMediaManager: LocalMediaAdapting @@ -58,8 +55,7 @@ final class AudioMediaAdapter: MediaAdapting, @unchecked Sendable { peerConnection: StreamRTCPeerConnectionProtocol, peerConnectionFactory: PeerConnectionFactory, sfuAdapter: SFUAdapter, - subject: PassthroughSubject, - audioSession: AudioSession + subject: PassthroughSubject ) { self.init( sessionID: sessionID, @@ -70,11 +66,9 @@ final class AudioMediaAdapter: MediaAdapting, @unchecked Sendable { peerConnection: peerConnection, peerConnectionFactory: peerConnectionFactory, sfuAdapter: sfuAdapter, - audioSession: audioSession, subject: subject ), - subject: subject, - audioSession: audioSession + subject: subject ) } @@ -86,21 +80,18 @@ final class AudioMediaAdapter: MediaAdapting, @unchecked Sendable { /// - peerConnectionFactory: The factory for creating WebRTC peer connection components. /// - localMediaManager: The manager for local audio media. /// - subject: A subject for publishing track events. - /// - audioSession: The audio session manager. init( sessionID: String, peerConnection: StreamRTCPeerConnectionProtocol, peerConnectionFactory: PeerConnectionFactory, localMediaManager: LocalMediaAdapting, - subject: PassthroughSubject, - audioSession: AudioSession + subject: PassthroughSubject ) { self.sessionID = sessionID self.peerConnection = peerConnection self.peerConnectionFactory = peerConnectionFactory self.localMediaManager = localMediaManager self.subject = subject - self.audioSession = audioSession // Set up observers for added and removed streams peerConnection @@ -140,30 +131,6 @@ final class AudioMediaAdapter: MediaAdapting, @unchecked Sendable { try await localMediaManager.didUpdateCallSettings(settings) } - // MARK: - AudioSession - - /// Updates the audio session state. - /// - /// - Parameter isEnabled: Whether the audio session is enabled. - func didUpdateAudioSessionState(_ isEnabled: Bool) async { - await audioSession.setAudioSessionEnabled(isEnabled) - } - - /// Updates the audio session speaker state. - /// - /// - Parameters: - /// - isEnabled: Whether the speaker is enabled. - /// - audioSessionEnabled: Whether the audio session is enabled. - func didUpdateAudioSessionSpeakerState( - _ isEnabled: Bool, - with audioSessionEnabled: Bool - ) async { - await audioSession.configure( - audioOn: audioSessionEnabled, - speakerOn: isEnabled - ) - } - // MARK: - Observers /// Adds a new audio stream and notifies observers. diff --git a/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/LocalMediaAdapters/LocalAudioMediaAdapter.swift b/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/LocalMediaAdapters/LocalAudioMediaAdapter.swift index 94e1113e3..68b0462a6 100644 --- a/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/LocalMediaAdapters/LocalAudioMediaAdapter.swift +++ b/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/LocalMediaAdapters/LocalAudioMediaAdapter.swift @@ -24,9 +24,6 @@ final class LocalAudioMediaAdapter: LocalMediaAdapting { /// The adapter for communicating with the Selective Forwarding Unit (SFU). private var sfuAdapter: SFUAdapter - /// The audio session manager. - private let audioSession: AudioSession - /// The stream identifiers for this audio adapter. private let streamIds: [String] @@ -36,6 +33,8 @@ final class LocalAudioMediaAdapter: LocalMediaAdapting { /// The RTP transceiver for sending audio. private var sender: RTCRtpTransceiver? + private var lastUpdatedCallSettings: CallSettings.Audio? + /// The mid (Media Stream Identification) of the sender. var mid: String? { sender?.mid } @@ -56,14 +55,12 @@ final class LocalAudioMediaAdapter: LocalMediaAdapting { peerConnection: StreamRTCPeerConnectionProtocol, peerConnectionFactory: PeerConnectionFactory, sfuAdapter: SFUAdapter, - audioSession: AudioSession, subject: PassthroughSubject ) { self.sessionID = sessionID self.peerConnection = peerConnection self.peerConnectionFactory = peerConnectionFactory self.sfuAdapter = sfuAdapter - self.audioSession = audioSession self.subject = subject streamIds = ["\(sessionID):audio"] } @@ -192,40 +189,46 @@ final class LocalAudioMediaAdapter: LocalMediaAdapting { _ settings: CallSettings ) async throws { guard let localTrack else { return } - let isMuted = !settings.audioOn - let isLocalMuted = localTrack.isEnabled == false - guard isMuted != isLocalMuted || sender == nil else { + + guard lastUpdatedCallSettings != settings.audio else { return } - try await sfuAdapter.updateTrackMuteState( - .audio, - isMuted: isMuted, - for: sessionID - ) - - await audioSession.configure( - audioOn: settings.audioOn, - speakerOn: settings.speakerOn - ) + let isMuted = !settings.audioOn + let isLocalMuted = localTrack.isEnabled == false + + if isMuted != isLocalMuted { + try await sfuAdapter.updateTrackMuteState( + .audio, + isMuted: isMuted, + for: sessionID + ) + } if isMuted, localTrack.isEnabled == true { unpublish() } else if !isMuted { publish() await audioRecorder.startRecording() - let isActive = await audioSession.isActive - let isAudioEnabled = await audioSession.isAudioEnabled - log.debug( - """ - Local audioTrack is now published. - isEnabled: \(localTrack.isEnabled == true) - senderHasCorrectTrack: \(sender?.sender.track == localTrack) - trackId:\(localTrack.trackId) - audioSession.isActive: \(isActive) - audioSession.isAudioEnabled: \(isAudioEnabled) - """ - ) } + + lastUpdatedCallSettings = settings.audio + } +} + +extension CallSettings { + + struct Audio: Equatable { + var micOn: Bool + var speakerOn: Bool + var audioSessionOn: Bool + } + + var audio: Audio { + .init( + micOn: audioOn, + speakerOn: speakerOn, + audioSessionOn: audioOutputOn + ) } } diff --git a/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/MediaAdapter.swift b/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/MediaAdapter.swift index 08ea5bdaa..1d85afdd8 100644 --- a/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/MediaAdapter.swift +++ b/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/MediaAdapter.swift @@ -50,7 +50,6 @@ final class MediaAdapter { sfuAdapter: SFUAdapter, videoOptions: VideoOptions, videoConfig: VideoConfig, - audioSession: AudioSession, videoCaptureSessionProvider: VideoCaptureSessionProvider, screenShareSessionProvider: ScreenShareSessionProvider ) { @@ -65,8 +64,7 @@ final class MediaAdapter { peerConnection: peerConnection, peerConnectionFactory: peerConnectionFactory, localMediaManager: LocalNoOpMediaAdapter(subject: subject), - subject: subject, - audioSession: audioSession + subject: subject ), videoMediaAdapter: .init( sessionID: sessionID, @@ -92,8 +90,7 @@ final class MediaAdapter { peerConnection: peerConnection, peerConnectionFactory: peerConnectionFactory, sfuAdapter: sfuAdapter, - subject: subject, - audioSession: audioSession + subject: subject ), videoMediaAdapter: .init( sessionID: sessionID, @@ -223,30 +220,6 @@ final class MediaAdapter { } } - // MARK: - Audio - - /// Updates the audio session state. - /// - /// - Parameter isEnabled: Whether the audio session is enabled. - func didUpdateAudioSessionState(_ isEnabled: Bool) async { - await audioMediaAdapter.didUpdateAudioSessionState(isEnabled) - } - - /// Updates the audio session speaker state. - /// - /// - Parameters: - /// - isEnabled: Whether the speaker is enabled. - /// - audioSessionEnabled: Whether the audio session is enabled. - func didUpdateAudioSessionSpeakerState( - _ isEnabled: Bool, - with audioSessionEnabled: Bool - ) async { - await audioMediaAdapter.didUpdateAudioSessionSpeakerState( - isEnabled, - with: audioSessionEnabled - ) - } - // MARK: - Video /// Updates the camera position. diff --git a/Sources/StreamVideo/WebRTC/v2/PeerConnection/Protocols/RTCPeerConnectionCoordinatorProviding.swift b/Sources/StreamVideo/WebRTC/v2/PeerConnection/Protocols/RTCPeerConnectionCoordinatorProviding.swift index 77ddc700b..8bb22e5a3 100644 --- a/Sources/StreamVideo/WebRTC/v2/PeerConnection/Protocols/RTCPeerConnectionCoordinatorProviding.swift +++ b/Sources/StreamVideo/WebRTC/v2/PeerConnection/Protocols/RTCPeerConnectionCoordinatorProviding.swift @@ -22,7 +22,6 @@ protocol RTCPeerConnectionCoordinatorProviding { /// - callSettings: Settings related to the overall call. /// - audioSettings: Settings for audio configuration. /// - sfuAdapter: The adapter for interacting with the Selective Forwarding Unit. - /// - audioSession: The audio session to be used. /// - videoCaptureSessionProvider: Provider for video capturing functionality. /// - screenShareSessionProvider: Provider for screen sharing functionality. /// - Returns: An initialized `RTCPeerConnectionCoordinator` instance. @@ -36,7 +35,6 @@ protocol RTCPeerConnectionCoordinatorProviding { callSettings: CallSettings, audioSettings: AudioSettings, sfuAdapter: SFUAdapter, - audioSession: AudioSession, videoCaptureSessionProvider: VideoCaptureSessionProvider, screenShareSessionProvider: ScreenShareSessionProvider ) -> RTCPeerConnectionCoordinator @@ -59,7 +57,6 @@ final class StreamRTCPeerConnectionCoordinatorFactory: RTCPeerConnectionCoordina /// - callSettings: Settings related to the overall call. /// - audioSettings: Settings for audio configuration. /// - sfuAdapter: The adapter for interacting with the Selective Forwarding Unit. - /// - audioSession: The audio session to be used. /// - videoCaptureSessionProvider: Provider for video capturing functionality. /// - screenShareSessionProvider: Provider for screen sharing functionality. /// - Returns: A newly created `RTCPeerConnectionCoordinator` instance. @@ -73,7 +70,6 @@ final class StreamRTCPeerConnectionCoordinatorFactory: RTCPeerConnectionCoordina callSettings: CallSettings, audioSettings: AudioSettings, sfuAdapter: SFUAdapter, - audioSession: AudioSession, videoCaptureSessionProvider: VideoCaptureSessionProvider, screenShareSessionProvider: ScreenShareSessionProvider ) -> RTCPeerConnectionCoordinator { @@ -87,7 +83,6 @@ final class StreamRTCPeerConnectionCoordinatorFactory: RTCPeerConnectionCoordina callSettings: callSettings, audioSettings: audioSettings, sfuAdapter: sfuAdapter, - audioSession: audioSession, videoCaptureSessionProvider: videoCaptureSessionProvider, screenShareSessionProvider: screenShareSessionProvider ) diff --git a/Sources/StreamVideo/WebRTC/v2/PeerConnection/RTCPeerConnectionCoordinator.swift b/Sources/StreamVideo/WebRTC/v2/PeerConnection/RTCPeerConnectionCoordinator.swift index 3806e2d6a..d5a8d3124 100644 --- a/Sources/StreamVideo/WebRTC/v2/PeerConnection/RTCPeerConnectionCoordinator.swift +++ b/Sources/StreamVideo/WebRTC/v2/PeerConnection/RTCPeerConnectionCoordinator.swift @@ -36,7 +36,6 @@ class RTCPeerConnectionCoordinator: @unchecked Sendable { private let subsystem: LogSubsystem private let disposableBag: DisposableBag = .init() private let dispatchQueue = DispatchQueue(label: "io.getstream.peerconnection.serial.offer.queue") - private let audioSession: AudioSession // MARK: Adapters @@ -97,7 +96,6 @@ class RTCPeerConnectionCoordinator: @unchecked Sendable { callSettings: CallSettings, audioSettings: AudioSettings, sfuAdapter: SFUAdapter, - audioSession: AudioSession, videoCaptureSessionProvider: VideoCaptureSessionProvider, screenShareSessionProvider: ScreenShareSessionProvider ) { @@ -109,7 +107,6 @@ class RTCPeerConnectionCoordinator: @unchecked Sendable { callSettings: callSettings, audioSettings: audioSettings, sfuAdapter: sfuAdapter, - audioSession: audioSession, mediaAdapter: .init( sessionID: sessionId, peerConnectionType: peerType, @@ -118,7 +115,6 @@ class RTCPeerConnectionCoordinator: @unchecked Sendable { sfuAdapter: sfuAdapter, videoOptions: videoOptions, videoConfig: videoConfig, - audioSession: audioSession, videoCaptureSessionProvider: videoCaptureSessionProvider, screenShareSessionProvider: screenShareSessionProvider ) @@ -133,7 +129,6 @@ class RTCPeerConnectionCoordinator: @unchecked Sendable { callSettings: CallSettings, audioSettings: AudioSettings, sfuAdapter: SFUAdapter, - audioSession: AudioSession, mediaAdapter: MediaAdapter ) { self.sessionId = sessionId @@ -146,7 +141,6 @@ class RTCPeerConnectionCoordinator: @unchecked Sendable { subsystem = peerType == .publisher ? .peerConnectionPublisher : .peerConnectionSubscriber - self.audioSession = audioSession self.mediaAdapter = mediaAdapter iceAdapter = .init( @@ -291,7 +285,6 @@ class RTCPeerConnectionCoordinator: @unchecked Sendable { func didUpdateCallSettings( _ settings: CallSettings ) async throws { - let isActive = await audioSession.isActive log.debug( """ PeerConnection will setUp: @@ -305,8 +298,6 @@ class RTCPeerConnectionCoordinator: @unchecked Sendable { videoOn: \(settings.videoOn) audioOutputOn: \(settings.audioOutputOn) speakerOn: \(settings.speakerOn) - - AudioSession enabled: \(isActive) """, subsystems: subsystem ) @@ -445,52 +436,6 @@ class RTCPeerConnectionCoordinator: @unchecked Sendable { try await peerConnection.statistics() } - // MARK: - Audio - - /// Updates the audio session state. - /// - /// - Parameter isEnabled: Whether the audio session should be enabled or disabled. - func didUpdateAudioSessionState(_ isEnabled: Bool) async { - log.debug( - """ - PeerConnection will update audioSession state - Identifier: \(identifier) - type:\(peerType) - sessionID: \(sessionId) - sfu: \(sfuAdapter.hostname) - audioSession state: \(isEnabled) - """, - subsystems: subsystem - ) - await mediaAdapter.didUpdateAudioSessionState(isEnabled) - } - - /// Updates the audio session speaker state. - /// - /// - Parameters: - /// - isEnabled: Whether the speaker should be enabled or disabled. - /// - audioSessionEnabled: Whether the audio session is currently enabled. - func didUpdateAudioSessionSpeakerState( - _ isEnabled: Bool, - with audioSessionEnabled: Bool - ) async { - log.debug( - """ - PeerConnection will update audioSession speakerState - Identifier: \(identifier) - type:\(peerType) - sessionID: \(sessionId) - sfu: \(sfuAdapter.hostname) - audioSession speakerState: \(isEnabled) - """, - subsystems: subsystem - ) - await mediaAdapter.didUpdateAudioSessionSpeakerState( - isEnabled, - with: audioSessionEnabled - ) - } - // MARK: - Video /// Updates the camera position. diff --git a/Sources/StreamVideo/WebRTC/v2/StateMachine/Stages/WebRTCCoordinator+Joined.swift b/Sources/StreamVideo/WebRTC/v2/StateMachine/Stages/WebRTCCoordinator+Joined.swift index 345169499..b742510e7 100644 --- a/Sources/StreamVideo/WebRTC/v2/StateMachine/Stages/WebRTCCoordinator+Joined.swift +++ b/Sources/StreamVideo/WebRTC/v2/StateMachine/Stages/WebRTCCoordinator+Joined.swift @@ -362,14 +362,7 @@ extension WebRTCCoordinator.StateMachine.Stage { .$callSettings .compactMap { $0 } .removeDuplicates() - .log(.debug, subsystems: .webRTC) { - """ - CallSettings updated - audioOn: \($0.audioOn) - videoOn: \($0.videoOn) - audioOutputOn: \($0.audioOutputOn) - """ - } + .log(.debug, subsystems: .webRTC) { "Updated \($0)" } .sinkTask(storeIn: disposableBag) { [weak self] callSettings in guard let self else { return } @@ -384,11 +377,17 @@ extension WebRTCCoordinator.StateMachine.Stage { return } + context + .coordinator? + .stateAdapter + .audioSession + .didUpdateCallSettings(callSettings) + try await publisher.didUpdateCallSettings(callSettings) - log.debug("Publisher callSettings updated.", subsystems: .webRTC) + log.debug("Publisher and AudioSession callSettings updated.", subsystems: .webRTC) } catch { log.warning( - "Will disconnect because failed to update callSettings on publisher.", + "Will disconnect because failed to update callSettings on Publisher or AudioSession.[Error:\(error)]", subsystems: .webRTC ) transitionDisconnectOrError(error) diff --git a/Sources/StreamVideo/WebRTC/v2/WebRTCStateAdapter.swift b/Sources/StreamVideo/WebRTC/v2/WebRTCStateAdapter.swift index ac81bcec7..3cf093827 100644 --- a/Sources/StreamVideo/WebRTC/v2/WebRTCStateAdapter.swift +++ b/Sources/StreamVideo/WebRTC/v2/WebRTCStateAdapter.swift @@ -10,7 +10,7 @@ import StreamWebRTC /// video call. This class manages the connection setup, track handling, and /// participants, including their media settings, capabilities, and track /// updates. -actor WebRTCStateAdapter: ObservableObject { +actor WebRTCStateAdapter: ObservableObject, StreamAudioSessionAdapterDelegate { typealias ParticipantsStorage = [String: CallParticipant] typealias ParticipantOperation = @Sendable(ParticipantsStorage) -> ParticipantsStorage @@ -42,6 +42,7 @@ actor WebRTCStateAdapter: ObservableObject { let peerConnectionFactory: PeerConnectionFactory let videoCaptureSessionProvider: VideoCaptureSessionProvider let screenShareSessionProvider: ScreenShareSessionProvider + let audioSession: StreamAudioSessionAdapter = .init() /// Published properties that represent different parts of the WebRTC state. @Published private(set) var sessionID: String = UUID().uuidString @@ -74,7 +75,6 @@ actor WebRTCStateAdapter: ObservableObject { private var videoFilter: VideoFilter? private let rtcPeerConnectionCoordinatorFactory: RTCPeerConnectionCoordinatorProviding - private let audioSession: AudioSession = .init() private let disposableBag = DisposableBag() private let peerConnectionsDisposableBag = DisposableBag() @@ -112,6 +112,8 @@ actor WebRTCStateAdapter: ObservableObject { self.rtcPeerConnectionCoordinatorFactory = rtcPeerConnectionCoordinatorFactory self.videoCaptureSessionProvider = videoCaptureSessionProvider self.screenShareSessionProvider = screenShareSessionProvider + + audioSession.delegate = self } /// Sets the session ID. @@ -221,7 +223,6 @@ actor WebRTCStateAdapter: ObservableObject { callSettings: callSettings, audioSettings: audioSettings, sfuAdapter: sfuAdapter, - audioSession: audioSession, videoCaptureSessionProvider: videoCaptureSessionProvider, screenShareSessionProvider: screenShareSessionProvider ) @@ -239,7 +240,6 @@ actor WebRTCStateAdapter: ObservableObject { callSettings: callSettings, audioSettings: audioSettings, sfuAdapter: sfuAdapter, - audioSession: audioSession, videoCaptureSessionProvider: videoCaptureSessionProvider, screenShareSessionProvider: screenShareSessionProvider ) @@ -541,4 +541,19 @@ actor WebRTCStateAdapter: ObservableObject { partialResult[entry.key] = newParticipant } } + + // MARK: - AudioSessionDelegate + + nonisolated func audioSessionAdapterDidUpdateCallSettings( + _ adapter: StreamAudioSessionAdapter, + callSettings: CallSettings + ) { + Task { + await self.set(callSettings: callSettings) + log.debug( + "AudioSession updated call settings: \(callSettings)", + subsystems: .audioSession + ) + } + } } diff --git a/StreamVideo.xcodeproj/project.pbxproj b/StreamVideo.xcodeproj/project.pbxproj index 958d996e6..8fa94ec45 100644 --- a/StreamVideo.xcodeproj/project.pbxproj +++ b/StreamVideo.xcodeproj/project.pbxproj @@ -191,6 +191,17 @@ 4065839B2B877ADA00B4F979 /* CIImage+Sendable.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4065839A2B877ADA00B4F979 /* CIImage+Sendable.swift */; }; 4065839D2B877B6500B4F979 /* UIDevice+NeuralEngine.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4065839C2B877B6500B4F979 /* UIDevice+NeuralEngine.swift */; }; 4067A5D82AE1249400CFDEB1 /* CornerClipper_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4067A5D72AE1249400CFDEB1 /* CornerClipper_Tests.swift */; }; + 4067F3082CDA32FA002E28BD /* StreamAudioSessionAdapterDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4067F3072CDA32FA002E28BD /* StreamAudioSessionAdapterDelegate.swift */; }; + 4067F30B2CDA3359002E28BD /* AVAudioSessionRouteDescription+Convenience.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4067F30A2CDA3359002E28BD /* AVAudioSessionRouteDescription+Convenience.swift */; }; + 4067F30D2CDA3377002E28BD /* AVAudioSession_RouteChangeReason+Convenience.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4067F30C2CDA3377002E28BD /* AVAudioSession_RouteChangeReason+Convenience.swift */; }; + 4067F30F2CDA3394002E28BD /* AVAudioSession+CategoryOptions+Convenience.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4067F30E2CDA3394002E28BD /* AVAudioSession+CategoryOptions+Convenience.swift */; }; + 4067F3112CDA33AB002E28BD /* AVAudioSessionPortDescription+Convenience.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4067F3102CDA33AB002E28BD /* AVAudioSessionPortDescription+Convenience.swift */; }; + 4067F3132CDA33C6002E28BD /* RTCAudioSessionConfiguration+Default.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4067F3122CDA33C4002E28BD /* RTCAudioSessionConfiguration+Default.swift */; }; + 4067F3152CDA4094002E28BD /* StreamRTCAudioSession.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4067F3142CDA4094002E28BD /* StreamRTCAudioSession.swift */; }; + 4067F3172CDA40CC002E28BD /* StreamAudioSessionAdapter.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4067F3162CDA40CC002E28BD /* StreamAudioSessionAdapter.swift */; }; + 4067F3192CDA469F002E28BD /* MockAudioSession.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4067F3182CDA469C002E28BD /* MockAudioSession.swift */; }; + 4067F31C2CDA55D6002E28BD /* StreamRTCAudioSession_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4067F31B2CDA55D6002E28BD /* StreamRTCAudioSession_Tests.swift */; }; + 4067F31E2CDA5A56002E28BD /* StreamAudioSessionAdapter_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4067F31D2CDA5A53002E28BD /* StreamAudioSessionAdapter_Tests.swift */; }; 4069A0042AD985D2009A3A06 /* CallParticipant_Mock.swift in Sources */ = {isa = PBXBuildFile; fileRef = 406303412AD848000091AE77 /* CallParticipant_Mock.swift */; }; 4069A0052AD985D3009A3A06 /* CallParticipant_Mock.swift in Sources */ = {isa = PBXBuildFile; fileRef = 406303412AD848000091AE77 /* CallParticipant_Mock.swift */; }; 406A8E8D2AA1D78C001F598A /* AppEnvironment.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4030E59F2A9DF5BD003E8CBA /* AppEnvironment.swift */; }; @@ -311,6 +322,7 @@ 4097B3832BF4E37B0057992D /* OnChangeViewModifier_iOS13.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4097B3822BF4E37B0057992D /* OnChangeViewModifier_iOS13.swift */; }; 40986C3A2CCB6D2F00510F88 /* RTCRtpEncodingParameters_Test.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40986C392CCB6D2F00510F88 /* RTCRtpEncodingParameters_Test.swift */; }; 40986C3C2CCB6E4B00510F88 /* RTCRtpTransceiverInit_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40986C3B2CCB6E4B00510F88 /* RTCRtpTransceiverInit_Tests.swift */; }; + 40986C3E2CD1148F00510F88 /* AudioSession_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40986C3D2CD1148F00510F88 /* AudioSession_Tests.swift */; }; 409CA7992BEE21720045F7AA /* XCTestCase+PredicateFulfillment.swift in Sources */ = {isa = PBXBuildFile; fileRef = 409CA7982BEE21720045F7AA /* XCTestCase+PredicateFulfillment.swift */; }; 40A0E9602B88ABC80089E8D3 /* DemoBackgroundEffectSelector.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40A0E95F2B88ABC80089E8D3 /* DemoBackgroundEffectSelector.swift */; }; 40A0E9622B88D3DC0089E8D3 /* UIInterfaceOrientation+CGOrientation.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40A0E9612B88D3DC0089E8D3 /* UIInterfaceOrientation+CGOrientation.swift */; }; @@ -1097,7 +1109,6 @@ 84D6494029E94C14002CA428 /* CallsQuery.swift in Sources */ = {isa = PBXBuildFile; fileRef = 84D6493F29E94C14002CA428 /* CallsQuery.swift */; }; 84D6494329E9AD08002CA428 /* RTMPIngress.swift in Sources */ = {isa = PBXBuildFile; fileRef = 84D6494129E9AD07002CA428 /* RTMPIngress.swift */; }; 84D6494429E9AD08002CA428 /* CallIngressResponse.swift in Sources */ = {isa = PBXBuildFile; fileRef = 84D6494229E9AD08002CA428 /* CallIngressResponse.swift */; }; - 84D6494729E9F2D0002CA428 /* WebRTCClient_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 84D6494629E9F2D0002CA428 /* WebRTCClient_Tests.swift */; }; 84D6E53A2B3AD10000D0056C /* RepeatingTimer_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 84D6E5392B3AD10000D0056C /* RepeatingTimer_Tests.swift */; }; 84D91E9C2C7CB0AA00B163A0 /* CallSessionParticipantCountsUpdatedEvent.swift in Sources */ = {isa = PBXBuildFile; fileRef = 84D91E9A2C7CB0AA00B163A0 /* CallSessionParticipantCountsUpdatedEvent.swift */; }; 84D91E9D2C7CB0AA00B163A0 /* CallRtmpBroadcastFailedEvent.swift in Sources */ = {isa = PBXBuildFile; fileRef = 84D91E9B2C7CB0AA00B163A0 /* CallRtmpBroadcastFailedEvent.swift */; }; @@ -1191,7 +1202,6 @@ 84E86D4F2905E731004BA44C /* Utils.swift in Sources */ = {isa = PBXBuildFile; fileRef = 84E86D4E2905E731004BA44C /* Utils.swift */; }; 84EA5D3C28BFB890004D3531 /* CallParticipantImageView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 84EA5D3B28BFB890004D3531 /* CallParticipantImageView.swift */; }; 84EA5D3F28C09AAC004D3531 /* CallController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 84EA5D3E28C09AAB004D3531 /* CallController.swift */; }; - 84EA5D4328C1E944004D3531 /* AudioSession.swift in Sources */ = {isa = PBXBuildFile; fileRef = 84EA5D4228C1E944004D3531 /* AudioSession.swift */; }; 84EBA4A22A72B81100577297 /* BroadcastBufferConnection.swift in Sources */ = {isa = PBXBuildFile; fileRef = 84EBA4A12A72B81100577297 /* BroadcastBufferConnection.swift */; }; 84ED240D286C9515002A3186 /* DemoCallContainerView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 84ED240C286C9515002A3186 /* DemoCallContainerView.swift */; }; 84F07BD12CB4804900422E58 /* NoiseCancellationSettingsRequest.swift in Sources */ = {isa = PBXBuildFile; fileRef = 84F07BD02CB4804900422E58 /* NoiseCancellationSettingsRequest.swift */; }; @@ -1545,6 +1555,17 @@ 4065839A2B877ADA00B4F979 /* CIImage+Sendable.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CIImage+Sendable.swift"; sourceTree = ""; }; 4065839C2B877B6500B4F979 /* UIDevice+NeuralEngine.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "UIDevice+NeuralEngine.swift"; sourceTree = ""; }; 4067A5D72AE1249400CFDEB1 /* CornerClipper_Tests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CornerClipper_Tests.swift; sourceTree = ""; }; + 4067F3072CDA32FA002E28BD /* StreamAudioSessionAdapterDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = StreamAudioSessionAdapterDelegate.swift; sourceTree = ""; }; + 4067F30A2CDA3359002E28BD /* AVAudioSessionRouteDescription+Convenience.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAudioSessionRouteDescription+Convenience.swift"; sourceTree = ""; }; + 4067F30C2CDA3377002E28BD /* AVAudioSession_RouteChangeReason+Convenience.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAudioSession_RouteChangeReason+Convenience.swift"; sourceTree = ""; }; + 4067F30E2CDA3394002E28BD /* AVAudioSession+CategoryOptions+Convenience.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAudioSession+CategoryOptions+Convenience.swift"; sourceTree = ""; }; + 4067F3102CDA33AB002E28BD /* AVAudioSessionPortDescription+Convenience.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAudioSessionPortDescription+Convenience.swift"; sourceTree = ""; }; + 4067F3122CDA33C4002E28BD /* RTCAudioSessionConfiguration+Default.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "RTCAudioSessionConfiguration+Default.swift"; sourceTree = ""; }; + 4067F3142CDA4094002E28BD /* StreamRTCAudioSession.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = StreamRTCAudioSession.swift; sourceTree = ""; }; + 4067F3162CDA40CC002E28BD /* StreamAudioSessionAdapter.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = StreamAudioSessionAdapter.swift; sourceTree = ""; }; + 4067F3182CDA469C002E28BD /* MockAudioSession.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MockAudioSession.swift; sourceTree = ""; }; + 4067F31B2CDA55D6002E28BD /* StreamRTCAudioSession_Tests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = StreamRTCAudioSession_Tests.swift; sourceTree = ""; }; + 4067F31D2CDA5A53002E28BD /* StreamAudioSessionAdapter_Tests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = StreamAudioSessionAdapter_Tests.swift; sourceTree = ""; }; 406AF2002AF3D98F00ED4D0C /* SimulatorScreenCapturer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SimulatorScreenCapturer.swift; sourceTree = ""; }; 406AF2042AF3DE4000ED4D0C /* test.mp4 */ = {isa = PBXFileReference; lastKnownFileType = file; path = test.mp4; sourceTree = ""; }; 406B3BD62C8F331F00FC93A1 /* RTCVideoTrack+Sendable.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "RTCVideoTrack+Sendable.swift"; sourceTree = ""; }; @@ -1636,6 +1657,7 @@ 4097B3822BF4E37B0057992D /* OnChangeViewModifier_iOS13.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = OnChangeViewModifier_iOS13.swift; sourceTree = ""; }; 40986C392CCB6D2F00510F88 /* RTCRtpEncodingParameters_Test.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RTCRtpEncodingParameters_Test.swift; sourceTree = ""; }; 40986C3B2CCB6E4B00510F88 /* RTCRtpTransceiverInit_Tests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RTCRtpTransceiverInit_Tests.swift; sourceTree = ""; }; + 40986C3D2CD1148F00510F88 /* AudioSession_Tests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AudioSession_Tests.swift; sourceTree = ""; }; 409CA7982BEE21720045F7AA /* XCTestCase+PredicateFulfillment.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "XCTestCase+PredicateFulfillment.swift"; sourceTree = ""; }; 40A0E95F2B88ABC80089E8D3 /* DemoBackgroundEffectSelector.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DemoBackgroundEffectSelector.swift; sourceTree = ""; }; 40A0E9612B88D3DC0089E8D3 /* UIInterfaceOrientation+CGOrientation.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "UIInterfaceOrientation+CGOrientation.swift"; sourceTree = ""; }; @@ -2320,7 +2342,6 @@ 84D6493F29E94C14002CA428 /* CallsQuery.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CallsQuery.swift; sourceTree = ""; }; 84D6494129E9AD07002CA428 /* RTMPIngress.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = RTMPIngress.swift; sourceTree = ""; }; 84D6494229E9AD08002CA428 /* CallIngressResponse.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CallIngressResponse.swift; sourceTree = ""; }; - 84D6494629E9F2D0002CA428 /* WebRTCClient_Tests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = WebRTCClient_Tests.swift; sourceTree = ""; }; 84D6E5392B3AD10000D0056C /* RepeatingTimer_Tests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RepeatingTimer_Tests.swift; sourceTree = ""; }; 84D91E9A2C7CB0AA00B163A0 /* CallSessionParticipantCountsUpdatedEvent.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CallSessionParticipantCountsUpdatedEvent.swift; sourceTree = ""; }; 84D91E9B2C7CB0AA00B163A0 /* CallRtmpBroadcastFailedEvent.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CallRtmpBroadcastFailedEvent.swift; sourceTree = ""; }; @@ -2411,7 +2432,6 @@ 84E86D4E2905E731004BA44C /* Utils.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Utils.swift; sourceTree = ""; }; 84EA5D3B28BFB890004D3531 /* CallParticipantImageView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CallParticipantImageView.swift; sourceTree = ""; }; 84EA5D3E28C09AAB004D3531 /* CallController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CallController.swift; sourceTree = ""; }; - 84EA5D4228C1E944004D3531 /* AudioSession.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AudioSession.swift; sourceTree = ""; }; 84EBA4A12A72B81100577297 /* BroadcastBufferConnection.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = BroadcastBufferConnection.swift; sourceTree = ""; }; 84EBAA92288C137E00BE3176 /* Modifiers.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Modifiers.swift; sourceTree = ""; }; 84ED240C286C9515002A3186 /* DemoCallContainerView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DemoCallContainerView.swift; sourceTree = ""; }; @@ -2629,7 +2649,6 @@ children = ( 40149DCB2B7E814300473176 /* AVAudioRecorderBuilder.swift */, 40149DCD2B7E837A00473176 /* StreamCallAudioRecorder.swift */, - 40149DCF2B7E839500473176 /* AudioSessionProtocol.swift */, 4031D7F92B84B077002EC6E4 /* StreamActiveCallProvider.swift */, ); path = AudioRecorder; @@ -3092,6 +3111,40 @@ path = Filters; sourceTree = ""; }; + 4067F3062CDA32F0002E28BD /* AudioSession */ = { + isa = PBXGroup; + children = ( + 4067F3092CDA330E002E28BD /* Extensions */, + 40149DCA2B7E813500473176 /* AudioRecorder */, + 40149DCF2B7E839500473176 /* AudioSessionProtocol.swift */, + 4067F3142CDA4094002E28BD /* StreamRTCAudioSession.swift */, + 4067F3162CDA40CC002E28BD /* StreamAudioSessionAdapter.swift */, + 4067F3072CDA32FA002E28BD /* StreamAudioSessionAdapterDelegate.swift */, + ); + path = AudioSession; + sourceTree = ""; + }; + 4067F3092CDA330E002E28BD /* Extensions */ = { + isa = PBXGroup; + children = ( + 4067F3122CDA33C4002E28BD /* RTCAudioSessionConfiguration+Default.swift */, + 4067F3102CDA33AB002E28BD /* AVAudioSessionPortDescription+Convenience.swift */, + 4067F30E2CDA3394002E28BD /* AVAudioSession+CategoryOptions+Convenience.swift */, + 4067F30C2CDA3377002E28BD /* AVAudioSession_RouteChangeReason+Convenience.swift */, + 4067F30A2CDA3359002E28BD /* AVAudioSessionRouteDescription+Convenience.swift */, + ); + path = Extensions; + sourceTree = ""; + }; + 4067F31A2CDA55D1002E28BD /* AudioSession */ = { + isa = PBXGroup; + children = ( + 4067F31B2CDA55D6002E28BD /* StreamRTCAudioSession_Tests.swift */, + 4067F31D2CDA5A53002E28BD /* StreamAudioSessionAdapter_Tests.swift */, + ); + path = AudioSession; + sourceTree = ""; + }; 406B3C042C8F602D00FC93A1 /* LocalMediaAdapters */ = { isa = PBXGroup; children = ( @@ -4378,6 +4431,7 @@ 842747F429EEDACB00E063AD /* Utils */ = { isa = PBXGroup; children = ( + 4067F31A2CDA55D1002E28BD /* AudioSession */, 40E18AB02CD51FB900A65C9F /* Queues */, 4029E9562CB943E800E1D571 /* CollectionDelayedUpdateObserver_Tests.swift */, 40C2B5C42C2D7ADE00EC2C2D /* RejectionReasonProvider */, @@ -4720,6 +4774,7 @@ 8492B87629081CE700006649 /* Mock */ = { isa = PBXGroup; children = ( + 4067F3182CDA469C002E28BD /* MockAudioSession.swift */, 409774AD2CC1979F00E0D3EE /* MockCallController.swift */, 40C75BB62CB4044600C167C3 /* MockThermalStateObserver.swift */, 40FE5EBC2C9C82A6006B0881 /* MockRTCVideoCapturerDelegate.swift */, @@ -4857,6 +4912,7 @@ 84AF64D3287C79220012A503 /* Utils */ = { isa = PBXGroup; children = ( + 4067F3062CDA32F0002E28BD /* AudioSession */, 408CF9C42CAEC24500F56833 /* ScreenPropertiesAdapter */, 40C9E44F2C9880D300802B28 /* Unwrap */, 40382F2C2C88B87500C2D00F /* ReflectiveStringConvertible */, @@ -4868,7 +4924,6 @@ 403FF3DF2BA1D20E0092CE8A /* Queues */, 40FB150D2BF77CA200D5E580 /* StateMachine */, 40FB15082BF74C0A00D5E580 /* CallCache */, - 40149DCA2B7E813500473176 /* AudioRecorder */, 8456E6C7287EC343004E180E /* Logger */, 84C2997C28784BB30034B735 /* Utils.swift */, 84AF64D4287C79320012A503 /* RawJSON.swift */, @@ -4970,10 +5025,10 @@ 40A0E9652B88E03B0089E8D3 /* VideoFilters */, 402C54582B6BE4C900672BFB /* Statistics */, 845E31042A7121BE004DC470 /* Screensharing */, - 84D6494629E9F2D0002CA428 /* WebRTCClient_Tests.swift */, 8414081229F28B5600FF2D7C /* RTCConfiguration_Tests.swift */, 8446AF902A4D84F4002AB07B /* Retries_Tests.swift */, 841FF5042A5D815700809BBB /* VideoCapturerUtils_Tests.swift */, + 40986C3D2CD1148F00510F88 /* AudioSession_Tests.swift */, ); path = WebRTC; sourceTree = ""; @@ -5425,7 +5480,6 @@ 84FC2C2328AD1B5E00181490 /* WebRTCEventDecoder.swift */, 84FC2C2728AD350100181490 /* WebRTCEvents.swift */, 84BBF62A28AFC24000387A02 /* PeerConnectionFactory.swift */, - 84EA5D4228C1E944004D3531 /* AudioSession.swift */, 84BBF62C28AFC72700387A02 /* DefaultRTCMediaConstraints.swift */, 8411925D28C5E5D00074EF88 /* DefaultRTCConfiguration.swift */, ); @@ -6257,6 +6311,7 @@ 40BBC4A12C623D03002AEF92 /* RTCMediaStream+Convenience.swift in Sources */, 84DCA20E2A3885FE000C3411 /* Permissions.swift in Sources */, 842E70D82B91BE1700D2D68B /* StatsOptions.swift in Sources */, + 4067F30B2CDA3359002E28BD /* AVAudioSessionRouteDescription+Convenience.swift in Sources */, 40BBC48E2C623C6E002AEF92 /* PeerConnectionType.swift in Sources */, 84BAD77E2A6BFFB200733156 /* BroadcastSampleHandler.swift in Sources */, 40C2B5BB2C2C41DA00EC2C2D /* RejectCallRequest+Reason.swift in Sources */, @@ -6321,7 +6376,6 @@ 4012B1962BFCAC26006B0031 /* StreamCallStateMachine+RejectedStage.swift in Sources */, 846E4AEF29CDEA66003733AB /* WSAuthMessageRequest.swift in Sources */, 406583992B877AB400B4F979 /* CIImage+Resize.swift in Sources */, - 84EA5D4328C1E944004D3531 /* AudioSession.swift in Sources */, 40BBC4A42C623D03002AEF92 /* RTCRtpTransceiverInit+Convenience.swift in Sources */, 841BAA382BD15CDE000C73E4 /* CallTimeline.swift in Sources */, 8449824D2C738A830029734D /* StartRTMPBroadcastsResponse.swift in Sources */, @@ -6352,6 +6406,7 @@ 40C4DF492C1C2C210035DBC2 /* Publisher+WeakAssign.swift in Sources */, 4157FF912C9AC9EC0093D839 /* RTMPBroadcastRequest.swift in Sources */, 844982472C738A830029734D /* DeleteRecordingResponse.swift in Sources */, + 4067F3172CDA40CC002E28BD /* StreamAudioSessionAdapter.swift in Sources */, 40AB34DA2C5D5A7B00B5B6B3 /* WebRTCStatsReporter.swift in Sources */, 408679F72BD12F1000D027E0 /* AudioFilter.swift in Sources */, 8456E6D2287EC343004E180E /* ConsoleLogDestination.swift in Sources */, @@ -6368,10 +6423,12 @@ 841BAA342BD15CDE000C73E4 /* GeolocationResult.swift in Sources */, 848CCCEB2AB8ED8F002E83A2 /* CallHLSBroadcastingStoppedEvent.swift in Sources */, 40BBC4C42C638789002AEF92 /* RTCPeerConnectionCoordinator.swift in Sources */, + 4067F3152CDA4094002E28BD /* StreamRTCAudioSession.swift in Sources */, 40BBC4C62C638915002AEF92 /* WebRTCCoordinator.swift in Sources */, 841BAA392BD15CDE000C73E4 /* UserSessionStats.swift in Sources */, 406B3BD72C8F332200FC93A1 /* RTCVideoTrack+Sendable.swift in Sources */, 406B3BE32C8F358600FC93A1 /* Stream_Video_Sfu_Models_Codec+Convenience.swift in Sources */, + 4067F3132CDA33C6002E28BD /* RTCAudioSessionConfiguration+Default.swift in Sources */, 8409465829AF4EEC007AF5BF /* SendReactionRequest.swift in Sources */, 40BBC4BA2C627F83002AEF92 /* TrackEvent.swift in Sources */, 84B9A56D29112F39004DE31A /* EndpointConfig.swift in Sources */, @@ -6392,6 +6449,7 @@ 84DC389F29ADFCFD00946713 /* JoinCallResponse.swift in Sources */, 84A7E1AE2883E6B300526C98 /* HTTPUtils.swift in Sources */, 4065839D2B877B6500B4F979 /* UIDevice+NeuralEngine.swift in Sources */, + 4067F3112CDA33AB002E28BD /* AVAudioSessionPortDescription+Convenience.swift in Sources */, 40FB15142BF77D9000D5E580 /* StreamCallStateMachine+Stage.swift in Sources */, 84DC38D229ADFCFD00946713 /* UpdatedCallPermissionsEvent.swift in Sources */, 40429D5B2C779ADB00AC7FFF /* SFUEventAdapter.swift in Sources */, @@ -6442,6 +6500,7 @@ 841BAA352BD15CDE000C73E4 /* CallTranscriptionStartedEvent.swift in Sources */, 4159F18A2C86FA41002B94D3 /* NullBool.swift in Sources */, 84DC38A729ADFCFD00946713 /* GoLiveResponse.swift in Sources */, + 4067F30D2CDA3377002E28BD /* AVAudioSession_RouteChangeReason+Convenience.swift in Sources */, 84DC38C329ADFCFD00946713 /* GeofenceSettings.swift in Sources */, 842B8E162A2DFED900863A87 /* CallRingEvent.swift in Sources */, 842B8E2D2A2DFED900863A87 /* StopTranscriptionResponse.swift in Sources */, @@ -6487,6 +6546,7 @@ 84A7E1922883647200526C98 /* Event.swift in Sources */, 84DC38D629ADFCFD00946713 /* GeofenceSettingsRequest.swift in Sources */, 84DC38BB29ADFCFD00946713 /* UnblockUserRequest.swift in Sources */, + 4067F3082CDA32FA002E28BD /* StreamAudioSessionAdapterDelegate.swift in Sources */, 84D6494429E9AD08002CA428 /* CallIngressResponse.swift in Sources */, 40BBC4C32C6373C4002AEF92 /* WebRTCStateAdapter.swift in Sources */, 403CA9B22CC7BAD6001A88C2 /* VideoLayer.swift in Sources */, @@ -6514,6 +6574,7 @@ 40BBC49F2C623D03002AEF92 /* RTCPeerConnectionState+CustomStringConvertible.swift in Sources */, 842B8E172A2DFED900863A87 /* EgressRTMPResponse.swift in Sources */, 841BAA312BD15CDE000C73E4 /* VideoQuality.swift in Sources */, + 4067F30F2CDA3394002E28BD /* AVAudioSession+CategoryOptions+Convenience.swift in Sources */, 40BBC4A32C623D03002AEF92 /* RTCIceGatheringState+CustomStringConvertible.swift in Sources */, 84A7E1A82883E46200526C98 /* Timers.swift in Sources */, 84DC38B129ADFCFD00946713 /* AudioSettings.swift in Sources */, @@ -6662,6 +6723,7 @@ 406B3C3A2C909CA600FC93A1 /* RTCPeerConnectionCoordinator_Tests.swift in Sources */, 406B3C412C919F5A00FC93A1 /* MockRTCPeerConnectionCoordinatorStack.swift in Sources */, 40F017472BBEEF5100E89FD1 /* ThumbnailResponse+Dummy.swift in Sources */, + 4067F31E2CDA5A56002E28BD /* StreamAudioSessionAdapter_Tests.swift in Sources */, 40AF6A412C9356B700BA2935 /* WebRTCCoordinatorStateMachine_MigratingStageTests.swift in Sources */, 408CE0F72BD95EB60052EC3A /* VideoConfig+Dummy.swift in Sources */, 84F58B7029EE914400010C4C /* BackgroundTaskScheduler_Tests.swift in Sources */, @@ -6678,6 +6740,7 @@ 406B3C4C2C91EFA700FC93A1 /* MockCallAuthenticator.swift in Sources */, 40F0174D2BBEEFD500E89FD1 /* TranscriptionSettings+Dummy.swift in Sources */, 406B3C4A2C91EE9700FC93A1 /* MockWebRTCCoordinatorStack.swift in Sources */, + 4067F3192CDA469F002E28BD /* MockAudioSession.swift in Sources */, 40AB34C92C5D3F2E00B5B6B3 /* ParticipantsStats+Dummy.swift in Sources */, 84F58B7629EE92BF00010C4C /* UniqueValues.swift in Sources */, 84F58B9529EEBA3900010C4C /* EquatableEvent.swift in Sources */, @@ -6720,6 +6783,7 @@ 40F017392BBEAF6400E89FD1 /* MockCallKitService.swift in Sources */, 403FB1602BFE22840047A696 /* StreamCallStateMachineStageRejectingStage_Tests.swift in Sources */, 40F017402BBEBC6500E89FD1 /* MockCallKitPushNotificationAdapter.swift in Sources */, + 40986C3E2CD1148F00510F88 /* AudioSession_Tests.swift in Sources */, 403FB1512BFE1AA90047A696 /* StreamCallStateMachine_Tests.swift in Sources */, 406B3C532C92007900FC93A1 /* WebRTCCoordinatorStateMachine_ConnectedStageTests.swift in Sources */, 84F58B8129EE9C4900010C4C /* WebSocketPingController_Delegate.swift in Sources */, @@ -6756,6 +6820,7 @@ 40C9E4422C943DC000802B28 /* WebRTCCoordinatorStateMachine_ErrorStageTests.swift in Sources */, 842747E529EECD0100E063AD /* ClientError_Tests.swift in Sources */, 40F017552BBEF03E00E89FD1 /* RecordSettingsResponse+Dummy.swift in Sources */, + 4067F31C2CDA55D6002E28BD /* StreamRTCAudioSession_Tests.swift in Sources */, 8446AF912A4D84F4002AB07B /* Retries_Tests.swift in Sources */, 406B3C272C904F7100FC93A1 /* LocalScreenShareMediaAdapter_Tests.swift in Sources */, 84F58B7429EE928400010C4C /* TestError.swift in Sources */, @@ -6808,7 +6873,6 @@ 40AF6A3F2C934E3400BA2935 /* WebRTCCoordinatorStateMachine_RejoiningStageTests.swift in Sources */, 40F017492BBEEF8100E89FD1 /* TargetResolution+Dummy.swift in Sources */, 8492B87A29081E6600006649 /* StreamVideo_Mock.swift in Sources */, - 84D6494729E9F2D0002CA428 /* WebRTCClient_Tests.swift in Sources */, 4013387A2BF248CC007318BD /* MockCall.swift in Sources */, 40FE5EBB2C9C7D40006B0881 /* StreamVideoCaptureHandler_Tests.swift in Sources */, 4029E95C2CB9449900E1D571 /* CallParticipant_TrackSubscriptionTests.swift in Sources */, diff --git a/StreamVideoSwiftUITests/CallViewModel_Tests.swift b/StreamVideoSwiftUITests/CallViewModel_Tests.swift index c3006e083..abffb51bd 100644 --- a/StreamVideoSwiftUITests/CallViewModel_Tests.swift +++ b/StreamVideoSwiftUITests/CallViewModel_Tests.swift @@ -84,7 +84,8 @@ final class CallViewModel_Tests: StreamVideoTestCase { ) // Then - await fulfillment("CallViewModel.callingState expected:.idle actual: \(callViewModel.callingState)") { + let callingState = callViewModel.callingState + await fulfillment("CallViewModel.callingState expected:.idle actual: \(callingState)") { callViewModel.callingState == .idle } } @@ -112,7 +113,8 @@ final class CallViewModel_Tests: StreamVideoTestCase { eventNotificationCenter.process(first) // Then - await fulfillment("CallViewModel.callingState expected:.outgoing actual: \(callViewModel.callingState)") { + let callingState = callViewModel.callingState + await fulfillment("CallViewModel.callingState expected:.outgoing actual: \(callingState)") { callViewModel.callingState == .outgoing } @@ -131,7 +133,8 @@ final class CallViewModel_Tests: StreamVideoTestCase { eventNotificationCenter.process(second) // Then - await fulfillment("CallViewModel.callingState expected:.idle actual: \(callViewModel.callingState)") { + let callingStateB = callViewModel.callingState + await fulfillment("CallViewModel.callingState expected:.idle actual: \(callingStateB)") { callViewModel.callingState == .idle } } @@ -152,7 +155,8 @@ final class CallViewModel_Tests: StreamVideoTestCase { eventNotificationCenter.process(.coordinatorEvent(.typeCallEndedEvent(event))) // Then - await fulfillment("CallViewModel.callingState expected:.idle actual: \(callViewModel.callingState)") { + let callingState = callViewModel.callingState + await fulfillment("CallViewModel.callingState expected:.idle actual: \(callingState)") { callViewModel.callingState == .idle } } @@ -173,7 +177,8 @@ final class CallViewModel_Tests: StreamVideoTestCase { eventNotificationCenter.process(.coordinatorEvent(.typeBlockedUserEvent(event))) // Then - await fulfillment("CallViewModel.callingState expected:.idle actual: \(callViewModel.callingState)") { + let callingState = callViewModel.callingState + await fulfillment("CallViewModel.callingState expected:.idle actual: \(callingState)") { callViewModel.callingState == .idle } } @@ -191,7 +196,8 @@ final class CallViewModel_Tests: StreamVideoTestCase { call?.state.update(from: callData) try? await Task.sleep(nanoseconds: 250_000_000) callViewModel.setActiveCall(call) - await fulfillment("CallViewModel.callingState expected:.inCall actual: \(callViewModel.callingState)") { + let callingState = callViewModel.callingState + await fulfillment("CallViewModel.callingState expected:.inCall actual: \(callingState)") { callViewModel.callingState == .inCall } @@ -224,7 +230,8 @@ final class CallViewModel_Tests: StreamVideoTestCase { callViewModel.hangUp() // Then - await fulfillment("CallViewModel.callingState expected:.idle actual: \(callViewModel.callingState)") { + let callingState = callViewModel.callingState + await fulfillment("CallViewModel.callingState expected:.idle actual: \(callingState)") { callViewModel.callingState == .idle } } @@ -282,7 +289,8 @@ final class CallViewModel_Tests: StreamVideoTestCase { callViewModel.acceptCall(callType: callType, callId: callId) // Then - await fulfillment("CallViewModel.callingState expected:.inCall actual: \(callViewModel.callingState)") { + let callingState = callViewModel.callingState + await fulfillment("CallViewModel.callingState expected:.inCall actual: \(callingState)") { callViewModel.callingState == .inCall } } @@ -337,7 +345,8 @@ final class CallViewModel_Tests: StreamVideoTestCase { callViewModel.rejectCall(callType: callType, callId: callId) // Then - await fulfillment("CallViewModel.callingState expected:.idle actual: \(callViewModel.callingState)") { + let callingState = callViewModel.callingState + await fulfillment("CallViewModel.callingState expected:.idle actual: \(callingState)") { callViewModel.callingState == .idle } } @@ -352,7 +361,8 @@ final class CallViewModel_Tests: StreamVideoTestCase { // Then XCTAssert(callViewModel.callingState == .joining) - await fulfillment("CallViewModel.callingState expected:.inCall actual: \(callViewModel.callingState)") { + let callingState = callViewModel.callingState + await fulfillment("CallViewModel.callingState expected:.inCall actual: \(callingState)") { callViewModel.callingState == .inCall } } @@ -393,7 +403,8 @@ final class CallViewModel_Tests: StreamVideoTestCase { ) // Then - await fulfillment("CallViewModel.callingState expected:.inCall actual: \(callViewModel.callingState)") { + let callingState = callViewModel.callingState + await fulfillment("CallViewModel.callingState expected:.inCall actual: \(callingState)") { callViewModel.callingState == .inCall } } @@ -432,7 +443,8 @@ final class CallViewModel_Tests: StreamVideoTestCase { callViewModel.hangUp() // Then - await fulfillment("CallViewModel.callingState expected:.idle actual: \(callViewModel.callingState)") { + let callingState = callViewModel.callingState + await fulfillment("CallViewModel.callingState expected:.idle actual: \(callingState)") { callViewModel.callingState == .idle } } @@ -447,7 +459,8 @@ final class CallViewModel_Tests: StreamVideoTestCase { // When callViewModel.startCall(callType: .default, callId: callId, members: participants) - await fulfillment("CallViewModel.callingState expected:.inCall actual: \(callViewModel.callingState)") { + let callingState = callViewModel.callingState + await fulfillment("CallViewModel.callingState expected:.inCall actual: \(callingState)") { callViewModel.callingState == .inCall } @@ -465,7 +478,8 @@ final class CallViewModel_Tests: StreamVideoTestCase { // When callViewModel.startCall(callType: .default, callId: callId, members: participants) - await fulfillment("CallViewModel.callingState expected:.inCall actual: \(callViewModel.callingState)") { + let callingState = callViewModel.callingState + await fulfillment("CallViewModel.callingState expected:.inCall actual: \(callingState)") { callViewModel.callingState == .inCall } @@ -483,7 +497,8 @@ final class CallViewModel_Tests: StreamVideoTestCase { // When callViewModel.startCall(callType: .default, callId: callId, members: participants) - await fulfillment("CallViewModel.callingState expected:.inCall actual: \(callViewModel.callingState)") { + let callingState = callViewModel.callingState + await fulfillment("CallViewModel.callingState expected:.inCall actual: \(callingState)") { callViewModel.callingState == .inCall } @@ -503,7 +518,8 @@ final class CallViewModel_Tests: StreamVideoTestCase { // When callViewModel.startCall(callType: .default, callId: callId, members: participants) - await fulfillment("CallViewModel.callingState expected:.inCall actual: \(callViewModel.callingState)") { + let callingState = callViewModel.callingState + await fulfillment("CallViewModel.callingState expected:.inCall actual: \(callingState)") { callViewModel.callingState == .inCall } @@ -575,7 +591,8 @@ final class CallViewModel_Tests: StreamVideoTestCase { // When callViewModel.startCall(callType: .default, callId: callId, members: participants) - await fulfillment("CallViewModel.callingState expected:.inCall actual: \(callViewModel.callingState)") { + let callingState = callViewModel.callingState + await fulfillment("CallViewModel.callingState expected:.inCall actual: \(callingState)") { callViewModel.callingState == .inCall } @@ -606,7 +623,8 @@ final class CallViewModel_Tests: StreamVideoTestCase { // When callViewModel.startCall(callType: .default, callId: callId, members: participants) - await fulfillment("CallViewModel.callingState expected:.inCall actual: \(callViewModel.callingState)") { + let callingState = callViewModel.callingState + await fulfillment("CallViewModel.callingState expected:.inCall actual: \(callingState)") { callViewModel.callingState == .inCall } @@ -883,7 +901,8 @@ final class CallViewModel_Tests: StreamVideoTestCase { await fulfillment { callViewModel.isSubscribedToCallEvents } callViewModel.startCall(callType: .default, callId: callId, members: []) - await fulfillment("CallViewModel.callingState expected:.inCall actual: \(callViewModel.callingState)") { + let callingState = callViewModel.callingState + await fulfillment("CallViewModel.callingState expected:.inCall actual: \(callingState)") { callViewModel.callingState == .inCall } let call = try XCTUnwrap(callViewModel.call, file: file, line: line) diff --git a/StreamVideoTests/Mock/MockAudioSession.swift b/StreamVideoTests/Mock/MockAudioSession.swift new file mode 100644 index 000000000..bdaea5f4d --- /dev/null +++ b/StreamVideoTests/Mock/MockAudioSession.swift @@ -0,0 +1,148 @@ +// +// Copyright © 2024 Stream.io Inc. All rights reserved. +// + +import AVFoundation +@testable import StreamVideo +import StreamWebRTC + +final class MockAudioSession: AudioSessionProtocol, Mockable { + final class WeakBox { + weak var value: T? + init(value: T?) { self.value = value } + } + + // MARK: - Mockable + + typealias FunctionKey = MockFunctionKey + typealias FunctionInputKey = MockFunctionInputKey + enum MockFunctionKey: CaseIterable { + case add + case setMode + case setCategory + case setActive + case setConfiguration + case overrideOutputAudioPort + case updateConfiguration + case requestRecordPermission + } + + enum MockFunctionInputKey: Payloadable { + case add(delegate: WeakBox) + case setMode(mode: String) + case setCategory(category: String, categoryOptions: AVAudioSession.CategoryOptions) + case setActive(value: Bool) + case setConfiguration(value: RTCAudioSessionConfiguration) + case overrideOutputAudioPort(value: AVAudioSession.PortOverride) + case updateConfiguration + case requestRecordPermission + + var payload: Any { + switch self { + case let .add(delegate): + return delegate + + case let .setMode(mode): + return mode + + case let .setCategory(category, categoryOptions): + return (category, categoryOptions) + + case let .setActive(value): + return value + + case let .setConfiguration(value): + return value + + case let .overrideOutputAudioPort(value): + return value + + case .updateConfiguration: + return () + + case .requestRecordPermission: + return () + } + } + } + + var stubbedProperty: [String: Any] = [:] + var stubbedFunction: [FunctionKey: Any] = [:] + @Atomic var stubbedFunctionInput: [FunctionKey: [FunctionInputKey]] = FunctionKey.allCases + .reduce(into: [FunctionKey: [FunctionInputKey]]()) { $0[$1] = [] } + func stub(for keyPath: KeyPath, with value: T) { stubbedProperty[propertyKey(for: keyPath)] = value } + func stub(for function: FunctionKey, with value: T) { stubbedFunction[function] = value } + + // MARK: - AudioSessionProtocol + + var isActive: Bool = false + + var currentRoute: AVAudioSessionRouteDescription = .init() + + var category: String = "" + + var isUsingSpeakerOutput: Bool = false + + var isUsingExternalOutput: Bool = false + + var useManualAudio: Bool = false + + var isAudioEnabled: Bool = false + + func add(_ delegate: RTCAudioSessionDelegate) { + stubbedFunctionInput[.add]?.append(.add(delegate: .init(value: delegate))) + } + + func setMode(_ mode: String) throws { + stubbedFunctionInput[.setMode]?.append(.setMode(mode: mode)) + } + + func setCategory( + _ category: String, + with categoryOptions: AVAudioSession.CategoryOptions + ) throws { + stubbedFunctionInput[.setCategory]?.append( + .setCategory( + category: category, + categoryOptions: categoryOptions + ) + ) + } + + func setActive(_ isActive: Bool) throws { + stubbedFunctionInput[.setActive]?.append(.setActive(value: isActive)) + } + + func setConfiguration(_ configuration: RTCAudioSessionConfiguration) throws { + stubbedFunctionInput[.setConfiguration]?.append( + .setConfiguration( + value: configuration + ) + ) + } + + func overrideOutputAudioPort(_ port: AVAudioSession.PortOverride) throws { + stubbedFunctionInput[.overrideOutputAudioPort]?.append( + .overrideOutputAudioPort(value: port) + ) + } + + func updateConfiguration( + functionName: StaticString, + file: StaticString, + line: UInt, + _ block: @escaping (AudioSessionProtocol) throws -> Void + ) { + do { + try block(self) + stubbedFunctionInput[.updateConfiguration]?.append(.updateConfiguration) + } catch { + /* No-op */ + } + } + + func requestRecordPermission() async -> Bool { + stubbedFunctionInput[.requestRecordPermission]?.append(.requestRecordPermission) + return stubbedFunction[.requestRecordPermission] as? Bool ?? false + } +} diff --git a/StreamVideoTests/Mock/MockRTCPeerConnectionCoordinator.swift b/StreamVideoTests/Mock/MockRTCPeerConnectionCoordinator.swift index 77c9c7a4d..5e6d622da 100644 --- a/StreamVideoTests/Mock/MockRTCPeerConnectionCoordinator.swift +++ b/StreamVideoTests/Mock/MockRTCPeerConnectionCoordinator.swift @@ -134,7 +134,6 @@ final class MockRTCPeerConnectionCoordinator: callSettings: CallSettings = .init(), audioSettings: AudioSettings = .init(), sfuAdapter: SFUAdapter, - audioSession: AudioSession = .init(), videoCaptureSessionProvider: VideoCaptureSessionProvider = .init(), screenShareSessionProvider: ScreenShareSessionProvider = .init() ) throws { @@ -151,7 +150,6 @@ final class MockRTCPeerConnectionCoordinator: callSettings: callSettings, audioSettings: audioSettings, sfuAdapter: sfuAdapter, - audioSession: audioSession, videoCaptureSessionProvider: videoCaptureSessionProvider, screenShareSessionProvider: screenShareSessionProvider ) diff --git a/StreamVideoTests/Mock/MockRTCPeerConnectionCoordinatorFactory.swift b/StreamVideoTests/Mock/MockRTCPeerConnectionCoordinatorFactory.swift index 9ba85f498..9d6e74af2 100644 --- a/StreamVideoTests/Mock/MockRTCPeerConnectionCoordinatorFactory.swift +++ b/StreamVideoTests/Mock/MockRTCPeerConnectionCoordinatorFactory.swift @@ -19,7 +19,6 @@ final class MockRTCPeerConnectionCoordinatorFactory: RTCPeerConnectionCoordinato callSettings: CallSettings, audioSettings: AudioSettings, sfuAdapter: SFUAdapter, - audioSession: AudioSession, videoCaptureSessionProvider: VideoCaptureSessionProvider, screenShareSessionProvider: ScreenShareSessionProvider ) -> RTCPeerConnectionCoordinator { @@ -33,7 +32,6 @@ final class MockRTCPeerConnectionCoordinatorFactory: RTCPeerConnectionCoordinato callSettings: callSettings, audioSettings: audioSettings, sfuAdapter: sfuAdapter, - audioSession: audioSession, videoCaptureSessionProvider: videoCaptureSessionProvider, screenShareSessionProvider: screenShareSessionProvider ) diff --git a/StreamVideoTests/Mock/MockRTCPeerConnectionCoordinatorStack.swift b/StreamVideoTests/Mock/MockRTCPeerConnectionCoordinatorStack.swift index 54a3ae28a..9428507fe 100644 --- a/StreamVideoTests/Mock/MockRTCPeerConnectionCoordinatorStack.swift +++ b/StreamVideoTests/Mock/MockRTCPeerConnectionCoordinatorStack.swift @@ -12,7 +12,7 @@ struct MockRTCPeerConnectionCoordinatorStack { let peerConnection: MockRTCPeerConnection let peerConnectionFactory: PeerConnectionFactory let mockSFUStack: MockSFUStack - let audioSession: AudioSession + let audioSession: StreamAudioSessionAdapter let spySubject: PassthroughSubject let mockLocalAudioMediaAdapter: MockLocalMediaAdapter let mockLocalVideoMediaAdapter: MockLocalMediaAdapter @@ -32,7 +32,7 @@ struct MockRTCPeerConnectionCoordinatorStack { peerConnection: MockRTCPeerConnection = .init(), peerConnectionFactory: PeerConnectionFactory = .mock(), mockSFUStack: MockSFUStack = .init(), - audioSession: AudioSession = .init(), + audioSession: StreamAudioSessionAdapter = .init(), spySubject: PassthroughSubject = .init(), mockLocalAudioMediaAdapter: MockLocalMediaAdapter = .init(), mockLocalVideoMediaAdapter: MockLocalMediaAdapter = .init(), @@ -53,8 +53,7 @@ struct MockRTCPeerConnectionCoordinatorStack { peerConnection: peerConnection, peerConnectionFactory: peerConnectionFactory, localMediaManager: mockLocalAudioMediaAdapter, - subject: spySubject, - audioSession: audioSession + subject: spySubject ) self.audioMediaAdapter = audioMediaAdapter @@ -91,7 +90,6 @@ struct MockRTCPeerConnectionCoordinatorStack { callSettings: callSettings, audioSettings: audioSettings, sfuAdapter: mockSFUStack.adapter, - audioSession: audioSession, mediaAdapter: mediaAdapter ) } diff --git a/StreamVideoTests/TestUtils/Mockable.swift b/StreamVideoTests/TestUtils/Mockable.swift index dfe1a4482..6a196c013 100644 --- a/StreamVideoTests/TestUtils/Mockable.swift +++ b/StreamVideoTests/TestUtils/Mockable.swift @@ -49,4 +49,8 @@ extension Mockable { } func timesCalled(_ key: FunctionKey) -> Int { stubbedFunctionInput[key]?.count ?? 0 } + + mutating func resetRecords(for key: FunctionKey) { + stubbedFunctionInput[key] = [] + } } diff --git a/StreamVideoTests/Utilities/Extensions/XCTestCase+PredicateFulfillment.swift b/StreamVideoTests/Utilities/Extensions/XCTestCase+PredicateFulfillment.swift index 6a141a895..d121ff911 100644 --- a/StreamVideoTests/Utilities/Extensions/XCTestCase+PredicateFulfillment.swift +++ b/StreamVideoTests/Utilities/Extensions/XCTestCase+PredicateFulfillment.swift @@ -11,7 +11,7 @@ extension XCTestCase { @MainActor func fulfillment( timeout: TimeInterval = defaultTimeout, - _ message: @autoclosure () -> String = "", + _ message: @Sendable @autoclosure () -> String = "", file: StaticString = #file, line: UInt = #line, block: @MainActor @Sendable @escaping () -> Bool diff --git a/StreamVideoTests/Utils/AudioSession/StreamAudioSessionAdapter_Tests.swift b/StreamVideoTests/Utils/AudioSession/StreamAudioSessionAdapter_Tests.swift new file mode 100644 index 000000000..81f3955ac --- /dev/null +++ b/StreamVideoTests/Utils/AudioSession/StreamAudioSessionAdapter_Tests.swift @@ -0,0 +1,354 @@ +// +// Copyright © 2024 Stream.io Inc. All rights reserved. +// + +import AVFoundation +@testable import StreamVideo +import StreamWebRTC +@preconcurrency import XCTest + +final class StreamAudioSessionAdapter_Tests: XCTestCase, @unchecked Sendable { + + private lazy var audioSession: MockAudioSession! = .init() + private lazy var subject: StreamAudioSessionAdapter! = StreamAudioSessionAdapter(audioSession) + + // MARK: - Lifecycle + + override func tearDown() { + subject = nil + audioSession = nil + super.tearDown() + } + + // MARK: - init + + func test_init_callAudioSessionKeyCurrentValueUpdated() { + _ = subject + XCTAssertTrue(StreamActiveCallAudioSessionKey.currentValue === audioSession) + } + + func test_init_setsManualAudioAndEnabled() { + _ = subject + + XCTAssertTrue(audioSession.useManualAudio) + XCTAssertTrue(audioSession.isAudioEnabled) + } + + func test_init_updatesConfiguration() async throws { + let expected = RTCAudioSessionConfiguration.default + + _ = subject + await fulfillment { self.audioSession.timesCalled(.updateConfiguration) == 1 } + + let actual = try XCTUnwrap( + audioSession.recordedInputPayload( + RTCAudioSessionConfiguration.self, + for: .setConfiguration + )?.first + ) + XCTAssertEqual(actual, expected) + } + + // MARK: - deinit + + func test_deinit_callAudioSessionKeyCurrentValueSetToNil() { + _ = subject + XCTAssertTrue(StreamActiveCallAudioSessionKey.currentValue === audioSession) + + subject = nil + + XCTAssertNil(StreamActiveCallAudioSessionKey.currentValue) + } + + // MARK: - Active Call Settings Tests + + func test_didUpdateCallSettings_withUpdatedCallSettingsAudioOutputOn_updatesAudioSession() async throws { + let callSettings = CallSettings(audioOn: false, audioOutputOn: true) + + subject.didUpdateCallSettings(callSettings) + + await fulfillment { self.audioSession.timesCalled(.setActive) == 1 } + } + + func test_didUpdateCallSettings_withUpdatedCallSettingsSpeakerOn_updatesAudioSession() async throws { + audioSession.category = .unique + let callSettings = CallSettings(speakerOn: true) + + subject.didUpdateCallSettings(callSettings) + + await fulfillment { self.audioSession.timesCalled(.overrideOutputAudioPort) == 1 } + XCTAssertEqual( + audioSession.recordedInputPayload( + String.self, + for: .setMode + )?.first, + AVAudioSession.Mode.videoChat.rawValue + ) + XCTAssertEqual( + audioSession.recordedInputPayload( + (String, AVAudioSession.CategoryOptions).self, + for: .setCategory + )?.first?.0, + audioSession.category + ) + XCTAssertEqual( + audioSession.recordedInputPayload( + (String, AVAudioSession.CategoryOptions).self, + for: .setCategory + )?.first?.1, + [.defaultToSpeaker, .allowBluetooth, .allowBluetoothA2DP] + ) + XCTAssertEqual( + audioSession.recordedInputPayload( + AVAudioSession.PortOverride.self, + for: .overrideOutputAudioPort + )?.first, + .speaker + ) + } + + func test_didUpdateCallSettings_withUpdatedCallSettingsSpeakerOf_updatesAudioSession() async throws { + audioSession.category = .unique + subject.didUpdateCallSettings(CallSettings(speakerOn: true)) + await fulfillment { self.audioSession.timesCalled(.overrideOutputAudioPort) == 1 } + audioSession.resetRecords(for: .setMode) + audioSession.resetRecords(for: .setCategory) + audioSession.resetRecords(for: .overrideOutputAudioPort) + audioSession.isUsingSpeakerOutput = true + + subject.didUpdateCallSettings(CallSettings(speakerOn: false)) + + await fulfillment { self.audioSession.timesCalled(.overrideOutputAudioPort) == 1 } + XCTAssertEqual( + audioSession.recordedInputPayload( + String.self, + for: .setMode + )?.first, + AVAudioSession.Mode.voiceChat.rawValue + ) + XCTAssertEqual( + audioSession.recordedInputPayload( + (String, AVAudioSession.CategoryOptions).self, + for: .setCategory + )?.first?.0, + audioSession.category + ) + XCTAssertEqual( + audioSession.recordedInputPayload( + (String, AVAudioSession.CategoryOptions).self, + for: .setCategory + )?.first?.1, + [.allowBluetooth, .allowBluetoothA2DP] + ) + XCTAssertEqual( + audioSession.recordedInputPayload( + AVAudioSession.PortOverride.self, + for: .overrideOutputAudioPort + )?.first, + AVAudioSession.PortOverride.none + ) + } + + func test_didUpdateCallSettings_withoutChanges_doesNotUpdateAudioSession() async throws { + audioSession.isActive = true + let callSettings = CallSettings(audioOn: false, videoOn: true) + subject.didUpdateCallSettings(callSettings) + await fulfillment { self.audioSession.timesCalled(.updateConfiguration) > 0 } + audioSession.resetRecords(for: .updateConfiguration) + + subject.didUpdateCallSettings(callSettings) + + XCTAssertEqual(audioSession.timesCalled(.updateConfiguration), 0) + } + + // MARK: - Audio Session Delegate Tests + + // MARK: routeUpdate + + func test_audioSessionDidChangeRoute_reasonUnkwnown_updatesCallSettingsForNewRoute() async { + await assertRouteUpdate( + initialSpeakerOn: true, + reason: .unknown, + expectedSpeakerOn: false + ) + } + + func test_audioSessionDidChangeRoute_reasonNewDeviceAvailable_updatesCallSettingsForNewRoute() async { + await assertRouteUpdate( + initialSpeakerOn: true, + reason: .newDeviceAvailable, + expectedSpeakerOn: false + ) + } + + func test_audioSessionDidChangeRoute_reasonOverride_updatesCallSettingsForNewRoute() async { + await assertRouteUpdate( + initialSpeakerOn: true, + reason: .override, + expectedSpeakerOn: false + ) + } + + func test_audioSessionDidChangeRoute_reasonNoSuitableRouteForCategory_updatesCallSettingsForNewRoute() async { + await assertRouteUpdate( + initialSpeakerOn: true, + reason: .noSuitableRouteForCategory, + expectedSpeakerOn: false + ) + } + + // MARK: respectCallSettings + + func test_audioSessionDidChangeRoute_reasonOldDeviceUnavailable_updatesCallSettingsForNewRoute() async { + await assertRespectCallSettings( + callSettingsSpeakerOn: true, + reason: .oldDeviceUnavailable, + isUsingSpeakerOutput: false, + expectedMode: .videoChat, + expectedCategoryOptions: [.defaultToSpeaker, .allowBluetooth, .allowBluetoothA2DP], + expectedOverrideOutputAudioPort: .speaker + ) + } + + func test_audioSessionDidChangeRoute_reasonCategoryChange_updatesCallSettingsForNewRoute() async { + await assertRespectCallSettings( + callSettingsSpeakerOn: true, + reason: .categoryChange, + isUsingSpeakerOutput: false, + expectedMode: .videoChat, + expectedCategoryOptions: [.defaultToSpeaker, .allowBluetooth, .allowBluetoothA2DP], + expectedOverrideOutputAudioPort: .speaker + ) + } + + func test_audioSessionDidChangeRoute_reasonWakeFromSleep_updatesCallSettingsForNewRoute() async { + await assertRespectCallSettings( + callSettingsSpeakerOn: true, + reason: .wakeFromSleep, + isUsingSpeakerOutput: false, + expectedMode: .videoChat, + expectedCategoryOptions: [.defaultToSpeaker, .allowBluetooth, .allowBluetoothA2DP], + expectedOverrideOutputAudioPort: .speaker + ) + } + + func test_audioSessionDidChangeRoute_reasonRouteConfigurationChange_updatesCallSettingsForNewRoute() async { + await assertRespectCallSettings( + callSettingsSpeakerOn: true, + reason: .routeConfigurationChange, + isUsingSpeakerOutput: false, + expectedMode: .videoChat, + expectedCategoryOptions: [.defaultToSpeaker, .allowBluetooth, .allowBluetoothA2DP], + expectedOverrideOutputAudioPort: .speaker + ) + } + + // MARK: - Private Helper Tests + + private func assertRouteUpdate( + initialSpeakerOn: Bool, + reason: AVAudioSession.RouteChangeReason, + expectedSpeakerOn: Bool, + file: StaticString = #file, + line: UInt = #line + ) async { + subject.didUpdateCallSettings(.init(speakerOn: initialSpeakerOn)) + audioSession.isUsingSpeakerOutput = expectedSpeakerOn + let delegate = MockStreamAudioSessionAdapterDelegate() + subject.delegate = delegate + + subject.audioSessionDidChangeRoute( + .sharedInstance(), + reason: reason, + previousRoute: .init() + ) + + await fulfillment( + file: file, + line: line + ) { delegate.audioSessionAdapterDidUpdateCallSettingsWithCallSettings != nil } + XCTAssertEqual( + delegate.audioSessionAdapterDidUpdateCallSettingsWithCallSettings?.speakerOn, + expectedSpeakerOn, + file: file, + line: line + ) + } + + private func assertRespectCallSettings( + callSettingsSpeakerOn: Bool, + reason: AVAudioSession.RouteChangeReason, + isUsingSpeakerOutput: Bool, + expectedMode: AVAudioSession.Mode, + expectedCategoryOptions: AVAudioSession.CategoryOptions, + expectedOverrideOutputAudioPort: AVAudioSession.PortOverride, + file: StaticString = #file, + line: UInt = #line + ) async { + audioSession.category = .unique + subject.didUpdateCallSettings(.init(speakerOn: callSettingsSpeakerOn)) + audioSession.isUsingSpeakerOutput = isUsingSpeakerOutput + let delegate = MockStreamAudioSessionAdapterDelegate() + subject.delegate = delegate + audioSession.resetRecords(for: .setMode) + audioSession.resetRecords(for: .setCategory) + audioSession.resetRecords(for: .overrideOutputAudioPort) + + subject.audioSessionDidChangeRoute( + .sharedInstance(), + reason: reason, + previousRoute: .init() + ) + + await fulfillment( + file: file, + line: line + ) { self.audioSession.timesCalled(.overrideOutputAudioPort) == 1 } + XCTAssertEqual( + audioSession.recordedInputPayload( + String.self, + for: .setMode + )?.first, + expectedMode.rawValue, + file: file, + line: line + ) + XCTAssertEqual( + audioSession.recordedInputPayload( + (String, AVAudioSession.CategoryOptions).self, + for: .setCategory + )?.first?.0, + audioSession.category, + file: file, + line: line + ) + XCTAssertEqual( + audioSession.recordedInputPayload( + (String, AVAudioSession.CategoryOptions).self, + for: .setCategory + )?.first?.1, + expectedCategoryOptions, + file: file, + line: line + ) + XCTAssertEqual( + audioSession.recordedInputPayload( + AVAudioSession.PortOverride.self, + for: .overrideOutputAudioPort + )?.first, + expectedOverrideOutputAudioPort, + file: file, + line: line + ) + } +} + +final class MockStreamAudioSessionAdapterDelegate: StreamAudioSessionAdapterDelegate, @unchecked Sendable { + private(set) var audioSessionAdapterDidUpdateCallSettingsWithCallSettings: CallSettings? + func audioSessionAdapterDidUpdateCallSettings( + _ adapter: StreamAudioSessionAdapter, + callSettings: CallSettings + ) { + audioSessionAdapterDidUpdateCallSettingsWithCallSettings = callSettings + } +} diff --git a/StreamVideoTests/Utils/AudioSession/StreamRTCAudioSession_Tests.swift b/StreamVideoTests/Utils/AudioSession/StreamRTCAudioSession_Tests.swift new file mode 100644 index 000000000..3d71c044e --- /dev/null +++ b/StreamVideoTests/Utils/AudioSession/StreamRTCAudioSession_Tests.swift @@ -0,0 +1,205 @@ +// +// Copyright © 2024 Stream.io Inc. All rights reserved. +// + +@testable import StreamVideo +import StreamWebRTC +import XCTest + +final class StreamRTCAudioSession_Tests: XCTestCase { + + // MARK: - Lazy Properties + + private var rtcAudioSession: RTCAudioSession! = .sharedInstance() + private lazy var subject: StreamRTCAudioSession! = StreamRTCAudioSession() + + // MARK: - Lifecycle + + override func tearDown() { + subject = nil + rtcAudioSession = nil + super.tearDown() + } + + // MARK: - isActive + + func test_isActive_returnsCorrectState() throws { + // Given + XCTAssertEqual(subject.isActive, rtcAudioSession.isActive) + + // When + rtcAudioSession.lockForConfiguration() + try rtcAudioSession.setActive(true) + rtcAudioSession.unlockForConfiguration() + + // Then + XCTAssertTrue(rtcAudioSession.isActive) + XCTAssertEqual(subject.isActive, rtcAudioSession.isActive) + } + + // MARK: - currentRoute + + func test_currentRoute_returnsCorrectRoute() { + XCTAssertEqual(subject.currentRoute, rtcAudioSession.currentRoute) + } + + // MARK: - category + + func test_category_returnsCorrectCategory() throws { + rtcAudioSession.lockForConfiguration() + try rtcAudioSession.setCategory(AVAudioSession.Category.playAndRecord.rawValue) + rtcAudioSession.unlockForConfiguration() + + // Then + XCTAssertEqual(subject.category, rtcAudioSession.category) + } + + // MARK: - isUsingSpeakerOutput + + func test_isUsingSpeakerOutput_returnsCorrectValue() throws { + // Given + rtcAudioSession.lockForConfiguration() + try rtcAudioSession.overrideOutputAudioPort(.speaker) + rtcAudioSession.unlockForConfiguration() + + // When + let isUsingSpeakerOutput = subject.isUsingSpeakerOutput + + // Then + XCTAssertTrue(isUsingSpeakerOutput) + } + + // MARK: - useManualAudio + + func test_useManualAudio_setAndGet() { + // When + subject.useManualAudio = true + + // Then + XCTAssertTrue(rtcAudioSession.useManualAudio) + XCTAssertEqual(subject.useManualAudio, rtcAudioSession.useManualAudio) + } + + // MARK: - isAudioEnabled + + func test_isAudioEnabled_setAndGet() { + // When + subject.isAudioEnabled = true + + // Then + XCTAssertTrue(rtcAudioSession.isAudioEnabled) + XCTAssertEqual(subject.isAudioEnabled, rtcAudioSession.isAudioEnabled) + } + + // MARK: - addDelegate + + func test_addDelegate() throws { + final class MockRTCAudioSessionDelegate: NSObject, RTCAudioSessionDelegate { + private(set) var didSetActiveWasCalled: Bool = false + func audioSession(_ audioSession: RTCAudioSession, didSetActive active: Bool) { didSetActiveWasCalled = true } + } + + // Given + let delegate = MockRTCAudioSessionDelegate() + subject.add(delegate) + + // When + rtcAudioSession.lockForConfiguration() + try rtcAudioSession.setActive(true) + rtcAudioSession.unlockForConfiguration() + + // Then + XCTAssertTrue(delegate.didSetActiveWasCalled) + } + + // MARK: - setMode + + func test_setMode_modeUpdatedOnAudioSession() throws { + // Given + rtcAudioSession.lockForConfiguration() + try subject.setMode(AVAudioSession.Mode.videoChat.rawValue) + rtcAudioSession.unlockForConfiguration() + + // Then + XCTAssertEqual(rtcAudioSession.mode, AVAudioSession.Mode.videoChat.rawValue) + } + + // MARK: - setCategory + + func test_setCategory_categoryUpdatedOnAudioSession() throws { + // Given + rtcAudioSession.lockForConfiguration() + try subject.setCategory( + AVAudioSession.Category.playAndRecord.rawValue, + with: [.allowBluetooth] + ) + rtcAudioSession.unlockForConfiguration() + + // Then + XCTAssertEqual( + rtcAudioSession.category, + AVAudioSession.Category.playAndRecord.rawValue + ) + XCTAssertEqual( + rtcAudioSession.categoryOptions, + [.allowBluetooth] + ) + } + + // MARK: - setActive + + func test_setActive_isActiveUpdatedOnAudioSession() throws { + // Given + rtcAudioSession.lockForConfiguration() + try subject.setActive(true) + rtcAudioSession.unlockForConfiguration() + + // Then + XCTAssertTrue(rtcAudioSession.isActive) + } + + // MARK: - setConfiguration + + func test_setConfiguration_configurationUpdatedOnAudioSession() throws { + // Given + rtcAudioSession.lockForConfiguration() + let configuration = RTCAudioSessionConfiguration() + configuration.category = AVAudioSession.Category.playAndRecord.rawValue + configuration.categoryOptions = [.allowBluetooth] + configuration.mode = AVAudioSession.Mode.videoChat.rawValue + try subject.setConfiguration(configuration) + rtcAudioSession.unlockForConfiguration() + + // Then + XCTAssertEqual(rtcAudioSession.mode, AVAudioSession.Mode.videoChat.rawValue) + XCTAssertEqual( + rtcAudioSession.category, + AVAudioSession.Category.playAndRecord.rawValue + ) + XCTAssertEqual( + rtcAudioSession.categoryOptions, + [.allowBluetooth] + ) + } + + // MARK: - updateConfiguration + + func test_updateConfiguration_executesBlockOnQueue() { + // Given + let expectation = self.expectation(description: "Configuration updated") + + // When + subject.updateConfiguration( + functionName: #function, + file: #file, + line: #line + ) { session in + try session.setMode(AVAudioSession.Mode.videoChat.rawValue) + expectation.fulfill() + } + + wait(for: [expectation], timeout: defaultTimeout) + + XCTAssertEqual(rtcAudioSession.mode, AVAudioSession.Mode.videoChat.rawValue) + } +} diff --git a/StreamVideoTests/Utils/StreamCallAudioRecorderTests.swift b/StreamVideoTests/Utils/StreamCallAudioRecorderTests.swift index 82be7688e..2476287fe 100644 --- a/StreamVideoTests/Utils/StreamCallAudioRecorderTests.swift +++ b/StreamVideoTests/Utils/StreamCallAudioRecorderTests.swift @@ -5,7 +5,7 @@ import AVFoundation import Combine @testable import StreamVideo -import XCTest +@preconcurrency import XCTest final class StreamAudioRecorderTests: XCTestCase { @@ -13,14 +13,12 @@ final class StreamAudioRecorderTests: XCTestCase { private lazy var mockAudioSession: MockAudioSession! = .init() private lazy var mockActiveCallProvider: MockStreamActiveCallProvider! = .init() private var mockAudioRecorder: MockAudioRecorder! - private lazy var subject: StreamCallAudioRecorder! = .init( - audioRecorderBuilder: builder, - audioSession: mockAudioSession - ) + private lazy var subject: StreamCallAudioRecorder! = .init(audioRecorderBuilder: builder) override func setUp() async throws { try await super.setUp() StreamActiveCallProviderKey.currentValue = mockActiveCallProvider + StreamActiveCallAudioSessionKey.currentValue = mockAudioSession mockAudioRecorder = try .init( url: URL(string: "test.wav")!, settings: AVAudioRecorderBuilder.defaultRecordingSettings @@ -44,12 +42,10 @@ final class StreamAudioRecorderTests: XCTestCase { let actualFileURL = await recorder.audioRecorderBuilder.fileURL.lastPathComponent XCTAssertTrue(actualFileURL == filename) - XCTAssertTrue(recorder.audioSession === AVAudioSession.sharedInstance()) } func testInitWithBuilderAndSession_givenCustomBuilderAndSession_whenInitialized_thenUsesProvidedObjects() { XCTAssertTrue(subject.audioRecorderBuilder === builder) - XCTAssertTrue(subject.audioSession === mockAudioSession) } // MARK: - deinit @@ -59,10 +55,7 @@ final class StreamAudioRecorderTests: XCTestCase { let filename = tempDirectory.appendingPathComponent("test_recording.m4a") let mockBuilder = AVAudioRecorderBuilder(cachedResult: try .init(url: filename, settings: [:])) - var recorder: StreamCallAudioRecorder! = StreamCallAudioRecorder( - audioRecorderBuilder: mockBuilder, - audioSession: MockAudioSession() - ) + var recorder: StreamCallAudioRecorder! = StreamCallAudioRecorder(audioRecorderBuilder: mockBuilder) await recorder.startRecording() // Simulate recording recorder = nil @@ -73,7 +66,7 @@ final class StreamAudioRecorderTests: XCTestCase { // MARK: - startRecording func testStartRecording_givenPermissionNotGranted_whenStarted_thenRecordsAndMetersAreNotUpdated() async throws { - mockAudioSession.recordPermission = false + mockAudioSession.stub(for: .requestRecordPermission, with: false) await setUpHasActiveCall(true) await subject.startRecording() @@ -82,7 +75,7 @@ final class StreamAudioRecorderTests: XCTestCase { } func testStartRecording_givenPermissionGranted_whenStarted_thenRecordsAndMetersUpdates() async throws { - mockAudioSession.recordPermission = true + mockAudioSession.stub(for: .requestRecordPermission, with: true) await setUpHasActiveCall(true) await subject.startRecording() @@ -91,7 +84,7 @@ final class StreamAudioRecorderTests: XCTestCase { } func testStartRecording_givenPermissionGrantedButNoActiveCall_whenStarted_thenRecordsAndMetersWontStart() async throws { - mockAudioSession.recordPermission = true + mockAudioSession.stub(for: .requestRecordPermission, with: true) await subject.startRecording() @@ -100,7 +93,7 @@ final class StreamAudioRecorderTests: XCTestCase { func testStartRecording_givenPermissionGrantedButNoActiveCall_whenIgnoreActiveCallAndStarted_thenRecordsAndMetersUpdates( ) async throws { - mockAudioSession.recordPermission = true + mockAudioSession.stub(for: .requestRecordPermission, with: true) await subject.startRecording(ignoreActiveCall: true) @@ -110,7 +103,7 @@ final class StreamAudioRecorderTests: XCTestCase { // MARK: - stopRecording func testStopRecording_givenRecording_whenStopped_thenStopsRecording() async throws { - mockAudioSession.recordPermission = true + mockAudioSession.stub(for: .requestRecordPermission, with: true) await setUpHasActiveCall(true) await subject.startRecording() @@ -122,7 +115,7 @@ final class StreamAudioRecorderTests: XCTestCase { // MARK: - activeCall ended func test_activeCallEnded_givenAnActiveCallAndRecordingTrue_whenActiveCallEnds_thenStopsRecording() async throws { - mockAudioSession.recordPermission = true + mockAudioSession.stub(for: .requestRecordPermission, with: true) await setUpHasActiveCall(true) await subject.startRecording() @@ -135,7 +128,7 @@ final class StreamAudioRecorderTests: XCTestCase { func test_activeCallEnded_givenAnActiveCallAndRecordingTrue_whenActiveCallEndsAndAnotherOneStarts_thenStartsRecording( ) async throws { - mockAudioSession.recordPermission = true + mockAudioSession.stub(for: .requestRecordPermission, with: true) await setUpHasActiveCall(true) await subject.startRecording() await setUpHasActiveCall(false) @@ -209,25 +202,6 @@ private class MockAudioRecorder: AVAudioRecorder { } } -private class MockAudioSession: AudioSessionProtocol { - - var category: AVAudioSession.Category = .playback - var active = false - var recordPermission = false - - func setCategory(_ category: AVAudioSession.Category) throws { - self.category = category - } - - func setActive(_ active: Bool, options: AVAudioSession.SetActiveOptions = []) throws { - self.active = active - } - - func requestRecordPermission() async -> Bool { - recordPermission - } -} - private class MockStreamActiveCallProvider: StreamActiveCallProviding { private var _activeCallSubject = PassthroughSubject() diff --git a/StreamVideoTests/WebRTC/AudioSession_Tests.swift b/StreamVideoTests/WebRTC/AudioSession_Tests.swift new file mode 100644 index 000000000..f38509805 --- /dev/null +++ b/StreamVideoTests/WebRTC/AudioSession_Tests.swift @@ -0,0 +1,159 @@ +// +// Copyright © 2024 Stream.io Inc. All rights reserved. +// + +@testable import StreamVideo +import StreamWebRTC +import XCTest + +final class AudioSession_Tests: XCTestCase { + +// private lazy var subject: StreamAudioSessionAdapter! = StreamAudioSessionAdapter() +// private lazy var rtcAudioSession: RTCAudioSession! = .sharedInstance() +// +// private var updatedCallSettings: CallSettings? +// private var didReceiveUpdateCallSettings: Bool = false + + // MARK: - Lifecycle + +// override func setUp() { +// super.setUp() +// subject.delegate = self +// } + +// override func tearDown() { +// subject = nil +// rtcAudioSession = nil +// updatedCallSettings = nil +// super.tearDown() +// } + +// // MARK: - StreamAudioSessionAdapterDelegate +// +// func audioSessionDidUpdateCallSettings( +// _ audioSession: StreamAudioSessionAdapter, +// callSettings: CallSettings +// ) { +// didReceiveUpdateCallSettings = true +// updatedCallSettings = callSettings +// } + + // MARK: - didUpdateCallSettings + +// func test_didUpdateCallSettings_updatesActiveCallSettings() { +// // Given +// let callSettings = CallSettings(speakerOn: true, audioOutputOn: true) +// +// // When +// subject.didUpdateCallSettings(callSettings) +// +// // Then +// XCTAssertEqual(subject.activeCallSettings, callSettings) +// } + +// func test_didUpdateCallSettings_respectsCallSettingsIfAlreadyActive() { +// // Given +// let initialSettings = CallSettings(speakerOn: true, audioOutputOn: true) +// subject.didUpdateCallSettings(initialSettings) +// let newSettings = initialSettings // No change +// +// // When +// subject.didUpdateCallSettings(newSettings) +// +// // Then +// XCTAssertEqual(subject.activeCallSettings, initialSettings) +// XCTAssertFalse(didReceiveUpdateCallSettings) +// } + + // MARK: - audioSessionDidChangeRoute + +// func test_audioSessionDidChangeRoute_updatesRouteOnNewDeviceAvailable() { +// // Given +// let previousRoute = AVAudioSessionRouteDescription() +// let callSettings = CallSettings(speakerOn: true, audioOutputOn: true) +// subject.didUpdateCallSettings(callSettings) +// +// // When +// subject.audioSessionDidChangeRoute( +// rtcAudioSession, +// reason: .newDeviceAvailable, +// previousRoute: previousRoute +// ) +// +// // Then +// XCTAssertNotNil(updatedCallSettings) +// } + +// func test_audioSessionDidChangeRoute_respectsCallSettingsOnOldDeviceUnavailable() { +// // Given +// let previousRoute = AVAudioSessionRouteDescription() +// let callSettings = CallSettings(audioOutputOn: true, speakerOn: true) +// subject.didUpdateCallSettings(callSettings) +// +// // When +// subject.audioSessionDidChangeRoute( +// mockAudioSession, +// reason: .oldDeviceUnavailable, +// previousRoute: previousRoute +// ) +// +// // Then +// XCTAssertEqual(mockDelegate.updatedCallSettings?.speakerOn, callSettings.speakerOn) +// } + + // MARK: - audioSession(didChangeCanPlayOrRecord:) + +// func test_audioSession_didChangeCanPlayOrRecord_logsCorrectly() { +// // When +// subject.audioSession( +// mockAudioSession, +// didChangeCanPlayOrRecord: true +// ) +// +// // Then +// XCTAssertTrue(mockAudioSession.loggedInfo.contains("can playOrRecord:true")) +// } + + // MARK: - audioSessionDidStopPlayOrRecord + +// func test_audioSessionDidStopPlayOrRecord_logsCorrectly() { +// // When +// subject.audioSessionDidStopPlayOrRecord(mockAudioSession) +// +// // Then +// XCTAssertTrue(mockAudioSession.loggedInfo.contains("cannot playOrRecord")) +// } + + // MARK: - audioSession(didSetActive:) + +// func test_audioSession_didSetActive_appliesCorrectCallSettings() { +// // Given +// let callSettings = CallSettings(audioOutputOn: true, speakerOn: true) +// subject.didUpdateCallSettings(callSettings) +// +// // When +// subject.audioSession( +// mockAudioSession, +// didSetActive: true +// ) +// +// // Then +// XCTAssertEqual(mockDelegate.updatedCallSettings?.speakerOn, callSettings.speakerOn) +// } + + // MARK: - Private Helpers + +// func test_performAudioSessionOperation_executesOperationOnProcessingQueue() { +// // Given +// let expectation = self.expectation(description: "Operation executed") +// +// // When +// subject.performAudioSessionOperation { +// _ in +// expectation.fulfill() +// } +// +// // Then +// waitForExpectations(timeout: 1.0) +// } +} diff --git a/StreamVideoTests/WebRTC/WebRTCClient_Tests.swift b/StreamVideoTests/WebRTC/WebRTCClient_Tests.swift deleted file mode 100644 index a9eca8c74..000000000 --- a/StreamVideoTests/WebRTC/WebRTCClient_Tests.swift +++ /dev/null @@ -1,836 +0,0 @@ -// -// Copyright © 2024 Stream.io Inc. All rights reserved. -// - -//// -//// Copyright © 2024 Stream.io Inc. All rights reserved. -//// -// -// @testable import StreamVideo -// @preconcurrency import StreamWebRTC -// import XCTest -// -// final class WebRTCClient_Tests: StreamVideoTestCase { -// -// private let callCid = "default:123" -// private let sessionId = "123" -// private let userId = "martin" -// private let callParticipant = CallParticipant( -// id: "123", -// userId: "123", -// roles: [], -// name: "Test", -// profileImageURL: nil, -// trackLookupPrefix: nil, -// hasVideo: false, -// hasAudio: true, -// isScreenSharing: false, -// showTrack: false, -// isDominantSpeaker: false, -// sessionId: "123", -// connectionQuality: .excellent, -// joinedAt: Date(), -// audioLevel: 0, -// audioLevels: [], -// pin: nil -// ) -// -// let mockResponseBuilder = MockResponseBuilder() -// -// private lazy var participant: Stream_Video_Sfu_Models_Participant = { -// var participant = Stream_Video_Sfu_Models_Participant() -// participant.userID = userId -// participant.sessionID = sessionId -// participant.name = "Test" -// return participant -// }() -// -// private lazy var participantJoined: Stream_Video_Sfu_Event_ParticipantJoined = { -// var participantJoined = Stream_Video_Sfu_Event_ParticipantJoined() -// participantJoined.callCid = callCid -// participantJoined.participant = participant -// return participantJoined -// }() -// -// private lazy var factory: PeerConnectionFactory! = PeerConnectionFactory(audioProcessingModule: MockAudioProcessingModule()) -// private var webRTCClient: WebRTCClient! -// private var tracks: Set = [] -// -// // MARK: - Lifecycle -// -// override func tearDown() { -// tracks.forEach { $0.isEnabled = false } -// factory = nil -// webRTCClient = nil -// super.tearDown() -// } -// -// // MARK: init -// -// func test_webRTCClient_init_signalChannelIsUsingTheExpectedConnectURL() { -// // Given -// webRTCClient = makeWebRTCClient(ownCapabilities: [.sendAudio, .sendVideo]) -// -// // Then -// XCTAssertEqual(webRTCClient.sfuAdapter?.connectURL.absoluteString, "wss://test.com/ws") -// } -// -//// func test_webRTCClient_connectionFlow( -//// ownCapabilities: [OwnCapability] = [.sendAudio, .sendVideo], -//// migrating: Bool = false -//// ) async throws { -//// // Given -//// webRTCClient = makeWebRTCClient(ownCapabilities: ownCapabilities) -//// if migrating { -//// webRTCClient.sfuAdapter?.connect() -//// webRTCClient.prepareForMigration( -//// url: "https://test.com", -//// token: "123", -//// webSocketURL: "ws://test/com", -//// fromSfuName: "sfu-1" -//// ) -//// } -//// -//// // When -//// try await webRTCClient.connect( -//// callSettings: CallSettings(), -//// videoOptions: VideoOptions(), -//// connectOptions: ConnectOptions(iceServers: []), -//// migrating: migrating -//// ) -//// -//// // Then -//// var state = await webRTCClient.state.connectionState -//// XCTAssert(state == .connecting) -//// -//// // When -//// let engine = webRTCClient.signalChannel?.engine as! WebSocketEngine_Mock -//// engine.simulateConnectionSuccess() -//// -//// // Then -//// // Connection flow is not finished until join response arrives. -//// state = await webRTCClient.state.connectionState -//// XCTAssert(state == .connecting) -//// -//// // When -//// let eventNotificationCenter = webRTCClient.eventNotificationCenter -//// let event = Stream_Video_Sfu_Event_JoinResponse() -//// eventNotificationCenter.process(.sfuEvent(.joinResponse(event))) -//// try await waitForCallEvent() -//// -//// // Then -//// state = await webRTCClient.state.connectionState -//// XCTAssert(state == .connected) -//// } -// -//// func test_webRTCClient_migration() async throws { -//// try await test_webRTCClient_connectionFlow(migrating: true) -//// } -//// -//// func test_webRTCClient_defaultCallCapabilities() async throws { -//// // Given -//// try await test_webRTCClient_connectionFlow() -//// -//// // Then -//// XCTAssert(webRTCClient.localAudioTrack != nil) -//// XCTAssert(webRTCClient.localVideoTrack != nil) -//// } -//// -//// func test_webRTCClient_callCapabilitiesNoAudioAndVideo() async throws { -//// // Given -//// try await test_webRTCClient_connectionFlow(ownCapabilities: []) -//// -//// // Then -//// XCTAssert(webRTCClient.localAudioTrack == nil) -//// XCTAssert(webRTCClient.localVideoTrack == nil) -//// } -//// -//// func test_webRTCClient_cleanup() async throws { -//// // Given -//// try await test_webRTCClient_connectionFlow() -//// -//// // Then -//// XCTAssert(webRTCClient.localAudioTrack != nil) -//// XCTAssert(webRTCClient.localVideoTrack != nil) -//// -//// // When -//// await webRTCClient.cleanUp() -//// -//// // Then -//// XCTAssert(webRTCClient.localAudioTrack == nil) -//// XCTAssert(webRTCClient.localVideoTrack == nil) -//// } -//// -//// func test_webRTCClient_assignTracksMatchingTrackLookupPrefix() async throws { -//// // Given -//// try await test_webRTCClient_connectionFlow() -//// var participant = participant.toCallParticipant() -//// participant.trackLookupPrefix = "test-track" -//// let track = makeVideoTrack() -//// -//// // When -//// await webRTCClient.state.update(tracks: ["test-track": track]) -//// await webRTCClient.state.update(callParticipant: participant) -//// try await waitForCallEvent() -//// -//// // Then -//// let callParticipant = await webRTCClient.state.callParticipants[participant.id] -//// XCTAssert(callParticipant?.track != nil) -//// } -//// -//// func test_webRTCClient_assignTracksMatchingId() async throws { -//// // Given -//// try await test_webRTCClient_connectionFlow() -//// let participant = participant.toCallParticipant() -//// let track = makeVideoTrack() -//// -//// // When -//// await webRTCClient.state.update(tracks: ["123": track]) -//// await webRTCClient.state.update(callParticipant: participant) -//// try await waitForCallEvent() -//// -//// // Then -//// let callParticipant = await webRTCClient.state.callParticipants[participant.id] -//// XCTAssert(callParticipant?.track != nil) -//// } -//// -//// func test_webRTCClient_assignTracksNoMatch() async throws { -//// // Given -//// try await test_webRTCClient_connectionFlow() -//// let participant = participant.toCallParticipant() -//// let track = makeVideoTrack() -//// -//// // When -//// await webRTCClient.state.update(tracks: ["test-track": track]) -//// await webRTCClient.state.update(callParticipant: participant) -//// try await waitForCallEvent() -//// -//// // Then -//// let callParticipant = await webRTCClient.state.callParticipants[participant.id] -//// XCTAssert(callParticipant?.track == nil) -//// } -//// -//// func test_webRTCClient_assignScreenSharingMatchingTrackLookupPrefix() async throws { -//// // Given -//// try await test_webRTCClient_connectionFlow() -//// var participant = participant.toCallParticipant() -//// participant.trackLookupPrefix = "test-track" -//// let screensharingTrack = makeVideoTrack() -//// -//// // When -//// await webRTCClient.state.update(screensharingTracks: ["test-track": screensharingTrack]) -//// await webRTCClient.state.update(callParticipant: participant) -//// try await waitForCallEvent() -//// -//// // Then -//// let callParticipant = await webRTCClient.state.callParticipants[participant.id] -//// XCTAssert(callParticipant?.screenshareTrack != nil) -//// } -//// -//// func test_webRTCClient_assignScreenSharingMatchingId() async throws { -//// // Given -//// try await test_webRTCClient_connectionFlow() -//// let participant = participant.toCallParticipant() -//// let screensharingTrack = makeVideoTrack() -//// -//// // When -//// await webRTCClient.state.update(screensharingTracks: ["123": screensharingTrack]) -//// await webRTCClient.state.update(callParticipant: participant) -//// try await waitForCallEvent() -//// -//// // Then -//// let callParticipant = await webRTCClient.state.callParticipants[participant.id] -//// XCTAssert(callParticipant?.screenshareTrack != nil) -//// } -// -// func test_webRTCClient_participantJoinedAndLeft() async throws { -// // Given -// webRTCClient = makeWebRTCClient() -// -// // When -// webRTCClient.eventNotificationCenter.process(.sfuEvent(.participantJoined(participantJoined))) -// -// // Then -// try await waitForCallEvent() -// let newParticipant = await webRTCClient.state.callParticipants[sessionId] -// XCTAssertNotNil(newParticipant) -// XCTAssert(newParticipant?.userId == userId) -// -// // When -// var participantLeft = Stream_Video_Sfu_Event_ParticipantLeft() -// participantLeft.callCid = callCid -// participantLeft.participant = participant -// webRTCClient.eventNotificationCenter.process(.sfuEvent(.participantLeft(participantLeft))) -// -// // Then -// try await waitForCallEvent() -// let left = await webRTCClient.state.callParticipants[sessionId] -// XCTAssertNil(left) -// } -// -// func test_webRTCClient_participantJoinedAndUpdated() async throws { -// // Given -// webRTCClient = makeWebRTCClient() -// -// // When -// webRTCClient.eventNotificationCenter.process(.sfuEvent(.participantJoined(participantJoined))) -// -// // Then -// try await waitForCallEvent() -// let newParticipant = await webRTCClient.state.callParticipants[sessionId] -// XCTAssertNotNil(newParticipant) -// XCTAssertEqual(newParticipant?.userId, userId) -// XCTAssertEqual(newParticipant?.name, "Test") -// -// // When -// var participantUpdated = Stream_Video_Sfu_Event_ParticipantUpdated() -// participantUpdated.callCid = callCid -// var updatedParticipant = participant -// updatedParticipant.name = "Test 1" -// participantUpdated.participant = updatedParticipant -// webRTCClient.eventNotificationCenter.process(.sfuEvent(.participantUpdated(participantUpdated))) -// -// // Then -// try await waitForCallEvent() -// let updated = await webRTCClient.state.callParticipants[sessionId] -// XCTAssertNotNil(updated) -// XCTAssertEqual(updated?.userId, userId) -// XCTAssertEqual(updated?.name, "Test 1") -// } -// -// func test_webRTCClient_dominantSpeakerChanged() async throws { -// // Given -// webRTCClient = makeWebRTCClient() -// var dominantSpeakerChanged = Stream_Video_Sfu_Event_DominantSpeakerChanged() -// dominantSpeakerChanged.sessionID = sessionId -// dominantSpeakerChanged.userID = userId -// -// // When -// webRTCClient.eventNotificationCenter.process(.sfuEvent(.participantJoined(participantJoined))) -// try await waitForCallEvent() -// webRTCClient.eventNotificationCenter.process(.sfuEvent(.dominantSpeakerChanged(dominantSpeakerChanged))) -// try await waitForCallEvent() -// -// // Then -// let newParticipant = await webRTCClient.state.callParticipants[sessionId] -// XCTAssertNotNil(newParticipant) -// XCTAssert(newParticipant?.isDominantSpeaker == true) -// } -// -// func test_webRTCClient_audioLevelsChanged() async throws { -// // Given -// webRTCClient = makeWebRTCClient() -// var audioLevelsChanged = Stream_Video_Sfu_Event_AudioLevelChanged() -// var audioLevel = Stream_Video_Sfu_Event_AudioLevel() -// audioLevel.sessionID = sessionId -// audioLevel.userID = userId -// audioLevel.isSpeaking = true -// audioLevelsChanged.audioLevels = [audioLevel] -// -// // When -// webRTCClient.eventNotificationCenter.process(.sfuEvent(.participantJoined(participantJoined))) -// try await waitForCallEvent() -// webRTCClient.eventNotificationCenter.process(.sfuEvent(.audioLevelChanged(audioLevelsChanged))) -// try await waitForCallEvent() -// -// // Then -// let newParticipant = await webRTCClient.state.callParticipants[sessionId] -// XCTAssertNotNil(newParticipant) -// XCTAssert(newParticipant?.isSpeaking == true) -// } -// -// func test_webRTCClient_connectionQualityChanged() async throws { -// // Given -// webRTCClient = makeWebRTCClient() -// var connectionQualityChanged = Stream_Video_Sfu_Event_ConnectionQualityChanged() -// var update = Stream_Video_Sfu_Event_ConnectionQualityInfo() -// update.sessionID = sessionId -// update.userID = userId -// update.connectionQuality = .good -// connectionQualityChanged.connectionQualityUpdates = [update] -// -// // When -// webRTCClient -// .eventNotificationCenter -// .process(.sfuEvent(.participantJoined(participantJoined))) -// webRTCClient -// .eventNotificationCenter -// .process(.sfuEvent(.connectionQualityChanged(connectionQualityChanged))) -// -// try await waitForCallEvent() -// -// // Then -// let newParticipant = await webRTCClient.state.callParticipants[sessionId] -// XCTAssertNotNil(newParticipant) -// XCTAssert(newParticipant?.connectionQuality == .good) -// } -// -// func test_webRTCClient_joinResponse() async throws { -// // Given -// webRTCClient = makeWebRTCClient() -// var joinResponse = Stream_Video_Sfu_Event_JoinResponse() -// joinResponse.callState.participants = [participant] -// -// // When -// webRTCClient.eventNotificationCenter.process(.sfuEvent(.joinResponse(joinResponse))) -// try await waitForCallEvent() -// -// // Then -// let newParticipant = await webRTCClient.state.callParticipants[sessionId] -// XCTAssertNotNil(newParticipant) -// XCTAssert(newParticipant?.userId == userId) -// } -// -// func test_webRTCClient_audioTrackPublished() async throws { -// // Given -// webRTCClient = makeWebRTCClient() -// var trackPublished = Stream_Video_Sfu_Event_TrackPublished() -// trackPublished.sessionID = sessionId -// trackPublished.userID = userId -// trackPublished.type = .audio -// -// // When -// webRTCClient.eventNotificationCenter.process(.sfuEvent(.participantJoined(participantJoined))) -// try await waitForCallEvent() -// webRTCClient.eventNotificationCenter.process(.sfuEvent(.trackPublished(trackPublished))) -// try await waitForCallEvent() -// -// // Then -// let newParticipant = await webRTCClient.state.callParticipants[sessionId] -// XCTAssertNotNil(newParticipant) -// XCTAssert(newParticipant?.hasAudio == true) -// } -// -// func test_webRTCClient_videoTrackPublished() async throws { -// // Given -// webRTCClient = makeWebRTCClient() -// var trackPublished = Stream_Video_Sfu_Event_TrackPublished() -// trackPublished.sessionID = sessionId -// trackPublished.userID = userId -// trackPublished.type = .video -// -// // When -// webRTCClient.eventNotificationCenter.process(.sfuEvent(.participantJoined(participantJoined))) -// try await waitForCallEvent() -// webRTCClient.eventNotificationCenter.process(.sfuEvent(.trackPublished(trackPublished))) -// try await waitForCallEvent() -// -// // Then -// let newParticipant = await webRTCClient.state.callParticipants[sessionId] -// XCTAssertNotNil(newParticipant) -// XCTAssert(newParticipant?.hasVideo == true) -// } -// -// func test_webRTCClient_screenshareTrackPublished() async throws { -// // Given -// webRTCClient = makeWebRTCClient() -// var trackPublished = Stream_Video_Sfu_Event_TrackPublished() -// trackPublished.sessionID = sessionId -// trackPublished.userID = userId -// trackPublished.type = .screenShare -// -// // When -// webRTCClient.eventNotificationCenter.process(.sfuEvent(.participantJoined(participantJoined))) -// try await waitForCallEvent() -// webRTCClient.eventNotificationCenter.process(.sfuEvent(.trackPublished(trackPublished))) -// try await waitForCallEvent() -// -// // Then -// let newParticipant = await webRTCClient.state.callParticipants[sessionId] -// XCTAssertNotNil(newParticipant) -// XCTAssert(newParticipant?.isScreensharing == true) -// } -// -// func test_webRTCClient_audioTrackUnpublished() async throws { -// // Given -// webRTCClient = makeWebRTCClient() -// var trackUnpublished = Stream_Video_Sfu_Event_TrackUnpublished() -// trackUnpublished.sessionID = sessionId -// trackUnpublished.userID = userId -// trackUnpublished.type = .audio -// -// // When -// webRTCClient.eventNotificationCenter.process(.sfuEvent(.participantJoined(participantJoined))) -// try await waitForCallEvent() -// webRTCClient.eventNotificationCenter.process(.sfuEvent(.trackUnpublished(trackUnpublished))) -// try await waitForCallEvent() -// -// // Then -// let newParticipant = await webRTCClient.state.callParticipants[sessionId] -// XCTAssertNotNil(newParticipant) -// XCTAssert(newParticipant?.hasAudio == false) -// } -// -// func test_webRTCClient_videoTrackUnpublished() async throws { -// // Given -// webRTCClient = makeWebRTCClient() -// var trackUnpublished = Stream_Video_Sfu_Event_TrackUnpublished() -// trackUnpublished.sessionID = sessionId -// trackUnpublished.userID = userId -// trackUnpublished.type = .video -// -// // When -// webRTCClient.eventNotificationCenter.process(.sfuEvent(.participantJoined(participantJoined))) -// try await waitForCallEvent() -// webRTCClient.eventNotificationCenter.process(.sfuEvent(.trackUnpublished(trackUnpublished))) -// try await waitForCallEvent() -// -// // Then -// let newParticipant = await webRTCClient.state.callParticipants[sessionId] -// XCTAssertNotNil(newParticipant) -// XCTAssert(newParticipant?.hasVideo == false) -// } -// -// func test_webRTCClient_screenshareTrackUnpublished() async throws { -// // Given -// webRTCClient = makeWebRTCClient() -// var trackUnpublished = Stream_Video_Sfu_Event_TrackUnpublished() -// trackUnpublished.sessionID = sessionId -// trackUnpublished.userID = userId -// trackUnpublished.type = .screenShare -// -// // When -// webRTCClient.eventNotificationCenter.process(.sfuEvent(.participantJoined(participantJoined))) -// try await waitForCallEvent() -// webRTCClient.eventNotificationCenter.process(.sfuEvent(.trackUnpublished(trackUnpublished))) -// try await waitForCallEvent() -// -// // Then -// let newParticipant = await webRTCClient.state.callParticipants[sessionId] -// XCTAssertNotNil(newParticipant) -// XCTAssert(newParticipant?.isScreensharing == false) -// } -// -// func test_webRTCClient_changeAudioState() async throws { -// // Given -// let httpClient = HTTPClient_Mock() -// let response = Stream_Video_Sfu_Signal_UpdateMuteStatesResponse() -// let data = try response.serializedData() -// httpClient.dataResponses = [data] -// webRTCClient = makeWebRTCClient(httpClient: httpClient) -// -// // When -// try await webRTCClient.connect( -// callSettings: CallSettings(), -// videoOptions: VideoOptions(), -// connectOptions: ConnectOptions(iceServers: []) -// ) -// try await webRTCClient.changeAudioState(isEnabled: false) -// -// // Then -// XCTAssert(webRTCClient.callSettings.audioOn == false) -// } -// -// func test_webRTCClient_changeVideoState() async throws { -// // Given -// let httpClient = HTTPClient_Mock() -// let response = Stream_Video_Sfu_Signal_UpdateMuteStatesResponse() -// let data = try response.serializedData() -// httpClient.dataResponses = [data] -// webRTCClient = makeWebRTCClient(httpClient: httpClient) -// -// // When -// try await webRTCClient.connect( -// callSettings: CallSettings(), -// videoOptions: VideoOptions(), -// connectOptions: ConnectOptions(iceServers: []) -// ) -// try await webRTCClient.changeVideoState(isEnabled: false) -// -// // Then -// XCTAssert(webRTCClient.callSettings.videoOn == false) -// } -// -// func test_webRTCClient_changeSoundState() async throws { -// // Given -// webRTCClient = makeWebRTCClient() -// -// // When -// try await webRTCClient.changeSoundState(isEnabled: false) -// -// // Then -// XCTAssert(webRTCClient.callSettings.audioOutputOn == false) -// } -// -// func test_webRTCClient_changeSpeakerState() async throws { -// // Given -// webRTCClient = makeWebRTCClient() -// -// // When -// try await webRTCClient.changeSpeakerState(isEnabled: false) -// -// // Then -// XCTAssert(webRTCClient.callSettings.speakerOn == false) -// } -// -// func test_webRTCClient_changeTrackVisibility() async throws { -// // Given -// webRTCClient = makeWebRTCClient() -// let participants = ["123": callParticipant] -// await webRTCClient.state.update(callParticipants: participants) -// -// // When -// await webRTCClient.changeTrackVisibility(for: callParticipant, isVisible: true) -// -// // Then -// let updated = await webRTCClient.state.callParticipants[callParticipant.sessionId] -// XCTAssert(updated?.showTrack == true) -// } -// -// func test_webRTCClient_changeTrackVisibilityNonExisting() async throws { -// // Given -// webRTCClient = makeWebRTCClient() -// -// // When -// await webRTCClient.changeTrackVisibility(for: callParticipant, isVisible: true) -// -// // Then -// let updated = await webRTCClient.state.callParticipants[callParticipant.sessionId] -// XCTAssertNil(updated) -// } -// -// func test_webRTCClient_updateTrackSize() async throws { -// // Given -// webRTCClient = makeWebRTCClient() -// let participants = ["123": callParticipant] -// await webRTCClient.state.update(callParticipants: participants) -// let trackSize = CGSize(width: 100, height: 100) -// -// // When -// await webRTCClient.updateTrackSize(trackSize, for: callParticipant) -// -// // Then -// let updated = await webRTCClient.state.callParticipants[callParticipant.sessionId] -// XCTAssert(updated?.trackSize == trackSize) -// } -// -// func test_webRTCClient_updateTrackSizeNonExisting() async throws { -// // Given -// webRTCClient = makeWebRTCClient() -// let trackSize = CGSize(width: 100, height: 100) -// -// // When -// await webRTCClient.updateTrackSize(trackSize, for: callParticipant) -// -// // Then -// let updated = await webRTCClient.state.callParticipants[callParticipant.sessionId] -// XCTAssertNil(updated) -// } -// -//// func test_webRTCClient_iceTrickleSubscriber() async throws { -//// // Given -//// webRTCClient = makeWebRTCClient() -//// try await test_webRTCClient_connectionFlow() -//// let trickleEvent = try makeIceTrickleEvent(peerType: .subscriber) -//// -//// // When -//// webRTCClient.eventNotificationCenter.process(.sfuEvent(.iceTrickle(trickleEvent))) -//// try await waitForCallEvent() -//// -//// // Then -//// XCTAssert(webRTCClient.subscriber?.pendingIceCandidates.count == 1) -//// } -// -//// func test_webRTCClient_iceTricklePublisher() async throws { -//// // Given -//// webRTCClient = makeWebRTCClient() -//// try await test_webRTCClient_connectionFlow() -//// let trickleEvent = try makeIceTrickleEvent(peerType: .publisherUnspecified) -//// -//// // When -//// webRTCClient.eventNotificationCenter.process(.sfuEvent(.iceTrickle(trickleEvent))) -//// try await waitForCallEvent() -//// -//// // Then -//// XCTAssert(webRTCClient.publisher?.pendingIceCandidates.count == 1) -//// } -// -// func test_webRTCClient_changePublishQuality() async throws { -// // Given -// webRTCClient = makeWebRTCClient() -// try await test_webRTCClient_connectionFlow() -// var event = Stream_Video_Sfu_Event_ChangePublishQuality() -// var videoSender = Stream_Video_Sfu_Event_VideoSender() -// var layer = Stream_Video_Sfu_Event_VideoLayerSetting() -// layer.active = true -// layer.name = "test" -// videoSender.layers = [layer] -// event.videoSenders = [videoSender] -// let videoOptions = VideoOptions() -// var encodingParams = [RTCRtpEncodingParameters]() -// for codec in videoOptions.supportedCodecs { -// let encodingParam = RTCRtpEncodingParameters() -// encodingParam.rid = codec.quality -// encodingParam.maxBitrateBps = (codec.maxBitrate) as NSNumber -// if let scaleDownFactor = codec.scaleDownFactor { -// encodingParam.scaleResolutionDownBy = (scaleDownFactor) as NSNumber -// } -// encodingParams.append(encodingParam) -// } -// -// // When -// let videoTrack = makeVideoTrack() -// webRTCClient -// .publisher? -// .addTransceiver( -// videoTrack, -// streamIds: ["some-id"], -// trackType: .video -// ) -// -// webRTCClient -// .eventNotificationCenter -// .process(.sfuEvent(.changePublishQuality(event))) -// -// let expected = encodingParams.map(\.rid) -// await fulfillment { [weak webRTCClient] in -// let actual = webRTCClient? -// .publisher? -// .transceiver? -// .sender -// .parameters -// .encodings -// .map(\.rid) -// -// return actual == expected -// } -// } -// -// func test_webRTCClient_screensharingBroadcast() async throws { -// try await assert_webRTCClient_screensharing(type: .broadcast) -// } -// -// func test_webRTCClient_screensharingInApp() async throws { -// try await assert_webRTCClient_screensharing(type: .inApp) -// } -// -// func assert_webRTCClient_screensharing(type: ScreensharingType) async throws { -// // Given -// let httpClient = HTTPClient_Mock() -// let response = Stream_Video_Sfu_Signal_UpdateMuteStatesResponse() -// for _ in 0..<20 { -// let data = try response.serializedData() -// httpClient.dataResponses.append(data) -// } -// webRTCClient = makeWebRTCClient( -// ownCapabilities: [.screenshare], -// httpClient: httpClient -// ) -// let sessionId = "123" -// let participants = [sessionId: callParticipant] -// await webRTCClient.state.update(callParticipants: participants) -// -// // When -// try await webRTCClient.connect( -// callSettings: CallSettings(), -// videoOptions: VideoOptions(), -// connectOptions: ConnectOptions(iceServers: []) -// ) -// try? await webRTCClient.startScreensharing(type: type) -// var event = Stream_Video_Sfu_Event_TrackPublished() -// event.sessionID = sessionId -// event.type = .screenShare -// webRTCClient.eventNotificationCenter.process(.sfuEvent(.trackPublished(event))) -// try await waitForCallEvent() -// -// // Then -// var current = await webRTCClient.state.callParticipants -// XCTAssert(current.values.first?.isScreensharing == true) -// -// // When -// try await webRTCClient.stopScreensharing() -// var unpublished = Stream_Video_Sfu_Event_TrackUnpublished() -// unpublished.sessionID = sessionId -// unpublished.type = .screenShare -// webRTCClient.eventNotificationCenter.process(.sfuEvent(.trackUnpublished(unpublished))) -// try await waitForCallEvent() -// -// // Then -// current = await webRTCClient.state.callParticipants -// XCTAssert(current.values.first?.isScreensharing == false) -// } -// -// func test_webRTCClient_pinEvents() async throws { -// // Given -// webRTCClient = makeWebRTCClient() -// try await test_webRTCClient_connectionFlow() -// let sessionId = "123" -// let participants = [sessionId: callParticipant] -// await webRTCClient.state.update(callParticipants: participants) -// -// // When -// var event = Stream_Video_Sfu_Event_PinsChanged() -// var pin = Stream_Video_Sfu_Models_Pin() -// pin.sessionID = sessionId -// event.pins = [pin] -// webRTCClient.eventNotificationCenter.process(.sfuEvent(.pinsUpdated(event))) -// try await waitForCallEvent() -// -// // Then -// var current = await webRTCClient.state.callParticipants[sessionId] -// XCTAssertNotNil(current?.pin) -// XCTAssertEqual(current?.pin?.isLocal, false) -// -// // When -// event = Stream_Video_Sfu_Event_PinsChanged() -// event.pins = [] -// webRTCClient.eventNotificationCenter.process(.sfuEvent(.pinsUpdated(event))) -// try await waitForCallEvent() -// -// // Then -// current = await webRTCClient.state.callParticipants[sessionId] -// XCTAssertNil(current?.pin) -// } -// -// // MARK: - private -// -// func makeWebRTCClient( -// ownCapabilities: [OwnCapability] = [], -// httpClient: HTTPClient? = nil -// ) -> WebRTCClient { -// var environment = WebSocketClient.Environment.mock -// environment.httpClientBuilder = { -// httpClient ?? HTTPClient_Mock() -// } -// -// let webRTCClient = WebRTCClient( -// user: StreamVideo.mockUser, -// apiKey: StreamVideo.apiKey, -// hostname: "test.com", -// webSocketURLString: "wss://test.com/ws", -// token: StreamVideo.mockToken.rawValue, -// callCid: callCid, -// sessionID: nil, -// ownCapabilities: ownCapabilities, -// videoConfig: .dummy(), -// audioSettings: AudioSettings( -// accessRequestEnabled: true, -// defaultDevice: .speaker, -// micDefaultOn: true, -// opusDtxEnabled: true, -// redundantCodingEnabled: true, -// speakerDefaultOn: true -// ), -// environment: environment -// ) -// return webRTCClient -// } -// -// private func makeIceTrickleEvent( -// peerType: Stream_Video_Sfu_Models_PeerType -// ) throws -> Stream_Video_Sfu_Models_ICETrickle { -// let iceCandidate = try JSONSerialization.data(withJSONObject: ["candidate": "test-sdp"]) -// let iceCandidateString = String(data: iceCandidate, encoding: .utf8)! -// var trickleEvent = Stream_Video_Sfu_Models_ICETrickle() -// trickleEvent.iceCandidate = iceCandidateString -// trickleEvent.peerType = peerType -// return trickleEvent -// } -// -// private func makeVideoTrack() -> RTCVideoTrack { -// let videoSource = factory.makeVideoSource(forScreenShare: false) -// let track = factory.makeVideoTrack(source: videoSource) -// tracks.insert(track) -// return track -// } -// } diff --git a/StreamVideoTests/WebRTC/v2/PeerConnection/MediaAdapters/AudioMediaAdapter_Tests.swift b/StreamVideoTests/WebRTC/v2/PeerConnection/MediaAdapters/AudioMediaAdapter_Tests.swift index c2cd6e886..9407a4f9f 100644 --- a/StreamVideoTests/WebRTC/v2/PeerConnection/MediaAdapters/AudioMediaAdapter_Tests.swift +++ b/StreamVideoTests/WebRTC/v2/PeerConnection/MediaAdapters/AudioMediaAdapter_Tests.swift @@ -14,14 +14,13 @@ final class AudioMediaAdapter_Tests: XCTestCase { private lazy var mockPeerConnection: MockRTCPeerConnection! = .init() private lazy var spySubject: PassthroughSubject! = .init() private lazy var mockMediaAdapter: MockLocalMediaAdapter! = .init() - private lazy var audioSession: AudioSession! = .init() + private lazy var audioSession: StreamAudioSessionAdapter! = .init() private lazy var subject: AudioMediaAdapter! = .init( sessionID: sessionId, peerConnection: mockPeerConnection, peerConnectionFactory: peerConnectionFactory, localMediaManager: mockMediaAdapter, - subject: spySubject, - audioSession: audioSession + subject: spySubject ) override func tearDown() { @@ -61,24 +60,4 @@ final class AudioMediaAdapter_Tests: XCTestCase { ) XCTAssertEqual(actual, settings) } - - // MARK: - didUpdateAudioSessionState(_:) - - func test_didUpdateAudioSessionState_audioSessionWasConfiguredCorrectly() async throws { - await subject.didUpdateAudioSessionState(true) - - let isActive = await audioSession.isAudioEnabled - XCTAssertTrue(isActive) - } - - // MARK: - didUpdateAudioSessionSpeakerState(_:) - - func test_didUpdateAudioSessionSpeakerState_audioSessionWasConfiguredCorrectly() async throws { - await subject.didUpdateAudioSessionSpeakerState(true, with: false) - - let isActive = await audioSession.isActive - let isSpeakerOn = await audioSession.isSpeakerOn - XCTAssertFalse(isActive) - XCTAssertTrue(isSpeakerOn) - } } diff --git a/StreamVideoTests/WebRTC/v2/PeerConnection/MediaAdapters/LocalMediaAdapters/LocalAudioMediaAdapter_Tests.swift b/StreamVideoTests/WebRTC/v2/PeerConnection/MediaAdapters/LocalMediaAdapters/LocalAudioMediaAdapter_Tests.swift index f5970e626..58a635c72 100644 --- a/StreamVideoTests/WebRTC/v2/PeerConnection/MediaAdapters/LocalMediaAdapters/LocalAudioMediaAdapter_Tests.swift +++ b/StreamVideoTests/WebRTC/v2/PeerConnection/MediaAdapters/LocalMediaAdapters/LocalAudioMediaAdapter_Tests.swift @@ -16,14 +16,14 @@ final class LocalAudioMediaAdapter_Tests: XCTestCase { private lazy var peerConnectionFactory: PeerConnectionFactory! = .mock() private lazy var mockPeerConnection: MockRTCPeerConnection! = .init() private lazy var mockSFUStack: MockSFUStack! = .init() - private lazy var audioSession: AudioSession! = .init() + private lazy var audioSession: MockAudioSession! = .init() + private lazy var audioSessionAdapter: StreamAudioSessionAdapter! = .init(audioSession) private lazy var spySubject: PassthroughSubject! = .init() private lazy var subject: LocalAudioMediaAdapter! = .init( sessionID: sessionId, peerConnection: mockPeerConnection, peerConnectionFactory: peerConnectionFactory, sfuAdapter: mockSFUStack.adapter, - audioSession: audioSession, subject: spySubject ) @@ -33,6 +33,7 @@ final class LocalAudioMediaAdapter_Tests: XCTestCase { subject = nil spySubject = nil audioSession = nil + audioSessionAdapter = nil mockSFUStack = nil mockPeerConnection = nil peerConnectionFactory = nil @@ -153,44 +154,6 @@ final class LocalAudioMediaAdapter_Tests: XCTestCase { XCTAssertTrue(request.muteStates[0].muted) } - func test_didUpdateCallSettings_isEnabledFalseCallSettingsTrue_callSettingsUpdatedAudioSession() async throws { - try await subject.setUp( - with: .init(audioOn: true), - ownCapabilities: [.sendAudio] - ) - - try await subject.didUpdateCallSettings(.init(audioOn: true)) - - let isActive = await audioSession.isAudioEnabled - XCTAssertTrue(isActive) - } - - func test_didUpdateCallSettings_isEnabledTrueCallSettingsFalse_callSettingsUpdatedAudioSession() async throws { - try await subject.setUp( - with: .init(audioOn: true), - ownCapabilities: [.sendAudio] - ) - subject.localTrack?.isEnabled = true - - try await subject.didUpdateCallSettings(.init(audioOn: false)) - - let isActive = await audioSession.isActive - XCTAssertFalse(isActive) - } - - func test_didUpdateCallSettings_isEnabledFalseCallSettingsTrue_startsRecording() async throws { - try await subject.setUp( - with: .init(audioOn: true), - ownCapabilities: [.sendAudio] - ) - - try await subject.didUpdateCallSettings(.init(audioOn: true)) - - await fulfillment { [mockAudioRecorder] in - mockAudioRecorder?.stubbedFunctionInput[.startRecording]?.isEmpty == true - } - } - // MARK: - publish func test_publish_disabledLocalTrack_enablesAndAddsTrackAndTransceiver() async throws { @@ -283,6 +246,17 @@ final class LocalAudioMediaAdapter_Tests: XCTestCase { } } + private func assertEqualAsync( + _ expression: @autoclosure () async throws -> T, + _ expected: @autoclosure () async throws -> T, + file: StaticString = #file, + line: UInt = #line + ) async rethrows { + let value = try await expression() + let expectedValue = try await expected() + XCTAssertEqual(value, expectedValue, file: file, line: line) + } + private func makeTransceiver( of type: TrackType, direction: RTCRtpTransceiverDirection = .sendOnly, diff --git a/StreamVideoTests/WebRTC/v2/PeerConnection/RTCPeerConnectionCoordinator_Tests.swift b/StreamVideoTests/WebRTC/v2/PeerConnection/RTCPeerConnectionCoordinator_Tests.swift index 69e08c2b1..ed57da81e 100644 --- a/StreamVideoTests/WebRTC/v2/PeerConnection/RTCPeerConnectionCoordinator_Tests.swift +++ b/StreamVideoTests/WebRTC/v2/PeerConnection/RTCPeerConnectionCoordinator_Tests.swift @@ -5,7 +5,7 @@ import Combine @testable import StreamVideo import StreamWebRTC -import XCTest +@preconcurrency import XCTest final class RTCPeerConnectionCoordinator_Tests: XCTestCase { @@ -14,7 +14,7 @@ final class RTCPeerConnectionCoordinator_Tests: XCTestCase { private lazy var mockPeerConnection: MockRTCPeerConnection! = .init() private lazy var peerConnectionFactory: PeerConnectionFactory! = .mock() private lazy var mockSFUStack: MockSFUStack! = .init() - private lazy var audioSession: AudioSession! = .init() + private lazy var audioSession: StreamAudioSessionAdapter! = .init() private lazy var spySubject: PassthroughSubject! = .init() private lazy var mockLocalMediaAdapterA: MockLocalMediaAdapter! = .init() private lazy var mockLocalMediaAdapterB: MockLocalMediaAdapter! = .init() @@ -24,8 +24,7 @@ final class RTCPeerConnectionCoordinator_Tests: XCTestCase { peerConnection: mockPeerConnection, peerConnectionFactory: peerConnectionFactory, localMediaManager: mockLocalMediaAdapterA, - subject: spySubject, - audioSession: audioSession + subject: spySubject ) private lazy var videoMediaAdapter: VideoMediaAdapter! = .init( sessionID: sessionId, @@ -55,7 +54,6 @@ final class RTCPeerConnectionCoordinator_Tests: XCTestCase { callSettings: .init(), audioSettings: .dummy(opusDtxEnabled: true, redundantCodingEnabled: true), sfuAdapter: mockSFUStack.adapter, - audioSession: audioSession, mediaAdapter: mediaAdapter ) diff --git a/StreamVideoTests/WebRTC/v2/StateMachine/Stages/WebRTCCoordinatorStateMachine_JoinedStageTests.swift b/StreamVideoTests/WebRTC/v2/StateMachine/Stages/WebRTCCoordinatorStateMachine_JoinedStageTests.swift index 404563991..1a0742239 100644 --- a/StreamVideoTests/WebRTC/v2/StateMachine/Stages/WebRTCCoordinatorStateMachine_JoinedStageTests.swift +++ b/StreamVideoTests/WebRTC/v2/StateMachine/Stages/WebRTCCoordinatorStateMachine_JoinedStageTests.swift @@ -494,16 +494,19 @@ final class WebRTCCoordinatorStateMachine_JoinedStageTests: XCTestCase, @uncheck .configurePeerConnections() let publisher = await mockCoordinatorStack?.coordinator.stateAdapter.publisher let mockPublisher = try XCTUnwrap(publisher as? MockRTCPeerConnectionCoordinator) + let updateCallSettings = CallSettings(audioOn: true, videoOn: true) + let audioSession = await mockCoordinatorStack.coordinator.stateAdapter.audioSession await assertResultAfterTrigger( trigger: { [mockCoordinatorStack] in await mockCoordinatorStack? .coordinator .stateAdapter - .set(callSettings: CallSettings(audioOn: true, videoOn: true)) + .set(callSettings: updateCallSettings) } - ) { [mockPublisher] expectation in + ) { [mockPublisher, audioSession] expectation in XCTAssertEqual(mockPublisher.timesCalled(.didUpdateCallSettings), 1) + XCTAssertEqual(audioSession.activeCallSettings, updateCallSettings) expectation.fulfill() } } diff --git a/StreamVideoTests/WebRTC/v2/WebRTCStateAdapter_Tests.swift b/StreamVideoTests/WebRTC/v2/WebRTCStateAdapter_Tests.swift index 227a773d2..52ce4ade0 100644 --- a/StreamVideoTests/WebRTC/v2/WebRTCStateAdapter_Tests.swift +++ b/StreamVideoTests/WebRTC/v2/WebRTCStateAdapter_Tests.swift @@ -36,6 +36,12 @@ final class WebRTCStateAdapter_Tests: XCTestCase, @unchecked Sendable { super.tearDown() } + // MARK: - audioSession + + func test_audioSession_delegateWasSetAsExpected() async throws { + await assertTrueAsync(await subject.audioSession.delegate === subject) + } + // MARK: - setSessionID func test_sessionID_shouldNotBeEmptyOnInit() async throws { @@ -634,6 +640,27 @@ final class WebRTCStateAdapter_Tests: XCTestCase, @unchecked Sendable { } } + // MARK: - audioSessionDidUpdateCallSettings + + func test_audioSessionDidUpdateCallSettings_updatesCallSettingsAsExpected() async { + let updatedCallSettings = CallSettings( + audioOn: false, + videoOn: false, + speakerOn: true, + audioOutputOn: false, + cameraPosition: .back + ) + + subject.audioSessionAdapterDidUpdateCallSettings( + await subject.audioSession, + callSettings: updatedCallSettings + ) + + await fulfillment { [subject] in + await subject?.callSettings == updatedCallSettings + } + } + // MARK: - Private helpers private func assertNilAsync(