From 00b3ff5c5178e89b4c4ace58e35299e8f589a4ff Mon Sep 17 00:00:00 2001 From: ThibaultBee <37510686+ThibaultBee@users.noreply.github.com> Date: Tue, 18 Jun 2024 11:02:23 +0200 Subject: [PATCH] feat(*): add support for srt --- ApiVideoLiveStream.podspec | 3 +- ApiVideoLiveStream.xcodeproj/project.pbxproj | 56 ++- Cartfile | 1 - Examples/iOS/SettingsManager.swift | 6 +- .../ViewControllers/MainViewController.swift | 16 +- Package.swift | 5 +- .../ApiVideoLiveStream.swift | 383 ++++++------------ .../Configuration.swift | 0 .../Resolution.swift | 0 .../IOStream/IOLiveStream.swift | 294 ++++++++++++++ .../IOStream/LiveStreamProtocol.swift | 25 ++ .../IOStream/RtmpLiveStream.swift | 128 ++++++ .../IOStream/SrtLiveStream.swift | 93 +++++ .../ApiVideoLiveStream/Utils/Extensions.swift | 8 + .../Utils/LiveStreamError.swift | 6 + 15 files changed, 738 insertions(+), 286 deletions(-) delete mode 100644 Cartfile rename Sources/ApiVideoLiveStream/{models => Configuration}/Configuration.swift (100%) rename Sources/ApiVideoLiveStream/{models => Configuration}/Resolution.swift (100%) create mode 100644 Sources/ApiVideoLiveStream/IOStream/IOLiveStream.swift create mode 100644 Sources/ApiVideoLiveStream/IOStream/LiveStreamProtocol.swift create mode 100644 Sources/ApiVideoLiveStream/IOStream/RtmpLiveStream.swift create mode 100644 Sources/ApiVideoLiveStream/IOStream/SrtLiveStream.swift create mode 100644 Sources/ApiVideoLiveStream/Utils/Extensions.swift create mode 100644 Sources/ApiVideoLiveStream/Utils/LiveStreamError.swift diff --git a/ApiVideoLiveStream.podspec b/ApiVideoLiveStream.podspec index da821cb..3bc9a74 100644 --- a/ApiVideoLiveStream.podspec +++ b/ApiVideoLiveStream.podspec @@ -23,6 +23,7 @@ Pod::Spec.new do |spec| spec.source_files = "Sources/**/*.{h,m,swift}" spec.exclude_files = "Sources/Exclude" - spec.dependency "HaishinKit", "1.9.0" + spec.dependency "HaishinKit", "1.9.3" + spec.dependency "SRTHaishinKit", "1.9.3" end diff --git a/ApiVideoLiveStream.xcodeproj/project.pbxproj b/ApiVideoLiveStream.xcodeproj/project.pbxproj index dee3141..1e56bdb 100644 --- a/ApiVideoLiveStream.xcodeproj/project.pbxproj +++ b/ApiVideoLiveStream.xcodeproj/project.pbxproj @@ -8,6 +8,13 @@ /* Begin PBXBuildFile section */ 2102CD142BA9E0E700D0EBAD /* ApiVideoLiveStream.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 2102CD092BA9E0E700D0EBAD /* ApiVideoLiveStream.framework */; }; + 2192709F2C202CD90028BFFF /* RtmpLiveStream.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2192709E2C202CD90028BFFF /* RtmpLiveStream.swift */; }; + 219270A12C202D020028BFFF /* LiveStreamProtocol.swift in Sources */ = {isa = PBXBuildFile; fileRef = 219270A02C202D020028BFFF /* LiveStreamProtocol.swift */; }; + 219270A32C202FA30028BFFF /* IOLiveStream.swift in Sources */ = {isa = PBXBuildFile; fileRef = 219270A22C202FA30028BFFF /* IOLiveStream.swift */; }; + 219270A52C2039910028BFFF /* SrtLiveStream.swift in Sources */ = {isa = PBXBuildFile; fileRef = 219270A42C2039910028BFFF /* SrtLiveStream.swift */; }; + 219270A72C203A360028BFFF /* SRTHaishinKit in Frameworks */ = {isa = PBXBuildFile; productRef = 219270A62C203A360028BFFF /* SRTHaishinKit */; }; + 219270AA2C2064280028BFFF /* LiveStreamError.swift in Sources */ = {isa = PBXBuildFile; fileRef = 219270A92C2064280028BFFF /* LiveStreamError.swift */; }; + 219270AC2C20644D0028BFFF /* Extensions.swift in Sources */ = {isa = PBXBuildFile; fileRef = 219270AB2C20644D0028BFFF /* Extensions.swift */; }; 21BF485B2C10B54300EAEB5B /* HaishinKit in Frameworks */ = {isa = PBXBuildFile; productRef = 21BF485A2C10B54300EAEB5B /* HaishinKit */; }; 21BF485E2C10BAAF00EAEB5B /* InAppSettingsKit in Frameworks */ = {isa = PBXBuildFile; productRef = 21BF485D2C10BAAF00EAEB5B /* InAppSettingsKit */; }; 21CCD6A02BBC44EE00E58F5D /* ApiVideoLiveStream.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 2102CD092BA9E0E700D0EBAD /* ApiVideoLiveStream.framework */; }; @@ -86,7 +93,12 @@ 2126C5152BCD4D6E006BEDF3 /* create-documentation-pr.yml */ = {isa = PBXFileReference; lastKnownFileType = text.yaml; path = "create-documentation-pr.yml"; sourceTree = ""; }; 2126C5162BCD4D6E006BEDF3 /* create-release-from-changelog.yml */ = {isa = PBXFileReference; lastKnownFileType = text.yaml; path = "create-release-from-changelog.yml"; sourceTree = ""; }; 2126C5172BCD4D6E006BEDF3 /* release.yml */ = {isa = PBXFileReference; lastKnownFileType = text.yaml; path = release.yml; sourceTree = ""; }; - 21E3585C2C10A27000D856D9 /* Cartfile */ = {isa = PBXFileReference; lastKnownFileType = text; path = Cartfile; sourceTree = ""; }; + 2192709E2C202CD90028BFFF /* RtmpLiveStream.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RtmpLiveStream.swift; sourceTree = ""; }; + 219270A02C202D020028BFFF /* LiveStreamProtocol.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LiveStreamProtocol.swift; sourceTree = ""; }; + 219270A22C202FA30028BFFF /* IOLiveStream.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = IOLiveStream.swift; sourceTree = ""; }; + 219270A42C2039910028BFFF /* SrtLiveStream.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SrtLiveStream.swift; sourceTree = ""; }; + 219270A92C2064280028BFFF /* LiveStreamError.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LiveStreamError.swift; sourceTree = ""; }; + 219270AB2C20644D0028BFFF /* Extensions.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Extensions.swift; sourceTree = ""; }; 21E3585D2C10A27000D856D9 /* ApiVideoLiveStream.podspec */ = {isa = PBXFileReference; lastKnownFileType = text; path = ApiVideoLiveStream.podspec; sourceTree = ""; }; 21E358802C10A35300D856D9 /* MainViewController.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = MainViewController.swift; sourceTree = ""; }; 21E358812C10A35300D856D9 /* SettingsViewController.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = SettingsViewController.swift; sourceTree = ""; }; @@ -110,6 +122,7 @@ isa = PBXFrameworksBuildPhase; buildActionMask = 2147483647; files = ( + 219270A72C203A360028BFFF /* SRTHaishinKit in Frameworks */, 21BF485B2C10B54300EAEB5B /* HaishinKit in Frameworks */, ); runOnlyForDeploymentPostprocessing = 0; @@ -145,7 +158,6 @@ 2117CBD02BB1EBCA00B85BF0 /* CONTRIBUTING.md */, 2126C5132BCD47DA006BEDF3 /* LICENSE.md */, 2117CBD12BB1EBCA00B85BF0 /* README.md */, - 21E3585C2C10A27000D856D9 /* Cartfile */, 21E3585D2C10A27000D856D9 /* ApiVideoLiveStream.podspec */, 2117CC072BB1F1F900B85BF0 /* Package.swift */, 2102CD0A2BA9E0E700D0EBAD /* Products */, @@ -182,6 +194,26 @@ path = .github; sourceTree = ""; }; + 2192709D2C202C9B0028BFFF /* IOStream */ = { + isa = PBXGroup; + children = ( + 2192709E2C202CD90028BFFF /* RtmpLiveStream.swift */, + 219270A42C2039910028BFFF /* SrtLiveStream.swift */, + 219270A22C202FA30028BFFF /* IOLiveStream.swift */, + 219270A02C202D020028BFFF /* LiveStreamProtocol.swift */, + ); + path = IOStream; + sourceTree = ""; + }; + 219270A82C20640D0028BFFF /* Utils */ = { + isa = PBXGroup; + children = ( + 219270A92C2064280028BFFF /* LiveStreamError.swift */, + 219270AB2C20644D0028BFFF /* Extensions.swift */, + ); + path = Utils; + sourceTree = ""; + }; 21CCD69F2BBC44EE00E58F5D /* Frameworks */ = { isa = PBXGroup; children = ( @@ -239,19 +271,21 @@ path = Tests; sourceTree = ""; }; - 21E358A12C10A39400D856D9 /* models */ = { + 21E358A12C10A39400D856D9 /* Configuration */ = { isa = PBXGroup; children = ( 21E3589F2C10A39400D856D9 /* Configuration.swift */, 21E358A02C10A39400D856D9 /* Resolution.swift */, ); - path = models; + path = Configuration; sourceTree = ""; }; 21E358A32C10A39400D856D9 /* ApiVideoLiveStream */ = { isa = PBXGroup; children = ( - 21E358A12C10A39400D856D9 /* models */, + 219270A82C20640D0028BFFF /* Utils */, + 2192709D2C202C9B0028BFFF /* IOStream */, + 21E358A12C10A39400D856D9 /* Configuration */, 21E358A22C10A39400D856D9 /* ApiVideoLiveStream.swift */, ); path = ApiVideoLiveStream; @@ -294,6 +328,7 @@ name = ApiVideoLiveStream; packageProductDependencies = ( 21BF485A2C10B54300EAEB5B /* HaishinKit */, + 219270A62C203A360028BFFF /* SRTHaishinKit */, ); productName = Databus; productReference = 2102CD092BA9E0E700D0EBAD /* ApiVideoLiveStream.framework */; @@ -418,9 +453,15 @@ isa = PBXSourcesBuildPhase; buildActionMask = 2147483647; files = ( + 219270A12C202D020028BFFF /* LiveStreamProtocol.swift in Sources */, + 219270AC2C20644D0028BFFF /* Extensions.swift in Sources */, + 219270A32C202FA30028BFFF /* IOLiveStream.swift in Sources */, 21E358A52C10A39400D856D9 /* Configuration.swift in Sources */, 21E358A62C10A39400D856D9 /* Resolution.swift in Sources */, + 219270A52C2039910028BFFF /* SrtLiveStream.swift in Sources */, 21E358A72C10A39400D856D9 /* ApiVideoLiveStream.swift in Sources */, + 219270AA2C2064280028BFFF /* LiveStreamError.swift in Sources */, + 2192709F2C202CD90028BFFF /* RtmpLiveStream.swift in Sources */, ); runOnlyForDeploymentPostprocessing = 0; }; @@ -846,6 +887,11 @@ /* End XCRemoteSwiftPackageReference section */ /* Begin XCSwiftPackageProductDependency section */ + 219270A62C203A360028BFFF /* SRTHaishinKit */ = { + isa = XCSwiftPackageProductDependency; + package = 21E358A82C10A54D00D856D9 /* XCRemoteSwiftPackageReference "HaishinKit" */; + productName = SRTHaishinKit; + }; 21BF485A2C10B54300EAEB5B /* HaishinKit */ = { isa = XCSwiftPackageProductDependency; package = 21E358A82C10A54D00D856D9 /* XCRemoteSwiftPackageReference "HaishinKit" */; diff --git a/Cartfile b/Cartfile deleted file mode 100644 index 2fef846..0000000 --- a/Cartfile +++ /dev/null @@ -1 +0,0 @@ -github "shogo4405/HaishinKit.swift" ~> 1.9.0 diff --git a/Examples/iOS/SettingsManager.swift b/Examples/iOS/SettingsManager.swift index 4406430..c94a4fc 100644 --- a/Examples/iOS/SettingsManager.swift +++ b/Examples/iOS/SettingsManager.swift @@ -5,7 +5,11 @@ enum SettingsManager { // MARK: Endpoint static var rtmpUrl: String { - UserDefaults.standard.string(forKey: "RtmpUrl") ?? "rtmp://broadcast.api.video/s/" + UserDefaults.standard.string(forKey: "RtmpUrl") ?? "rtmp://192.168.1.12/s/" + } + + static var srtUrl: String { + UserDefaults.standard.string(forKey: "SrtUrl") ?? "srt://192.168.1.12:9998" } static var streamKey: String { diff --git a/Examples/iOS/ViewControllers/MainViewController.swift b/Examples/iOS/ViewControllers/MainViewController.swift index a18bb8c..83f1207 100644 --- a/Examples/iOS/ViewControllers/MainViewController.swift +++ b/Examples/iOS/ViewControllers/MainViewController.swift @@ -56,13 +56,14 @@ class MainViewController: UIViewController { }() private func callAlert(_ message: String, title: String = "Error", action: @escaping () -> Void = {}) { - let alert = UIAlertController(title: title, message: message, preferredStyle: .alert) - let okAction = UIAlertAction(title: "OK", style: .default) { _ in - action() - } - - alert.addAction(okAction) DispatchQueue.main.async { + let alert = UIAlertController(title: title, message: message, preferredStyle: .alert) + let okAction = UIAlertAction(title: "OK", style: .default) { _ in + action() + } + + alert.addAction(okAction) + self.present(alert, animated: true, completion: nil) } } @@ -149,7 +150,8 @@ class MainViewController: UIViewController { return } - try self.liveStream.startStreaming(streamKey: SettingsManager.streamKey, url: SettingsManager.rtmpUrl) + // try self.liveStream.startStreaming(streamKey: SettingsManager.streamKey, url: SettingsManager.rtmpUrl) + try self.liveStream.startStreaming(streamKey: SettingsManager.streamKey, url: SettingsManager.srtUrl) self.streamingButton.setTitle("Stop", for: []) self.streamingButton.isSelected = true diff --git a/Package.swift b/Package.swift index 0fa8762..4a3bb3d 100644 --- a/Package.swift +++ b/Package.swift @@ -18,7 +18,7 @@ let package = Package( dependencies: [ // Dependencies declare other packages that this package depends on. // .package(url: /* package url */, from: "1.0.0"), - .package(url: "https://github.com/shogo4405/HaishinKit.swift", exact: "1.9.0") + .package(url: "https://github.com/shogo4405/HaishinKit.swift", exact: "1.9.3") ], targets: [ // Targets are the basic building blocks of a package. A target can define a module or a test suite. @@ -26,7 +26,8 @@ let package = Package( .target( name: "ApiVideoLiveStream", dependencies: [ - .product(name: "HaishinKit", package: "HaishinKit.swift") + .product(name: "HaishinKit", package: "HaishinKit.swift"), + .product(name: "SRTHaishinKit", package: "HaishinKit.swift") ] ), .testTarget( diff --git a/Sources/ApiVideoLiveStream/ApiVideoLiveStream.swift b/Sources/ApiVideoLiveStream/ApiVideoLiveStream.swift index eef353b..a70e16c 100644 --- a/Sources/ApiVideoLiveStream/ApiVideoLiveStream.swift +++ b/Sources/ApiVideoLiveStream/ApiVideoLiveStream.swift @@ -11,53 +11,51 @@ import UIKit import VideoToolbox public class ApiVideoLiveStream { - private let rtmpStream: RTMPStream - private let rtmpConnection = RTMPConnection() + private let rtmpStream: RtmpLiveStream + private let srtStream: SrtLiveStream - private var streamKey: String = "" - private var url: String = "" + private var currentStream: IOLiveStream - private var isAudioConfigured = false - private var isVideoConfigured = false + private let preview: IOStreamView? /// The delegate of the ApiVideoLiveStream - public weak var delegate: ApiVideoLiveStreamDelegate? + public weak var delegate: ApiVideoLiveStreamDelegate? { + didSet { + self.rtmpStream.delegate = self.delegate + self.srtStream.delegate = self.delegate + } + } /// Getter and Setter for an AudioConfig public var audioConfig: AudioConfig { get { - AudioConfig(bitrate: self.rtmpStream.audioSettings.bitRate) + self.currentStream.audioConfig } set { - self.prepareAudio(audioConfig: newValue) + self.rtmpStream.audioConfig = newValue + self.srtStream.audioConfig = newValue } } /// Getter and Setter for a VideoConfig public var videoConfig: VideoConfig { get { - VideoConfig( - bitrate: Int(self.rtmpStream.videoSettings.bitRate), - resolution: CGSize( - width: Int(self.rtmpStream.videoSettings.videoSize.width), - height: Int(self.rtmpStream.videoSettings.videoSize.height) - ), - fps: self.rtmpStream.frameRate, - gopDuration: TimeInterval(self.rtmpStream.videoSettings.maxKeyFrameIntervalDuration) - ) + self.currentStream.videoConfig } set { - self.prepareVideo(videoConfig: newValue) + self.rtmpStream.videoConfig = newValue + self.srtStream.videoConfig = newValue } } /// Getter and Setter for the Bitrate number for the video public var videoBitrate: Int { get { - self.rtmpStream.videoSettings.bitRate + self.currentStream.videoBitrate } set(newValue) { - self.rtmpStream.videoSettings.bitRate = newValue + self.rtmpStream.videoBitrate = newValue + self.srtStream.videoBitrate = newValue } } @@ -66,33 +64,31 @@ public class ApiVideoLiveStream { /// Camera position public var cameraPosition: AVCaptureDevice.Position { get { - guard let position = rtmpStream.videoCapture(for: 0)?.device?.position else { - return AVCaptureDevice.Position.unspecified - } - return position + self.currentStream.cameraPosition } set(newValue) { - self.attachCamera(newValue) + self.currentStream.cameraPosition = newValue } } /// Camera device public var camera: AVCaptureDevice? { get { - self.rtmpStream.videoCapture(for: 0)?.device + self.currentStream.camera } set(newValue) { - self.attachCamera(newValue) + self.currentStream.camera = newValue } } /// Mutes or unmutes audio capture. public var isMuted: Bool { get { - !self.rtmpStream.audioMixerSettings.isMuted + self.currentStream.isMuted } set(newValue) { - self.rtmpStream.audioMixerSettings.isMuted = !newValue + self.rtmpStream.isMuted = newValue + self.srtStream.isMuted = newValue } } @@ -100,34 +96,23 @@ public class ApiVideoLiveStream { /// Zoom on the video capture public var zoomRatio: CGFloat { get { - guard let device = rtmpStream.videoCapture(for: 0)?.device else { - return 1.0 - } - return device.videoZoomFactor + self.currentStream.zoomRatio } set(newValue) { - guard let device = rtmpStream.videoCapture(for: 0)?.device, newValue >= 1, - newValue < device.activeFormat.videoMaxZoomFactor else - { - return - } - do { - try device.lockForConfiguration() - device.videoZoomFactor = newValue - device.unlockForConfiguration() - } catch let error as NSError { - print("Error while locking device for zoom ramp: \(error)") - } + self.rtmpStream.zoomRatio = newValue + self.srtStream.zoomRatio = newValue } } #endif - /// Creates a new ApiVideoLiveStream object without a preview + /// Creates a new ApiVideoLiveStream object with a IOStreamView /// - Parameters: - /// - initialAudioConfig: The ApiVideoLiveStream's initial AudioConfig - /// - initialVideoConfig: The ApiVideoLiveStream's initial VideoConfig + /// - preview: The IOStreamView where to display the preview of camera. Nil if you don + /// - initialAudioConfig: The ApiVideoLiveStream's new AudioConfig + /// - initialVideoConfig: The ApiVideoLiveStream's new VideoConfig /// - initialCamera: The ApiVideoLiveStream's initial camera device public init( + preview: IOStreamView?, initialAudioConfig: AudioConfig? = AudioConfig(), initialVideoConfig: VideoConfig? = VideoConfig(), initialCamera: AVCaptureDevice? = AVCaptureDevice.default( @@ -144,28 +129,27 @@ public class ApiVideoLiveStream { try session.setActive(true) #endif - self.rtmpStream = RTMPStream(connection: self.rtmpConnection) - - // Force default resolution because HK default resolution is not supported (480x272) - self.rtmpStream.videoSettings = VideoCodecSettings(videoSize: .init(width: 1_280, height: 720)) + self.rtmpStream = try RtmpLiveStream( + initialAudioConfig: initialAudioConfig, + initialVideoConfig: initialVideoConfig, + initialCamera: initialCamera + ) - #if os(iOS) - if let orientation = DeviceUtil.videoOrientation(by: UIApplication.shared.statusBarOrientation) { - self.rtmpStream.videoOrientation = orientation - } - #endif + // Default to RTMP + self.currentStream = self.rtmpStream - if let initialCamera = initialCamera { - self.attachCamera(initialCamera) - } - if let initialVideoConfig = initialVideoConfig { - self.prepareVideo(videoConfig: initialVideoConfig) + // Attach preview + self.preview = preview + if let preview { + self.currentStream.attachPreview(preview) } - self.attachAudio() - if let initialAudioConfig = initialAudioConfig { - self.prepareAudio(audioConfig: initialAudioConfig) - } + // Init SRT later to get the preview quickly + self.srtStream = try SrtLiveStream( + initialAudioConfig: initialAudioConfig, + initialVideoConfig: initialVideoConfig, + initialCamera: nil + ) #if !os(macOS) NotificationCenter.default.addObserver( @@ -176,9 +160,6 @@ public class ApiVideoLiveStream { ) #endif - self.rtmpConnection.addEventListener(.rtmpStatus, selector: #selector(self.rtmpStatusHandler), observer: self) - self.rtmpConnection.addEventListener(.ioError, selector: #selector(self.rtmpErrorHandler), observer: self) - #if os(iOS) NotificationCenter.default.addObserver( self, @@ -187,17 +168,15 @@ public class ApiVideoLiveStream { object: nil ) #endif + } - #if !os(macOS) - /// Creates a new ApiVideoLiveStream object with a UIView as preview + /// Creates a new ApiVideoLiveStream object without a preview /// - Parameters: - /// - preview: The UIView where to display the preview of camera - /// - initialAudioConfig: The ApiVideoLiveStream's new AudioConfig - /// - initialVideoConfig: The ApiVideoLiveStream's new VideoConfig + /// - initialAudioConfig: The ApiVideoLiveStream's initial AudioConfig + /// - initialVideoConfig: The ApiVideoLiveStream's initial VideoConfig /// - initialCamera: The ApiVideoLiveStream's initial camera device public convenience init( - preview: UIView, initialAudioConfig: AudioConfig? = AudioConfig(), initialVideoConfig: VideoConfig? = VideoConfig(), initialCamera: AVCaptureDevice? = AVCaptureDevice.default( @@ -207,15 +186,33 @@ public class ApiVideoLiveStream { ) ) throws { try self.init( + preview: nil, initialAudioConfig: initialAudioConfig, initialVideoConfig: initialVideoConfig, initialCamera: initialCamera ) + } + #if !os(macOS) + /// Creates a new ApiVideoLiveStream object with a UIView as preview + /// - Parameters: + /// - preview: The UIView where to display the preview of camera + /// - initialAudioConfig: The ApiVideoLiveStream's new AudioConfig + /// - initialVideoConfig: The ApiVideoLiveStream's new VideoConfig + /// - initialCamera: The ApiVideoLiveStream's initial camera device + public convenience init( + preview: UIView, + initialAudioConfig: AudioConfig? = AudioConfig(), + initialVideoConfig: VideoConfig? = VideoConfig(), + initialCamera: AVCaptureDevice? = AVCaptureDevice.default( + .builtInWideAngleCamera, + for: .video, + position: .back + ) + ) throws { let mthkView = MTHKView(frame: preview.bounds) mthkView.translatesAutoresizingMaskIntoConstraints = false mthkView.videoGravity = AVLayerVideoGravity.resizeAspectFill - mthkView.attachStream(self.rtmpStream) preview.addSubview(mthkView) @@ -232,32 +229,15 @@ public class ApiVideoLiveStream { NSLayoutConstraint.activate([ maxWidth, maxHeight, width, height, centerX, centerY ]) - } - #endif - /// Creates a new ApiVideoLiveStream object with a NetStreamDrawable - /// - Parameters: - /// - preview: The NetStreamDrawable where to display the preview of camera - /// - initialAudioConfig: The ApiVideoLiveStream's new AudioConfig - /// - initialVideoConfig: The ApiVideoLiveStream's new VideoConfig - /// - initialCamera: The ApiVideoLiveStream's initial camera device - public convenience init( - preview: IOStreamView, - initialAudioConfig: AudioConfig? = AudioConfig(), - initialVideoConfig: VideoConfig? = VideoConfig(), - initialCamera: AVCaptureDevice? = AVCaptureDevice.default( - .builtInWideAngleCamera, - for: .video, - position: .back - ) - ) throws { try self.init( + preview: mthkView as IOStreamView, initialAudioConfig: initialAudioConfig, initialVideoConfig: initialVideoConfig, initialCamera: initialCamera ) - preview.attachStream(self.rtmpStream) } + #endif deinit { #if os(iOS) @@ -266,88 +246,6 @@ public class ApiVideoLiveStream { #if !os(macOS) NotificationCenter.default.removeObserver(self, name: UIApplication.didEnterBackgroundNotification, object: nil) #endif - rtmpConnection.removeEventListener(.rtmpStatus, selector: #selector(rtmpStatusHandler), observer: self) - rtmpConnection.removeEventListener(.ioError, selector: #selector(rtmpErrorHandler), observer: self) - } - - private func attachCamera(_ cameraPosition: AVCaptureDevice.Position) { - let camera = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: cameraPosition) - self.attachCamera(camera) - } - - private func attachCamera(_ camera: AVCaptureDevice?) { - self.lastCamera = camera - - self.rtmpStream.attachCamera(camera) { videoCaptureUnit, error in - if let error { - print("======== Camera error ==========") - print(error) - self.delegate?.videoError(error) - return - } - - if let camera { - videoCaptureUnit?.isVideoMirrored = camera.position == .front - } - #if os(iOS) - // videoCaptureUnit.preferredVideoStabilizationMode = AVCaptureVideoStabilizationMode - // .auto // Add latency to video - #endif - - guard let device = videoCaptureUnit?.device else { - return - } - self.rtmpStream.lockQueue.async { - do { - try device.lockForConfiguration() - if device.isExposureModeSupported(.continuousAutoExposure) { - device.exposureMode = .continuousAutoExposure - } - if device.isFocusModeSupported(.continuousAutoFocus) { - device.focusMode = .continuousAutoFocus - } - device.unlockForConfiguration() - } catch { - print("Could not lock device for exposure and focus: \(error)") - } - } - } - } - - private func prepareVideo(videoConfig: VideoConfig) { - self.rtmpStream.frameRate = videoConfig.fps - self.rtmpStream.sessionPreset = AVCaptureSession.Preset.high - - let resolution = videoConfig.resolution - let width = self.rtmpStream.videoOrientation - .isLandscape ? max(resolution.width, resolution.height) : min(resolution.width, resolution.height) - let height = self.rtmpStream.videoOrientation - .isLandscape ? min(resolution.width, resolution.height) : max(resolution.width, resolution.height) - - self.rtmpStream.videoSettings = VideoCodecSettings( - videoSize: CGSize(width: width, height: height), - bitRate: videoConfig.bitrate, - profileLevel: kVTProfileLevel_H264_Baseline_5_2 as String, - maxKeyFrameIntervalDuration: Int32(videoConfig.gopDuration) - ) - - self.isVideoConfigured = true - } - - private func attachAudio() { - self.rtmpStream.attachAudio(AVCaptureDevice.default(for: AVMediaType.audio)) { _, error in - if let error { - print("======== Audio error ==========") - print(error) - self.delegate?.audioError(error) - } - } - } - - private func prepareAudio(audioConfig: AudioConfig) { - self.rtmpStream.audioSettings.bitRate = audioConfig.bitrate - - self.isAudioConfigured = true } /// Start your livestream @@ -356,108 +254,66 @@ public class ApiVideoLiveStream { /// - url: The url of your rtmp server, by default it's rtmp://broadcast.api.video/s /// - Returns: Void public func startStreaming(streamKey: String, url: String = "rtmp://broadcast.api.video/s") throws { - if streamKey.isEmpty { - throw LiveStreamError.IllegalArgumentError("Stream key must not be empty") - } - if url.isEmpty { - throw LiveStreamError.IllegalArgumentError("URL must not be empty") + if currentStream.isConnected { + throw LiveStreamError.IllegalOperationError("Already streaming") } - if !self.isAudioConfigured || !self.isVideoConfigured { - throw LiveStreamError.IllegalOperationError("Missing audio and/or video configuration") + + guard let parsedUrl = URL(string: url), + let scheme = parsedUrl.scheme else + { + throw LiveStreamError.IllegalArgumentError("Invalid URL: \(url)") } - self.streamKey = streamKey - self.url = url + let currentStream: IOLiveStream + switch scheme { + case "rtmp": + currentStream = self.rtmpStream + case "srt": + currentStream = self.srtStream + default: + throw LiveStreamError.IllegalArgumentError("Invalid scheme: \(scheme)") + } - self.rtmpStream.fcPublishName = streamKey - self.rtmpConnection.connect(url) + // Switch stream if necessary + if currentStream !== self.currentStream { + if let preview { + currentStream.camera = self.currentStream.camera + currentStream.attachPreview(preview) + } + self.currentStream = currentStream + } + // TODO: make startStream async + Task { + try await currentStream.startStreaming(streamKey: streamKey, url: url) + } } /// Stop your livestream /// - Returns: Void public func stopStreaming() { - let isConnected = self.rtmpConnection.connected - self.rtmpConnection.close() - if isConnected { - self.delegate?.disconnection() + let isConnected = self.currentStream.isConnected + Task { + await self.currentStream.stopStreaming() + if isConnected { + self.delegate?.disconnection() + } } } public func startPreview() { - guard let lastCamera = lastCamera else { - print("No camera has been set") - return - } - self.attachCamera(lastCamera) - self.attachAudio() + self.currentStream.startPreview() } public func stopPreview() { - self.rtmpStream.attachCamera(nil) - self.rtmpStream.attachAudio(nil) - } - - @objc - private func rtmpStatusHandler(_ notification: Notification) { - let e = Event.from(notification) - guard let data: ASObject = e.data as? ASObject, - let code: String = data["code"] as? String, - let level: String = data["level"] as? String else - { - print("rtmpStatusHandler: failed to parse event: \(e)") - return - } - switch code { - case RTMPConnection.Code.connectSuccess.rawValue: - self.rtmpStream.publish(self.streamKey) - - case RTMPStream.Code.publishStart.rawValue: - self.delegate?.connectionSuccess() - - case RTMPConnection.Code.connectClosed.rawValue: - self.delegate?.disconnection() - - default: - if level == "error" { - self.delegate?.connectionFailed(code) - } - } - } - - @objc - private func rtmpErrorHandler(_ notification: Notification) { - let e = Event.from(notification) - print("rtmpErrorHandler: \(e)") - DispatchQueue.main.async { - self.rtmpConnection.connect(self.url) - } + self.currentStream.stopPreview() } #if os(iOS) @objc private func orientationDidChange(_: Notification) { - guard let orientation = DeviceUtil.videoOrientation(by: UIApplication.shared.statusBarOrientation) else { - return - } + self.rtmpStream.orientationDidChange() + self.srtStream.orientationDidChange() - self.rtmpStream.lockQueue.async { - self.rtmpStream.videoOrientation = orientation - - let currentVideoSize = self.rtmpStream.videoSettings.videoSize - var newVideoSize: CGSize - if self.rtmpStream.videoOrientation.isLandscape { - newVideoSize = CGSize( - width: max(currentVideoSize.width, currentVideoSize.height), - height: min(currentVideoSize.width, currentVideoSize.height) - ) - } else { - newVideoSize = CGSize( - width: min(currentVideoSize.width, currentVideoSize.height), - height: max(currentVideoSize.width, currentVideoSize.height) - ) - } - self.rtmpStream.videoSettings.videoSize = newVideoSize - } } #endif @@ -485,14 +341,3 @@ public protocol ApiVideoLiveStreamDelegate: AnyObject { /// Called if an error happened during the video configuration func videoError(_ error: Error) } - -extension AVCaptureVideoOrientation { - var isLandscape: Bool { - self == .landscapeLeft || self == .landscapeRight - } -} - -public enum LiveStreamError: Error { - case IllegalArgumentError(String) - case IllegalOperationError(String) -} diff --git a/Sources/ApiVideoLiveStream/models/Configuration.swift b/Sources/ApiVideoLiveStream/Configuration/Configuration.swift similarity index 100% rename from Sources/ApiVideoLiveStream/models/Configuration.swift rename to Sources/ApiVideoLiveStream/Configuration/Configuration.swift diff --git a/Sources/ApiVideoLiveStream/models/Resolution.swift b/Sources/ApiVideoLiveStream/Configuration/Resolution.swift similarity index 100% rename from Sources/ApiVideoLiveStream/models/Resolution.swift rename to Sources/ApiVideoLiveStream/Configuration/Resolution.swift diff --git a/Sources/ApiVideoLiveStream/IOStream/IOLiveStream.swift b/Sources/ApiVideoLiveStream/IOStream/IOLiveStream.swift new file mode 100644 index 0000000..c48e0fe --- /dev/null +++ b/Sources/ApiVideoLiveStream/IOStream/IOLiveStream.swift @@ -0,0 +1,294 @@ +import AVFoundation +import Foundation +import HaishinKit +import VideoToolbox +#if !os(macOS) +import UIKit +#endif + +class IOLiveStream: LiveStreamProtocol { + private let ioStream: IOStream + + private(set) var isAudioConfigured = false + private(set) var isVideoConfigured = false + + /// The delegate of the ApiVideoLiveStream + weak var delegate: ApiVideoLiveStreamDelegate? + + /// Getter and Setter for an AudioConfig + var audioConfig: AudioConfig { + get { + AudioConfig(bitrate: self.ioStream.audioSettings.bitRate) + } + set { + self.prepareAudio(audioConfig: newValue) + } + } + + /// Getter and Setter for a VideoConfig + var videoConfig: VideoConfig { + get { + VideoConfig( + bitrate: Int(self.ioStream.videoSettings.bitRate), + resolution: CGSize( + width: Int(self.ioStream.videoSettings.videoSize.width), + height: Int(self.ioStream.videoSettings.videoSize.height) + ), + fps: self.ioStream.frameRate, + gopDuration: TimeInterval(self.ioStream.videoSettings.maxKeyFrameIntervalDuration) + ) + } + set { + self.prepareVideo(videoConfig: newValue) + } + } + + /// Getter and Setter for the Bitrate number for the video + var videoBitrate: Int { + get { + self.ioStream.videoSettings.bitRate + } + set(newValue) { + self.ioStream.videoSettings.bitRate = newValue + } + } + + private var lastCamera: AVCaptureDevice? + + /// Camera position + var cameraPosition: AVCaptureDevice.Position { + get { + guard let position = ioStream.videoCapture(for: 0)?.device?.position else { + return AVCaptureDevice.Position.unspecified + } + return position + } + set(newValue) { + self.attachCamera(newValue) + } + } + + /// Camera device + var camera: AVCaptureDevice? { + get { + self.ioStream.videoCapture(for: 0)?.device + } + set(newValue) { + self.attachCamera(newValue) + } + } + + /// Mutes or unmutes audio capture. + var isMuted: Bool { + get { + !self.ioStream.audioMixerSettings.isMuted + } + set(newValue) { + self.ioStream.audioMixerSettings.isMuted = !newValue + } + } + + #if os(iOS) + /// Zoom on the video capture + var zoomRatio: CGFloat { + get { + guard let device = ioStream.videoCapture(for: 0)?.device else { + return 1.0 + } + return device.videoZoomFactor + } + set(newValue) { + guard let device = ioStream.videoCapture(for: 0)?.device, newValue >= 1, + newValue < device.activeFormat.videoMaxZoomFactor else + { + return + } + do { + try device.lockForConfiguration() + device.videoZoomFactor = newValue + device.unlockForConfiguration() + } catch let error as NSError { + print("Error while locking device for zoom ramp: \(error)") + } + } + } + #endif + + var isConnected: Bool { + fatalError("Not implemented") + } + + /// Creates a new ApiVideoLiveStream object without a preview + /// - Parameters: + /// - initialAudioConfig: The ApiVideoLiveStream's initial AudioConfig + /// - initialVideoConfig: The ApiVideoLiveStream's initial VideoConfig + /// - initialCamera: The ApiVideoLiveStream's initial camera device + init( + ioStream: IOStream, + initialAudioConfig: AudioConfig? = AudioConfig(), + initialVideoConfig: VideoConfig? = VideoConfig(), + initialCamera: AVCaptureDevice? = AVCaptureDevice.default( + .builtInWideAngleCamera, + for: .video, + position: .back + ) + ) throws { + #if os(iOS) + let session = AVAudioSession.sharedInstance() + + // https://stackoverflow.com/questions/51010390/avaudiosession-setcategory-swift-4-2-ios-12-play-sound-on-silent + try session.setCategory(.playAndRecord, mode: .default, options: [.defaultToSpeaker, .allowBluetooth]) + try session.setActive(true) + #endif + + self.ioStream = ioStream + + // Force default resolution because HK default resolution is not supported (480x272) + self.ioStream.videoSettings = VideoCodecSettings(videoSize: .init(width: 1_280, height: 720)) + + if let initialCamera = initialCamera { + self.attachCamera(initialCamera) + } + if let initialVideoConfig = initialVideoConfig { + self.prepareVideo(videoConfig: initialVideoConfig) + } + + self.attachAudio() + if let initialAudioConfig = initialAudioConfig { + self.prepareAudio(audioConfig: initialAudioConfig) + } + } + + private func attachCamera(_ cameraPosition: AVCaptureDevice.Position) { + let camera = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: cameraPosition) + self.attachCamera(camera) + } + + private func attachCamera(_ camera: AVCaptureDevice?) { + self.lastCamera = camera + + self.ioStream.attachCamera(camera) { videoCaptureUnit, error in + if let error { + print("======== Camera error ==========") + print(error) + self.delegate?.videoError(error) + return + } + + if let camera { + videoCaptureUnit?.isVideoMirrored = camera.position == .front + } + #if os(iOS) + // videoCaptureUnit.preferredVideoStabilizationMode = AVCaptureVideoStabilizationMode + // .auto // Add latency to video + #endif + + guard let device = videoCaptureUnit?.device else { + return + } + self.ioStream.lockQueue.async { + do { + try device.lockForConfiguration() + if device.isExposureModeSupported(.continuousAutoExposure) { + device.exposureMode = .continuousAutoExposure + } + if device.isFocusModeSupported(.continuousAutoFocus) { + device.focusMode = .continuousAutoFocus + } + device.unlockForConfiguration() + } catch { + print("Could not lock device for exposure and focus: \(error)") + } + } + } + } + + private func prepareVideo(videoConfig: VideoConfig) { + self.ioStream.frameRate = videoConfig.fps + self.ioStream.sessionPreset = AVCaptureSession.Preset.high + + let resolution = videoConfig.resolution + let width = self.ioStream.videoOrientation + .isLandscape ? max(resolution.width, resolution.height) : min(resolution.width, resolution.height) + let height = self.ioStream.videoOrientation + .isLandscape ? min(resolution.width, resolution.height) : max(resolution.width, resolution.height) + + self.ioStream.videoSettings = VideoCodecSettings( + videoSize: CGSize(width: width, height: height), + bitRate: videoConfig.bitrate, + profileLevel: kVTProfileLevel_H264_Baseline_5_2 as String, + maxKeyFrameIntervalDuration: Int32(videoConfig.gopDuration) + ) + + self.isVideoConfigured = true + } + + private func attachAudio() { + self.ioStream.attachAudio(AVCaptureDevice.default(for: AVMediaType.audio)) { _, error in + if let error { + print("======== Audio error ==========") + print(error) + self.delegate?.audioError(error) + } + } + } + + func prepareAudio(audioConfig: AudioConfig) { + self.ioStream.audioSettings.bitRate = audioConfig.bitrate + + self.isAudioConfigured = true + } + + func attachPreview(_ view: IOStreamView) { + view.attachStream(self.ioStream) + } + + func startPreview() { + guard let lastCamera = lastCamera else { + print("No camera has been set") + return + } + self.attachCamera(lastCamera) + self.attachAudio() + } + + func stopPreview() { + self.ioStream.attachCamera(nil) + self.ioStream.attachAudio(nil) + } + + func startStreaming(streamKey _: String, url _: String) throws { + fatalError("Not implemented") + } + + func stopStreaming() { + fatalError("Not implemented") + } + + #if os(iOS) + func orientationDidChange() { + guard let orientation = DeviceUtil.videoOrientation(by: UIApplication.shared.statusBarOrientation) else { + return + } + + self.ioStream.lockQueue.async { + self.ioStream.videoOrientation = orientation + + let currentVideoSize = self.ioStream.videoSettings.videoSize + var newVideoSize: CGSize + if self.ioStream.videoOrientation.isLandscape { + newVideoSize = CGSize( + width: max(currentVideoSize.width, currentVideoSize.height), + height: min(currentVideoSize.width, currentVideoSize.height) + ) + } else { + newVideoSize = CGSize( + width: min(currentVideoSize.width, currentVideoSize.height), + height: max(currentVideoSize.width, currentVideoSize.height) + ) + } + self.ioStream.videoSettings.videoSize = newVideoSize + } + } + #endif +} diff --git a/Sources/ApiVideoLiveStream/IOStream/LiveStreamProtocol.swift b/Sources/ApiVideoLiveStream/IOStream/LiveStreamProtocol.swift new file mode 100644 index 0000000..7083da9 --- /dev/null +++ b/Sources/ApiVideoLiveStream/IOStream/LiveStreamProtocol.swift @@ -0,0 +1,25 @@ +import AVFoundation +import Foundation +import HaishinKit + +protocol LiveStreamProtocol { + var audioConfig: AudioConfig { get set } + var videoConfig: VideoConfig { get set } + var videoBitrate: Int { get set } + var cameraPosition: AVCaptureDevice.Position { get set } + var camera: AVCaptureDevice? { get set } + var isMuted: Bool { get set } + var isConnected: Bool { get } + #if os(iOS) + var zoomRatio: CGFloat { get set } + func orientationDidChange() + #endif + + func attachPreview(_ view: IOStreamView) + + func startStreaming(streamKey: String, url: String) throws + func stopStreaming() + + func startPreview() + func stopPreview() +} diff --git a/Sources/ApiVideoLiveStream/IOStream/RtmpLiveStream.swift b/Sources/ApiVideoLiveStream/IOStream/RtmpLiveStream.swift new file mode 100644 index 0000000..1fd842e --- /dev/null +++ b/Sources/ApiVideoLiveStream/IOStream/RtmpLiveStream.swift @@ -0,0 +1,128 @@ +import AVFoundation +import Foundation +import HaishinKit +#if !os(macOS) +import UIKit +#endif + +class RtmpLiveStream: IOLiveStream { + private let stream: RTMPStream + private let connection = RTMPConnection() + + private var streamKey: String = "" + private var url: String = "" + + override var isConnected: Bool { + self.connection.connected + } + + /// Creates a new ApiVideoLiveStream object without a preview + /// - Parameters: + /// - initialAudioConfig: The ApiVideoLiveStream's initial AudioConfig + /// - initialVideoConfig: The ApiVideoLiveStream's initial VideoConfig + /// - initialCamera: The ApiVideoLiveStream's initial camera device + init( + initialAudioConfig: AudioConfig? = AudioConfig(), + initialVideoConfig: VideoConfig? = VideoConfig(), + initialCamera: AVCaptureDevice? = AVCaptureDevice.default( + .builtInWideAngleCamera, + for: .video, + position: .back + ) + ) throws { + self.stream = RTMPStream(connection: self.connection) + + try super.init( + ioStream: self.stream, + initialAudioConfig: initialAudioConfig, + initialVideoConfig: initialVideoConfig, + initialCamera: initialCamera + ) + } + + deinit { + connection.removeEventListener(.rtmpStatus, selector: #selector(rtmpStatusHandler), observer: self) + connection.removeEventListener(.ioError, selector: #selector(rtmpErrorHandler), observer: self) + } + + /// Start your livestream + /// - Parameters: + /// - streamKey: The key of your live + /// - url: The url of your rtmp server, by default it's rtmp://broadcast.api.video/s + /// - Returns: Void + override func startStreaming(streamKey: String, url: String = "rtmp://broadcast.api.video/s") throws { + if streamKey.isEmpty { + throw LiveStreamError.IllegalArgumentError("Stream key must not be empty") + } + if url.isEmpty { + throw LiveStreamError.IllegalArgumentError("URL must not be empty") + } + if !self.isAudioConfigured || !self.isVideoConfigured { + throw LiveStreamError.IllegalOperationError("Missing audio and/or video configuration") + } + + self.streamKey = streamKey + self.url = url + + self.stream.fcPublishName = streamKey + + Task { + self.connection.addEventListener(.rtmpStatus, selector: #selector(rtmpStatusHandler), observer: self) + self.connection.addEventListener(.ioError, selector: #selector(rtmpErrorHandler), observer: self) + + self.connection.connect(url) + } + } + + /// Stop your livestream + /// - Returns: Void + override func stopStreaming() { + let isConnected = isConnected + + Task { + self.connection.close() + connection.removeEventListener(.rtmpStatus, selector: #selector(rtmpStatusHandler), observer: self) + connection.removeEventListener(.ioError, selector: #selector(rtmpErrorHandler), observer: self) + + if isConnected { + self.delegate?.disconnection() + } + } + } + + @objc + private func rtmpStatusHandler(_ notification: Notification) { + let e = Event.from(notification) + guard let data: ASObject = e.data as? ASObject, + let code: String = data["code"] as? String, + let level: String = data["level"] as? String else + { + print("rtmpStatusHandler: failed to parse event: \(e)") + return + } + switch code { + case RTMPConnection.Code.connectSuccess.rawValue: + self.stream.publish(self.streamKey) + + case RTMPStream.Code.publishStart.rawValue: + self.delegate?.connectionSuccess() + + case RTMPConnection.Code.connectClosed.rawValue: + self.delegate?.disconnection() + + default: + if level == "error" { + self.delegate?.connectionFailed(code) + } + } + } + + @objc + private func rtmpErrorHandler(_ notification: Notification) { + let e = Event.from(notification) + print("rtmpErrorHandler: \(e)") + DispatchQueue.main.async { + self.connection.connect(self.url) + } + } +} diff --git a/Sources/ApiVideoLiveStream/IOStream/SrtLiveStream.swift b/Sources/ApiVideoLiveStream/IOStream/SrtLiveStream.swift new file mode 100644 index 0000000..6a52461 --- /dev/null +++ b/Sources/ApiVideoLiveStream/IOStream/SrtLiveStream.swift @@ -0,0 +1,93 @@ +import AVFoundation +import Foundation +import SRTHaishinKit + +class SrtLiveStream: IOLiveStream { + private let stream: SRTStream + private let connection = SRTConnection() + private var keyValueObservations: [NSKeyValueObservation] = [] + + override var isConnected: Bool { + self.connection.connected + } + + /// Creates a new ApiVideoLiveStream object without a preview + /// - Parameters: + /// - initialAudioConfig: The ApiVideoLiveStream's initial AudioConfig + /// - initialVideoConfig: The ApiVideoLiveStream's initial VideoConfig + /// - initialCamera: The ApiVideoLiveStream's initial camera device + init( + initialAudioConfig: AudioConfig? = AudioConfig(), + initialVideoConfig: VideoConfig? = VideoConfig(), + initialCamera: AVCaptureDevice? = AVCaptureDevice.default( + .builtInWideAngleCamera, + for: .video, + position: .back + ) + ) throws { + self.stream = SRTStream(connection: self.connection) + + try super.init( + ioStream: self.stream, + initialAudioConfig: initialAudioConfig, + initialVideoConfig: initialVideoConfig, + initialCamera: initialCamera + ) + + let keyValueObservation = self.connection.observe(\.connected, options: [.new]) { [weak self] _, _ in + guard let self = self else { + return + } + if !connection.connected { + delegate?.disconnection() + } + } + self.keyValueObservations.append(keyValueObservation) + } + + deinit { + keyValueObservations.removeAll() + } + + override func startStreaming(streamKey: String, url: String) throws { + if streamKey.isEmpty { + throw LiveStreamError.IllegalArgumentError("Stream key must not be empty") + } + if url.isEmpty { + throw LiveStreamError.IllegalArgumentError("URL must not be empty") + } + if !self.isAudioConfigured || !self.isVideoConfigured { + throw LiveStreamError.IllegalOperationError("Missing audio and/or video configuration") + } + guard var urlComponents = URLComponents(string: url) else { + throw LiveStreamError.IllegalArgumentError("Invalid URL: \(url)") + } + if urlComponents.scheme != "srt" { + throw LiveStreamError.IllegalArgumentError("Invalid URL scheme: \(urlComponents.scheme ?? "unknown")") + } + var queryItems = urlComponents.queryItems ?? [] + queryItems.append(URLQueryItem(name: "streamid", value: streamKey)) + urlComponents.queryItems = queryItems + + let srtUrl = urlComponents.url + Task { + do { + try await connection.open(srtUrl) + stream.publish() + delegate?.connectionSuccess() + } catch { + delegate?.connectionFailed(error.localizedDescription) + } + } + } + + override func stopStreaming() { + let isConnected = isConnected + Task { + await connection.close() + if isConnected { + delegate?.disconnection() + } + } + } +} diff --git a/Sources/ApiVideoLiveStream/Utils/Extensions.swift b/Sources/ApiVideoLiveStream/Utils/Extensions.swift new file mode 100644 index 0000000..8c5424a --- /dev/null +++ b/Sources/ApiVideoLiveStream/Utils/Extensions.swift @@ -0,0 +1,8 @@ +import AVFoundation +import Foundation + +extension AVCaptureVideoOrientation { + var isLandscape: Bool { + self == .landscapeLeft || self == .landscapeRight + } +} diff --git a/Sources/ApiVideoLiveStream/Utils/LiveStreamError.swift b/Sources/ApiVideoLiveStream/Utils/LiveStreamError.swift new file mode 100644 index 0000000..7a62b75 --- /dev/null +++ b/Sources/ApiVideoLiveStream/Utils/LiveStreamError.swift @@ -0,0 +1,6 @@ +import Foundation + +public enum LiveStreamError: Error { + case IllegalArgumentError(String) + case IllegalOperationError(String) +}