diff --git a/DemoApp/Sources/Components/MemoryLogDestination/OSLogDestination.swift b/DemoApp/Sources/Components/MemoryLogDestination/OSLogDestination.swift index bad781372..48427e956 100644 --- a/DemoApp/Sources/Components/MemoryLogDestination/OSLogDestination.swift +++ b/DemoApp/Sources/Components/MemoryLogDestination/OSLogDestination.swift @@ -40,7 +40,10 @@ final class OSLogDestination: BaseLogDestination { extendedDetails += "[\(logDetails.functionName)] " } - let extendedMessage = "\(extendedDetails)> \(logDetails.message)" + var extendedMessage = "\(extendedDetails)> \(logDetails.message)" + if let error = logDetails.error { + extendedMessage += "[Error: \(error)]" + } let formattedMessage = LogConfig .formatters .reduce(extendedMessage) { $1.format(logDetails: logDetails, message: $0) } diff --git a/Sources/StreamVideo/Models/AudioCodec.swift b/Sources/StreamVideo/Models/AudioCodec.swift new file mode 100644 index 000000000..0a43841b0 --- /dev/null +++ b/Sources/StreamVideo/Models/AudioCodec.swift @@ -0,0 +1,49 @@ +// +// Copyright © 2024 Stream.io Inc. All rights reserved. +// + +import Foundation +import StreamWebRTC + +/// Represents supported audio codecs in the StreamVideo SDK. +/// +/// - `none`: Represents an unsupported or unknown codec. +/// - `opus`: Represents the Opus codec, widely used for audio streaming. +/// - `red`: Represents the RED codec, used for redundant audio streams. +public enum AudioCodec: String, Sendable, Hashable { + case none, opus, red + + public var description: String { + rawValue + } + + /// Initializes an `AudioCodec` from WebRTC codec parameters. + /// + /// - Parameter source: The `RTCRtpCodecParameters` containing codec details. + /// - Assigns `.opus` or `.red` based on the codec name, or `.none` by default. + init(_ source: RTCRtpCodecParameters) { + switch source.name.lowercased() { + case AudioCodec.opus.rawValue: + self = .opus + case AudioCodec.red.rawValue: + self = .red + default: + self = .none + } + } + + /// Initializes an `AudioCodec` from SFU codec model parameters. + /// + /// - Parameter source: The `Stream_Video_Sfu_Models_Codec` containing codec details. + /// - Assigns `.opus` or `.red` based on the codec name, or `.none` by default. + init(_ source: Stream_Video_Sfu_Models_Codec) { + switch source.name.lowercased() { + case AudioCodec.opus.rawValue: + self = .opus + case AudioCodec.red.rawValue: + self = .red + default: + self = .none + } + } +} diff --git a/Sources/StreamVideo/Models/PublishOptions.swift b/Sources/StreamVideo/Models/PublishOptions.swift new file mode 100644 index 000000000..668fef03e --- /dev/null +++ b/Sources/StreamVideo/Models/PublishOptions.swift @@ -0,0 +1,241 @@ +// +// Copyright © 2024 Stream.io Inc. All rights reserved. +// + +import Foundation + +/// Represents options for publishing audio and video tracks. +/// +/// This structure encapsulates configurations for audio, video, and screen-sharing +/// tracks, such as codec, bitrate, frame rate, and dimensions. +struct PublishOptions: Sendable, Hashable { + + /// Options for configuring audio publishing. + struct AudioPublishOptions: Sendable, Hashable, CustomStringConvertible { + /// Unique identifier for the audio stream. + var id: Int + /// Codec used for the audio stream. + var codec: AudioCodec + /// Bitrate allocated for the audio stream. + var bitrate: Int + + /// A string describing the audio publish options. + var description: String { + "AudioPublishOptions(id: \(id), codec: \(codec), bitrate: \(bitrate))" + } + + /// Initializes the audio options from a model. + /// + /// - Parameter publishOption: The audio publish option model. + init(_ publishOption: Stream_Video_Sfu_Models_PublishOption) { + id = Int(publishOption.id) + codec = .init(publishOption.codec) + bitrate = Int(publishOption.bitrate) + } + + /// Initializes the audio options with given parameters. + /// + /// - Parameters: + /// - id: The unique identifier for the audio stream. + /// - codec: The codec for the audio stream. + /// - bitrate: The bitrate for the audio stream. Defaults to `0`. + init( + id: Int = 0, + codec: AudioCodec, + bitrate: Int = 0 + ) { + self.id = id + self.codec = codec + self.bitrate = bitrate + } + + /// Hashes the essential properties into the given hasher. + /// + /// - Parameter hasher: The hasher used to combine values. + func hash(into hasher: inout Hasher) { + hasher.combine(id) + hasher.combine(codec) + } + } + + /// Options for configuring video publishing. + struct VideoPublishOptions: Sendable, Hashable { + + /// Represents spatial and temporal layers for video capturing. + struct CapturingLayers: Sendable, Hashable, CustomStringConvertible { + /// Number of spatial layers for the video. + var spatialLayers: Int + /// Number of temporal layers for the video. + var temporalLayers: Int + + /// Scalability mode derived from spatial and temporal layers. + var scalabilityMode: String { + var components = [ + "L", + "\(spatialLayers)", + "T", + "\(temporalLayers)" + ] + if spatialLayers > 1 { + components.append("_KEY") + } + return components.joined() + } + + /// A string describing the capturing layers. + var description: String { + "CapturingLayers(spatial: \(spatialLayers), temporal: \(temporalLayers), " + + "scalabilityMode: \(scalabilityMode))" + } + } + + /// Unique identifier for the video stream. + var id: Int + /// Codec used for the video stream. + var codec: VideoCodec + /// Layers for video capturing. + var capturingLayers: CapturingLayers + /// Bitrate allocated for the video stream. + var bitrate: Int + /// Frame rate for the video stream. + var frameRate: Int + /// Dimensions of the video stream. + var dimensions: CGSize + + /// Initializes the video options from a model. + /// + /// - Parameter publishOption: The video publish option model. + init(_ publishOption: Stream_Video_Sfu_Models_PublishOption) { + id = Int(publishOption.id) + codec = .init(publishOption.codec) + capturingLayers = .init( + spatialLayers: Int(publishOption.maxSpatialLayers), + temporalLayers: Int(publishOption.maxTemporalLayers) + ) + bitrate = Int(publishOption.bitrate) + frameRate = Int(publishOption.fps) + dimensions = .init( + width: Int(publishOption.videoDimension.width), + height: Int(publishOption.videoDimension.height) + ) + } + + /// Initializes the video options with given parameters. + /// + /// - Parameters: + /// - id: Unique identifier for the video stream. + /// - codec: Codec used for the video stream. + /// - capturingLayers: Video capturing layers. Defaults to 3 spatial and 1 temporal layer. + /// - bitrate: Bitrate for the video stream. Defaults to `.maxBitrate`. + /// - frameRate: Frame rate for the video stream. Defaults to `30`. + /// - dimensions: Video dimensions. Defaults to `.full`. + init( + id: Int = -1, + codec: VideoCodec, + capturingLayers: PublishOptions.VideoPublishOptions.CapturingLayers = .init(spatialLayers: 3, temporalLayers: 1), + bitrate: Int = .maxBitrate, + frameRate: Int = .defaultFrameRate, + dimensions: CGSize = .full + ) { + self.id = id + self.codec = codec + self.capturingLayers = capturingLayers + self.bitrate = bitrate + self.frameRate = frameRate + self.dimensions = dimensions + } + + /// Hashes the essential properties into the given hasher. + /// + /// - Parameter hasher: The hasher used to combine values. + func hash(into hasher: inout Hasher) { + hasher.combine(id) + hasher.combine(codec) + } + } + + /// Original publish option models from the server. + let source: [Stream_Video_Sfu_Models_PublishOption] + /// Configured audio publishing options. + let audio: [AudioPublishOptions] + /// Configured video publishing options. + let video: [VideoPublishOptions] + /// Configured screen-sharing options. + let screenShare: [VideoPublishOptions] + + /// Initializes the publish options from server models. + /// + /// - Parameter publishOptions: List of server-provided publish options. + init(_ publishOptions: [Stream_Video_Sfu_Models_PublishOption]) { + var audio = [AudioPublishOptions]() + var video = [VideoPublishOptions]() + var screenShare = [VideoPublishOptions]() + + for publishOption in publishOptions { + switch publishOption.trackType { + case .audio: + audio.append(.init(publishOption)) + case .video: + video.append(.init(publishOption)) + case .screenShare: + screenShare.append(.init(publishOption)) + default: + break + } + } + + source = publishOptions + self.audio = audio + self.video = video + self.screenShare = screenShare + } + + /// Initializes the publish options with audio, video, and screen-sharing. + /// + /// - Parameters: + /// - audio: List of audio publish options. + /// - video: List of video publish options. + /// - screenShare: List of screen-sharing options. + init( + audio: [AudioPublishOptions] = [], + video: [VideoPublishOptions] = [], + screenShare: [VideoPublishOptions] = [] + ) { + var source: [Stream_Video_Sfu_Models_PublishOption] = [] + source.append( + contentsOf: audio.map(Stream_Video_Sfu_Models_PublishOption.init) + ) + source.append( + contentsOf: video + .map { Stream_Video_Sfu_Models_PublishOption($0, trackType: .video) } + ) + source.append( + contentsOf: screenShare + .map { Stream_Video_Sfu_Models_PublishOption($0, trackType: .screenShare) } + ) + + self.source = source + self.audio = audio + self.video = video + self.screenShare = screenShare + } + + /// Returns video layers for the given track type and codec. + /// + /// - Parameters: + /// - trackType: The type of track (e.g., video, screen share). + /// - codec: The video codec to use. + /// - Returns: A list of video layers. + func videoLayers( + for trackType: TrackType, + codec: VideoCodec + ) -> [VideoLayer] { + [] + } + + /// Default publish options. + static let `default` = PublishOptions( + video: [.init(codec: .h264)], + screenShare: [.init(codec: .h264, frameRate: 20)] + ) +} diff --git a/Sources/StreamVideo/Models/VideoCodec.swift b/Sources/StreamVideo/Models/VideoCodec.swift index 4a990990f..538ee4f43 100644 --- a/Sources/StreamVideo/Models/VideoCodec.swift +++ b/Sources/StreamVideo/Models/VideoCodec.swift @@ -9,8 +9,12 @@ import StreamWebRTC /// /// Each codec is associated with a specific encoding and transmission /// standard for video data, such as H.264 or VP8. -public enum VideoCodec: String, Sendable { - case h264, vp8, vp9, av1 +public enum VideoCodec: String, Sendable, Hashable, CustomStringConvertible { + case none, h264, vp8, vp9, av1 + + public var description: String { + rawValue + } /// Determines if the codec supports Scalable Video Coding (SVC). /// @@ -23,6 +27,8 @@ public enum VideoCodec: String, Sendable { return true case .h264, .vp8: return false + default: + return false } } @@ -33,7 +39,22 @@ public enum VideoCodec: String, Sendable { /// initializer returns `nil`. /// /// - Parameter source: The codec parameters used to determine the codec. - init?(_ source: RTCRtpCodecParameters) { + init(_ source: RTCRtpCodecParameters) { + switch source.name.lowercased() { + case VideoCodec.h264.rawValue: + self = .h264 + case VideoCodec.vp8.rawValue: + self = .vp8 + case VideoCodec.vp9.rawValue: + self = .vp9 + case VideoCodec.av1.rawValue: + self = .av1 + default: + self = .none + } + } + + init(_ source: Stream_Video_Sfu_Models_Codec) { switch source.name.lowercased() { case VideoCodec.h264.rawValue: self = .h264 @@ -44,7 +65,7 @@ public enum VideoCodec: String, Sendable { case VideoCodec.av1.rawValue: self = .av1 default: - return nil + self = .none } } } diff --git a/Sources/StreamVideo/Models/VideoOptions.swift b/Sources/StreamVideo/Models/VideoOptions.swift index dd28dd983..e2bc2314a 100644 --- a/Sources/StreamVideo/Models/VideoOptions.swift +++ b/Sources/StreamVideo/Models/VideoOptions.swift @@ -9,102 +9,20 @@ import Foundation struct VideoOptions: Sendable { /// The preferred video format. var preferredFormat: AVCaptureDevice.Format? - /// The preferred video dimensions. - var preferredDimensions: CMVideoDimensions - /// The preferred frames per second. - var preferredFps: Int - var preferredVideoCodec: VideoCodec - var preferredBitrate: Int - var preferredTargetResolution: TargetResolution? var preferredCameraPosition: AVCaptureDevice.Position - /// The supported codecs. - var videoLayers: [VideoLayer] - init( - preferredTargetResolution: TargetResolution? = nil, preferredFormat: AVCaptureDevice.Format? = nil, - preferredFps: Int = 30, - preferredVideoCodec: VideoCodec = .h264, - preferredBitrate: Int = .maxBitrate, preferredCameraPosition: AVCaptureDevice.Position = .front ) { - self.preferredTargetResolution = preferredTargetResolution self.preferredFormat = preferredFormat - self.preferredFps = preferredFps - self.preferredVideoCodec = preferredVideoCodec - self.preferredBitrate = preferredBitrate self.preferredCameraPosition = preferredCameraPosition - - if let preferredTargetResolution { - preferredDimensions = CMVideoDimensions( - width: Int32(preferredTargetResolution.width), - height: Int32(preferredTargetResolution.height) - ) - do { - videoLayers = try VideoCapturingUtils.codecs( - preferredFormat: preferredFormat, - preferredDimensions: preferredDimensions, - preferredFps: preferredFps, - preferredBitrate: preferredTargetResolution.bitrate ?? preferredBitrate, - preferredCameraPosition: preferredCameraPosition - ) - } catch { - videoLayers = VideoLayer.default - } - } else { - preferredDimensions = .full - videoLayers = VideoLayer.default - } - - print("") - } - - func with(preferredTargetResolution: TargetResolution?) -> VideoOptions { - .init( - preferredTargetResolution: preferredTargetResolution, - preferredFormat: preferredFormat, - preferredFps: preferredFps, - preferredVideoCodec: preferredVideoCodec, - preferredBitrate: preferredBitrate, - preferredCameraPosition: preferredCameraPosition - ) - } - - func with(preferredVideoCodec: VideoCodec) -> VideoOptions { - .init( - preferredTargetResolution: preferredTargetResolution, - preferredFormat: preferredFormat, - preferredFps: preferredFps, - preferredVideoCodec: preferredVideoCodec, - preferredBitrate: preferredBitrate, - preferredCameraPosition: preferredCameraPosition - ) - } - - func with(preferredBitrate: Int) -> VideoOptions { - .init( - preferredTargetResolution: preferredTargetResolution, - preferredFormat: preferredFormat, - preferredFps: preferredFps, - preferredVideoCodec: preferredVideoCodec, - preferredBitrate: preferredBitrate, - preferredCameraPosition: preferredCameraPosition - ) } func with(preferredCameraPosition: AVCaptureDevice.Position) -> VideoOptions { .init( - preferredTargetResolution: preferredTargetResolution, preferredFormat: preferredFormat, - preferredFps: preferredFps, - preferredVideoCodec: preferredVideoCodec, - preferredBitrate: preferredBitrate, preferredCameraPosition: preferredCameraPosition ) } } - -extension Int { - public static let maxBitrate = 1_000_000 -} diff --git a/Sources/StreamVideo/Utils/BroadcastUtils.swift b/Sources/StreamVideo/Utils/BroadcastUtils.swift index f8bfb40bd..28c43b81f 100644 --- a/Sources/StreamVideo/Utils/BroadcastUtils.swift +++ b/Sources/StreamVideo/Utils/BroadcastUtils.swift @@ -18,7 +18,11 @@ enum BroadcastUtils { ) } - static func aspectFit(width: Int32, height: Int32, size: Int32) -> (width: Int32, height: Int32) { + static func aspectFit( + width: Int32, + height: Int32, + size: Int32 + ) -> (width: Int32, height: Int32) { let isWider = width >= height let ratio = isWider ? Double(height) / Double(width) : Double(width) / Double(height) return ( diff --git a/Sources/StreamVideo/Utils/Logger/Logger.swift b/Sources/StreamVideo/Utils/Logger/Logger.swift index 65d40dd8a..856446dd4 100644 --- a/Sources/StreamVideo/Utils/Logger/Logger.swift +++ b/Sources/StreamVideo/Utils/Logger/Logger.swift @@ -29,7 +29,8 @@ public struct LogSubsystem: OptionSet, CustomStringConvertible { .iceAdapter, .mediaAdapter, .thermalState, - .audioSession + .audioSession, + .videoCapturer ] /// All subsystems within the SDK. @@ -46,7 +47,8 @@ public struct LogSubsystem: OptionSet, CustomStringConvertible { .iceAdapter, .mediaAdapter, .thermalState, - .audioSession + .audioSession, + .videoCapturer ] /// The subsystem responsible for any other part of the SDK. @@ -76,6 +78,7 @@ public struct LogSubsystem: OptionSet, CustomStringConvertible { public static let thermalState = Self(rawValue: 1 << 11) /// The subsystem responsible for interacting with the AudioSession. public static let audioSession = Self(rawValue: 1 << 12) + public static let videoCapturer = Self(rawValue: 1 << 13) public var description: String { switch rawValue { @@ -105,6 +108,8 @@ public struct LogSubsystem: OptionSet, CustomStringConvertible { return "thermalState" case LogSubsystem.audioSession.rawValue: return "audioSession" + case LogSubsystem.videoCapturer.rawValue: + return "videoCapturer" default: return "unknown(rawValue:\(rawValue)" } diff --git a/Sources/StreamVideo/WebRTC/PeerConnectionFactory.swift b/Sources/StreamVideo/WebRTC/PeerConnectionFactory.swift index a2a8f9c67..031702180 100644 --- a/Sources/StreamVideo/WebRTC/PeerConnectionFactory.swift +++ b/Sources/StreamVideo/WebRTC/PeerConnectionFactory.swift @@ -43,14 +43,6 @@ final class PeerConnectionFactory: @unchecked Sendable { defaultDecoder.supportedCodecs() } - func setPreferredEncodingCodec(_ videoCodec: VideoCodec) { - if let preferredCodec = defaultEncoder.supportedCodecs().first(where: { $0.name.lowercased() == videoCodec.rawValue }) { - defaultEncoder.preferredCodec = preferredCodec - } else { - log.warning("Unable to set preferred encoding codec \(videoCodec.rawValue).") - } - } - /// Creates or retrieves a PeerConnectionFactory instance for a given /// audio processing module. /// - Parameter audioProcessingModule: The RTCAudioProcessingModule to use. @@ -79,6 +71,8 @@ final class PeerConnectionFactory: @unchecked Sendable { PeerConnectionFactoryStorage.shared.remove(for: audioProcessingModule) } + // MARK: - Builders + /// Creates a video source, optionally configured for screen sharing. /// - Parameter forScreenShare: Boolean indicating if the source is for screen sharing. /// - Returns: An RTCVideoSource instance. @@ -160,6 +154,26 @@ final class PeerConnectionFactory: @unchecked Sendable { return peerConnection } + + // MARK: - Capabilities + + func codecCapabilities( + for audioCodec: AudioCodec + ) -> RTCRtpCodecCapability? { + factory + .rtpSenderCapabilities(forKind: kRTCMediaStreamTrackKindAudio) + .codecs + .baseline(for: audioCodec) + } + + func codecCapabilities( + for videoCodec: VideoCodec + ) -> RTCRtpCodecCapability? { + factory + .rtpSenderCapabilities(forKind: kRTCMediaStreamTrackKindVideo) + .codecs + .baseline(for: videoCodec) + } } /// A thread-safe storage class for managing PeerConnectionFactory instances. diff --git a/Sources/StreamVideo/WebRTC/Screensharing/BroadcastScreenCapturer.swift b/Sources/StreamVideo/WebRTC/Screensharing/BroadcastScreenCapturer.swift index 7dd5d93c7..d8defbb5c 100644 --- a/Sources/StreamVideo/WebRTC/Screensharing/BroadcastScreenCapturer.swift +++ b/Sources/StreamVideo/WebRTC/Screensharing/BroadcastScreenCapturer.swift @@ -18,21 +18,20 @@ class BroadcastScreenCapturer: VideoCapturing { init( videoSource: RTCVideoSource, - videoOptions: VideoOptions, - videoFilters: [VideoFilter] + videoOptions: VideoOptions ) { self.videoOptions = videoOptions self.videoSource = videoSource #if targetEnvironment(simulator) videoCapturer = RTCFileVideoCapturer(delegate: videoSource) #else - let handler = StreamVideoCaptureHandler(source: videoSource, filters: videoFilters, handleRotation: false) + let handler = StreamVideoCaptureHandler(source: videoSource, handleRotation: false) videoCaptureHandler = handler videoCapturer = RTCVideoCapturer(delegate: handler) #endif } - func startCapture(device: AVCaptureDevice?) async throws { + func startCapture(with configuration: VideoCapturingConfiguration) async throws { guard self.bufferReader == nil else { return } @@ -69,15 +68,15 @@ class BroadcastScreenCapturer: VideoCapturing { height: Int32(CVPixelBufferGetHeight(pixelBuffer)) ) - bufferDimensions = BroadcastUtils.adjust( - width: bufferDimensions.width, - height: bufferDimensions.height, - size: max( - self.videoOptions.preferredDimensions.width, - self.videoOptions.preferredDimensions.height - ) - ) - +// bufferDimensions = BroadcastUtils.adjust( +// width: bufferDimensions.width, +// height: bufferDimensions.height, +// size: max( +// self.videoOptions.preferredDimensions.width, +// self.videoOptions.preferredDimensions.height +// ) +// ) +// self.videoCaptureHandler?.capturer(self.videoCapturer, didCapture: rtcFrame) if !self.adaptedOutputFormat { self.adaptedOutputFormat = true diff --git a/Sources/StreamVideo/WebRTC/Screensharing/ScreenshareCapturer.swift b/Sources/StreamVideo/WebRTC/Screensharing/ScreenshareCapturer.swift index 48f4f3635..3eb455a0d 100644 --- a/Sources/StreamVideo/WebRTC/Screensharing/ScreenshareCapturer.swift +++ b/Sources/StreamVideo/WebRTC/Screensharing/ScreenshareCapturer.swift @@ -14,21 +14,20 @@ class ScreenshareCapturer: VideoCapturing { init( videoSource: RTCVideoSource, - videoOptions: VideoOptions, - videoFilters: [VideoFilter] + videoOptions: VideoOptions ) { self.videoOptions = videoOptions self.videoSource = videoSource #if targetEnvironment(simulator) videoCapturer = RTCFileVideoCapturer(delegate: videoSource) #else - let handler = StreamVideoCaptureHandler(source: videoSource, filters: videoFilters, handleRotation: false) + let handler = StreamVideoCaptureHandler(source: videoSource, handleRotation: false) videoCaptureHandler = handler videoCapturer = RTCVideoCapturer(delegate: handler) #endif } - func startCapture(device: AVCaptureDevice?) async throws { + func startCapture(with configuration: VideoCapturingConfiguration) async throws { let devices = RTCCameraVideoCapturer.captureDevices() guard let device = devices.first else { @@ -61,43 +60,43 @@ class ScreenshareCapturer: VideoCapturing { } func handle(sampleBuffer: CMSampleBuffer, type: RPSampleBufferType, for device: AVCaptureDevice) { - let outputFormat = VideoCapturingUtils.outputFormat( - for: device, - preferredFormat: videoOptions.preferredFormat, - preferredDimensions: videoOptions.preferredDimensions, - preferredFps: videoOptions.preferredFps - ) - - if type == .video { - guard CMSampleBufferGetNumSamples(sampleBuffer) == 1, - CMSampleBufferIsValid(sampleBuffer), - CMSampleBufferDataIsReady(sampleBuffer) else { - return - } - - guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { - return - } - - let timeStamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer) - let timeStampNs = Int64(CMTimeGetSeconds(timeStamp) * Double(NSEC_PER_SEC)) - - let rtcBuffer = RTCCVPixelBuffer(pixelBuffer: pixelBuffer) - let rtcFrame = RTCVideoFrame( - buffer: rtcBuffer, - rotation: ._0, - timeStampNs: timeStampNs - ) +// let outputFormat = VideoCapturingUtils.outputFormat( +// for: device, +// preferredFormat: videoOptions.preferredFormat, +// preferredDimensions: videoOptions.preferredDimensions, +// preferredFps: videoOptions.preferredFps +// ) +// +// if type == .video { +// guard CMSampleBufferGetNumSamples(sampleBuffer) == 1, +// CMSampleBufferIsValid(sampleBuffer), +// CMSampleBufferDataIsReady(sampleBuffer) else { +// return +// } +// +// guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { +// return +// } +// +// let timeStamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer) +// let timeStampNs = Int64(CMTimeGetSeconds(timeStamp) * Double(NSEC_PER_SEC)) +// +// let rtcBuffer = RTCCVPixelBuffer(pixelBuffer: pixelBuffer) +// let rtcFrame = RTCVideoFrame( +// buffer: rtcBuffer, +// rotation: ._0, +// timeStampNs: timeStampNs +// ) - videoCaptureHandler?.capturer(videoCapturer, didCapture: rtcFrame) - if let dimensions = outputFormat.dimensions { - videoSource.adaptOutputFormat( - toWidth: dimensions.width, - height: dimensions.height, - fps: Int32(outputFormat.fps) - ) - } - } +// videoCaptureHandler?.capturer(videoCapturer, didCapture: rtcFrame) +// if let dimensions = outputFormat.dimensions { +// videoSource.adaptOutputFormat( +// toWidth: dimensions.width, +// height: dimensions.height, +// fps: Int32(outputFormat.fps) +// ) +// } +// } } private func stopScreensharing() async throws { diff --git a/Sources/StreamVideo/WebRTC/VideoCapturing/StreamVideoCaptureHandler.swift b/Sources/StreamVideo/WebRTC/VideoCapturing/StreamVideoCaptureHandler.swift index 0585704ac..73c6c4cb5 100644 --- a/Sources/StreamVideo/WebRTC/VideoCapturing/StreamVideoCaptureHandler.swift +++ b/Sources/StreamVideo/WebRTC/VideoCapturing/StreamVideoCaptureHandler.swift @@ -11,7 +11,6 @@ final class StreamVideoCaptureHandler: NSObject, RTCVideoCapturerDelegate { @Injected(\.orientationAdapter) private var orientationAdapter let source: RTCVideoCapturerDelegate - let filters: [VideoFilter] let context: CIContext let colorSpace: CGColorSpace var selectedFilter: VideoFilter? @@ -24,11 +23,9 @@ final class StreamVideoCaptureHandler: NSObject, RTCVideoCapturerDelegate { init( source: RTCVideoCapturerDelegate, - filters: [VideoFilter], handleRotation: Bool = true ) { self.source = source - self.filters = filters self.handleRotation = handleRotation context = CIContext(options: [CIContextOption.useSoftwareRenderer: false]) colorSpace = CGColorSpaceCreateDeviceRGB() diff --git a/Sources/StreamVideo/WebRTC/VideoCapturing/VideoCapturer.swift b/Sources/StreamVideo/WebRTC/VideoCapturing/VideoCapturer.swift index 78f879ee3..f9f002edc 100644 --- a/Sources/StreamVideo/WebRTC/VideoCapturing/VideoCapturer.swift +++ b/Sources/StreamVideo/WebRTC/VideoCapturing/VideoCapturer.swift @@ -5,32 +5,31 @@ import Foundation import StreamWebRTC -class VideoCapturer: CameraVideoCapturing { - - private var videoCapturer: RTCVideoCapturer? - private var videoOptions: VideoOptions +final class VideoCapturer: CameraVideoCapturing { + private let videoSource: RTCVideoSource + + private var videoCapturer: RTCVideoCapturer? private var videoCaptureHandler: StreamVideoCaptureHandler? - + private var simulatorStreamFile: URL? = InjectedValues[\.simulatorStreamFile] - - init( - videoSource: RTCVideoSource, - videoOptions: VideoOptions, - videoFilters: [VideoFilter] - ) { - self.videoOptions = videoOptions + + @Atomic private var configuration: VideoCapturingConfiguration? + @Atomic private var isActive: Bool = false + + init(videoSource: RTCVideoSource) { self.videoSource = videoSource #if targetEnvironment(simulator) if let url = simulatorStreamFile { - let handler = StreamVideoCaptureHandler(source: videoSource, filters: videoFilters) + let handler = StreamVideoCaptureHandler(source: videoSource) videoCaptureHandler = handler videoCapturer = SimulatorScreenCapturer(delegate: handler, videoURL: url) + isActive = true } else { videoCapturer = RTCFileVideoCapturer(delegate: videoSource) } #else - let handler = StreamVideoCaptureHandler(source: videoSource, filters: videoFilters) + let handler = StreamVideoCaptureHandler(source: videoSource) videoCaptureHandler = handler videoCapturer = RTCCameraVideoCapturer(delegate: handler, captureSession: AVCaptureSession()) checkForBackgroundCameraAccess() @@ -41,64 +40,138 @@ class VideoCapturer: CameraVideoCapturing { VideoCapturingUtils.capturingDevice(for: cameraPosition) } - func setCameraPosition(_ cameraPosition: AVCaptureDevice.Position) async throws { - guard let device = VideoCapturingUtils.capturingDevice(for: cameraPosition) else { - throw ClientError.Unexpected() + func setCameraPosition(_ position: AVCaptureDevice.Position) async throws { + guard + let configuration + else { + return log.debug("No active video capturing session found.", subsystems: .webRTC) } - try await startCapture(device: device) + try await startCapture( + with: .init( + position: position, + dimensions: configuration.dimensions, + frameRate: configuration.frameRate + ) + ) } func setVideoFilter(_ videoFilter: VideoFilter?) { videoCaptureHandler?.selectedFilter = videoFilter } - - func startCapture(device: AVCaptureDevice?) async throws { - try await withCheckedThrowingContinuation { continuation in - guard let videoCapturer = videoCapturer as? RTCCameraVideoCapturer else { - continuation.resume() - return + + func startCapture(with configuration: VideoCapturingConfiguration) async throws { + try await withCheckedThrowingContinuation { [weak self, videoCapturer] continuation in + guard + let self, + let videoCapturer = videoCapturer as? RTCCameraVideoCapturer + else { + log.debug( + "Start video capturing isn't possible with videoCapturer of type:\(type(of: videoCapturer)).", + subsystems: .webRTC + ) + return continuation.resume() } - guard let device else { - continuation.resume(throwing: ClientError.Unexpected()) + guard configuration != self.configuration else { return } - let outputFormat = VideoCapturingUtils.outputFormat( - for: device, - preferredFormat: videoOptions.preferredFormat, - preferredDimensions: videoOptions.preferredDimensions, - preferredFps: videoOptions.preferredFps - ) - guard let selectedFormat = outputFormat.format, let dimensions = outputFormat.dimensions else { - continuation.resume(throwing: ClientError.Unexpected()) - return + + guard + let device = capturingDevice(for: configuration.position) + else { + return continuation.resume( + throwing: ClientError( + "Start video capturing for position:\(configuration.position) failed as no devices were found." + ) + ) } - - if dimensions.area != videoOptions.preferredDimensions.area { - log.debug("Adapting video source output format") + + guard + let outputFormat = self.outputFormat(for: device, configuration: configuration) + else { + return continuation.resume( + throwing: ClientError( + "Start video capturing for position:\(configuration.position) failed as no output format was found." + ) + ) + } + + let outputFormatDimensions = CMVideoFormatDescriptionGetDimensions( + outputFormat.formatDescription + ) + + let preferredDimensions = CMVideoDimensions( + width: Int32(configuration.dimensions.width), + height: Int32(configuration.dimensions.height) + ) + if outputFormatDimensions.area != preferredDimensions.area { videoSource.adaptOutputFormat( - toWidth: dimensions.width, - height: dimensions.height, - fps: Int32(outputFormat.fps) + toWidth: outputFormatDimensions.width, + height: outputFormatDimensions.height, + fps: Int32(configuration.frameRate.clamped(to: outputFormat.frameRateRange)) + ) + log.debug( + "Start video capturing requested dimensions:[w:\(configuration.dimensions.width), h:\(configuration.dimensions.height)] but found dimensions:[w:\(outputFormatDimensions.width), h:\(outputFormatDimensions.height)]. Adapting video source now!", + subsystems: .webRTC ) } - + videoCapturer.startCapture( with: device, - format: selectedFormat, - fps: outputFormat.fps + format: outputFormat, + fps: configuration.frameRate.clamped(to: outputFormat.frameRateRange) ) { [weak self] error in if let error { continuation.resume(throwing: error) } else { self?.videoCaptureHandler?.currentCameraPosition = device.position - continuation.resume(returning: ()) + self?.isActive = true + continuation.resume() } } - } as Void + + self.configuration = configuration + log.debug( + """ + Start video capturing completed: + Configuration: + Position: \(configuration.position) + Dimensions: [w:\(configuration.dimensions.width), h:\(configuration.dimensions.height)] + FrameRate: \(configuration.frameRate) + + OutputFormat: + Device: \(device) + Dimensions: [w:\(outputFormatDimensions.width), h:\(outputFormatDimensions.height)] + FrameRate: \(configuration.frameRate.clamped(to: outputFormat.frameRateRange)) + """, + subsystems: .webRTC + ) + } + } + + private func outputFormat( + for device: AVCaptureDevice, + configuration: VideoCapturingConfiguration + ) -> AVCaptureDevice.Format? { + VideoCapturingUtils.outputFormat( + for: device, + preferredFormat: nil, + preferredDimensions: .init( + width: Int32(configuration.dimensions.width), + height: Int32(configuration.dimensions.height) + ), + preferredFps: configuration.frameRate + ).format } func stopCapture() async throws { + guard + isActive + else { + log.debug("Stop video capturing isn't possible while isActive:\(isActive).", subsystems: .webRTC) + return + } + try await withCheckedThrowingContinuation { continuation in if let capturer = videoCapturer as? RTCCameraVideoCapturer { capturer.stopCapture { @@ -111,66 +184,71 @@ class VideoCapturer: CameraVideoCapturing { continuation.resume(returning: ()) } } + + configuration = nil + isActive = false + + log.debug("Stop video capturing completed", subsystems: .webRTC) } func updateCaptureQuality( _ layers: [VideoLayer], on device: AVCaptureDevice? ) async throws { - guard - let videoCapturer = videoCapturer as? RTCCameraVideoCapturer, - let device - else { - return - } - - let preferredDimensions: CMVideoDimensions = { - if layers.first(where: { $0.quality == VideoLayer.full.quality }) != nil { - return .full - } else if layers.first(where: { $0.quality == VideoLayer.half.quality }) != nil { - return .half - } else { - return .quarter - } - }() - let outputFormat = VideoCapturingUtils.outputFormat( - for: device, - preferredFormat: videoOptions.preferredFormat, - preferredDimensions: preferredDimensions, - preferredFps: videoOptions.preferredFps - ) - guard - let selectedFormat = outputFormat.format, - let dimensions = outputFormat.dimensions - else { - return - } - - if dimensions.area != videoOptions.preferredDimensions.area { - log.debug( - "Adapting video source output format (\(dimensions.width)x\(dimensions.height))", - subsystems: .webRTC - ) - videoSource.adaptOutputFormat( - toWidth: dimensions.width, - height: dimensions.height, - fps: Int32(outputFormat.fps) - ) - } - - return try await withCheckedThrowingContinuation { continuation in - videoCapturer.startCapture( - with: device, - format: selectedFormat, - fps: outputFormat.fps - ) { error in - if let error { - continuation.resume(throwing: error) - } else { - continuation.resume(returning: ()) - } - } - } +// guard +// let videoCapturer = videoCapturer as? RTCCameraVideoCapturer, +// let device +// else { +// return +// } +// +// let preferredDimensions: CMVideoDimensions = { +// if layers.first(where: { $0.quality == VideoLayer.full.quality }) != nil { +// return .full +// } else if layers.first(where: { $0.quality == VideoLayer.half.quality }) != nil { +// return .half +// } else { +// return .quarter +// } +// }() +// let outputFormat = VideoCapturingUtils.outputFormat( +// for: device, +// preferredFormat: videoOptions.preferredFormat, +// preferredDimensions: preferredDimensions, +// preferredFps: videoOptions.preferredFps +// ) +// guard +// let selectedFormat = outputFormat.format, +// let dimensions = outputFormat.dimensions +// else { +// return +// } +// +// if dimensions.area != videoOptions.preferredDimensions.area { +// log.debug( +// "Adapting video source output format (\(dimensions.width)x\(dimensions.height))", +// subsystems: .webRTC +// ) +// videoSource.adaptOutputFormat( +// toWidth: dimensions.width, +// height: dimensions.height, +// fps: Int32(outputFormat.fps) +// ) +// } +// +// return try await withCheckedThrowingContinuation { continuation in +// videoCapturer.startCapture( +// with: device, +// format: selectedFormat, +// fps: outputFormat.fps +// ) { error in +// if let error { +// continuation.resume(throwing: error) +// } else { +// continuation.resume(returning: ()) +// } +// } +// } } /// Initiates a focus and exposure operation at the specified point on the camera's view. @@ -404,49 +482,3 @@ class VideoCapturer: CameraVideoCapturing { } } } - -extension CMVideoDimensions { - - public static var full = CMVideoDimensions(width: 1280, height: 720) - public static var half = CMVideoDimensions(width: 640, height: 480) - public static var quarter = CMVideoDimensions(width: 480, height: 360) - - var area: Int32 { - width * height - } -} - -extension AVCaptureDevice.Format { - - // computes a ClosedRange of supported FPSs for this format - func fpsRange() -> ClosedRange { - videoSupportedFrameRateRanges - .map { $0.toRange() } - .reduce(into: 0...0) { result, current in - result = merge(range: result, with: current) - } - } -} - -extension AVFrameRateRange { - - // convert to a ClosedRange - func toRange() -> ClosedRange { - Int(minFrameRate)...Int(maxFrameRate) - } -} - -internal func merge( - range range1: ClosedRange, - with range2: ClosedRange -) -> ClosedRange where T: Comparable { - min(range1.lowerBound, range2.lowerBound)...max(range1.upperBound, range2.upperBound) -} - -extension Comparable { - - // clamp a value within the range - func clamped(to limits: ClosedRange) -> Self { - min(max(self, limits.lowerBound), limits.upperBound) - } -} diff --git a/Sources/StreamVideo/WebRTC/VideoCapturing/VideoCapturerProviding.swift b/Sources/StreamVideo/WebRTC/VideoCapturing/VideoCapturerProviding.swift index a701e9033..f6ef0313a 100644 --- a/Sources/StreamVideo/WebRTC/VideoCapturing/VideoCapturerProviding.swift +++ b/Sources/StreamVideo/WebRTC/VideoCapturing/VideoCapturerProviding.swift @@ -5,82 +5,68 @@ import StreamWebRTC /// A protocol defining methods for creating video capturing objects. +/// +/// `VideoCapturerProviding` allows the creation of capturers for both cameras and +/// screen sharing, with support for configuring options and applying filters. protocol VideoCapturerProviding { - /// Builds a camera capturer with the specified source, options, and filters. + /// Builds a camera capturer with the specified source and filters. + /// /// - Parameters: - /// - source: The video source for the capturer. - /// - options: Configuration options for the video capture. - /// - filters: An array of video filters to apply. - /// - Returns: An object conforming to `CameraVideoCapturing` for camera capture. + /// - source: The video source for the capturer, providing the captured frames. + /// - Returns: An object conforming to `CameraVideoCapturing` for managing + /// camera capture operations. func buildCameraCapturer( - source: RTCVideoSource, - options: VideoOptions, - filters: [VideoFilter] - ) -> CameraVideoCapturing + source: RTCVideoSource + ) -> StreamVideoCapturer /// Builds a screen capturer with the specified type, source, options, and filters. + /// /// - Parameters: - /// - type: The type of screen sharing to perform. - /// - source: The video source for the capturer. - /// - options: Configuration options for the video capture. - /// - filters: An array of video filters to apply. - /// - Returns: An object conforming to `VideoCapturing` for screen capture. + /// - type: The type of screen sharing to perform (`.inApp` or `.broadcast`). + /// - source: The video source for the capturer, providing the captured frames. + /// - options: Configuration options for screen capture (e.g., resolution, bitrate). + /// - Returns: An object conforming to `VideoCapturing` for managing screen capture. func buildScreenCapturer( _ type: ScreensharingType, - source: RTCVideoSource, - options: VideoOptions, - filters: [VideoFilter] - ) -> VideoCapturing + source: RTCVideoSource + ) -> StreamVideoCapturer } /// A concrete implementation of `VideoCapturerProviding` for creating video capturers. +/// +/// `StreamVideoCapturerFactory` generates video capturers for camera and screen sharing. +/// It supports applying video filters and configuring capture settings for each type. final class StreamVideoCapturerFactory: VideoCapturerProviding { /// Creates a camera capturer with the given parameters. + /// /// - Parameters: - /// - source: The video source for the capturer. - /// - options: Configuration options for the video capture. - /// - filters: An array of video filters to apply. - /// - Returns: A `VideoCapturer` instance for camera capture. + /// - source: The video source for the capturer, providing the captured frames. + /// - Returns: A `CameraVideoCapturing` instance for managing camera capture. func buildCameraCapturer( - source: RTCVideoSource, - options: VideoOptions, - filters: [VideoFilter] - ) -> CameraVideoCapturing { - VideoCapturer( - videoSource: source, - videoOptions: options, - videoFilters: filters - ) + source: RTCVideoSource + ) -> StreamVideoCapturer { + .cameraCapturer(with: source) } /// Creates a screen capturer based on the specified type and parameters. + /// /// - Parameters: - /// - type: The type of screen sharing to perform. - /// - source: The video source for the capturer. - /// - options: Configuration options for the video capture. - /// - filters: An array of video filters to apply. - /// - Returns: A `VideoCapturing` instance for screen capture, either `ScreenshareCapturer` or `BroadcastScreenCapturer`. + /// - type: The type of screen sharing to perform (`.inApp` or `.broadcast`). + /// - source: The video source for the capturer, providing the captured frames. + /// - options: Configuration options for screen capture (e.g., resolution, bitrate). + /// - Returns: A `VideoCapturing` instance for managing screen capture. Depending + /// on the type, it returns either a `ScreenshareCapturer` or `BroadcastScreenCapturer`. func buildScreenCapturer( _ type: ScreensharingType, - source: RTCVideoSource, - options: VideoOptions, - filters: [VideoFilter] - ) -> VideoCapturing { + source: RTCVideoSource + ) -> StreamVideoCapturer { switch type { case .inApp: - return ScreenshareCapturer( - videoSource: source, - videoOptions: options, - videoFilters: filters - ) + return .screenShareCapturer(with: source) case .broadcast: - return BroadcastScreenCapturer( - videoSource: source, - videoOptions: options, - videoFilters: filters - ) + return .broadcastCapturer(with: source) } } } diff --git a/Sources/StreamVideo/WebRTC/VideoCapturing/VideoCapturing.swift b/Sources/StreamVideo/WebRTC/VideoCapturing/VideoCapturing.swift index f3a0c8bef..d9c66ed9b 100644 --- a/Sources/StreamVideo/WebRTC/VideoCapturing/VideoCapturing.swift +++ b/Sources/StreamVideo/WebRTC/VideoCapturing/VideoCapturing.swift @@ -5,30 +5,111 @@ import Foundation import StreamWebRTC +/// Configuration for video capturing settings. +/// +/// This structure encapsulates the desired position, dimensions, and frame rate +/// for video capturing. It is used to initialize or update video capture sessions. +struct VideoCapturingConfiguration: Equatable { + /// The camera position to use for capturing (e.g., front or back camera). + var position: AVCaptureDevice.Position + /// The dimensions (width and height) for the captured video. + var dimensions: CGSize + /// The frame rate for video capturing in frames per second (fps). + var frameRate: Int +} + +/// Protocol defining the behavior of a video capturing system. +/// +/// This protocol outlines the essential operations for starting and stopping +/// video capture sessions. It is intended to be implemented by classes that +/// manage video capturing devices. protocol VideoCapturing { - func startCapture(device: AVCaptureDevice?) async throws + /// Starts capturing video with the specified configuration. + /// + /// - Parameter configuration: The desired capturing configuration. + /// - Throws: An error if the capture session fails to start. + func startCapture(with configuration: VideoCapturingConfiguration) async throws + + /// Stops the video capturing session. + /// + /// - Throws: An error if the capture session fails to stop. func stopCapture() async throws } +/// Protocol extending `VideoCapturing` to add camera-specific functionality. +/// +/// This protocol provides additional methods for managing camera settings, +/// such as switching camera positions, applying video filters, and configuring +/// capture quality. protocol CameraVideoCapturing: VideoCapturing { + /// Updates the current camera position (e.g., front or back). + /// + /// - Parameter cameraPosition: The desired camera position. + /// - Throws: An error if the camera position cannot be set. func setCameraPosition(_ cameraPosition: AVCaptureDevice.Position) async throws + + /// Applies a video filter to the captured video. + /// + /// - Parameter videoFilter: An optional video filter to apply. func setVideoFilter(_ videoFilter: VideoFilter?) + + /// Updates the capture quality based on provided video layer configurations. + /// + /// - Parameters: + /// - codecs: The video layers specifying quality configurations. + /// - device: The optional capture device to apply these settings to. + /// - Throws: An error if the quality cannot be updated. func updateCaptureQuality( _ codecs: [VideoLayer], on device: AVCaptureDevice? ) async throws + + /// Retrieves the capture device for a given camera position. + /// + /// - Parameter cameraPosition: The desired camera position. + /// - Returns: The corresponding `AVCaptureDevice`, or `nil` if unavailable. func capturingDevice(for cameraPosition: AVCaptureDevice.Position) -> AVCaptureDevice? + + /// Zooms the camera by the specified factor. + /// + /// - Parameter factor: The zoom factor to apply. + /// - Throws: An error if the zoom cannot be applied. func zoom(by factor: CGFloat) throws + + /// Adjusts the camera focus to the specified point. + /// + /// - Parameter point: The focus point in normalized coordinates (0.0 to 1.0). + /// - Throws: An error if the focus cannot be adjusted. func focus(at point: CGPoint) throws + + /// Adds a video output for capturing video data. + /// + /// - Parameter videoOutput: The `AVCaptureVideoDataOutput` to add. + /// - Throws: An error if the video output cannot be added. func addVideoOutput( _ videoOutput: AVCaptureVideoDataOutput ) throws + + /// Removes a previously added video output. + /// + /// - Parameter videoOutput: The `AVCaptureVideoDataOutput` to remove. + /// - Throws: An error if the video output cannot be removed. func removeVideoOutput( _ videoOutput: AVCaptureVideoDataOutput ) throws + + /// Adds a photo output for capturing still images. + /// + /// - Parameter capturePhotoOutput: The `AVCapturePhotoOutput` to add. + /// - Throws: An error if the photo output cannot be added. func addCapturePhotoOutput( _ capturePhotoOutput: AVCapturePhotoOutput ) throws + + /// Removes a previously added photo output. + /// + /// - Parameter capturePhotoOutput: The `AVCapturePhotoOutput` to remove. + /// - Throws: An error if the photo output cannot be removed. func removeCapturePhotoOutput( _ capturePhotoOutput: AVCapturePhotoOutput ) throws diff --git a/Sources/StreamVideo/WebRTC/VideoCapturing/VideoCapturingUtils.swift b/Sources/StreamVideo/WebRTC/VideoCapturing/VideoCapturingUtils.swift index 01dc52702..033b69ab2 100644 --- a/Sources/StreamVideo/WebRTC/VideoCapturing/VideoCapturingUtils.swift +++ b/Sources/StreamVideo/WebRTC/VideoCapturing/VideoCapturingUtils.swift @@ -83,7 +83,7 @@ enum VideoCapturingUtils { selectedFormat = foundFormat } else { selectedFormat = sortedFormats.first(where: { $0.dimensions.area >= preferredDimensions.area - && $0.format.fpsRange().contains(preferredFps) + && $0.format.frameRateRange.contains(preferredFps) }) if selectedFormat == nil { @@ -110,7 +110,7 @@ enum VideoCapturingUtils { ) var selectedFps = preferredFps - let fpsRange = selectedFormat.format.fpsRange() + let fpsRange = selectedFormat.format.frameRateRange if !fpsRange.contains(selectedFps) { log.warning("requested fps: \(preferredFps) not available: \(fpsRange) and will be clamped") diff --git a/Sources/StreamVideo/WebRTC/v2/Extensions/AVFoundation/AVCaptureDevice+OutputFormat.swift b/Sources/StreamVideo/WebRTC/v2/Extensions/AVFoundation/AVCaptureDevice+OutputFormat.swift new file mode 100644 index 000000000..a23093062 --- /dev/null +++ b/Sources/StreamVideo/WebRTC/v2/Extensions/AVFoundation/AVCaptureDevice+OutputFormat.swift @@ -0,0 +1,37 @@ +// +// Copyright © 2024 Stream.io Inc. All rights reserved. +// + +import AVFoundation +import Foundation +import StreamWebRTC + +extension AVCaptureDevice { + func outputFormat( + preferredDimensions: CMVideoDimensions, + preferredFrameRate: Int + ) -> AVCaptureDevice.Format? { + let formats = RTCCameraVideoCapturer + .supportedFormats(for: self) + .sorted { $0.areaDiff(preferredDimensions) < $1.areaDiff(preferredDimensions) } + + if let result = formats.first( + with: [ + .area(preferredDimensions: preferredDimensions), + .frameRate(preferredFrameRate: preferredFrameRate) + ] + ) { + return result + } else if let result = formats.first( + with: [.area(preferredDimensions: preferredDimensions)] + ) { + return result + } else if let result = formats.first( + with: [.minimumAreaDifference(preferredDimensions: preferredDimensions)] + ) { + return result + } else { + return nil + } + } +} diff --git a/Sources/StreamVideo/WebRTC/v2/Extensions/AVFoundation/AVCaptureDevice.Format+Convenience.swift b/Sources/StreamVideo/WebRTC/v2/Extensions/AVFoundation/AVCaptureDevice.Format+Convenience.swift new file mode 100644 index 000000000..eecf7f816 --- /dev/null +++ b/Sources/StreamVideo/WebRTC/v2/Extensions/AVFoundation/AVCaptureDevice.Format+Convenience.swift @@ -0,0 +1,76 @@ +// +// Copyright © 2024 Stream.io Inc. All rights reserved. +// + +import AVFoundation +import Foundation + +/// Extension adding utility for retrieving frame rate range of an `AVCaptureDevice.Format`. +extension AVCaptureDevice.Format { + + var dimensions: CMVideoDimensions { + CMVideoFormatDescriptionGetDimensions(formatDescription) + } + + func areaDiff(_ target: CMVideoDimensions) -> Int32 { + abs(dimensions.area - target.area) + } + + /// The supported frame rate range for the capture device format. + /// + /// - Returns: A `ClosedRange` representing the minimum and maximum + /// frame rates supported by the format. + /// + /// - Note: + /// - This range is derived from the `videoSupportedFrameRateRanges` property. + /// - If no frame rate ranges are available, the range defaults to `0...0`. + /// + /// - Example: + /// ```swift + /// if let format = captureDevice.activeFormat { + /// let range = format.frameRateRange + /// print("Supported frame rates: \(range)") + /// } + /// ``` + var frameRateRange: ClosedRange { + let minFrameRate = videoSupportedFrameRateRanges + .map(\.minFrameRate) + .min() ?? 0 + let maxFrameRate = videoSupportedFrameRateRanges + .map(\.maxFrameRate) + .max() ?? 0 + + return (Int(minFrameRate)...Int(maxFrameRate)) + } +} + +extension Array where Element == AVCaptureDevice.Format { + enum Requirement { + case area(preferredDimensions: CMVideoDimensions) + case frameRate(preferredFrameRate: Int) + case minimumAreaDifference(preferredDimensions: CMVideoDimensions) + } + + func first(with requirements: [Requirement]) -> AVCaptureDevice.Format? { + var possibleResults = self + + for requirement in requirements { + switch requirement { + case let .area(preferredDimensions: preferredDimensions): + possibleResults = possibleResults.filter { $0.dimensions.area >= preferredDimensions.area } + case let .frameRate(preferredFrameRate: preferredFrameRate): + possibleResults = possibleResults.filter { $0.frameRateRange.contains(preferredFrameRate) } + case let .minimumAreaDifference(preferredDimensions): + let result = possibleResults + .min { $0.areaDiff(preferredDimensions) < $1.areaDiff(preferredDimensions) } + if let result { + possibleResults = [result] + } else { + possibleResults = [] + } + } + } + + return possibleResults.first + } +} diff --git a/Sources/StreamVideo/WebRTC/v2/Extensions/CoreGraphics/CGSize+Adapt.swift b/Sources/StreamVideo/WebRTC/v2/Extensions/CoreGraphics/CGSize+Adapt.swift new file mode 100644 index 000000000..747f5667b --- /dev/null +++ b/Sources/StreamVideo/WebRTC/v2/Extensions/CoreGraphics/CGSize+Adapt.swift @@ -0,0 +1,31 @@ +// +// Copyright © 2024 Stream.io Inc. All rights reserved. +// + +import CoreGraphics +import Foundation + +extension CGSize { + /// Adjusts the size to fit within the specified maximum size while maintaining the aspect ratio + /// and ensuring dimensions are safe multiples. + /// + /// - Parameter maxSize: The target size to fit within. + /// - Returns: A new `CGSize` with adjusted dimensions. + func adjusted(toFit maxSize: CGFloat) -> CGSize { + guard width > 0 && height > 0 && maxSize > 0 else { + return CGSize(width: 16, height: 16) // Minimum safe size + } + + // Determine aspect-fit dimensions + let isWider = width >= height + let ratio = isWider ? height / width : width / height + let fitWidth = isWider ? maxSize : ratio * maxSize + let fitHeight = isWider ? ratio * maxSize : maxSize + + // Ensure dimensions are safe multiples of 2 and at least 16 + let safeWidth = max(16, ceil(fitWidth / 2) * 2) + let safeHeight = max(16, ceil(fitHeight / 2) * 2) + + return CGSize(width: safeWidth, height: safeHeight) + } +} diff --git a/Sources/StreamVideo/WebRTC/v2/Extensions/CoreGraphics/CGSize+DefaultValues.swift b/Sources/StreamVideo/WebRTC/v2/Extensions/CoreGraphics/CGSize+DefaultValues.swift new file mode 100644 index 000000000..1d263cb1d --- /dev/null +++ b/Sources/StreamVideo/WebRTC/v2/Extensions/CoreGraphics/CGSize+DefaultValues.swift @@ -0,0 +1,39 @@ +// +// Copyright © 2024 Stream.io Inc. All rights reserved. +// + +import CoreGraphics +import CoreMedia +import Foundation + +/// Extension providing default video dimensions and utility methods for `CGSize`. +extension CGSize { + /// Default dimensions for full quality video. + /// + /// Used for video layers with ``VideoLayer.Quality.full``. + static let full = CGSize(width: 1280, height: 720) + + /// Default dimensions for half quality video. + /// + /// Used for video layers with ``VideoLayer.Quality.half``. + static let half = CGSize(width: 640, height: 480) + + /// Default dimensions for quarter quality video. + /// + /// Used for video layers with ``VideoLayer.Quality.quarter``. + static let quarter = CGSize(width: 480, height: 360) + + /// The total area of the `CGSize`, calculated as `width * height`. + var area: CGFloat { width * height } + + /// Initializes a `CGSize` from a `CMVideoDimensions` source. + /// + /// - Parameter source: The `CMVideoDimensions` containing width and height values. + /// - Converts the `CMVideoDimensions` values to `CGFloat` and assigns them to `CGSize`. + init(_ source: CMVideoDimensions) { + self = .init( + width: CGFloat(source.width), + height: CGFloat(source.height) + ) + } +} diff --git a/Sources/StreamVideo/WebRTC/v2/Extensions/CoreMedia/CMVideoDimensions+DefaultValues.swift b/Sources/StreamVideo/WebRTC/v2/Extensions/CoreMedia/CMVideoDimensions+DefaultValues.swift new file mode 100644 index 000000000..3be59ef40 --- /dev/null +++ b/Sources/StreamVideo/WebRTC/v2/Extensions/CoreMedia/CMVideoDimensions+DefaultValues.swift @@ -0,0 +1,41 @@ +// +// Copyright © 2024 Stream.io Inc. All rights reserved. +// + +import CoreMedia +import Foundation + +/// Extension providing default video dimensions and utility methods for `CMVideoDimensions`. +extension CMVideoDimensions { + + /// Represents full quality video dimensions (1280x720). + /// + /// Used for video layers with ``VideoLayer.Quality.full``. + public static var full = CMVideoDimensions(.full) + + /// Represents half quality video dimensions (640x480). + /// + /// Used for video layers with ``VideoLayer.Quality.half``. + public static var half = CMVideoDimensions(.half) + + /// Represents quarter quality video dimensions (480x360). + /// + /// Used for video layers with ``VideoLayer.Quality.quarter``. + public static var quarter = CMVideoDimensions(.quarter) + + /// The total area of the video dimensions, calculated as `width * height`. + /// + /// - Returns: The area of the dimensions as an `Int32`. + var area: Int32 { width * height } + + /// Initializes a `CMVideoDimensions` instance from a `CGSize`. + /// + /// - Parameter source: The `CGSize` containing width and height values. + /// - Converts the `CGSize` dimensions into the required `Int32` format. + init(_ source: CGSize) { + self = .init( + width: Int32(source.width), + height: Int32(source.height) + ) + } +} diff --git a/Sources/StreamVideo/WebRTC/v2/Extensions/Foundation/Array+RTCRtpEncodingParameters.swift b/Sources/StreamVideo/WebRTC/v2/Extensions/Foundation/Array+RTCRtpEncodingParameters.swift new file mode 100644 index 000000000..28b23db04 --- /dev/null +++ b/Sources/StreamVideo/WebRTC/v2/Extensions/Foundation/Array+RTCRtpEncodingParameters.swift @@ -0,0 +1,32 @@ +// +// Copyright © 2024 Stream.io Inc. All rights reserved. +// + +import Foundation +import StreamWebRTC + +/// Extension providing utilities for arrays of `RTCRtpEncodingParameters`. +extension Array where Element: RTCRtpEncodingParameters { + + /// Prepares the array of `RTCRtpEncodingParameters` for use with SVC codecs. + /// + /// This method adjusts the encoding parameters if a Scalable Video Codec (SVC) + /// is being used. It filters the encodings to retain only the highest-quality + /// layer (`.full`) and modifies its `rid` (Restriction Identifier) to use + /// the lower-quality layer (`.quarter`) as required by certain SVC scenarios. + /// + /// - Parameter isSVC: A Boolean indicating whether an SVC codec is in use. + /// - Returns: The modified array of `RTCRtpEncodingParameters` if SVC is used, + /// or the original array if SVC is not used. + func prepareIfRequired(usesSVCCodec isSVC: Bool) -> Self { + guard isSVC else { + return self + } + + // Filter for the highest-quality layer and adjust its `rid` if necessary. + let rewriteResult = filter { $0.rid == VideoLayer.Quality.full.rawValue } + rewriteResult.first?.rid = VideoLayer.Quality.quarter.rawValue + + return rewriteResult + } +} diff --git a/Sources/StreamVideo/WebRTC/v2/Extensions/Foundation/Comparable+Clamped.swift b/Sources/StreamVideo/WebRTC/v2/Extensions/Foundation/Comparable+Clamped.swift new file mode 100644 index 000000000..8b3437e80 --- /dev/null +++ b/Sources/StreamVideo/WebRTC/v2/Extensions/Foundation/Comparable+Clamped.swift @@ -0,0 +1,29 @@ +// +// Copyright © 2024 Stream.io Inc. All rights reserved. +// + +import Foundation + +/// Extension providing a utility for clamping values within a range. +extension Comparable { + + /// Clamps a value to ensure it lies within a specified range. + /// + /// - Parameter limits: A `ClosedRange` specifying the minimum and maximum + /// bounds for the value. + /// - Returns: The clamped value, constrained to the given range. + /// + /// - Example: + /// ```swift + /// let value = 15 + /// let clampedValue = value.clamped(to: 10...20) + /// print(clampedValue) // 15 + /// + /// let outOfBoundsValue = 25 + /// let clampedOutOfBounds = outOfBoundsValue.clamped(to: 10...20) + /// print(clampedOutOfBounds) // 20 + /// ``` + func clamped(to limits: ClosedRange) -> Self { + min(max(self, limits.lowerBound), limits.upperBound) + } +} diff --git a/Sources/StreamVideo/WebRTC/v2/Extensions/Foundation/Int+DefaultValues.swift b/Sources/StreamVideo/WebRTC/v2/Extensions/Foundation/Int+DefaultValues.swift new file mode 100644 index 000000000..b9351de2e --- /dev/null +++ b/Sources/StreamVideo/WebRTC/v2/Extensions/Foundation/Int+DefaultValues.swift @@ -0,0 +1,31 @@ +// +// Copyright © 2024 Stream.io Inc. All rights reserved. +// + +import Foundation + +/// Provides default values for frame rate and bitrate. +extension Int { + /// The default frame rate for video streams. + /// + /// Typically used for video publishing when no specific frame rate is set. + public static let defaultFrameRate: Int = 30 + + /// The maximum bitrate for video streams, in bits per second. + /// + /// Used to limit the data rate for video publishing to optimize quality + /// and bandwidth usage. + public static let maxBitrate = 1_000_000 + + /// The maximum number of spatial layers for video streams. + /// + /// Spatial layers are used in scalable video encoding to allow the receiver + /// to adapt to varying network conditions or device capabilities. + public static let maxSpatialLayers = 3 + + /// The maximum number of temporal layers for video streams. + /// + /// Temporal layers allow for frame rate scalability in video streams, enabling + /// smoother playback or reduced data usage on low-bandwidth connections. + public static let maxTemporalLayers = 1 +} diff --git a/Sources/StreamVideo/WebRTC/v2/Extensions/Protobuf/Stream_Video_Sfu_Models_Codec+Convenience.swift b/Sources/StreamVideo/WebRTC/v2/Extensions/Protobuf/Stream_Video_Sfu_Models_Codec+Convenience.swift new file mode 100644 index 000000000..917da0c5d --- /dev/null +++ b/Sources/StreamVideo/WebRTC/v2/Extensions/Protobuf/Stream_Video_Sfu_Models_Codec+Convenience.swift @@ -0,0 +1,31 @@ +// +// Copyright © 2024 Stream.io Inc. All rights reserved. +// + +import Foundation +import StreamWebRTC + +/// Extension adding a convenience initializer for `Stream_Video_Sfu_Models_Codec`. +extension Stream_Video_Sfu_Models_Codec { + + /// Initializes a `Stream_Video_Sfu_Models_Codec` from an `RTCRtpCodecCapability`. + /// + /// This initializer converts the codec capability information from the WebRTC + /// layer (`RTCRtpCodecCapability`) into a `Stream_Video_Sfu_Models_Codec` model. + /// + /// - Parameter source: The `RTCRtpCodecCapability` to convert. + /// + /// - Note: + /// - `name` is mapped directly from the codec's name. + /// - `fmtp` represents the codec parameters formatted as a string. + /// - `clockRate` is converted from the codec's `clockRate` to `UInt32` or + /// defaults to `0` if absent. + /// - `payloadType` is derived from the codec's `preferredPayloadType` or + /// defaults to `0` if absent. + init(_ source: RTCRtpCodecCapability) { + name = source.name + fmtp = source.fmtp + clockRate = source.clockRate?.uint32Value ?? 0 + payloadType = source.preferredPayloadType?.uint32Value ?? 0 + } +} diff --git a/Sources/StreamVideo/WebRTC/v2/Extensions/Protobuf/Stream_Video_Sfu_Models_PublishOption+Convenience.swift b/Sources/StreamVideo/WebRTC/v2/Extensions/Protobuf/Stream_Video_Sfu_Models_PublishOption+Convenience.swift new file mode 100644 index 000000000..b010843aa --- /dev/null +++ b/Sources/StreamVideo/WebRTC/v2/Extensions/Protobuf/Stream_Video_Sfu_Models_PublishOption+Convenience.swift @@ -0,0 +1,66 @@ +// +// Copyright © 2024 Stream.io Inc. All rights reserved. +// + +import Foundation +import StreamWebRTC + +/// Convenience initializers for `Stream_Video_Sfu_Models_PublishOption`. +extension Stream_Video_Sfu_Models_PublishOption { + + /// Initializes a `Stream_Video_Sfu_Models_PublishOption` with basic parameters. + /// + /// - Parameters: + /// - trackType: The type of track (e.g., audio, video, screen share). + /// - codec: The codec to use for the track. + /// - bitrate: The bitrate for the track, in bits per second. + /// - maxSpatialLayer: The maximum spatial layer (default is `.maxSpatialLayers`). + init( + trackType: Stream_Video_Sfu_Models_TrackType, + codec: Stream_Video_Sfu_Models_Codec, + bitrate: Int, + maxSpatialLayer: Int = .maxSpatialLayers + ) { + self.trackType = trackType + self.codec = codec + self.bitrate = Int32(bitrate) + maxSpatialLayers = Int32(maxSpatialLayer) + } + + /// Initializes a `Stream_Video_Sfu_Models_PublishOption` from an audio model. + /// + /// Converts an instance of `PublishOptions.AudioPublishOptions` into a + /// `Stream_Video_Sfu_Models_PublishOption`. + /// + /// - Parameter source: The `AudioPublishOptions` to convert. + init(_ source: PublishOptions.AudioPublishOptions) { + trackType = .audio + codec = .init() + codec.name = source.codec.rawValue + bitrate = Int32(source.bitrate) + } + + /// Initializes a `Stream_Video_Sfu_Models_PublishOption` from a video model. + /// + /// Converts an instance of `PublishOptions.VideoPublishOptions` into a + /// `Stream_Video_Sfu_Models_PublishOption`. + /// + /// - Parameters: + /// - source: The `VideoPublishOptions` to convert. + /// - trackType: The type of track (e.g., video, screen share). + init( + _ source: PublishOptions.VideoPublishOptions, + trackType: Stream_Video_Sfu_Models_TrackType + ) { + self.trackType = trackType + codec = .init() + codec.name = source.codec.rawValue + bitrate = Int32(source.bitrate) + fps = Int32(source.frameRate) + videoDimension = .init() + videoDimension.width = UInt32(source.dimensions.width) + videoDimension.height = UInt32(source.dimensions.height) + maxSpatialLayers = Int32(source.capturingLayers.spatialLayers) + maxTemporalLayers = Int32(source.capturingLayers.temporalLayers) + } +} diff --git a/Sources/StreamVideo/WebRTC/v2/Extensions/Protobuf/Stream_Video_Sfu_Models_VideoDimension+Convenience.swift b/Sources/StreamVideo/WebRTC/v2/Extensions/Protobuf/Stream_Video_Sfu_Models_VideoDimension+Convenience.swift new file mode 100644 index 000000000..94538524c --- /dev/null +++ b/Sources/StreamVideo/WebRTC/v2/Extensions/Protobuf/Stream_Video_Sfu_Models_VideoDimension+Convenience.swift @@ -0,0 +1,18 @@ +// +// Copyright © 2024 Stream.io Inc. All rights reserved. +// + +import Foundation + +/// Extension providing convenience initializers for `Stream_Video_Sfu_Models_VideoDimension`. +extension Stream_Video_Sfu_Models_VideoDimension { + + /// Initializes a `Stream_Video_Sfu_Models_VideoDimension` from a `CGSize`. + /// + /// - Parameter size: The `CGSize` representing width and height in points. + /// - Converts the `CGSize` dimensions into the required `UInt32` format. + init(_ size: CGSize) { + height = UInt32(size.height) + width = UInt32(size.width) + } +} diff --git a/Sources/StreamVideo/WebRTC/v2/Extensions/Protobuf/Stream_Video_Sfu_Models_VideoLayer+Convenience.swift b/Sources/StreamVideo/WebRTC/v2/Extensions/Protobuf/Stream_Video_Sfu_Models_VideoLayer+Convenience.swift new file mode 100644 index 000000000..3bdeefeda --- /dev/null +++ b/Sources/StreamVideo/WebRTC/v2/Extensions/Protobuf/Stream_Video_Sfu_Models_VideoLayer+Convenience.swift @@ -0,0 +1,86 @@ +// +// Copyright © 2024 Stream.io Inc. All rights reserved. +// + +import Foundation +import StreamWebRTC + +extension Stream_Video_Sfu_Models_VideoLayer { + + /// Initializes a `Stream_Video_Sfu_Models_VideoLayer` from a `VideoLayer`. + /// + /// - Parameters: + /// - codec: The `VideoLayer` instance containing quality and dimension details. + /// - fps: The frames per second for the video layer. Defaults to ``Int.defaultFrameRate``. + /// + /// - Note: + /// - Sets the `bitrate`, `rid` (Restriction Identifier), video dimensions, and quality + /// based on the provided `VideoLayer` instance. + init( + _ codec: VideoLayer, + fps: Int = .defaultFrameRate + ) { + bitrate = UInt32(codec.maxBitrate) + rid = codec.quality.rawValue + var dimension = Stream_Video_Sfu_Models_VideoDimension() + dimension.height = UInt32(codec.dimensions.height) + dimension.width = UInt32(codec.dimensions.width) + videoDimension = dimension + quality = codec.sfuQuality + self.fps = UInt32(fps) + } + + /// Initializes a `Stream_Video_Sfu_Models_VideoLayer` from encoding parameters. + /// + /// - Parameters: + /// - layer: The `RTCRtpEncodingParameters` containing encoding details. + /// - publishOptions: The `VideoPublishOptions` providing fallback configurations. + /// + /// - Note: + /// - Uses `rid`, `maxBitrateBps`, and `maxFramerate` from the `layer` if available; + /// otherwise, defaults to values from `publishOptions`. + /// - Logs warnings if any invalid configurations are detected: + /// - `rid` longer or shorter than 1 character. + /// - `bitrate` set to `0`. + /// - `fps` (frames per second) set to `0`. + /// - Missing video dimensions. + init( + _ layer: RTCRtpEncodingParameters, + publishOptions: PublishOptions.VideoPublishOptions + ) { + rid = layer.rid ?? (publishOptions.codec.isSVC ? "q" : "") + bitrate = layer.maxBitrateBps?.uint32Value ?? UInt32(publishOptions.bitrate) + fps = layer.maxFramerate?.uint32Value ?? UInt32(publishOptions.frameRate) + videoDimension = .init() + videoDimension.width = UInt32(publishOptions.dimensions.width) + videoDimension.height = UInt32(publishOptions.dimensions.height) + + if rid.count != 1 { + log.warning( + "Stream_Video_Sfu_Models_VideoLayer with rid longer/smaller than 1 character is invalid.", + subsystems: .webRTC + ) + } + + if bitrate == 0 { + log.warning( + "Stream_Video_Sfu_Models_VideoLayer with bitrate=0 is invalid.", + subsystems: .webRTC + ) + } + + if fps == 0 { + log.warning( + "Stream_Video_Sfu_Models_VideoLayer with fps=0 is invalid.", + subsystems: .webRTC + ) + } + + if !hasVideoDimension { + log.warning( + "Stream_Video_Sfu_Models_VideoLayer without videoDimension is invalid.", + subsystems: .webRTC + ) + } + } +} diff --git a/Sources/StreamVideo/WebRTC/v2/Extensions/Protobuf/Stream_Video_Sfu_Models_VideoQuality+Convenience.swift b/Sources/StreamVideo/WebRTC/v2/Extensions/Protobuf/Stream_Video_Sfu_Models_VideoQuality+Convenience.swift new file mode 100644 index 000000000..638aebe2f --- /dev/null +++ b/Sources/StreamVideo/WebRTC/v2/Extensions/Protobuf/Stream_Video_Sfu_Models_VideoQuality+Convenience.swift @@ -0,0 +1,32 @@ +// +// Copyright © 2024 Stream.io Inc. All rights reserved. +// + +import Foundation + +/// Extension adding a convenience initializer for `Stream_Video_Sfu_Models_VideoQuality`. +extension Stream_Video_Sfu_Models_VideoQuality { + + /// Initializes a `Stream_Video_Sfu_Models_VideoQuality` from a `VideoLayer.Quality`. + /// + /// This initializer maps the `VideoLayer.Quality` levels to their corresponding + /// `Stream_Video_Sfu_Models_VideoQuality` values, ensuring compatibility between + /// the two models. + /// + /// - Parameter source: The `VideoLayer.Quality` value to convert. + /// + /// - Mapping: + /// - `.full` maps to `.high`. + /// - `.half` maps to `.mid`. + /// - `.quarter` maps to `.lowUnspecified`. + init(_ source: VideoLayer.Quality) { + switch source { + case .full: + self = .high + case .half: + self = .mid + case .quarter: + self = .lowUnspecified + } + } +} diff --git a/Sources/StreamVideo/WebRTC/v2/Extensions/Protobuf/Stream_Video_Sfu_Signal_TrackSubscriptionDetails+Convenience.swift b/Sources/StreamVideo/WebRTC/v2/Extensions/Protobuf/Stream_Video_Sfu_Signal_TrackSubscriptionDetails+Convenience.swift new file mode 100644 index 000000000..0d6105aa6 --- /dev/null +++ b/Sources/StreamVideo/WebRTC/v2/Extensions/Protobuf/Stream_Video_Sfu_Signal_TrackSubscriptionDetails+Convenience.swift @@ -0,0 +1,30 @@ +// +// Copyright © 2024 Stream.io Inc. All rights reserved. +// + +import Foundation + +/// Extension providing a convenience initializer for +/// `Stream_Video_Sfu_Signal_TrackSubscriptionDetails`. +extension Stream_Video_Sfu_Signal_TrackSubscriptionDetails { + + /// Initializes a `TrackSubscriptionDetails` instance for a specific user and session. + /// + /// - Parameters: + /// - userId: The ID of the user associated with the track subscription. + /// - sessionId: The session ID for the user's subscription. + /// - size: The optional video dimension (`CGSize`) for the track. Defaults to `nil`. + /// - type: The type of track (e.g., audio, video, screen share). + init( + for userId: String, + sessionId: String, + size: CGSize? = nil, + type: Stream_Video_Sfu_Models_TrackType + ) { + userID = userId + dimension = size.map { Stream_Video_Sfu_Models_VideoDimension($0) } + ?? Stream_Video_Sfu_Models_VideoDimension() + sessionID = sessionId + trackType = type + } +} diff --git a/Sources/StreamVideo/WebRTC/v2/Extensions/Stream_Video_Sfu_Models_VideoDimension+Convenience.swift b/Sources/StreamVideo/WebRTC/v2/Extensions/Stream_Video_Sfu_Models_VideoDimension+Convenience.swift deleted file mode 100644 index fa7487c13..000000000 --- a/Sources/StreamVideo/WebRTC/v2/Extensions/Stream_Video_Sfu_Models_VideoDimension+Convenience.swift +++ /dev/null @@ -1,12 +0,0 @@ -// -// Copyright © 2024 Stream.io Inc. All rights reserved. -// - -import Foundation - -extension Stream_Video_Sfu_Models_VideoDimension { - init(_ size: CGSize) { - height = UInt32(size.height) - width = UInt32(size.width) - } -} diff --git a/Sources/StreamVideo/WebRTC/v2/Extensions/Stream_Video_Sfu_Models_VideoLayer+Convenience.swift b/Sources/StreamVideo/WebRTC/v2/Extensions/Stream_Video_Sfu_Models_VideoLayer+Convenience.swift deleted file mode 100644 index 40794385d..000000000 --- a/Sources/StreamVideo/WebRTC/v2/Extensions/Stream_Video_Sfu_Models_VideoLayer+Convenience.swift +++ /dev/null @@ -1,22 +0,0 @@ -// -// Copyright © 2024 Stream.io Inc. All rights reserved. -// - -import Foundation - -extension Stream_Video_Sfu_Models_VideoLayer { - - init( - _ codec: VideoLayer, - fps: UInt32 = 30 - ) { - bitrate = UInt32(codec.maxBitrate) - rid = codec.quality.rawValue - var dimension = Stream_Video_Sfu_Models_VideoDimension() - dimension.height = UInt32(codec.dimensions.height) - dimension.width = UInt32(codec.dimensions.width) - videoDimension = dimension - quality = codec.sfuQuality - self.fps = fps - } -} diff --git a/Sources/StreamVideo/WebRTC/v2/Extensions/Stream_Video_Sfu_Signal_TrackSubscriptionDetails+Convenience.swift b/Sources/StreamVideo/WebRTC/v2/Extensions/Stream_Video_Sfu_Signal_TrackSubscriptionDetails+Convenience.swift deleted file mode 100644 index 5e2f7138d..000000000 --- a/Sources/StreamVideo/WebRTC/v2/Extensions/Stream_Video_Sfu_Signal_TrackSubscriptionDetails+Convenience.swift +++ /dev/null @@ -1,19 +0,0 @@ -// -// Copyright © 2024 Stream.io Inc. All rights reserved. -// - -import Foundation - -extension Stream_Video_Sfu_Signal_TrackSubscriptionDetails { - init( - for userId: String, - sessionId: String, - size: CGSize? = nil, - type: Stream_Video_Sfu_Models_TrackType - ) { - userID = userId - dimension = size.map { Stream_Video_Sfu_Models_VideoDimension($0) } ?? Stream_Video_Sfu_Models_VideoDimension() - sessionID = sessionId - trackType = type - } -} diff --git a/Sources/StreamVideo/WebRTC/v2/Extensions/WebRTC/RTCRtpCodecCapability+Convenience.swift b/Sources/StreamVideo/WebRTC/v2/Extensions/WebRTC/RTCRtpCodecCapability+Convenience.swift new file mode 100644 index 000000000..49224cde5 --- /dev/null +++ b/Sources/StreamVideo/WebRTC/v2/Extensions/WebRTC/RTCRtpCodecCapability+Convenience.swift @@ -0,0 +1,179 @@ +// +// Copyright © 2024 Stream.io Inc. All rights reserved. +// + +import Foundation +import StreamWebRTC + +/// Extension providing utility properties for `RTCRtpCodecCapability`. +extension RTCRtpCodecCapability { + /// A formatted string representation of codec parameters (`fmtp`). + /// + /// Converts the codec parameters dictionary into a single string where + /// each key-value pair is separated by `=` and entries are joined by `;`. + var fmtp: String { + parameters + .map { "\($0.key)=\($0.value)" } + .joined(separator: ";") + } +} + +/// Extension providing utility methods for sequences of `RTCRtpCodecCapability`. +extension Sequence where Element == RTCRtpCodecCapability { + + /// Retrieves the baseline codec for a given video codec. + /// + /// - Parameter videoCodec: The video codec to filter and prioritize. + /// - Returns: The most suitable `RTCRtpCodecCapability` for the given codec. + func baseline(for videoCodec: VideoCodec) -> RTCRtpCodecCapability? { + filter { $0.name.lowercased() == videoCodec.rawValue } + .sorted(by: videoCodec.baselineComparator) + .first + } + + /// Retrieves the baseline codec for a given audio codec. + /// + /// - Parameter audioCodec: The audio codec to filter and prioritize. + /// - Returns: The most suitable `RTCRtpCodecCapability` for the given codec. + func baseline(for audioCodec: AudioCodec) -> RTCRtpCodecCapability? { + filter { $0.name.lowercased() == audioCodec.rawValue } + .sorted(by: audioCodec.baselineComparator) + .first + } +} + +/// Extension providing prioritization rules for `VideoCodec`. +extension VideoCodec { + /// A comparator used to prioritize baseline codec capabilities. + /// + /// The comparator defines rules for ordering codecs, where codecs with + /// desirable properties (e.g., baseline profiles) are prioritized. + fileprivate var baselineComparator: (RTCRtpCodecCapability, RTCRtpCodecCapability) -> Bool { + switch self { + case .h264: + return h264Comparator + case .vp8: + return noOpComparator + case .vp9: + return vp9Comparator + case .av1: + return noOpComparator + case .none: + return noOpComparator + } + } +} + +/// Extension providing prioritization rules for `AudioCodec`. +extension AudioCodec { + /// A comparator used to prioritize baseline codec capabilities. + /// + /// The comparator currently applies no specific ordering for audio codecs. + fileprivate var baselineComparator: (RTCRtpCodecCapability, RTCRtpCodecCapability) -> Bool { + switch self { + case .none: + return noOpComparator + case .opus: + return noOpComparator + case .red: + return noOpComparator + } + } +} + +// MARK: - Private + +/// A no-op comparator that maintains the original codec order. +/// +/// - Parameters: +/// - lhs: The first `RTCRtpCodecCapability`. +/// - rhs: The second `RTCRtpCodecCapability`. +/// - Returns: Always returns `true`, maintaining the original order. +private func noOpComparator( + lhs: RTCRtpCodecCapability, + rhs: RTCRtpCodecCapability +) -> Bool { true } + +/// A comparator for prioritizing H264 codec capabilities. +/// +/// - Parameters: +/// - lhs: The first `RTCRtpCodecCapability`. +/// - rhs: The second `RTCRtpCodecCapability`. +/// - Returns: `true` if `lhs` is prioritized over `rhs`, otherwise `false`. +private func h264Comparator( + lhs: RTCRtpCodecCapability, + rhs: RTCRtpCodecCapability +) -> Bool { + let aMimeType = lhs.mimeType.lowercased() + let bMimeType = rhs.mimeType.lowercased() + + // Ensure comparison only applies to H264 codecs. + guard aMimeType == "video/h264", bMimeType == "video/h264" else { + return false + } + + let aFmtpLine = lhs.fmtp + let bFmtpLine = rhs.fmtp + + // Prioritize codecs with baseline profile-level-id=42. + let aIsBaseline = aFmtpLine.contains("profile-level-id=42") + let bIsBaseline = bFmtpLine.contains("profile-level-id=42") + if aIsBaseline && !bIsBaseline { + return true + } + if !aIsBaseline && bIsBaseline { + return false + } + + // Prioritize codecs with packetization-mode=0 or none. + let aPacketizationMode0 = aFmtpLine.contains("packetization-mode=0") || + !aFmtpLine.contains("packetization-mode") + let bPacketizationMode0 = bFmtpLine.contains("packetization-mode=0") || + !bFmtpLine.contains("packetization-mode") + if aPacketizationMode0 && !bPacketizationMode0 { + return true + } + if !aPacketizationMode0 && bPacketizationMode0 { + return false + } + + // Maintain original order if all conditions are equal. + return false +} + +/// A comparator for prioritizing VP9 codec capabilities. +/// +/// - Parameters: +/// - lhs: The first `RTCRtpCodecCapability`. +/// - rhs: The second `RTCRtpCodecCapability`. +/// - Returns: `true` if `lhs` is prioritized over `rhs`, otherwise `false`. +private func vp9Comparator( + lhs: RTCRtpCodecCapability, + rhs: RTCRtpCodecCapability +) -> Bool { + let aMimeType = lhs.mimeType.lowercased() + let bMimeType = rhs.mimeType.lowercased() + + // Ensure comparison only applies to VP9 codecs. + guard aMimeType == "video/vp9", bMimeType == "video/vp9" else { + return false + } + + let aFmtpLine = lhs.fmtp + let bFmtpLine = rhs.fmtp + + // Prioritize codecs with profile-id=0 or none. + let aIsProfile0 = aFmtpLine.contains("profile-id=0") || + !aFmtpLine.contains("profile-id") + let bIsProfile0 = bFmtpLine.contains("profile-id=0") || + !bFmtpLine.contains("profile-id") + if aIsProfile0 && !bIsProfile0 { + return true + } + if !aIsProfile0 && bIsProfile0 { + return false + } + + // Maintain original order if all conditions are equal. + return false +} diff --git a/Sources/StreamVideo/WebRTC/v2/PeerConnection/Extensions/RTCRtpEncodingParameters+Convenience.swift b/Sources/StreamVideo/WebRTC/v2/PeerConnection/Extensions/RTCRtpEncodingParameters+Convenience.swift index c9f3ba2d5..f53b0255a 100644 --- a/Sources/StreamVideo/WebRTC/v2/PeerConnection/Extensions/RTCRtpEncodingParameters+Convenience.swift +++ b/Sources/StreamVideo/WebRTC/v2/PeerConnection/Extensions/RTCRtpEncodingParameters+Convenience.swift @@ -14,9 +14,11 @@ extension RTCRtpEncodingParameters { /// /// - Parameter codec: The `VideoCodec` instance to use for initializing the encoding parameters. /// - /// - Note: The `rid` (Restriction Identifier) is set to the codec's quality. - /// The `maxBitrateBps` is set to the codec's maximum bitrate. - /// If the codec has a `scaleDownFactor`, it's applied to `scaleResolutionDownBy`. + /// - Note: + /// - The `rid` (Restriction Identifier) is set to the codec's quality. + /// - The `maxBitrateBps` is set to the codec's maximum bitrate. + /// - If the codec has a `scaleDownFactor`, it's applied to `scaleResolutionDownBy`. + /// - For scalable codecs (SVC), the default `scalabilityMode` is set to `"L3T2_KEY"`. convenience init( _ layer: VideoLayer, preferredVideoCodec: VideoCodec? @@ -32,4 +34,41 @@ extension RTCRtpEncodingParameters { } } } + + /// Convenience initializer to create an `RTCRtpEncodingParameters` instance. + /// + /// This initializer configures the RTP encoding parameters using the properties + /// of a given `VideoLayer` and `VideoPublishOptions`. It is particularly useful + /// for setting up video track encoding with scalability and resolution settings. + /// + /// - Parameters: + /// - layer: The `VideoLayer` representing the quality, bitrate, and scaling + /// properties for the video encoding. + /// - videoPublishOptions: The `VideoPublishOptions` specifying codec, + /// frame rate, and capturing layers for the video track. + /// + /// - Note: + /// - The `rid` (Restriction Identifier) is set to the layer's quality value. + /// - The `maxFramerate` is derived from the `frameRate` in `videoPublishOptions`. + /// - The `maxBitrateBps` is set to the layer's maximum bitrate. + /// - For scalable codecs (SVC), the `scalabilityMode` is derived from + /// `videoPublishOptions.capturingLayers.scalabilityMode`. + /// - If the codec is not SVC, the `scaleResolutionDownBy` is applied + /// based on the `scaleDownFactor` from the `layer`. + convenience init( + _ layer: VideoLayer, + videoPublishOptions: PublishOptions.VideoPublishOptions + ) { + self.init() + rid = layer.quality.rawValue + maxFramerate = (videoPublishOptions.frameRate) as NSNumber + maxBitrateBps = (layer.maxBitrate) as NSNumber + if videoPublishOptions.codec.isSVC { + scalabilityMode = videoPublishOptions.capturingLayers.scalabilityMode + } else { + if let scaleDownFactor = layer.scaleDownFactor { + scaleResolutionDownBy = (scaleDownFactor) as NSNumber + } + } + } } diff --git a/Sources/StreamVideo/WebRTC/v2/PeerConnection/Extensions/RTCRtpTransceiverInit+Convenience.swift b/Sources/StreamVideo/WebRTC/v2/PeerConnection/Extensions/RTCRtpTransceiverInit+Convenience.swift index 9276a57e2..8760d2941 100644 --- a/Sources/StreamVideo/WebRTC/v2/PeerConnection/Extensions/RTCRtpTransceiverInit+Convenience.swift +++ b/Sources/StreamVideo/WebRTC/v2/PeerConnection/Extensions/RTCRtpTransceiverInit+Convenience.swift @@ -6,44 +6,149 @@ import Foundation import StreamWebRTC extension RTCRtpTransceiverInit { - /// Convenience initializer for creating an `RTCRtpTransceiverInit` with specific parameters. + + /// Creates a temporary `RTCRtpTransceiverInit` instance for a specific track type. + /// + /// This utility method is used to create a temporary configuration for a transceiver, + /// tailored to the given track type (audio, video, or screen share). It provides + /// a default setup suitable for testing or initialization scenarios where the + /// specific track details are not yet available. + /// + /// - Parameter trackType: The type of track (e.g., audio, video, or screen share). + /// - Returns: A configured `RTCRtpTransceiverInit` instance. + /// + /// - Note: + /// - For `.audio`, a temporary stream ID of `"temp-audio"` is used with a default + /// audio configuration (`AudioPublishOptions` with `.none` codec). + /// - For `.video` or `.screenshare`, a temporary stream ID is assigned based on + /// the track type (`"temp-video"` for video or `"temp-screenshare"` for screen share), + /// with default `VideoPublishOptions` using the H.264 codec. + /// - For other cases, an empty `RTCRtpTransceiverInit` instance is returned. + static func temporary( + trackType: TrackType + ) -> RTCRtpTransceiverInit { + switch trackType { + case .audio: + return .init( + direction: .sendOnly, + streamIds: ["temp-audio"], + audioOptions: .init(id: 0, codec: .none) + ) + case .video, .screenshare: + let streamId = trackType == .video ? "temp-video" : "temp-screenshare" + return .init( + trackType: trackType, + direction: .sendOnly, + streamIds: [streamId], + videoOptions: .init(codec: .h264) + ) + default: + return .init() + } + } + + /// Convenience initializer for creating an `RTCRtpTransceiverInit` for audio tracks. /// - /// This initializer provides a more Swift-friendly way to create an `RTCRtpTransceiverInit` object, - /// allowing you to specify the track type, direction, stream IDs, and optional video codecs. + /// This initializer provides a streamlined way to set up a transceiver specifically + /// for audio tracks, allowing configuration of the direction and associated stream IDs. /// /// - Parameters: - /// - trackType: The type of track (e.g., audio, video, or screenshare). /// - direction: The desired direction for the transceiver (e.g., sendRecv, sendOnly, recvOnly). /// - streamIds: An array of stream IDs associated with this transceiver. - /// - codecs: An optional array of video codecs to be used. If provided, these will be used to create RTP encoding parameters. + /// - audioOptions: The `AudioPublishOptions` defining codec and bitrate configurations + /// for the audio track. /// - /// - Note: If the track type is screenshare, all send encodings will be set to active. + /// - Note: + /// - The `direction` determines how the transceiver interacts with the track (e.g., sending, + /// receiving, or both). + /// - The `audioOptions` provide the necessary details for setting up the audio codec and + /// configuring the bitrate for optimal performance. + convenience init( + direction: RTCRtpTransceiverDirection, + streamIds: [String], + audioOptions: PublishOptions.AudioPublishOptions + ) { + self.init() + self.direction = direction + self.streamIds = streamIds + + log.debug( + """ + RTCRtpTransceiverInit from AudioPublishOptions: + AudioCodec: \(audioOptions.codec) + Bitrate: \(audioOptions.bitrate) + """ + ) + } + + /// Convenience initializer for creating an `RTCRtpTransceiverInit` for video tracks. + /// + /// This initializer simplifies the creation of a transceiver specifically for + /// video tracks, allowing configuration of the track type, direction, associated + /// stream IDs, and video publishing options. + /// + /// - Parameters: + /// - trackType: The type of track (e.g., video or screen share). + /// - direction: The desired direction for the transceiver (e.g., sendRecv, sendOnly, recvOnly). + /// - streamIds: An array of stream IDs associated with this transceiver. + /// - videoOptions: The `VideoPublishOptions` specifying codec, frame rate, bitrate, + /// capturing layers, and resolution for the video track. + /// + /// - Note: + /// - Video layers are generated using `VideoLayerFactory` based on the provided + /// `videoOptions` and track type. + /// - If the codec supports SVC (Scalable Video Coding), the send encodings are + /// filtered to retain only the highest quality (`.full`) layer, and its `rid` + /// (Restriction Identifier) is adjusted to use the `.quarter` layer. + /// - For screen share tracks, all send encodings are set to active. convenience init( trackType: TrackType, direction: RTCRtpTransceiverDirection, streamIds: [String], - layers: [VideoLayer]? = nil, - preferredVideoCodec: VideoCodec? = nil + videoOptions: PublishOptions.VideoPublishOptions ) { self.init() self.direction = direction self.streamIds = streamIds - if let layers { - var sendEncodings = layers - .map { RTCRtpEncodingParameters($0, preferredVideoCodec: preferredVideoCodec) } - - if preferredVideoCodec?.isSVC == true { - sendEncodings = sendEncodings - .filter { $0.rid == "f" } - sendEncodings.first?.rid = "q" - self.sendEncodings = sendEncodings - } else { - self.sendEncodings = sendEncodings - } + + let publishOption = Stream_Video_Sfu_Models_PublishOption( + videoOptions, + trackType: trackType == .video ? .video : .screenShare + ) + let videoLayers = VideoLayerFactory() + .videoLayers(for: publishOption) + + var sendEncodings = videoLayers + .map { RTCRtpEncodingParameters($0, videoPublishOptions: videoOptions) } + + if videoOptions.codec.isSVC { + sendEncodings = sendEncodings + .filter { $0.rid == VideoLayer.full.quality.rawValue } + sendEncodings.first?.rid = VideoLayer.quarter.quality.rawValue } - + if trackType == .screenshare { sendEncodings.forEach { $0.isActive = true } } + + self.sendEncodings = sendEncodings + + log.debug( + """ + RTCRtpTransceiverInit from VideoPublishOptions: + VideoCodec: \(videoOptions.codec) + Bitrate: \(videoOptions.bitrate) + FrameRate: \(videoOptions.frameRate) + Dimensions: \(videoOptions.dimensions) + CapturingLayers + Spatial: \(videoOptions.capturingLayers.spatialLayers) + Temporal: \(videoOptions.capturingLayers.temporalLayers) + ScalabilityMode: \(videoOptions.capturingLayers.scalabilityMode) + + Created with: + VideoLayers: \(videoLayers.map(\.quality.rawValue).joined(separator: ",")) + SendEncodings: \(sendEncodings.compactMap(\.rid).joined(separator: ",")) + """ + ) } } diff --git a/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/AudioMediaAdapter.swift b/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/AudioMediaAdapter.swift index d7315f6a6..28cdc671f 100644 --- a/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/AudioMediaAdapter.swift +++ b/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/AudioMediaAdapter.swift @@ -33,14 +33,6 @@ final class AudioMediaAdapter: MediaAdapting, @unchecked Sendable { /// A subject for publishing track events. let subject: PassthroughSubject - /// The local audio track, if available. - var localTrack: RTCMediaStreamTrack? { - (localMediaManager as? LocalAudioMediaAdapter)?.localTrack - } - - /// The mid (Media Stream Identification) of the local audio track, if available. - var mid: String? { (localMediaManager as? LocalAudioMediaAdapter)?.mid } - /// Convenience initializer for creating an AudioMediaAdapter with a LocalAudioMediaAdapter. /// /// - Parameters: @@ -55,6 +47,7 @@ final class AudioMediaAdapter: MediaAdapting, @unchecked Sendable { peerConnection: StreamRTCPeerConnectionProtocol, peerConnectionFactory: PeerConnectionFactory, sfuAdapter: SFUAdapter, + publishOptions: [PublishOptions.AudioPublishOptions], subject: PassthroughSubject ) { self.init( @@ -66,6 +59,7 @@ final class AudioMediaAdapter: MediaAdapting, @unchecked Sendable { peerConnection: peerConnection, peerConnectionFactory: peerConnectionFactory, sfuAdapter: sfuAdapter, + publishOptions: publishOptions, subject: subject ), subject: subject @@ -124,6 +118,10 @@ final class AudioMediaAdapter: MediaAdapting, @unchecked Sendable { ) } + func trackInfo() -> [Stream_Video_Sfu_Models_TrackInfo] { + localMediaManager.trackInfo() + } + /// Updates the audio media based on new call settings. /// /// - Parameter settings: The updated call settings. @@ -131,6 +129,19 @@ final class AudioMediaAdapter: MediaAdapting, @unchecked Sendable { try await localMediaManager.didUpdateCallSettings(settings) } + func didUpdatePublishOptions( + _ publishOptions: PublishOptions + ) async throws { + try await localMediaManager.didUpdatePublishOptions(publishOptions) + } + + func changePublishQuality( + with layerSettings: [Stream_Video_Sfu_Event_AudioSender] + ) { + (localMediaManager as? LocalAudioMediaAdapter)? + .changePublishQuality(with: layerSettings) + } + // MARK: - Observers /// Adds a new audio stream and notifies observers. diff --git a/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/LocalMediaAdapters/LocalAudioMediaAdapter.swift b/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/LocalMediaAdapters/LocalAudioMediaAdapter.swift index 68b0462a6..845e1533c 100644 --- a/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/LocalMediaAdapters/LocalAudioMediaAdapter.swift +++ b/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/LocalMediaAdapters/LocalAudioMediaAdapter.swift @@ -24,20 +24,19 @@ final class LocalAudioMediaAdapter: LocalMediaAdapting { /// The adapter for communicating with the Selective Forwarding Unit (SFU). private var sfuAdapter: SFUAdapter + private var publishOptions: [PublishOptions.AudioPublishOptions] + /// The stream identifiers for this audio adapter. private let streamIds: [String] - /// The local audio track. - private(set) var localTrack: RTCAudioTrack? + private let transceiverStorage = MediaTransceiverStorage(for: .audio) + + private let primaryTrack: RTCAudioTrack /// The RTP transceiver for sending audio. - private var sender: RTCRtpTransceiver? private var lastUpdatedCallSettings: CallSettings.Audio? - /// The mid (Media Stream Identification) of the sender. - var mid: String? { sender?.mid } - /// A publisher that emits track events. let subject: PassthroughSubject @@ -55,32 +54,36 @@ final class LocalAudioMediaAdapter: LocalMediaAdapting { peerConnection: StreamRTCPeerConnectionProtocol, peerConnectionFactory: PeerConnectionFactory, sfuAdapter: SFUAdapter, + publishOptions: [PublishOptions.AudioPublishOptions], subject: PassthroughSubject ) { self.sessionID = sessionID self.peerConnection = peerConnection self.peerConnectionFactory = peerConnectionFactory self.sfuAdapter = sfuAdapter + self.publishOptions = publishOptions self.subject = subject + let source = peerConnectionFactory.makeAudioSource(.defaultConstraints) + let track = peerConnectionFactory.makeAudioTrack(source: source) + primaryTrack = track streamIds = ["\(sessionID):audio"] + + track.isEnabled = false } /// Cleans up resources when the instance is deallocated. deinit { - Task { @MainActor [sender, localTrack] in - sender?.sender.track = nil - localTrack?.isEnabled = false - } - if let localTrack { - log.debug( - """ - Local audioTrack will be deallocated - trackId:\(localTrack.trackId) - isEnabled:\(localTrack.isEnabled) - """, - subsystems: .webRTC - ) + Task { @MainActor [transceiverStorage] in + transceiverStorage.removeAll() } + log.debug( + """ + Local audioTracks will be deallocated + primary: \(primaryTrack.trackId) isEnabled:\(primaryTrack.isEnabled) + clones: \(transceiverStorage.compactMap(\.value.sender.track?.trackId).joined(separator: ",")) + """, + subsystems: .webRTC + ) } // MARK: - LocalMediaManaging @@ -94,91 +97,70 @@ final class LocalAudioMediaAdapter: LocalMediaAdapting { with settings: CallSettings, ownCapabilities: [OwnCapability] ) async throws { - let hasAudio = ownCapabilities.contains(.sendAudio) - - if - hasAudio, - localTrack == nil || localTrack?.isEnabled == false - { - let audioConstrains = RTCMediaConstraints( - mandatoryConstraints: nil, - optionalConstraints: nil - ) - let audioSource = peerConnectionFactory - .makeAudioSource(audioConstrains) - let audioTrack = peerConnectionFactory - .makeAudioTrack(source: audioSource) - - if sender == nil, settings.audioOn { - sender = peerConnection.addTransceiver( - with: audioTrack, - init: RTCRtpTransceiverInit( - trackType: .audio, - direction: .sendOnly, - streamIds: streamIds - ) - ) - } - /// This is important to be false once we setUp as the activation will happen once - /// publish is called (in order also to inform the SFU via the didUpdateCallSettings). - audioTrack.isEnabled = false - - log.debug( - """ - AudioTrack generated - address:\(Unmanaged.passUnretained(audioTrack).toOpaque()) - trackId:\(audioTrack.trackId) - mid: \(sender?.mid ?? "-") - """ - ) - - subject.send( - .added( - id: sessionID, - trackType: .audio, - track: audioTrack - ) + subject.send( + .added( + id: sessionID, + trackType: .audio, + track: primaryTrack ) - - localTrack = audioTrack - } else if !hasAudio { - localTrack?.isEnabled = false - } + ) } /// Starts publishing the local audio track. func publish() { Task { @MainActor in guard - let localTrack, - localTrack.isEnabled == false || sender?.sender.track == nil + !primaryTrack.isEnabled else { return } - if sender == nil { - sender = peerConnection.addTransceiver( - with: localTrack, - init: RTCRtpTransceiverInit( - trackType: .audio, - direction: .sendOnly, - streamIds: streamIds + primaryTrack.isEnabled = true + + publishOptions + .forEach { + addOrUpdateTransceiver( + for: $0, + with: primaryTrack.clone(from: peerConnectionFactory) ) - ) - } else { - sender?.sender.track = localTrack - } - localTrack.isEnabled = true + } + + await audioRecorder.startRecording() + + log.debug( + """ + Local audioTracks are now published + primary: \(primaryTrack.trackId) isEnabled:\(primaryTrack.isEnabled) + clones: \(transceiverStorage.compactMap(\.value.sender.track?.trackId).joined(separator: ",")) + """, + subsystems: .webRTC + ) } } /// Stops publishing the local audio track. func unpublish() { - Task { @MainActor in - guard let sender, let localTrack else { return } - localTrack.isEnabled = false - sender.sender.track = nil - log.debug("Local audioTrack trackId:\(localTrack.trackId) is now unpublished.") + Task { @MainActor [weak self] in + guard + let self, + primaryTrack.isEnabled + else { + return + } + + primaryTrack.isEnabled = false + + transceiverStorage + .forEach { $0.value.sender.track?.isEnabled = false } + + log.debug( + """ + Local audioTracks are now unpublished: + primary: \(primaryTrack.trackId) isEnabled:\(primaryTrack.isEnabled) + clones: \(transceiverStorage.compactMap(\.value.sender.track?.trackId).joined(separator: ",")) + """, + subsystems: .webRTC + ) } } @@ -188,15 +170,13 @@ final class LocalAudioMediaAdapter: LocalMediaAdapting { func didUpdateCallSettings( _ settings: CallSettings ) async throws { - guard let localTrack else { return } - guard lastUpdatedCallSettings != settings.audio else { return } let isMuted = !settings.audioOn - let isLocalMuted = localTrack.isEnabled == false - + let isLocalMuted = primaryTrack.isEnabled == false + if isMuted != isLocalMuted { try await sfuAdapter.updateTrackMuteState( .audio, @@ -205,15 +185,103 @@ final class LocalAudioMediaAdapter: LocalMediaAdapting { ) } - if isMuted, localTrack.isEnabled == true { + if isMuted, primaryTrack.isEnabled == true { unpublish() } else if !isMuted { publish() - await audioRecorder.startRecording() } lastUpdatedCallSettings = settings.audio } + + func didUpdatePublishOptions( + _ publishOptions: PublishOptions + ) async throws { + self.publishOptions = publishOptions.audio + + guard primaryTrack.isEnabled else { return } + + for publishOption in self.publishOptions { + addOrUpdateTransceiver( + for: publishOption, + with: primaryTrack.clone(from: peerConnectionFactory) + ) + } + + let activePublishOptions = Set(self.publishOptions) + + transceiverStorage + .filter { !activePublishOptions.contains($0.key) } + .forEach { $0.value.sender.track = nil } + + log.debug( + """ + Local audioTracks updated with: + PublishOptions: + \(self.publishOptions) + + TransceiverStorage: + \(transceiverStorage) + """, + subsystems: .webRTC + ) + } + + func changePublishQuality( + with layerSettings: [Stream_Video_Sfu_Event_AudioSender] + ) { /* No-op */ } + + func trackInfo() -> [Stream_Video_Sfu_Models_TrackInfo] { + transceiverStorage + .filter { $0.value.sender.track != nil } + .map { _, transceiver in + var trackInfo = Stream_Video_Sfu_Models_TrackInfo() + trackInfo.trackType = .audio + trackInfo.trackID = transceiver.sender.track?.trackId ?? "" + trackInfo.mid = transceiver.mid + trackInfo.muted = transceiver.sender.track?.isEnabled ?? true + return trackInfo + } + } + + // MARK: - Private Helpers + + private func addOrUpdateTransceiver( + for options: PublishOptions.AudioPublishOptions, + with track: RTCAudioTrack + ) { + if let transceiver = transceiverStorage.get(for: options) { + transceiver.sender.track = track + } else { + let transceiver = peerConnection.addTransceiver( + with: track, + init: .init( + direction: .sendOnly, + streamIds: streamIds, + audioOptions: options + ) + ) + transceiverStorage.set(transceiver, for: options) + } + } +} + +extension RTCAudioTrack { + + func clone(from factory: PeerConnectionFactory) -> RTCAudioTrack { + let result = factory.makeAudioTrack(source: source) + result.isEnabled = isEnabled + return result + } +} + +extension RTCVideoTrack { + + func clone(from factory: PeerConnectionFactory) -> RTCVideoTrack { + let result = factory.makeVideoTrack(source: source) + result.isEnabled = isEnabled + return result + } } extension CallSettings { diff --git a/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/LocalMediaAdapters/LocalNoOpMediaAdapter.swift b/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/LocalMediaAdapters/LocalNoOpMediaAdapter.swift index 6e5017438..91977fa30 100644 --- a/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/LocalMediaAdapters/LocalNoOpMediaAdapter.swift +++ b/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/LocalMediaAdapters/LocalNoOpMediaAdapter.swift @@ -50,10 +50,16 @@ final class LocalNoOpMediaAdapter: LocalMediaAdapting { /* No-op */ } + func trackInfo() -> [Stream_Video_Sfu_Models_TrackInfo] { [] } + /// A no-op implementation of the method to handle updated call settings. /// /// - Parameter settings: Ignored in this implementation. func didUpdateCallSettings(_ settings: CallSettings) async throws { /* No-op */ } + + func didUpdatePublishOptions(_ publishOptions: PublishOptions) async throws { + /* No-op */ + } } diff --git a/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/LocalMediaAdapters/LocalScreenShareMediaAdapter.swift b/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/LocalMediaAdapters/LocalScreenShareMediaAdapter.swift index 81223d5a3..20d7f8125 100644 --- a/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/LocalMediaAdapters/LocalScreenShareMediaAdapter.swift +++ b/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/LocalMediaAdapters/LocalScreenShareMediaAdapter.swift @@ -20,26 +20,20 @@ final class LocalScreenShareMediaAdapter: LocalMediaAdapting, @unchecked Sendabl private let peerConnectionFactory: PeerConnectionFactory /// Adapter for communicating with the Selective Forwarding Unit (SFU). private var sfuAdapter: SFUAdapter - /// Options for configuring video behavior. - private let videoOptions: VideoOptions - /// Configuration settings for video. - private let videoConfig: VideoConfig + + private var publishOptions: [PublishOptions.VideoPublishOptions] /// The factory for creating the capturer. private let capturerFactory: VideoCapturerProviding /// Provider for screen sharing session information. private let screenShareSessionProvider: ScreenShareSessionProvider - - /// The local video track used for screen sharing. - private(set) var localTrack: RTCVideoTrack? /// The type of screen sharing currently active. - private var screenSharingType: ScreensharingType? - /// The video capturer used to capture screen content. - private var capturer: VideoCapturing? - /// The RTP transceiver used to send the screen sharing track. - private var sender: RTCRtpTransceiver? - /// The media stream identifier (mid) of the sender. - var mid: String? { sender?.mid } + private let primaryTrack: RTCVideoTrack + + /// The screenshare capturer. + private var capturer: StreamVideoCapturer? + + private let transceiverStorage = MediaTransceiverStorage(for: .screenshare) /// A subject for publishing track-related events. let subject: PassthroughSubject @@ -51,8 +45,6 @@ final class LocalScreenShareMediaAdapter: LocalMediaAdapting, @unchecked Sendabl /// - peerConnection: The peer connection used for WebRTC communication. /// - peerConnectionFactory: Factory for creating peer connection related objects. /// - sfuAdapter: Adapter for communicating with the Selective Forwarding Unit (SFU). - /// - videoOptions: Options for configuring video behavior. - /// - videoConfig: Configuration settings for video. /// - subject: A subject for publishing track-related events. /// - screenShareSessionProvider: Provider for screen sharing session information. init( @@ -60,8 +52,7 @@ final class LocalScreenShareMediaAdapter: LocalMediaAdapting, @unchecked Sendabl peerConnection: StreamRTCPeerConnectionProtocol, peerConnectionFactory: PeerConnectionFactory, sfuAdapter: SFUAdapter, - videoOptions: VideoOptions, - videoConfig: VideoConfig, + publishOptions: [PublishOptions.VideoPublishOptions], subject: PassthroughSubject, screenShareSessionProvider: ScreenShareSessionProvider, capturerFactory: VideoCapturerProviding = StreamVideoCapturerFactory() @@ -70,20 +61,36 @@ final class LocalScreenShareMediaAdapter: LocalMediaAdapting, @unchecked Sendabl self.peerConnection = peerConnection self.peerConnectionFactory = peerConnectionFactory self.sfuAdapter = sfuAdapter - self.videoOptions = videoOptions - self.videoConfig = videoConfig + self.publishOptions = publishOptions self.subject = subject self.screenShareSessionProvider = screenShareSessionProvider self.capturerFactory = capturerFactory - - localTrack = screenShareSessionProvider.activeSession?.localTrack - capturer = screenShareSessionProvider.activeSession?.capturer - screenSharingType = screenShareSessionProvider.activeSession?.screenSharingType + primaryTrack = { + if let activeSession = screenShareSessionProvider.activeSession { + return activeSession.localTrack + } else { + return peerConnectionFactory.makeVideoTrack( + source: peerConnectionFactory.makeVideoSource(forScreenShare: true) + ) + } + }() + primaryTrack.isEnabled = false } /// Cleans up resources when the instance is being deallocated. deinit { - sender?.sender.track = nil + Task { @MainActor [transceiverStorage] in + transceiverStorage.removeAll() + } + + log.debug( + """ + Local screenShareTracks will be deallocated + primary: \(primaryTrack.trackId) isEnabled:\(primaryTrack.isEnabled) + clones: \(transceiverStorage.compactMap(\.value.sender.track?.trackId).joined(separator: ",")) + """, + subsystems: .webRTC + ) } // MARK: - LocalMediaManaging @@ -97,64 +104,83 @@ final class LocalScreenShareMediaAdapter: LocalMediaAdapting, @unchecked Sendabl with settings: CallSettings, ownCapabilities: [OwnCapability] ) async throws { - /* No-op */ + subject.send( + .added( + id: sessionID, + trackType: .screenshare, + track: primaryTrack + ) + ) } /// Publishes the local screen sharing track to the peer connection. func publish() { - guard - let localTrack, - let screenSharingType, - let capturer, - localTrack.isEnabled == false || sender == nil - else { - return - } + Task { @MainActor in + guard + !primaryTrack.isEnabled, + let activeSession = screenShareSessionProvider.activeSession + else { + return + } - if sender == nil { - sender = peerConnection.addTransceiver( - with: localTrack, - init: RTCRtpTransceiverInit( - trackType: .screenshare, - direction: .sendOnly, - streamIds: ["\(sessionID)-screenshare-\(screenSharingType)"], - layers: [VideoLayer.screenshare] - ) - ) - } else { - sender?.sender.track = localTrack - } - Task { do { - try await capturer.startCapture(device: nil) + try await startScreenShareCapturingSession() + primaryTrack.isEnabled = true + + publishOptions + .forEach { + addOrUpdateTransceiver( + for: $0, + with: primaryTrack.clone(from: peerConnectionFactory), + screenSharingType: activeSession.screenSharingType + ) + } + + log.debug( + """ + Local screenShareTracks are now published + primary: \(primaryTrack.trackId) isEnabled:\(primaryTrack.isEnabled) + clones: \(transceiverStorage.compactMap(\.value.sender.track?.trackId).joined(separator: ",")) + """, + subsystems: .webRTC + ) } catch { - log.error(error, subsystems: .webRTC) + log.error(error) } } - localTrack.isEnabled = true - log.debug("Local screenShareTrack trackId:\(localTrack.trackId) is now published.") } /// Unpublishes the local screen sharing track from the peer connection. func unpublish() { - guard let sender, let localTrack else { return } - Task { + Task { @MainActor [weak self] in do { - try await capturer?.stopCapture() + guard + let self, + primaryTrack.isEnabled, + screenShareSessionProvider.activeSession != nil + else { + return + } + + primaryTrack.isEnabled = false + + transceiverStorage + .forEach { $0.value.sender.track?.isEnabled = false } + + try await stopScreenShareCapturingSession() + + log.debug( + """ + Local screenShareTracks are now unpublished: + primary: \(primaryTrack.trackId) isEnabled:\(primaryTrack.isEnabled) + clones: \(transceiverStorage.compactMap(\.value.sender.track?.trackId).joined(separator: ",")) + """, + subsystems: .webRTC + ) } catch { log.error(error, subsystems: .webRTC) } } - sender.sender.track = nil - localTrack.isEnabled = false - log.debug( - """ - Local screenShareTrack is now unpublished - trackId: \(localTrack.trackId) - screenSharingType: \(String(describing: screenSharingType)) - """, - subsystems: .webRTC - ) } /// Updates the call settings. @@ -166,6 +192,67 @@ final class LocalScreenShareMediaAdapter: LocalMediaAdapting, @unchecked Sendabl /* No-op */ } + func didUpdatePublishOptions( + _ publishOptions: PublishOptions + ) async throws { + self.publishOptions = publishOptions.screenShare + + guard + primaryTrack.isEnabled, + let activeSession = screenShareSessionProvider.activeSession + else { return } + + for publishOption in self.publishOptions { + addOrUpdateTransceiver( + for: publishOption, + with: primaryTrack.clone(from: peerConnectionFactory), + screenSharingType: activeSession.screenSharingType + ) + } + + let activePublishOptions = Set(self.publishOptions) + + transceiverStorage + .filter { !activePublishOptions.contains($0.key) } + .forEach { $0.value.sender.track = nil } + + log.debug( + """ + Local screenShareTracks updated with: + PublishOptions: + \(self.publishOptions.map { "\($0)" }.joined(separator: "\n")) + + TransceiverStorage: + \(transceiverStorage) + """, + subsystems: .webRTC + ) + } + + func changePublishQuality( + with layerSettings: [Stream_Video_Sfu_Event_VideoSender] + ) { + /* No-op */ + } + + func trackInfo() -> [Stream_Video_Sfu_Models_TrackInfo] { + transceiverStorage + .filter { $0.value.sender.track != nil } + .compactMap { publishOptions, transceiver in + var trackInfo = Stream_Video_Sfu_Models_TrackInfo() + trackInfo.trackType = .screenShare + trackInfo.trackID = transceiver.sender.track?.trackId ?? "" + trackInfo.layers = transceiver + .sender + .parameters + .encodings + .map { Stream_Video_Sfu_Models_VideoLayer($0, publishOptions: publishOptions) } + trackInfo.mid = transceiver.mid + trackInfo.muted = transceiver.sender.track?.isEnabled ?? true + return trackInfo + } + } + // MARK: - Screensharing /// Begins screen sharing of the specified type. @@ -177,21 +264,20 @@ final class LocalScreenShareMediaAdapter: LocalMediaAdapting, @unchecked Sendabl of type: ScreensharingType, ownCapabilities: [OwnCapability] ) async throws { - let hasScreenShare = ownCapabilities.contains(.screenshare) - - guard hasScreenShare else { return } - - if type != screenSharingType { - localTrack = nil - sender?.sender.track = nil - sender?.stopInternal() - sender = nil + guard ownCapabilities.contains(.screenshare) else { + try await stopScreenShareCapturingSession() + return } - if localTrack == nil { - try await makeVideoTrack(type) + guard screenShareSessionProvider.activeSession == nil || !primaryTrack.isEnabled else { + return } + try await configureActiveScreenShareSession( + screenSharingType: type, + track: primaryTrack + ) + try await sfuAdapter.updateTrackMuteState( .screenShare, isMuted: false, @@ -199,14 +285,6 @@ final class LocalScreenShareMediaAdapter: LocalMediaAdapting, @unchecked Sendabl ) publish() - - if let localTrack, let screenSharingType, let capturer { - screenShareSessionProvider.activeSession = .init( - localTrack: localTrack, - screenSharingType: screenSharingType, - capturer: capturer - ) - } } /// Stops the current screen sharing session. @@ -218,48 +296,129 @@ final class LocalScreenShareMediaAdapter: LocalMediaAdapting, @unchecked Sendabl ) unpublish() - - screenShareSessionProvider.activeSession = nil } // MARK: - Private helpers - /// Creates a new video track for screen sharing. - /// - /// - Parameter screenSharingType: The type of screen sharing to set up. - private func makeVideoTrack( - _ screenSharingType: ScreensharingType + private func addOrUpdateTransceiver( + for options: PublishOptions.VideoPublishOptions, + with track: RTCVideoTrack, + screenSharingType: ScreensharingType + ) { + if let transceiver = transceiverStorage.get(for: options) { + transceiver.sender.track = track + } else { + let transceiver = peerConnection.addTransceiver( + with: track, + init: .init( + trackType: .screenshare, + direction: .sendOnly, + streamIds: ["\(sessionID)-screenshare-\(screenSharingType)"], + videoOptions: options + ) + ) + transceiverStorage.set(transceiver, for: options) + } + } + + private func configureActiveScreenShareSession( + screenSharingType: ScreensharingType, + track: RTCVideoTrack ) async throws { - let videoSource = peerConnectionFactory - .makeVideoSource(forScreenShare: true) - let videoTrack = peerConnectionFactory.makeVideoTrack(source: videoSource) - self.screenSharingType = screenSharingType - localTrack = videoTrack - videoTrack.isEnabled = false + if screenShareSessionProvider.activeSession == nil { + let videoCapturer = capturerFactory.buildScreenCapturer( + screenSharingType, + source: track.source + ) + capturer = videoCapturer + + screenShareSessionProvider.activeSession = .init( + localTrack: track, + screenSharingType: screenSharingType, + capturer: videoCapturer + ) + } else if + let activeSession = screenShareSessionProvider.activeSession, + activeSession.screenSharingType != screenSharingType { + try await stopScreenShareCapturingSession() + + let videoCapturer = capturerFactory.buildScreenCapturer( + screenSharingType, + source: track.source + ) + capturer = videoCapturer + + screenShareSessionProvider.activeSession = .init( + localTrack: track, + screenSharingType: screenSharingType, + capturer: videoCapturer + ) + } + } + + private func startScreenShareCapturingSession() async throws { + let capturingDimension = publishOptions + .map(\.dimensions) + .max(by: { $0.width < $1.width && $0.height < $1.height }) + let frameRate = publishOptions.map(\.frameRate).max() + + guard + let activeSession = screenShareSessionProvider.activeSession, + let capturingDimension, + let frameRate + else { + log.debug( + """ + Active screenShare capture session hasn't been configured for capturing. + isActiveSessionAlive: \(screenShareSessionProvider.activeSession != nil) + CapturingDimensions: \(capturingDimension ?? .zero) + FrameRate: \(frameRate ?? 0) + """, + subsystems: .webRTC + ) + return + } + + try await activeSession.capturer.startCapture( + dimensions: capturingDimension, + frameRate: frameRate + ) log.debug( """ - ScreenShareTrack generated - address:\(Unmanaged.passUnretained(videoTrack).toOpaque()) - trackId:\(videoTrack.trackId) - mid: \(sender?.mid ?? "-") - screenSharingType: \(screenSharingType) - """ + Active screenShare capture session started + capturingDimension: \(capturingDimension) + frameRate: \(frameRate) + track: \(activeSession.localTrack.trackId) + capturer: \(activeSession.capturer) + """, + subsystems: .webRTC ) + } - subject.send( - .added( - id: sessionID, - trackType: .screenshare, - track: videoTrack + private func stopScreenShareCapturingSession() async throws { + guard + let activeSession = screenShareSessionProvider.activeSession + else { + log.debug( + """ + Active screenShare capture session hasn't been configured for capturing. + isActiveSessionAlive: \(screenShareSessionProvider.activeSession != nil) + """, + subsystems: .webRTC ) - ) + return + } + try await activeSession.capturer.stopCapture() + screenShareSessionProvider.activeSession = nil - capturer = capturerFactory.buildScreenCapturer( - screenSharingType, - source: videoSource, - options: videoOptions, - filters: videoConfig.videoFilters + log.debug( + """ + Active screenShare capture session stopped + track: \(activeSession.localTrack.trackId) + capturer: \(activeSession.capturer) + """, + subsystems: .webRTC ) } } diff --git a/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/LocalMediaAdapters/LocalVideoMediaAdapter.swift b/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/LocalMediaAdapters/LocalVideoMediaAdapter.swift index 87bdca1ba..9868e2e5b 100644 --- a/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/LocalMediaAdapters/LocalVideoMediaAdapter.swift +++ b/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/LocalMediaAdapters/LocalVideoMediaAdapter.swift @@ -11,6 +11,7 @@ import StreamWebRTC final class LocalVideoMediaAdapter: LocalMediaAdapting, @unchecked Sendable { @Injected(\.videoCapturePolicy) private var videoCapturePolicy + @Injected(\.captureDeviceProvider) private var captureDeviceProvider /// The unique identifier for the current session. private let sessionID: String @@ -30,29 +31,28 @@ final class LocalVideoMediaAdapter: LocalMediaAdapting, @unchecked Sendable { /// The video configuration for the call. private let videoConfig: VideoConfig + private var publishOptions: [PublishOptions.VideoPublishOptions] + /// The factory for creating the capturer. private let capturerFactory: VideoCapturerProviding /// The stream identifiers for this video adapter. private let streamIds: [String] - private let videoCaptureSessionProvider: VideoCaptureSessionProvider - - /// The local video track. - private(set) var localTrack: RTCVideoTrack? + private let transceiverStorage = MediaTransceiverStorage(for: .video) - /// The video capturer. - private var capturer: CameraVideoCapturing? + private let primaryTrack: RTCVideoTrack - /// The RTP transceiver for sending video. - private var sender: RTCRtpTransceiver? + private let videoCaptureSessionProvider: VideoCaptureSessionProvider - /// The mid (Media Stream Identification) of the sender. - var mid: String? { sender?.mid } + /// The video capturer. + private var capturer: StreamVideoCapturer? /// A publisher that emits track events. let subject: PassthroughSubject + private let disposableBag = DisposableBag() + /// Initializes a new instance of the local video media adapter. /// /// - Parameters: @@ -62,6 +62,7 @@ final class LocalVideoMediaAdapter: LocalMediaAdapting, @unchecked Sendable { /// - sfuAdapter: The adapter for communicating with the SFU. /// - videoOptions: The video options for the call. /// - videoConfig: The video configuration for the call. + /// - publishOptions: TODO /// - subject: A publisher that emits track events. init( sessionID: String, @@ -70,6 +71,7 @@ final class LocalVideoMediaAdapter: LocalMediaAdapting, @unchecked Sendable { sfuAdapter: SFUAdapter, videoOptions: VideoOptions, videoConfig: VideoConfig, + publishOptions: [PublishOptions.VideoPublishOptions], subject: PassthroughSubject, capturerFactory: VideoCapturerProviding = StreamVideoCapturerFactory(), videoCaptureSessionProvider: VideoCaptureSessionProvider @@ -80,15 +82,37 @@ final class LocalVideoMediaAdapter: LocalMediaAdapting, @unchecked Sendable { self.sfuAdapter = sfuAdapter self.videoOptions = videoOptions self.videoConfig = videoConfig + self.publishOptions = publishOptions self.subject = subject self.capturerFactory = capturerFactory self.videoCaptureSessionProvider = videoCaptureSessionProvider + primaryTrack = { + if let activeSession = videoCaptureSessionProvider.activeSession { + return activeSession.localTrack + } else { + return peerConnectionFactory.makeVideoTrack( + source: peerConnectionFactory.makeVideoSource(forScreenShare: false) + ) + } + }() streamIds = ["\(sessionID):video"] + primaryTrack.isEnabled = false } /// Cleans up resources when the instance is deallocated. deinit { - sender?.sender.track = nil + Task { @MainActor [transceiverStorage] in + transceiverStorage.removeAll() + } + + log.debug( + """ + Local videoTracks will be deallocated + primary: \(primaryTrack.trackId) isEnabled:\(primaryTrack.isEnabled) + clones: \(transceiverStorage.compactMap(\.value.sender.track?.trackId).joined(separator: ",")) + """, + subsystems: .webRTC + ) } // MARK: - LocalMediaManaging @@ -102,46 +126,56 @@ final class LocalVideoMediaAdapter: LocalMediaAdapting, @unchecked Sendable { with settings: CallSettings, ownCapabilities: [OwnCapability] ) async throws { - let hasVideo = ownCapabilities.contains(.sendVideo) - - if hasVideo, localTrack == nil { - try await makeVideoTrack( - settings.cameraPosition == .front ? .front : .back + subject.send( + .added( + id: sessionID, + trackType: .video, + track: primaryTrack ) - if sender == nil, settings.videoOn, let localTrack { - setUpTransceiverIfRequired(localTrack) - } - } else if !hasVideo { - Task { [weak self] in - do { - try await self?.capturer?.stopCapture() - } catch { - log.error(error) - } - } + ) + + guard ownCapabilities.contains(.sendVideo) else { + try await videoCaptureSessionProvider.activeSession?.capturer.stopCapture() + videoCaptureSessionProvider.activeSession = nil + log.debug("Active video capture session stopped because user has no capabilities for video.") + return } + + try await configureActiveVideoCaptureSession( + position: settings.cameraPosition == .back ? .back : .front, + track: primaryTrack + ) } /// Starts publishing the local video track. func publish() { - Task { @MainActor [weak self] in + Task { @MainActor in guard - let self, - let localTrack, - localTrack.isEnabled == false || sender == nil, - let activeSession = videoCaptureSessionProvider.activeSession + !primaryTrack.isEnabled else { return } do { - try await activeSession.capturer.startCapture( - device: activeSession.device + try await startVideoCapturingSession() + primaryTrack.isEnabled = true + + publishOptions + .forEach { + addOrUpdateTransceiver( + for: $0, + with: primaryTrack.clone(from: peerConnectionFactory) + ) + } + + log.debug( + """ + Local videoTracks are now published + primary: \(primaryTrack.trackId) isEnabled:\(primaryTrack.isEnabled) + clones: \(transceiverStorage.compactMap(\.value.sender.track?.trackId).joined(separator: ",")) + """, + subsystems: .webRTC ) - - setUpTransceiverIfRequired(localTrack) - localTrack.isEnabled = true - log.debug("Local videoTrack trackId:\(localTrack.trackId) is now published.") } catch { log.error(error) } @@ -151,15 +185,32 @@ final class LocalVideoMediaAdapter: LocalMediaAdapting, @unchecked Sendable { /// Stops publishing the local video track. func unpublish() { Task { @MainActor [weak self] in - guard - let self, - let sender, - let localTrack - else { return } - sender.sender.track = nil - localTrack.isEnabled = false - try? await capturer?.stopCapture() - log.debug("Local videoTrack trackId:\(localTrack.trackId) is now unpublished.") + do { + guard + let self, + primaryTrack.isEnabled + else { + return + } + + primaryTrack.isEnabled = false + + transceiverStorage + .forEach { $0.value.sender.track?.isEnabled = false } + + try await stopVideoCapturingSession() + + log.debug( + """ + Local videoTracks are now unpublished: + primary: \(primaryTrack.trackId) isEnabled:\(primaryTrack.isEnabled) + clones: \(transceiverStorage.compactMap(\.value.sender.track?.trackId).joined(separator: ",")) + """, + subsystems: .webRTC + ) + } catch { + log.error(error, subsystems: .webRTC) + } } } @@ -169,26 +220,208 @@ final class LocalVideoMediaAdapter: LocalMediaAdapting, @unchecked Sendable { func didUpdateCallSettings( _ settings: CallSettings ) async throws { - guard let localTrack else { return } let isMuted = !settings.videoOn - let isLocalMuted = localTrack.isEnabled == false - guard isMuted != isLocalMuted || sender == nil else { - return - } + let isLocalMuted = primaryTrack.isEnabled == false - try await sfuAdapter.updateTrackMuteState( - .video, - isMuted: isMuted, - for: sessionID - ) + if isMuted != isLocalMuted { + try await sfuAdapter.updateTrackMuteState( + .video, + isMuted: isMuted, + for: sessionID + ) + } - if isMuted, localTrack.isEnabled { + if isMuted, primaryTrack.isEnabled { unpublish() } else if !isMuted { publish() } } + func didUpdatePublishOptions( + _ publishOptions: PublishOptions + ) async throws { + self.publishOptions = publishOptions.video + + guard primaryTrack.isEnabled else { return } + + for publishOption in self.publishOptions { + addOrUpdateTransceiver( + for: publishOption, + with: primaryTrack.clone(from: peerConnectionFactory) + ) + } + + let activePublishOptions = Set(self.publishOptions) + + transceiverStorage + .filter { !activePublishOptions.contains($0.key) } + .forEach { $0.value.sender.track = nil } + + log.debug( + """ + Local videoTracks updated with: + PublishOptions: + \(self.publishOptions.map { "\($0)" }.joined(separator: "\n")) + + TransceiverStorage: + \(transceiverStorage) + """, + subsystems: .webRTC + ) + } + + func trackInfo() -> [Stream_Video_Sfu_Models_TrackInfo] { + transceiverStorage + .filter { $0.value.sender.track != nil } + .compactMap { publishOptions, transceiver in + var trackInfo = Stream_Video_Sfu_Models_TrackInfo() + trackInfo.trackType = .video + trackInfo.trackID = transceiver.sender.track?.trackId ?? "" + trackInfo.layers = transceiver + .sender + .parameters + .encodings + .map { Stream_Video_Sfu_Models_VideoLayer($0, publishOptions: publishOptions) } + trackInfo.mid = transceiver.mid + trackInfo.muted = transceiver.sender.track?.isEnabled ?? true + return trackInfo + } + } + + /// Changes the publishing quality based on active encodings. + /// + /// - Parameter activeEncodings: The set of active encoding identifiers. + func changePublishQuality( + with layerSettings: [Stream_Video_Sfu_Event_VideoSender] + ) { + for videoSender in layerSettings { + guard + let codec = VideoCodec(rawValue: videoSender.codec.name), + let transceiver = transceiverStorage.get(for: PublishOptions.VideoPublishOptions( + id: Int(videoSender.publishOptionID), + codec: codec + )) + else { + continue + } + + var hasChanges = false + let params = transceiver + .sender + .parameters + + guard + !params.encodings.isEmpty + else { + log.warning("Update publish quality, No suitable video encoding quality found", subsystems: .webRTC) + return + } + + let isUsingSVCCodec = { + if let preferredCodec = params.codecs.first { + return VideoCodec(preferredCodec).isSVC + } else { + return false + } + }() + var updatedEncodings = [RTCRtpEncodingParameters]() + + for encoding in params.encodings { + let layerSettings = isUsingSVCCodec + // for SVC, we only have one layer (q) and often rid is omitted + ? videoSender.layers.first + // for non-SVC, we need to find the layer by rid (simulcast) + : videoSender.layers.first(where: { $0.name == encoding.rid }) + + // flip 'active' flag only when necessary + if layerSettings?.active != encoding.isActive { + encoding.isActive = layerSettings?.active ?? false + hasChanges = true + } + + // skip the rest of the settings if the layer is disabled or not found + guard let layerSettings else { + updatedEncodings.append(encoding) + continue + } + + if + layerSettings.scaleResolutionDownBy >= 1, + layerSettings.scaleResolutionDownBy != Float(truncating: encoding.scaleResolutionDownBy ?? 0) + { + encoding.scaleResolutionDownBy = .init(value: layerSettings.scaleResolutionDownBy) + hasChanges = true + } + + if + layerSettings.maxBitrate > 0, + layerSettings.maxBitrate != Int32(truncating: encoding.maxBitrateBps ?? 0) + { + encoding.maxBitrateBps = .init(value: layerSettings.maxBitrate) + hasChanges = true + } + + if + layerSettings.maxFramerate > 0, + layerSettings.maxFramerate != Int32(truncating: encoding.maxFramerate ?? 0) + { + encoding.maxFramerate = .init(value: layerSettings.maxFramerate) + hasChanges = true + } + + if + !layerSettings.scalabilityMode.isEmpty, + layerSettings.scalabilityMode != encoding.scalabilityMode + { + encoding.scalabilityMode = layerSettings.scalabilityMode + hasChanges = true + } + + updatedEncodings.append(encoding) + } + + let activeLayers = videoSender + .layers + .filter { $0.active } + .map { + let value = [ + "name:\($0.name)", + "scaleResolutionDownBy:\($0.scaleResolutionDownBy)", + "maxBitrate:\($0.maxBitrate)", + "maxFramerate:\($0.maxFramerate)", + "scalabilityMode:\($0.scalabilityMode)" + ] + return "[\(value.joined(separator: ","))]" + } + + guard hasChanges else { + log.info( + "Update publish quality, no change: \(activeLayers.joined(separator: ","))", + subsystems: .webRTC + ) + return + } + log.info( + "Update publish quality, enabled rids: \(activeLayers.joined(separator: ","))", + subsystems: .webRTC + ) + params.encodings = updatedEncodings + transceiver.sender.parameters = params + } + + Task { @MainActor in + do { + try await videoCapturePolicy.updateCaptureQuality( + with: .init(layerSettings.map(\.name)), + for: videoCaptureSessionProvider.activeSession + ) + } catch { + log.error(error) + } + } + } + // MARK: - Camera Video /// Updates the camera position. @@ -197,28 +430,33 @@ final class LocalVideoMediaAdapter: LocalMediaAdapting, @unchecked Sendable { func didUpdateCameraPosition( _ position: AVCaptureDevice.Position ) async throws { - try await capturer?.setCameraPosition(position) + try await configureActiveVideoCaptureSession( + position: position, + track: primaryTrack + ) } /// Sets a video filter. /// /// - Parameter videoFilter: The video filter to apply. func setVideoFilter(_ videoFilter: VideoFilter?) { - capturer?.setVideoFilter(videoFilter) + Task { [weak self] in + await self?.capturer?.setVideoFilter(videoFilter) + }.store(in: disposableBag, key: "\(#function)") } /// Zooms the camera by a given factor. /// /// - Parameter factor: The zoom factor. - func zoom(by factor: CGFloat) throws { - try capturer?.zoom(by: factor) + func zoom(by factor: CGFloat) async throws { + try await capturer?.zoom(by: factor) } /// Focuses the camera at a given point. /// /// - Parameter point: The point to focus on. - func focus(at point: CGPoint) throws { - try capturer?.focus(at: point) + func focus(at point: CGPoint) async throws { + try await capturer?.focus(at: point) } /// Adds a video output to the capture session. @@ -226,8 +464,8 @@ final class LocalVideoMediaAdapter: LocalMediaAdapting, @unchecked Sendable { /// - Parameter videoOutput: The video output to add. func addVideoOutput( _ videoOutput: AVCaptureVideoDataOutput - ) throws { - try capturer?.addVideoOutput(videoOutput) + ) async throws { + try await capturer?.addVideoOutput(videoOutput) } /// Removes a video output from the capture session. @@ -235,8 +473,8 @@ final class LocalVideoMediaAdapter: LocalMediaAdapting, @unchecked Sendable { /// - Parameter videoOutput: The video output to remove. func removeVideoOutput( _ videoOutput: AVCaptureVideoDataOutput - ) throws { - try capturer?.removeVideoOutput(videoOutput) + ) async throws { + try await capturer?.removeVideoOutput(videoOutput) } /// Adds a photo output to the capture session. @@ -244,8 +482,8 @@ final class LocalVideoMediaAdapter: LocalMediaAdapting, @unchecked Sendable { /// - Parameter capturePhotoOutput: The photo output to add. func addCapturePhotoOutput( _ capturePhotoOutput: AVCapturePhotoOutput - ) throws { - try capturer? + ) async throws { + try await capturer? .addCapturePhotoOutput(capturePhotoOutput) } @@ -254,221 +492,155 @@ final class LocalVideoMediaAdapter: LocalMediaAdapting, @unchecked Sendable { /// - Parameter capturePhotoOutput: The photo output to remove. func removeCapturePhotoOutput( _ capturePhotoOutput: AVCapturePhotoOutput - ) throws { - try capturer? + ) async throws { + try await capturer? .removeCapturePhotoOutput(capturePhotoOutput) } - /// Changes the publishing quality based on active encodings. - /// - /// - Parameter activeEncodings: The set of active encoding identifiers. - func changePublishQuality( - with layerSettings: [Stream_Video_Sfu_Event_VideoLayerSetting] - ) { - guard - let sender, - !layerSettings.isEmpty - else { - return - } - - var hasChanges = false - let params = sender - .sender - .parameters - - guard - !params.encodings.isEmpty - else { - log.warning("Update publish quality, No suitable video encoding quality found", subsystems: .webRTC) - return - } - - let isUsingSVCCodec = { - if - let preferredCodec = params.codecs.first, - let videoCodec = VideoCodec(preferredCodec) { - return videoCodec.isSVC - } else { - return false - } - }() - var updatedEncodings = [RTCRtpEncodingParameters]() - - for encoding in params.encodings { - let layerSettings = isUsingSVCCodec - // for SVC, we only have one layer (q) and often rid is omitted - ? layerSettings.first - // for non-SVC, we need to find the layer by rid (simulcast) - : layerSettings.first(where: { $0.name == encoding.rid }) - - // flip 'active' flag only when necessary - if layerSettings?.active != encoding.isActive { - encoding.isActive = layerSettings?.active ?? false - hasChanges = true - } - - // skip the rest of the settings if the layer is disabled or not found - guard let layerSettings else { - updatedEncodings.append(encoding) - continue - } - - if - layerSettings.scaleResolutionDownBy >= 1, - layerSettings.scaleResolutionDownBy != Float(truncating: encoding.scaleResolutionDownBy ?? 0) - { - encoding.scaleResolutionDownBy = .init(value: layerSettings.scaleResolutionDownBy) - hasChanges = true - } - - if - layerSettings.maxBitrate > 0, - layerSettings.maxBitrate != Int32(truncating: encoding.maxBitrateBps ?? 0) - { - encoding.maxBitrateBps = .init(value: layerSettings.maxBitrate) - hasChanges = true - } - - if - layerSettings.maxFramerate > 0, - layerSettings.maxFramerate != Int32(truncating: encoding.maxFramerate ?? 0) - { - encoding.maxFramerate = .init(value: layerSettings.maxFramerate) - hasChanges = true - } + // MARK: - Private helpers - if - !layerSettings.scalabilityMode.isEmpty, - layerSettings.scalabilityMode != encoding.scalabilityMode - { - encoding.scalabilityMode = layerSettings.scalabilityMode - hasChanges = true - } + private func configureActiveVideoCaptureSession( + position: AVCaptureDevice.Position, + track: RTCVideoTrack + ) async throws { + if videoCaptureSessionProvider.activeSession == nil { + let cameraCapturer = capturerFactory.buildCameraCapturer( + source: track.source + ) + capturer = cameraCapturer - updatedEncodings.append(encoding) + videoCaptureSessionProvider.activeSession = .init( + position: position, + device: nil, + localTrack: track, + capturer: cameraCapturer + ) + } else if + let activeSession = videoCaptureSessionProvider.activeSession, + activeSession.device == nil, + activeSession.position != position { + videoCaptureSessionProvider.activeSession = .init( + position: position, + device: nil, + localTrack: activeSession.localTrack, + capturer: activeSession.capturer + ) + } else if + let activeSession = videoCaptureSessionProvider.activeSession, + activeSession.position != position { + // We are currently capturing + let device = captureDeviceProvider.device(for: position) + videoCaptureSessionProvider.activeSession = .init( + position: position, + device: device, + localTrack: activeSession.localTrack, + capturer: activeSession.capturer + ) + try await activeSession.capturer.setCameraPosition(position) } + } - let activeLayers = layerSettings - .filter { $0.active } - .map { - let value = [ - "name:\($0.name)", - "scaleResolutionDownBy:\($0.scaleResolutionDownBy)", - "maxBitrate:\($0.maxBitrate)", - "maxFramerate:\($0.maxFramerate)", - "scalabilityMode:\($0.scalabilityMode)" - ] - return "[\(value.joined(separator: ","))]" - } + private func startVideoCapturingSession() async throws { + let capturingDimension = publishOptions + .map(\.dimensions) + .max(by: { $0.width < $1.width && $0.height < $1.height }) + let frameRate = publishOptions.map(\.frameRate).max() - guard hasChanges else { - log.info( - "Update publish quality, no change: \(activeLayers.joined(separator: ","))", + guard + let activeSession = videoCaptureSessionProvider.activeSession, + activeSession.device == nil, + let device = captureDeviceProvider.device(for: activeSession.position), + let capturingDimension, + let frameRate + else { + log.debug( + """ + Active video capture session hasn't been configured for capturing. + isActiveSessionAlive: \(videoCaptureSessionProvider.activeSession != nil) + isCapturingDeviceAlive: \(videoCaptureSessionProvider.activeSession?.device != nil) + CapturingDimensions: \(capturingDimension ?? .zero) + FrameRate: \(frameRate ?? 0) + """, subsystems: .webRTC ) return } - log.info( - "Update publish quality, enabled rids: \(activeLayers.joined(separator: ","))", - subsystems: .webRTC + + try await activeSession.capturer.startCapture( + position: activeSession.position, + dimensions: capturingDimension, + frameRate: frameRate + ) + videoCaptureSessionProvider.activeSession = .init( + position: activeSession.position, + device: device, + localTrack: activeSession.localTrack, + capturer: activeSession.capturer ) - params.encodings = updatedEncodings - sender.sender.parameters = params - Task { @MainActor in - do { - try await videoCapturePolicy.updateCaptureQuality( - with: .init(layerSettings.map(\.name)), - for: videoCaptureSessionProvider.activeSession - ) - } catch { - log.error(error) - } - } + log.debug( + """ + Active video capture session started + position: \(activeSession.position) + device: \(device) + track: \(activeSession.localTrack.trackId) + capturer: \(activeSession.capturer) + """, + subsystems: .webRTC + ) } - // MARK: - Private helpers - - /// Creates a new video track with the specified camera position. - /// - /// - Parameter position: The camera position to use. - private func makeVideoTrack( - _ position: AVCaptureDevice.Position - ) async throws { - if + private func stopVideoCapturingSession() async throws { + guard let activeSession = videoCaptureSessionProvider.activeSession, - activeSession.position == position { - capturer = activeSession.capturer - localTrack = activeSession.localTrack - localTrack?.isEnabled = false - - subject.send( - .added( - id: sessionID, - trackType: .video, - track: activeSession.localTrack - ) - ) - } else { - let videoSource = peerConnectionFactory - .makeVideoSource(forScreenShare: false) - let videoTrack = peerConnectionFactory.makeVideoTrack(source: videoSource) - localTrack = videoTrack - /// This is important to be false once we setUp as the activation will happen once - /// publish is called (in order also to inform the SFU via the didUpdateCallSettings). - videoTrack.isEnabled = false - + activeSession.device != nil + else { log.debug( """ - VideoTrack generated - address:\(Unmanaged.passUnretained(videoTrack).toOpaque()) - trackId:\(videoTrack.trackId) - mid: \(sender?.mid ?? "-") - """ - ) - - subject.send( - .added( - id: sessionID, - trackType: .video, - track: videoTrack - ) - ) - - let cameraCapturer = capturerFactory.buildCameraCapturer( - source: videoSource, - options: videoOptions, - filters: videoConfig.videoFilters - ) - capturer = cameraCapturer - - let device = cameraCapturer.capturingDevice(for: position) - try await cameraCapturer.startCapture(device: device) - - videoCaptureSessionProvider.activeSession = .init( - position: position, - device: device, - localTrack: videoTrack, - capturer: cameraCapturer + Active video capture session hasn't been configured for capturing. + isActiveSessionAlive: \(videoCaptureSessionProvider.activeSession != nil) + isCapturingDeviceAlive: \(videoCaptureSessionProvider.activeSession?.device != nil) + """, + subsystems: .webRTC ) + return } + try await activeSession.capturer.stopCapture() + videoCaptureSessionProvider.activeSession = .init( + position: activeSession.position, + device: nil, + localTrack: activeSession.localTrack, + capturer: activeSession.capturer + ) + + log.debug( + """ + Active video capture session stopped + position: \(activeSession.position) + track: \(activeSession.localTrack.trackId) + capturer: \(activeSession.capturer) + """, + subsystems: .webRTC + ) } - private func setUpTransceiverIfRequired(_ localTrack: RTCVideoTrack) { - if sender == nil { - sender = peerConnection.addTransceiver( - with: localTrack, - init: RTCRtpTransceiverInit( + private func addOrUpdateTransceiver( + for options: PublishOptions.VideoPublishOptions, + with track: RTCVideoTrack + ) { + if let transceiver = transceiverStorage.get(for: options) { + transceiver.sender.track = track + } else { + let transceiver = peerConnection.addTransceiver( + with: track, + init: .init( trackType: .video, direction: .sendOnly, streamIds: streamIds, - layers: videoOptions.videoLayers, - preferredVideoCodec: videoOptions.preferredVideoCodec + videoOptions: options ) ) - } else { - sender?.sender.track = localTrack + transceiverStorage.set(transceiver, for: options) } } } diff --git a/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/MediaAdapter.swift b/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/MediaAdapter.swift index 1d85afdd8..04f7b9f6d 100644 --- a/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/MediaAdapter.swift +++ b/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/MediaAdapter.swift @@ -39,6 +39,7 @@ final class MediaAdapter { /// - sfuAdapter: The adapter for communicating with the SFU. /// - videoOptions: The video options for the call. /// - videoConfig: The video configuration for the call. + /// - publishOptions: TODO /// - audioSession: The audio session manager. /// - videoCaptureSessionProvider: Provides access to the active video capturing session. /// - screenShareSessionProvider: Provides access to the active screen sharing session. @@ -50,6 +51,7 @@ final class MediaAdapter { sfuAdapter: SFUAdapter, videoOptions: VideoOptions, videoConfig: VideoConfig, + publishOptions: PublishOptions, videoCaptureSessionProvider: VideoCaptureSessionProvider, screenShareSessionProvider: ScreenShareSessionProvider ) { @@ -90,6 +92,7 @@ final class MediaAdapter { peerConnection: peerConnection, peerConnectionFactory: peerConnectionFactory, sfuAdapter: sfuAdapter, + publishOptions: publishOptions.audio, subject: subject ), videoMediaAdapter: .init( @@ -99,6 +102,7 @@ final class MediaAdapter { sfuAdapter: sfuAdapter, videoOptions: videoOptions, videoConfig: videoConfig, + publishOptions: publishOptions.video, subject: subject, videoCaptureSessionProvider: videoCaptureSessionProvider ), @@ -107,8 +111,7 @@ final class MediaAdapter { peerConnection: peerConnection, peerConnectionFactory: peerConnectionFactory, sfuAdapter: sfuAdapter, - videoOptions: videoOptions, - videoConfig: videoConfig, + publishOptions: publishOptions.screenShare, subject: subject, screenShareSessionProvider: screenShareSessionProvider ) @@ -186,37 +189,65 @@ final class MediaAdapter { } } - /// Returns the local track for the specified track type. - /// - /// - Parameter type: The type of track to retrieve. - /// - Returns: The local media track, if available. - func localTrack(of type: TrackType) -> RTCMediaStreamTrack? { + func trackInfo(for type: TrackType) -> [Stream_Video_Sfu_Models_TrackInfo] { switch type { case .audio: - return audioMediaAdapter.localTrack + return audioMediaAdapter.trackInfo() case .video: - return videoMediaAdapter.localTrack + return videoMediaAdapter.trackInfo() case .screenshare: - return screenShareMediaAdapter.localTrack + return screenShareMediaAdapter.trackInfo() default: - return nil + return [] } } - /// Returns the mid (Media Stream Identification) for the specified track type. + func didUpdatePublishOptions( + _ publishOptions: PublishOptions + ) async throws { + try await withThrowingTaskGroup(of: Void.self) { [audioMediaAdapter, videoMediaAdapter, screenShareMediaAdapter] group in + group.addTask { + try await audioMediaAdapter.didUpdatePublishOptions(publishOptions) + } + + group.addTask { + try await videoMediaAdapter.didUpdatePublishOptions(publishOptions) + } + + group.addTask { + try await screenShareMediaAdapter.didUpdatePublishOptions(publishOptions) + } + + while try await group.next() != nil {} + } + } + + /// Changes the publishing quality based on active encodings. /// - /// - Parameter type: The type of track to retrieve the mid for. - /// - Returns: The mid of the track, if available. - func mid(for type: TrackType) -> String? { - switch type { - case .audio: - return audioMediaAdapter.mid - case .video: - return videoMediaAdapter.mid - case .screenshare: - return screenShareMediaAdapter.mid - default: - return nil + /// - Parameter activeEncodings: The set of active encoding identifiers. + func changePublishQuality( + with event: Stream_Video_Sfu_Event_ChangePublishQuality + ) async { + await withTaskGroup(of: Void.self) { [audioMediaAdapter, videoMediaAdapter, screenShareMediaAdapter] group in + group.addTask { + audioMediaAdapter.changePublishQuality( + with: event.audioSenders.filter { $0.trackType == .audio } + ) + } + + group.addTask { + videoMediaAdapter.changePublishQuality( + with: event.videoSenders.filter { $0.trackType == .screenShare } + ) + } + + group.addTask { + screenShareMediaAdapter.changePublishQuality( + with: event.videoSenders.filter { $0.trackType == .screenShare } + ) + } + + while await group.next() != nil {} } } @@ -241,15 +272,15 @@ final class MediaAdapter { /// Zooms the camera by a given factor. /// /// - Parameter factor: The zoom factor. - func zoom(by factor: CGFloat) throws { - try videoMediaAdapter.zoom(by: factor) + func zoom(by factor: CGFloat) async throws { + try await videoMediaAdapter.zoom(by: factor) } /// Focuses the camera at a given point. /// /// - Parameter point: The point to focus on. - func focus(at point: CGPoint) throws { - try videoMediaAdapter.focus(at: point) + func focus(at point: CGPoint) async throws { + try await videoMediaAdapter.focus(at: point) } /// Adds a video output to the capture session. @@ -257,8 +288,8 @@ final class MediaAdapter { /// - Parameter videoOutput: The video output to add. func addVideoOutput( _ videoOutput: AVCaptureVideoDataOutput - ) throws { - try videoMediaAdapter.addVideoOutput(videoOutput) + ) async throws { + try await videoMediaAdapter.addVideoOutput(videoOutput) } /// Removes a video output from the capture session. @@ -266,8 +297,8 @@ final class MediaAdapter { /// - Parameter videoOutput: The video output to remove. func removeVideoOutput( _ videoOutput: AVCaptureVideoDataOutput - ) throws { - try videoMediaAdapter.removeVideoOutput(videoOutput) + ) async throws { + try await videoMediaAdapter.removeVideoOutput(videoOutput) } /// Adds a photo output to the capture session. @@ -275,8 +306,8 @@ final class MediaAdapter { /// - Parameter capturePhotoOutput: The photo output to add. func addCapturePhotoOutput( _ capturePhotoOutput: AVCapturePhotoOutput - ) throws { - try videoMediaAdapter.addCapturePhotoOutput(capturePhotoOutput) + ) async throws { + try await videoMediaAdapter.addCapturePhotoOutput(capturePhotoOutput) } /// Removes a photo output from the capture session. @@ -284,17 +315,8 @@ final class MediaAdapter { /// - Parameter capturePhotoOutput: The photo output to remove. func removeCapturePhotoOutput( _ capturePhotoOutput: AVCapturePhotoOutput - ) throws { - try videoMediaAdapter.removeCapturePhotoOutput(capturePhotoOutput) - } - - /// Changes the publishing quality based on active encodings. - /// - /// - Parameter activeEncodings: The set of active encoding identifiers. - func changePublishQuality( - with layerSettings: [Stream_Video_Sfu_Event_VideoLayerSetting] - ) { - videoMediaAdapter.changePublishQuality(with: layerSettings) + ) async throws { + try await videoMediaAdapter.removeCapturePhotoOutput(capturePhotoOutput) } // MARK: - ScreenSharing diff --git a/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/ScreenShareMediaAdapter.swift b/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/ScreenShareMediaAdapter.swift index cc05b10ab..6728597f1 100644 --- a/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/ScreenShareMediaAdapter.swift +++ b/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/ScreenShareMediaAdapter.swift @@ -33,16 +33,6 @@ final class ScreenShareMediaAdapter: MediaAdapting, @unchecked Sendable { /// A subject for publishing track events. let subject: PassthroughSubject - /// The local screen share track, if available. - var localTrack: RTCMediaStreamTrack? { - (localMediaManager as? LocalScreenShareMediaAdapter)?.localTrack - } - - /// The mid (Media Stream Identification) of the local screen share track, if available. - var mid: String? { - (localMediaManager as? LocalScreenShareMediaAdapter)?.mid - } - /// Convenience initializer for creating a ScreenShareMediaAdapter with a LocalScreenShareMediaAdapter. /// /// - Parameters: @@ -52,6 +42,7 @@ final class ScreenShareMediaAdapter: MediaAdapting, @unchecked Sendable { /// - sfuAdapter: The adapter for communicating with the SFU. /// - videoOptions: The video options for the call. /// - videoConfig: The video configuration for the call. + /// - PublishOptions: - /// - subject: A subject for publishing track events. /// - screenShareSessionProvider: Provides access to the active screen sharing session. convenience init( @@ -59,8 +50,7 @@ final class ScreenShareMediaAdapter: MediaAdapting, @unchecked Sendable { peerConnection: StreamRTCPeerConnectionProtocol, peerConnectionFactory: PeerConnectionFactory, sfuAdapter: SFUAdapter, - videoOptions: VideoOptions, - videoConfig: VideoConfig, + publishOptions: [PublishOptions.VideoPublishOptions], subject: PassthroughSubject, screenShareSessionProvider: ScreenShareSessionProvider ) { @@ -73,8 +63,7 @@ final class ScreenShareMediaAdapter: MediaAdapting, @unchecked Sendable { peerConnection: peerConnection, peerConnectionFactory: peerConnectionFactory, sfuAdapter: sfuAdapter, - videoOptions: videoOptions, - videoConfig: videoConfig, + publishOptions: publishOptions, subject: subject, screenShareSessionProvider: screenShareSessionProvider ), @@ -143,6 +132,23 @@ final class ScreenShareMediaAdapter: MediaAdapting, @unchecked Sendable { try await localMediaManager.didUpdateCallSettings(settings) } + func didUpdatePublishOptions( + _ publishOptions: PublishOptions + ) async throws { + try await localMediaManager.didUpdatePublishOptions(publishOptions) + } + + func changePublishQuality( + with layerSettings: [Stream_Video_Sfu_Event_VideoSender] + ) { + (localMediaManager as? LocalScreenShareMediaAdapter)? + .changePublishQuality(with: layerSettings) + } + + func trackInfo() -> [Stream_Video_Sfu_Models_TrackInfo] { + localMediaManager.trackInfo() + } + // MARK: - ScreenSharing /// Begins screen sharing of the specified type. diff --git a/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/ScreenShareSession.swift b/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/ScreenShareSession.swift index dd92f4225..27bedce68 100644 --- a/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/ScreenShareSession.swift +++ b/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/ScreenShareSession.swift @@ -14,7 +14,7 @@ struct ScreenShareSession { var screenSharingType: ScreensharingType /// The video capturer for the screen share. - var capturer: VideoCapturing + var capturer: StreamVideoCapturer } /// A class that provides and manages the active screen sharing session. diff --git a/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/Utilities/MediaTransceiverStorage.swift b/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/Utilities/MediaTransceiverStorage.swift new file mode 100644 index 000000000..0b304191e --- /dev/null +++ b/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/Utilities/MediaTransceiverStorage.swift @@ -0,0 +1,186 @@ +// +// Copyright © 2024 Stream.io Inc. All rights reserved. +// + +import Foundation +import StreamWebRTC + +/// A thread-safe storage for managing `RTCRtpTransceiver` instances by key. +/// +/// `MediaTransceiverStorage` provides a synchronized way to store and retrieve +/// transceivers associated with a specific track type (e.g., audio or video). +/// It uses a nested `Key` structure to hash and identify items, ensuring +/// safe access and modifications across multiple threads. +final class MediaTransceiverStorage: Sequence, CustomStringConvertible { + + /// Represents a key for identifying transceivers in the storage. + /// + /// The `Key` is generated by hashing relevant properties of the associated object (as have been + /// agreed with the SFU backend), ensuring uniqueness and consistency. It supports multiple types of + /// inputs, including `AudioPublishOptions` and `VideoPublishOptions`. + struct Key: Hashable { + fileprivate var sourceHashValue: Int + fileprivate var source: KeyNestedType + + /// Initializes a key for audio publish options. + /// + /// - Parameter source: The `AudioPublishOptions` used to generate the key. + init(_ source: KeyNestedType) where KeyNestedType == PublishOptions.AudioPublishOptions { + var hasher = Hasher() + hasher.combine(source.id) + hasher.combine(source.codec) + self.init(hasher: hasher, source: source) + } + + /// Initializes a key for video publish options. + /// + /// - Parameter source: The `VideoPublishOptions` used to generate the key. + init(_ source: KeyNestedType) where KeyNestedType == PublishOptions.VideoPublishOptions { + var hasher = Hasher() + hasher.combine(source.id) + hasher.combine(source.codec) + self.init(hasher: hasher, source: source) + } + + /// Initializes a key for general hashable types. + /// + /// - Parameter source: The `KeyNestedType` used to generate the key. + init(_ source: KeyNestedType) { + var hasher = Hasher() + hasher.combine(source) + self.init(hasher: hasher, source: source) + } + + /// Private initializer used to finalize the key creation. + /// + /// - Parameters: + /// - hasher: The hasher used to generate the hash value. + /// - source: The original source object used to create the key. + private init(hasher: Hasher, source: KeyNestedType) { + sourceHashValue = hasher.finalize() + self.source = source + } + } + + /// The type of track (e.g., audio, video, screen share) that this storage manages. + private let trackType: TrackType + + /// Internal dictionary storing `RTCRtpTransceiver` instances by a hashable key. + private var storage: [Key: RTCRtpTransceiver] = [:] + + /// A serial queue used to synchronize access to the storage. + private let storageQueue = UnfairQueue() + + /// A textual representation of the active and inactive transceivers in the storage. + /// + /// Lists all active and inactive transceivers, showing their associated keys + /// and track IDs for debugging and informational purposes. + var description: String { + let storageSnapshot = storageQueue.sync { self.storage } + let active = storageSnapshot + .filter { $1.sender.track?.isEnabled == true } + .map { "\($0.key.source) → \($0.value.sender.track?.trackId ?? "unavailable trackId")" } + .joined(separator: "\n") + let inactive = storageSnapshot + .filter { $1.sender.track?.isEnabled != true } + .map { "\($0.key.source)" } + .joined(separator: "\n") + + return """ + MediaTransceiverStorage for type: \(trackType) + Active: + \(active) + Inactive: + \(inactive) + """ + } + + /// Initializes a new `MediaTransceiverStorage` for a specific track type. + /// + /// - Parameter trackType: The type of track (e.g., audio, video, screen share). + init(for trackType: TrackType) { + self.trackType = trackType + } + + /// Deinitializes the storage, ensuring all transceivers are stopped and cleared. + deinit { + storageQueue.sync { + storage.forEach { + $0.value.sender.track = nil + $0.value.stopInternal() + } + storage.removeAll() + } + } + + /// Retrieves a transceiver associated with a specific key. + /// + /// - Parameter key: The key used to look up the transceiver. + /// - Returns: The `RTCRtpTransceiver` associated with the key, or `nil` if not found. + func get(for key: KeyNestedType) -> RTCRtpTransceiver? { + storageQueue.sync { + storage[.init(key)] + } + } + + /// Associates a transceiver with a specific key, replacing any existing entry. + /// + /// - Parameters: + /// - value: The transceiver to store, or `nil` to remove the key from storage. + /// - key: The key used to associate with the transceiver. + func set(_ value: RTCRtpTransceiver?, for key: KeyNestedType) { + if contains(key: key) { + log.warning("TransceiverStorage for trackType: \(trackType) will overwrite existing value for key: \(key).") + } + storageQueue.sync { + storage[.init(key)] = value + } + } + + /// Checks whether the storage contains a transceiver for a specific key. + /// + /// - Parameter key: The key to check for existence. + /// - Returns: `true` if the key exists in the storage, `false` otherwise. + func contains(key: KeyNestedType) -> Bool { + storageQueue.sync { storage[.init(key)] != nil } + } + + /// Removes all transceivers from the storage. + /// + /// Ensures that all transceivers are stopped and their associated tracks are cleared + /// before removing them from the storage. + func removeAll() { + storageQueue.sync { + storage.forEach { + $0.value.sender.track = nil + $0.value.stopInternal() + } + storage.removeAll() + } + } + + /// Retrieves the key associated with a specific transceiver's receiver. + /// + /// - Parameter value: The `RTCRtpReceiver` whose associated key is requested. + /// - Returns: The key associated with the receiver, or `nil` if not found. + func key(for value: RTCRtpReceiver) -> KeyNestedType? { + storageQueue.sync { storage.first(where: { $0.value === value })?.key.source } + } + + // MARK: Sequence + + /// Makes an iterator for iterating over the storage. + /// + /// - Returns: An iterator for `(key: Key, value: RTCRtpTransceiver)` pairs. + func makeIterator() -> AnyIterator<( + key: KeyNestedType, + value: RTCRtpTransceiver + )> { + let elements = storageQueue + .sync { storage } + .map { (key: MediaTransceiverStorage.Key, value: RTCRtpTransceiver) in + (key: key.source, value: value) + } + return AnyIterator(elements.makeIterator()) + } +} diff --git a/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/VideoCapturePolicy/VideoCapturePolicy.swift b/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/VideoCapturePolicy/VideoCapturePolicy.swift index 51e78da1d..e0d0e9e52 100644 --- a/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/VideoCapturePolicy/VideoCapturePolicy.swift +++ b/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/VideoCapturePolicy/VideoCapturePolicy.swift @@ -2,6 +2,7 @@ // Copyright © 2024 Stream.io Inc. All rights reserved. // +import CoreMedia import Foundation /// Defines a video capture policy used by the `LocalVideoAdapter` to adjust video capture quality based on @@ -52,7 +53,8 @@ final class AdaptiveVideoCapturePolicy: VideoCapturePolicy, @unchecked Sendable guard shouldUpdateCaptureQuality, lastActiveEncodings != activeEncodings, - let activeSession + let activeSession, + let device = activeSession.device else { return } /// Filter the default video codecs to include only those matching the active encodings. @@ -60,8 +62,18 @@ final class AdaptiveVideoCapturePolicy: VideoCapturePolicy, @unchecked Sendable .default .filter { activeEncodings.contains($0.quality.rawValue) } + let preferredDimensions: CGSize = { + if videoCodecs.first(where: { $0.quality == VideoLayer.full.quality }) != nil { + return .full + } else if videoCodecs.first(where: { $0.quality == VideoLayer.half.quality }) != nil { + return .half + } else { + return .quarter + } + }() + try await activeSession.capturer - .updateCaptureQuality(videoCodecs, on: activeSession.device) + .updateCaptureQuality(preferredDimensions, on: device) lastActiveEncodings = activeEncodings log.debug( "Video capture quality adapted to [\(activeEncodings.sorted().joined(separator: ","))].", diff --git a/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/VideoCaptureSession.swift b/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/VideoCaptureSession.swift index 7a6ebc931..7c8654e61 100644 --- a/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/VideoCaptureSession.swift +++ b/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/VideoCaptureSession.swift @@ -14,7 +14,7 @@ struct VideoCaptureSession { var localTrack: RTCVideoTrack /// The video capturer for the screen share. - var capturer: CameraVideoCapturing & Sendable + var capturer: StreamVideoCapturer } /// A class that provides and manages the active screen sharing session. diff --git a/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/VideoMediaAdapter.swift b/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/VideoMediaAdapter.swift index 97f1f33f4..0988e0374 100644 --- a/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/VideoMediaAdapter.swift +++ b/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/VideoMediaAdapter.swift @@ -32,17 +32,6 @@ final class VideoMediaAdapter: MediaAdapting, @unchecked Sendable { /// A subject for publishing track events. let subject: PassthroughSubject - - /// The local video track, if available. - var localTrack: RTCMediaStreamTrack? { - (localMediaManager as? LocalVideoMediaAdapter)?.localTrack - } - - /// The mid (Media Stream Identification) of the local video track, if available. - var mid: String? { - (localMediaManager as? LocalVideoMediaAdapter)?.mid - } - /// Convenience initializer for creating a VideoMediaAdapter with a LocalVideoMediaAdapter. /// /// - Parameters: @@ -52,6 +41,7 @@ final class VideoMediaAdapter: MediaAdapting, @unchecked Sendable { /// - sfuAdapter: The adapter for communicating with the SFU. /// - videoOptions: The video options for the call. /// - videoConfig: The video configuration for the call. + /// - publishOptions: TODO /// - subject: A subject for publishing track events. convenience init( sessionID: String, @@ -60,6 +50,7 @@ final class VideoMediaAdapter: MediaAdapting, @unchecked Sendable { sfuAdapter: SFUAdapter, videoOptions: VideoOptions, videoConfig: VideoConfig, + publishOptions: [PublishOptions.VideoPublishOptions], subject: PassthroughSubject, videoCaptureSessionProvider: VideoCaptureSessionProvider ) { @@ -74,6 +65,7 @@ final class VideoMediaAdapter: MediaAdapting, @unchecked Sendable { sfuAdapter: sfuAdapter, videoOptions: videoOptions, videoConfig: videoConfig, + publishOptions: publishOptions, subject: subject, videoCaptureSessionProvider: videoCaptureSessionProvider ), @@ -140,6 +132,16 @@ final class VideoMediaAdapter: MediaAdapting, @unchecked Sendable { try await localMediaManager.didUpdateCallSettings(settings) } + func didUpdatePublishOptions( + _ publishOptions: PublishOptions + ) async throws { + try await localMediaManager.didUpdatePublishOptions(publishOptions) + } + + func trackInfo() -> [Stream_Video_Sfu_Models_TrackInfo] { + localMediaManager.trackInfo() + } + // MARK: - Video /// Updates the camera position. @@ -163,15 +165,15 @@ final class VideoMediaAdapter: MediaAdapting, @unchecked Sendable { /// Zooms the camera by a given factor. /// /// - Parameter factor: The zoom factor. - func zoom(by factor: CGFloat) throws { - try (localMediaManager as? LocalVideoMediaAdapter)?.zoom(by: factor) + func zoom(by factor: CGFloat) async throws { + try await(localMediaManager as? LocalVideoMediaAdapter)?.zoom(by: factor) } /// Focuses the camera at a given point. /// /// - Parameter point: The point to focus on. - func focus(at point: CGPoint) throws { - try (localMediaManager as? LocalVideoMediaAdapter)?.focus(at: point) + func focus(at point: CGPoint) async throws { + try await(localMediaManager as? LocalVideoMediaAdapter)?.focus(at: point) } /// Adds a video output to the capture session. @@ -179,8 +181,8 @@ final class VideoMediaAdapter: MediaAdapting, @unchecked Sendable { /// - Parameter videoOutput: The video output to add. func addVideoOutput( _ videoOutput: AVCaptureVideoDataOutput - ) throws { - try (localMediaManager as? LocalVideoMediaAdapter)?.addVideoOutput(videoOutput) + ) async throws { + try await(localMediaManager as? LocalVideoMediaAdapter)?.addVideoOutput(videoOutput) } /// Removes a video output from the capture session. @@ -188,8 +190,8 @@ final class VideoMediaAdapter: MediaAdapting, @unchecked Sendable { /// - Parameter videoOutput: The video output to remove. func removeVideoOutput( _ videoOutput: AVCaptureVideoDataOutput - ) throws { - try (localMediaManager as? LocalVideoMediaAdapter)?.removeVideoOutput(videoOutput) + ) async throws { + try await(localMediaManager as? LocalVideoMediaAdapter)?.removeVideoOutput(videoOutput) } /// Adds a photo output to the capture session. @@ -197,8 +199,8 @@ final class VideoMediaAdapter: MediaAdapting, @unchecked Sendable { /// - Parameter capturePhotoOutput: The photo output to add. func addCapturePhotoOutput( _ capturePhotoOutput: AVCapturePhotoOutput - ) throws { - try (localMediaManager as? LocalVideoMediaAdapter)? + ) async throws { + try await(localMediaManager as? LocalVideoMediaAdapter)? .addCapturePhotoOutput(capturePhotoOutput) } @@ -207,8 +209,8 @@ final class VideoMediaAdapter: MediaAdapting, @unchecked Sendable { /// - Parameter capturePhotoOutput: The photo output to remove. func removeCapturePhotoOutput( _ capturePhotoOutput: AVCapturePhotoOutput - ) throws { - try (localMediaManager as? LocalVideoMediaAdapter)? + ) async throws { + try await(localMediaManager as? LocalVideoMediaAdapter)? .removeCapturePhotoOutput(capturePhotoOutput) } @@ -216,7 +218,7 @@ final class VideoMediaAdapter: MediaAdapting, @unchecked Sendable { /// /// - Parameter activeEncodings: The set of active encoding identifiers. func changePublishQuality( - with layerSettings: [Stream_Video_Sfu_Event_VideoLayerSetting] + with layerSettings: [Stream_Video_Sfu_Event_VideoSender] ) { (localMediaManager as? LocalVideoMediaAdapter)? .changePublishQuality(with: layerSettings) diff --git a/Sources/StreamVideo/WebRTC/v2/PeerConnection/Protocols/LocalMediaAdapting.swift b/Sources/StreamVideo/WebRTC/v2/PeerConnection/Protocols/LocalMediaAdapting.swift index 77e158672..bc3daaa18 100644 --- a/Sources/StreamVideo/WebRTC/v2/PeerConnection/Protocols/LocalMediaAdapting.swift +++ b/Sources/StreamVideo/WebRTC/v2/PeerConnection/Protocols/LocalMediaAdapting.swift @@ -35,10 +35,14 @@ protocol LocalMediaAdapting { /// This method should be called when the local participant wants to stop sharing their media. func unpublish() + func trackInfo() -> [Stream_Video_Sfu_Models_TrackInfo] + /// Updates the adapter with new call settings. /// /// - Parameter settings: The updated call settings to apply. /// /// - Throws: An error if the update process fails. func didUpdateCallSettings(_ settings: CallSettings) async throws + + func didUpdatePublishOptions(_ publishOptions: PublishOptions) async throws } diff --git a/Sources/StreamVideo/WebRTC/v2/PeerConnection/Protocols/MediaAdapting.swift b/Sources/StreamVideo/WebRTC/v2/PeerConnection/Protocols/MediaAdapting.swift index af4b9285e..eb45770d3 100644 --- a/Sources/StreamVideo/WebRTC/v2/PeerConnection/Protocols/MediaAdapting.swift +++ b/Sources/StreamVideo/WebRTC/v2/PeerConnection/Protocols/MediaAdapting.swift @@ -12,11 +12,7 @@ protocol MediaAdapting { /// A subject for publishing track events. var subject: PassthroughSubject { get } - /// The local media track managed by this adapter, if any. - var localTrack: RTCMediaStreamTrack? { get } - - /// The mid (Media Stream Identification) of the local track, if available. - var mid: String? { get } + func trackInfo() -> [Stream_Video_Sfu_Models_TrackInfo] /// Sets up the media adapter with the given settings and capabilities. /// @@ -34,4 +30,6 @@ protocol MediaAdapting { /// - Parameter settings: The updated call settings. /// - Throws: An error if the update process fails. func didUpdateCallSettings(_ settings: CallSettings) async throws + + func didUpdatePublishOptions(_ publishOptions: PublishOptions) async throws } diff --git a/Sources/StreamVideo/WebRTC/v2/PeerConnection/Protocols/RTCPeerConnectionCoordinatorProviding.swift b/Sources/StreamVideo/WebRTC/v2/PeerConnection/Protocols/RTCPeerConnectionCoordinatorProviding.swift index 8bb22e5a3..260b419b9 100644 --- a/Sources/StreamVideo/WebRTC/v2/PeerConnection/Protocols/RTCPeerConnectionCoordinatorProviding.swift +++ b/Sources/StreamVideo/WebRTC/v2/PeerConnection/Protocols/RTCPeerConnectionCoordinatorProviding.swift @@ -21,6 +21,7 @@ protocol RTCPeerConnectionCoordinatorProviding { /// - videoConfig: Configuration settings for video. /// - callSettings: Settings related to the overall call. /// - audioSettings: Settings for audio configuration. + /// - publishOptions: TODO /// - sfuAdapter: The adapter for interacting with the Selective Forwarding Unit. /// - videoCaptureSessionProvider: Provider for video capturing functionality. /// - screenShareSessionProvider: Provider for screen sharing functionality. @@ -34,6 +35,7 @@ protocol RTCPeerConnectionCoordinatorProviding { videoConfig: VideoConfig, callSettings: CallSettings, audioSettings: AudioSettings, + publishOptions: PublishOptions, sfuAdapter: SFUAdapter, videoCaptureSessionProvider: VideoCaptureSessionProvider, screenShareSessionProvider: ScreenShareSessionProvider @@ -56,6 +58,7 @@ final class StreamRTCPeerConnectionCoordinatorFactory: RTCPeerConnectionCoordina /// - videoConfig: Configuration settings for video. /// - callSettings: Settings related to the overall call. /// - audioSettings: Settings for audio configuration. + /// - publishOptions: TODO /// - sfuAdapter: The adapter for interacting with the Selective Forwarding Unit. /// - videoCaptureSessionProvider: Provider for video capturing functionality. /// - screenShareSessionProvider: Provider for screen sharing functionality. @@ -69,6 +72,7 @@ final class StreamRTCPeerConnectionCoordinatorFactory: RTCPeerConnectionCoordina videoConfig: VideoConfig, callSettings: CallSettings, audioSettings: AudioSettings, + publishOptions: PublishOptions, sfuAdapter: SFUAdapter, videoCaptureSessionProvider: VideoCaptureSessionProvider, screenShareSessionProvider: ScreenShareSessionProvider @@ -82,6 +86,7 @@ final class StreamRTCPeerConnectionCoordinatorFactory: RTCPeerConnectionCoordina videoConfig: videoConfig, callSettings: callSettings, audioSettings: audioSettings, + publishOptions: publishOptions, sfuAdapter: sfuAdapter, videoCaptureSessionProvider: videoCaptureSessionProvider, screenShareSessionProvider: screenShareSessionProvider diff --git a/Sources/StreamVideo/WebRTC/v2/PeerConnection/RTCPeerConnectionCoordinator.swift b/Sources/StreamVideo/WebRTC/v2/PeerConnection/RTCPeerConnectionCoordinator.swift index d5a8d3124..9cbc24c27 100644 --- a/Sources/StreamVideo/WebRTC/v2/PeerConnection/RTCPeerConnectionCoordinator.swift +++ b/Sources/StreamVideo/WebRTC/v2/PeerConnection/RTCPeerConnectionCoordinator.swift @@ -51,6 +51,9 @@ class RTCPeerConnectionCoordinator: @unchecked Sendable { private var setUpSubject: CurrentValueSubject = .init(false) var videoOptions: VideoOptions var audioSettings: AudioSettings + var publishOptions: PublishOptions { + didSet { didUpdatePublishOptions(publishOptions) } + } // MARK: State @@ -63,12 +66,8 @@ class RTCPeerConnectionCoordinator: @unchecked Sendable { .eraseToAnyPublisher() } - func localTrack(of type: TrackType) -> RTCMediaStreamTrack? { - mediaAdapter.localTrack(of: type) - } - - func mid(for type: TrackType) -> String? { - mediaAdapter.mid(for: type) + func trackInfo(for type: TrackType) -> [Stream_Video_Sfu_Models_TrackInfo] { + mediaAdapter.trackInfo(for: type) } /// Initializes the RTCPeerConnectionCoordinator with necessary dependencies. @@ -82,6 +81,7 @@ class RTCPeerConnectionCoordinator: @unchecked Sendable { /// - videoConfig: Configuration for video processing. /// - callSettings: Settings for the current call. /// - audioSettings: Settings for audio processing. + /// - publishOptions: TODO /// - sfuAdapter: Adapter for communicating with the SFU. /// - audioSession: The audio session to be used. /// - videoCaptureSessionProvider: Provider for video capturing sessions. @@ -95,6 +95,7 @@ class RTCPeerConnectionCoordinator: @unchecked Sendable { videoConfig: VideoConfig, callSettings: CallSettings, audioSettings: AudioSettings, + publishOptions: PublishOptions, sfuAdapter: SFUAdapter, videoCaptureSessionProvider: VideoCaptureSessionProvider, screenShareSessionProvider: ScreenShareSessionProvider @@ -106,6 +107,7 @@ class RTCPeerConnectionCoordinator: @unchecked Sendable { videoOptions: videoOptions, callSettings: callSettings, audioSettings: audioSettings, + publishOptions: publishOptions, sfuAdapter: sfuAdapter, mediaAdapter: .init( sessionID: sessionId, @@ -115,6 +117,7 @@ class RTCPeerConnectionCoordinator: @unchecked Sendable { sfuAdapter: sfuAdapter, videoOptions: videoOptions, videoConfig: videoConfig, + publishOptions: publishOptions, videoCaptureSessionProvider: videoCaptureSessionProvider, screenShareSessionProvider: screenShareSessionProvider ) @@ -128,6 +131,7 @@ class RTCPeerConnectionCoordinator: @unchecked Sendable { videoOptions: VideoOptions, callSettings: CallSettings, audioSettings: AudioSettings, + publishOptions: PublishOptions, sfuAdapter: SFUAdapter, mediaAdapter: MediaAdapter ) { @@ -136,6 +140,7 @@ class RTCPeerConnectionCoordinator: @unchecked Sendable { self.videoOptions = videoOptions self.callSettings = callSettings self.audioSettings = audioSettings + self.publishOptions = publishOptions self.peerConnection = peerConnection self.sfuAdapter = sfuAdapter subsystem = peerType == .publisher @@ -305,6 +310,18 @@ class RTCPeerConnectionCoordinator: @unchecked Sendable { try await mediaAdapter.didUpdateCallSettings(settings) } + func didUpdatePublishOptions( + _ publishOptions: PublishOptions + ) { + Task { + do { + try await mediaAdapter.didUpdatePublishOptions(publishOptions) + } catch { + log.error(error) + } + } + } + // MARK: - Actions /// Creates an offer for the peer connection. @@ -363,6 +380,7 @@ class RTCPeerConnectionCoordinator: @unchecked Sendable { type:\(peerType) sessionID: \(sessionId) sfu: \(sfuAdapter.hostname) + SDP: \(sessionDescription.sdp.replacingOccurrences(of: "\r\n", with: " ").replacingOccurrences(of: "\n", with: " ")) """, subsystems: subsystem ) @@ -383,6 +401,7 @@ class RTCPeerConnectionCoordinator: @unchecked Sendable { type:\(peerType) sessionID: \(sessionId) sfu: \(sfuAdapter.hostname) + SDP: \(sessionDescription.sdp.replacingOccurrences(of: "\r\n", with: " ").replacingOccurrences(of: "\n", with: " ")) """, subsystems: subsystem ) @@ -481,16 +500,16 @@ class RTCPeerConnectionCoordinator: @unchecked Sendable { /// /// - Parameter factor: The zoom factor to apply. /// - Throws: An error if zooming fails or is not supported by the current device. - func zoom(by factor: CGFloat) throws { - try mediaAdapter.zoom(by: factor) + func zoom(by factor: CGFloat) async throws { + try await mediaAdapter.zoom(by: factor) } /// Focuses the camera at a given point. /// /// - Parameter point: The point in the camera's coordinate system to focus on. /// - Throws: An error if focusing fails or is not supported by the current device. - func focus(at point: CGPoint) throws { - try mediaAdapter.focus(at: point) + func focus(at point: CGPoint) async throws { + try await mediaAdapter.focus(at: point) } /// Adds a video output to the capture session. @@ -499,8 +518,8 @@ class RTCPeerConnectionCoordinator: @unchecked Sendable { /// - Throws: An error if adding the video output fails. func addVideoOutput( _ videoOutput: AVCaptureVideoDataOutput - ) throws { - try mediaAdapter.addVideoOutput(videoOutput) + ) async throws { + try await mediaAdapter.addVideoOutput(videoOutput) } /// Removes a video output from the capture session. @@ -509,8 +528,8 @@ class RTCPeerConnectionCoordinator: @unchecked Sendable { /// - Throws: An error if removing the video output fails. func removeVideoOutput( _ videoOutput: AVCaptureVideoDataOutput - ) throws { - try mediaAdapter.removeVideoOutput(videoOutput) + ) async throws { + try await mediaAdapter.removeVideoOutput(videoOutput) } /// Adds a photo output to the capture session. @@ -519,8 +538,8 @@ class RTCPeerConnectionCoordinator: @unchecked Sendable { /// - Throws: An error if adding the photo output fails. func addCapturePhotoOutput( _ capturePhotoOutput: AVCapturePhotoOutput - ) throws { - try mediaAdapter.addCapturePhotoOutput(capturePhotoOutput) + ) async throws { + try await mediaAdapter.addCapturePhotoOutput(capturePhotoOutput) } /// Removes a photo output from the capture session. @@ -529,17 +548,19 @@ class RTCPeerConnectionCoordinator: @unchecked Sendable { /// - Throws: An error if removing the photo output fails. func removeCapturePhotoOutput( _ capturePhotoOutput: AVCapturePhotoOutput - ) throws { - try mediaAdapter.removeCapturePhotoOutput(capturePhotoOutput) + ) async throws { + try await mediaAdapter.removeCapturePhotoOutput(capturePhotoOutput) } /// Changes the publish quality with active encodings. /// /// - Parameter activeEncodings: A set of active encoding identifiers. func changePublishQuality( - with layerSettings: [Stream_Video_Sfu_Event_VideoLayerSetting] + with event: Stream_Video_Sfu_Event_ChangePublishQuality ) { - mediaAdapter.changePublishQuality(with: layerSettings) + Task { + await mediaAdapter.changePublishQuality(with: event) + } } // MARK: - ScreenSharing @@ -591,17 +612,13 @@ class RTCPeerConnectionCoordinator: @unchecked Sendable { let offer = try await self .createOffer(constraints: constraints) - .withOpusDTX(audioSettings.opusDtxEnabled) - .withRedundantCoding(audioSettings.redundantCodingEnabled) try await setLocalDescription(offer) try await ensureSetUpHasBeenCompleted() - let tracksInfo = WebRTCJoinRequestFactory().buildAnnouncedTracks( - self, - videoOptions: videoOptions - ) + let tracksInfo = WebRTCJoinRequestFactory() + .buildAnnouncedTracks(self) log.debug( """ @@ -611,8 +628,10 @@ class RTCPeerConnectionCoordinator: @unchecked Sendable { sessionID: \(sessionId) sfu: \(sfuAdapter.hostname) tracksInfo: - hasAudio: \(tracksInfo.contains { $0.trackType == .audio }) - hasVideo: \(tracksInfo.contains { $0.trackType == .video }) + audio: + \(tracksInfo.filter { $0.trackType == .audio }) + video: + \(tracksInfo.filter { $0.trackType == .video }) hasScreenSharing: \(tracksInfo.contains { $0.trackType == .screenShare }) """, subsystems: subsystem diff --git a/Sources/StreamVideo/WebRTC/v2/PeerConnection/RTCTemporaryPeerConnection.swift b/Sources/StreamVideo/WebRTC/v2/PeerConnection/RTCTemporaryPeerConnection.swift index 52ee91ab3..b281d17c7 100644 --- a/Sources/StreamVideo/WebRTC/v2/PeerConnection/RTCTemporaryPeerConnection.swift +++ b/Sources/StreamVideo/WebRTC/v2/PeerConnection/RTCTemporaryPeerConnection.swift @@ -9,10 +9,35 @@ import StreamWebRTC final class RTCTemporaryPeerConnection { private let peerConnection: StreamRTCPeerConnectionProtocol - private let localAudioTrack: RTCAudioTrack? - private let localVideoTrack: RTCVideoTrack? + private let direction: RTCRtpTransceiverDirection + private let localAudioTrack: RTCAudioTrack + private let localVideoTrack: RTCVideoTrack private let videoOptions: VideoOptions + convenience init( + peerConnectionType: PeerConnectionType, + coordinator: WebRTCCoordinator, + sfuAdapter: SFUAdapter + ) async throws { + let peerConnectionFactory = coordinator.stateAdapter.peerConnectionFactory + let audioSource = peerConnectionFactory.makeAudioSource(.defaultConstraints) + let audioTrack = peerConnectionFactory.makeAudioTrack(source: audioSource) + + let videoSource = peerConnectionFactory.makeVideoSource(forScreenShare: false) + let videoTrack = peerConnectionFactory.makeVideoTrack(source: videoSource) + + try await self.init( + direction: peerConnectionType == .subscriber ? .recvOnly : .sendOnly, + sessionID: coordinator.stateAdapter.sessionID, + peerConnectionFactory: coordinator.stateAdapter.peerConnectionFactory, + configuration: coordinator.stateAdapter.connectOptions.rtcConfiguration, + sfuAdapter: sfuAdapter, + videoOptions: coordinator.stateAdapter.videoOptions, + localAudioTrack: audioTrack, + localVideoTrack: videoTrack + ) + } + /// Initializes a new RTCTemporaryPeerConnection. /// /// - Parameters: @@ -21,23 +46,25 @@ final class RTCTemporaryPeerConnection { /// - configuration: The configuration for the peer connection. /// - sfuAdapter: The adapter for communicating with the SFU. /// - videoOptions: The options for video configuration. - /// - localAudioTrack: The local audio track to add to the connection, if any. - /// - localVideoTrack: The local video track to add to the connection, if any. + /// - localAudioTrack: The local audio track to add to the connection. + /// - localVideoTrack: The local video track to add to the connection. /// /// - Throws: An error if the peer connection creation fails. - init( + private init( + direction: RTCRtpTransceiverDirection, sessionID: String, peerConnectionFactory: PeerConnectionFactory, configuration: RTCConfiguration, sfuAdapter: SFUAdapter, videoOptions: VideoOptions, - localAudioTrack: RTCAudioTrack?, - localVideoTrack: RTCVideoTrack? + localAudioTrack: RTCAudioTrack, + localVideoTrack: RTCVideoTrack ) throws { peerConnection = try StreamRTCPeerConnection( peerConnectionFactory, configuration: configuration ) + self.direction = direction self.localAudioTrack = localAudioTrack self.localVideoTrack = localVideoTrack self.videoOptions = videoOptions @@ -57,29 +84,16 @@ final class RTCTemporaryPeerConnection { /// - Returns: An `RTCSessionDescription` representing the created offer. /// - Throws: An error if the offer creation fails. func createOffer() async throws -> RTCSessionDescription { - if let localAudioTrack { - _ = peerConnection.addTransceiver( - with: localAudioTrack, - init: RTCRtpTransceiverInit( - trackType: .audio, - direction: .recvOnly, - streamIds: ["temp-audio"] - ) - ) - } + _ = peerConnection.addTransceiver( + with: localAudioTrack, + init: .temporary(trackType: .audio) + ) - if let localVideoTrack { - _ = peerConnection.addTransceiver( - with: localVideoTrack, - init: RTCRtpTransceiverInit( - trackType: .video, - direction: .recvOnly, - streamIds: ["temp-video"], - layers: videoOptions.videoLayers, - preferredVideoCodec: videoOptions.preferredVideoCodec - ) - ) - } + _ = peerConnection.addTransceiver( + with: localVideoTrack, + init: .temporary(trackType: .video) + ) + return try await peerConnection.offer(for: .defaultConstraints) } } diff --git a/Sources/StreamVideo/WebRTC/v2/SDP Parsing/README.md b/Sources/StreamVideo/WebRTC/v2/SDP Parsing/README.md new file mode 100644 index 000000000..2038ab2ee --- /dev/null +++ b/Sources/StreamVideo/WebRTC/v2/SDP Parsing/README.md @@ -0,0 +1,109 @@ +# SDP Parser + +This repository contains an implementation of an SDP (Session Description Protocol) parser. The parser is designed using the Visitor pattern to process different types of SDP lines efficiently. + +## Architecture + +### Overview + +The SDP parser is built around the Visitor pattern, which allows for flexible and extensible processing of different SDP line types. The main components of the architecture are: + +- **SDPParser**: The main parser class that processes the SDP string and delegates line processing to registered visitors. +- **SDPLineVisitor**: A protocol that defines the interface for visitors that process specific SDP line prefixes. +- **SupportedPrefix**: An enumeration that defines the supported SDP line prefixes. + +### Visitor Pattern + +The Visitor pattern is used to separate the algorithm for processing SDP lines from the objects on which it operates. This allows for adding new processing logic without modifying the existing parser code. + +#### Components + +- **SDPParser**: The main parser that holds a list of visitors and delegates line processing to them. +- **SDPLineVisitor**: A protocol that visitors must conform to. Each visitor handles specific SDP line prefixes. +- **SupportedPrefix**: An enumeration that defines the supported prefixes and provides a method to check if a line has a supported prefix. + +#### Class Diagram + +```mermaid +classDiagram + class SDPParser { + - visitors: [SDPLineVisitor] + + registerVisitor(visitor: SDPLineVisitor) + + parse(sdp: String) async + } + + class SDPLineVisitor { + <> + + supportedPrefixes: [SupportedPrefix] + + visit(line: String) + } + + class SupportedPrefix { + <> + + isPrefixSupported(for line: String) -> SupportedPrefix + } + + SDPParser --> SDPLineVisitor : uses + SDPLineVisitor --> SupportedPrefix : uses +``` + +#### Data Flow + +- **SDP String Input**: The SDP string is provided to the SDPParser's parse method. +- **Line Splitting**: The SDP string is split into individual lines. +- **Prefix Checking**: Each line is checked to see if it has a supported prefix using the SupportedPrefix enumeration. +- **Visitor Delegation**: If a line has a supported prefix, the parser delegates the line processing to the registered visitors that support the prefix. +- **Line Processing**: Each visitor processes the line according to its specific logic. + +##### Sequence Diagram + +``` +sequenceDiagram + participant Client + participant SDPParser + participant SupportedPrefix + participant SDPLineVisitor + + Client->>SDPParser: parse(sdp: String) + SDPParser->>SDPParser: split(sdp, "\r\n") + loop for each line + SDPParser->>SupportedPrefix: isPrefixSupported(for: line) + alt supported prefix + SDPParser->>SDPLineVisitor: visit(line: String) + end + end +``` + +##### Performance Considerations + +The SDP parser is designed to be efficient and scalable. Key performance considerations include: + +- **Asynchronous Parsing**: The parse method is asynchronous, allowing for non-blocking parsing of large SDP strings. +- **Visitor Pattern**: The use of the Visitor pattern allows for efficient delegation of line processing, reducing the complexity of the parser. +- **Prefix Checking**: The SupportedPrefix enumeration provides a fast way to check if a line has a supported prefix, minimizing the overhead of line processing. + +#### Example Usage + +```swift +let parser = SDPParser() +let visitor = MySDPLineVisitor() +parser.registerVisitor(visitor) + +let sdpString = """ +v=0 +o=- 46117317 2 IN IP4 127.0.0.1 +s=- +c=IN IP4 127.0.0.1 +t=0 0 +a=rtpmap:96 opus/48000/2 +""" + +Task { + await parser.parse(sdp: sdpString) +} +``` + +In this example, MySDPLineVisitor is a custom visitor that conforms to the SDPLineVisitor protocol and processes lines with specific prefixes. + +#### Conclusion +The SDP parser is a flexible and efficient solution for processing SDP strings. The use of the Visitor pattern allows for easy extension and maintenance, while the asynchronous parsing ensures that the parser can handle large SDP strings without blocking the main thread. \ No newline at end of file diff --git a/Sources/StreamVideo/WebRTC/v2/SDP Parsing/SDPParser.swift b/Sources/StreamVideo/WebRTC/v2/SDP Parsing/SDPParser.swift new file mode 100644 index 000000000..5db395d17 --- /dev/null +++ b/Sources/StreamVideo/WebRTC/v2/SDP Parsing/SDPParser.swift @@ -0,0 +1,42 @@ +// +// Copyright © 2024 Stream.io Inc. All rights reserved. +// + +import Foundation + +/// The main SDP parser that uses visitors to process lines. +final class SDPParser { + private var visitors: [SDPLineVisitor] = [] + + /// Registers a visitor for a specific SDP line prefix. + /// - Parameters: + /// - prefix: The line prefix to handle (e.g., "a=rtpmap"). + /// - visitor: The visitor that processes lines with the specified prefix. + func registerVisitor(_ visitor: SDPLineVisitor) { + visitors.append(visitor) + } + + /// Parses the provided SDP string asynchronously. + /// - Parameter sdp: The SDP string to parse. + func parse(sdp: String) async { + let lines = sdp.split(separator: "\r\n") + for line in lines { + let line = String(line) + let supportedPrefix = SupportedPrefix.isPrefixSupported(for: line) + guard + supportedPrefix != .unsupported + else { + continue + } + + visitors.forEach { + guard + $0.supportedPrefixes.contains(supportedPrefix) + else { + return + } + $0.visit(line: line) + } + } + } +} diff --git a/Sources/StreamVideo/WebRTC/v2/SDP Parsing/SupportedPrefix.swift b/Sources/StreamVideo/WebRTC/v2/SDP Parsing/SupportedPrefix.swift new file mode 100644 index 000000000..72ab64e45 --- /dev/null +++ b/Sources/StreamVideo/WebRTC/v2/SDP Parsing/SupportedPrefix.swift @@ -0,0 +1,22 @@ +// +// Copyright © 2024 Stream.io Inc. All rights reserved. +// + +import Foundation + +enum SupportedPrefix: String, Hashable, CaseIterable { + case unsupported + case rtmap = "a=rtpmap:" + + static func isPrefixSupported(for line: String) -> SupportedPrefix { + guard + let supportedPrefix = SupportedPrefix + .allCases + .first(where: { $0 != .unsupported && line.hasPrefix($0.rawValue) }) + else { + return .unsupported + } + + return supportedPrefix + } +} diff --git a/Sources/StreamVideo/WebRTC/v2/SDP Parsing/Visitors/RTPMapVisitor.swift b/Sources/StreamVideo/WebRTC/v2/SDP Parsing/Visitors/RTPMapVisitor.swift new file mode 100644 index 000000000..ea14db0d7 --- /dev/null +++ b/Sources/StreamVideo/WebRTC/v2/SDP Parsing/Visitors/RTPMapVisitor.swift @@ -0,0 +1,39 @@ +// +// Copyright © 2024 Stream.io Inc. All rights reserved. +// + +import Foundation + +/// A visitor for processing `a=rtpmap` lines. +final class RTPMapVisitor: SDPLineVisitor { + private var codecMap: [String: Int] = [:] + + var supportedPrefixes: Set = [.rtmap] + + func visit(line: String) { + // Parse the `a=rtpmap` line and extract codec information + let components = line + .replacingOccurrences(of: SupportedPrefix.rtmap.rawValue, with: "") + .split(separator: " ") + + guard + components.count == 2, + let payloadType = Int(components[0]) + else { + return + } + + let codecName = components[1] + .split(separator: "/") + .first? + .lowercased() ?? "" + codecMap[codecName] = payloadType + } + + /// Retrieves the payload type for a given codec name. + /// - Parameter codec: The codec name to search for. + /// - Returns: The payload type, or `nil` if not found. + func payloadType(for codec: String) -> Int? { + codecMap[codec.lowercased()] + } +} diff --git a/Sources/StreamVideo/WebRTC/v2/SDP Parsing/Visitors/SDPLineVisitor.swift b/Sources/StreamVideo/WebRTC/v2/SDP Parsing/Visitors/SDPLineVisitor.swift new file mode 100644 index 000000000..da69954c6 --- /dev/null +++ b/Sources/StreamVideo/WebRTC/v2/SDP Parsing/Visitors/SDPLineVisitor.swift @@ -0,0 +1,12 @@ +// +// Copyright © 2024 Stream.io Inc. All rights reserved. +// + +import Foundation + +protocol SDPLineVisitor { + + var supportedPrefixes: Set { get } + + func visit(line: String) +} diff --git a/Sources/StreamVideo/WebRTC/v2/SFU/SFUEventAdapter.swift b/Sources/StreamVideo/WebRTC/v2/SFU/SFUEventAdapter.swift index ce612e5a9..886e6553b 100644 --- a/Sources/StreamVideo/WebRTC/v2/SFU/SFUEventAdapter.swift +++ b/Sources/StreamVideo/WebRTC/v2/SFU/SFUEventAdapter.swift @@ -106,6 +106,11 @@ final class SFUEventAdapter { .publisher(eventType: Stream_Video_Sfu_Event_ParticipantUpdated.self) .sinkTask(storeIn: disposableBag) { [weak self] in await self?.handleParticipantUpdated($0) } .store(in: disposableBag) + + sfuAdapter + .publisher(eventType: Stream_Video_Sfu_Event_ChangePublishOptions.self) + .sinkTask(storeIn: disposableBag) { [weak self] in await self?.handleChangePublishOptions($0) } + .store(in: disposableBag) } // MARK: - Event handlers @@ -173,18 +178,9 @@ final class SFUEventAdapter { private func handleChangePublishQuality( _ event: Stream_Video_Sfu_Event_ChangePublishQuality ) async { - guard - let layerSettings = event - .videoSenders - .first? - .layers - else { - return - } - await stateAdapter .publisher? - .changePublishQuality(with: layerSettings) + .changePublishQuality(with: event) } /// Handles a ParticipantJoined event. @@ -470,7 +466,7 @@ final class SFUEventAdapter { else { return participants } - + updatedParticipants[event.participant.sessionID] = event .participant .toCallParticipant() @@ -481,4 +477,11 @@ final class SFUEventAdapter { return updatedParticipants } } + + private func handleChangePublishOptions( + _ event: Stream_Video_Sfu_Event_ChangePublishOptions + ) async { + await stateAdapter + .set(publishOptions: .init(event.publishOptions)) + } } diff --git a/Sources/StreamVideo/WebRTC/v2/StateMachine/Stages/WebRTCCoordinator+Joining.swift b/Sources/StreamVideo/WebRTC/v2/StateMachine/Stages/WebRTCCoordinator+Joining.swift index dfbb230e7..84fb93a98 100644 --- a/Sources/StreamVideo/WebRTC/v2/StateMachine/Stages/WebRTCCoordinator+Joining.swift +++ b/Sources/StreamVideo/WebRTC/v2/StateMachine/Stages/WebRTCCoordinator+Joining.swift @@ -94,22 +94,22 @@ extension WebRTCCoordinator.StateMachine.Stage { try Task.checkCancellation() - if !isFastReconnecting { - try await coordinator.stateAdapter.configurePeerConnections() - } - - try Task.checkCancellation() - await sfuAdapter.sendJoinRequest( WebRTCJoinRequestFactory() .buildRequest( with: isFastReconnecting ? .fastReconnect : .default, coordinator: coordinator, - subscriberSdp: try await buildSubscriberSessionDescription( + publisherSdp: try await buildSessionDescription( + peerConnectionType: .publisher, coordinator: coordinator, sfuAdapter: sfuAdapter, - isFastReconnecting: isFastReconnecting, - publisher: await coordinator.stateAdapter.publisher + isFastReconnecting: isFastReconnecting + ), + subscriberSdp: try await buildSessionDescription( + peerConnectionType: .subscriber, + coordinator: coordinator, + sfuAdapter: sfuAdapter, + isFastReconnecting: isFastReconnecting ), reconnectAttempt: context.reconnectAttempts, publisher: await coordinator.stateAdapter.publisher @@ -120,7 +120,8 @@ extension WebRTCCoordinator.StateMachine.Stage { try await join( coordinator: coordinator, - sfuAdapter: sfuAdapter + sfuAdapter: sfuAdapter, + isFastReconnecting: isFastReconnecting ) try Task.checkCancellation() @@ -158,20 +159,22 @@ extension WebRTCCoordinator.StateMachine.Stage { try Task.checkCancellation() - try await coordinator.stateAdapter.configurePeerConnections() - - try Task.checkCancellation() - await sfuAdapter.sendJoinRequest( WebRTCJoinRequestFactory() .buildRequest( with: .migration(fromHostname: context.migratingFromSFU), coordinator: coordinator, - subscriberSdp: try await buildSubscriberSessionDescription( + publisherSdp: try await buildSessionDescription( + peerConnectionType: .publisher, coordinator: coordinator, sfuAdapter: sfuAdapter, - isFastReconnecting: false, - publisher: await coordinator.stateAdapter.publisher + isFastReconnecting: false + ), + subscriberSdp: try await buildSessionDescription( + peerConnectionType: .subscriber, + coordinator: coordinator, + sfuAdapter: sfuAdapter, + isFastReconnecting: false ), reconnectAttempt: context.reconnectAttempts, publisher: await coordinator.stateAdapter.publisher @@ -184,7 +187,8 @@ extension WebRTCCoordinator.StateMachine.Stage { try await join( coordinator: coordinator, - sfuAdapter: sfuAdapter + sfuAdapter: sfuAdapter, + isFastReconnecting: false ) transitionOrDisconnect(.joined(context)) @@ -214,20 +218,22 @@ extension WebRTCCoordinator.StateMachine.Stage { try Task.checkCancellation() - try await coordinator.stateAdapter.configurePeerConnections() - - try Task.checkCancellation() - await sfuAdapter.sendJoinRequest( WebRTCJoinRequestFactory() .buildRequest( with: .rejoin(fromSessionID: isRejoiningFromSessionID), coordinator: coordinator, - subscriberSdp: try await buildSubscriberSessionDescription( + publisherSdp: try await buildSessionDescription( + peerConnectionType: .publisher, + coordinator: coordinator, + sfuAdapter: sfuAdapter, + isFastReconnecting: false + ), + subscriberSdp: try await buildSessionDescription( + peerConnectionType: .subscriber, coordinator: coordinator, sfuAdapter: sfuAdapter, - isFastReconnecting: false, - publisher: coordinator.stateAdapter.publisher + isFastReconnecting: false ), reconnectAttempt: context.reconnectAttempts, publisher: coordinator.stateAdapter.publisher @@ -239,7 +245,8 @@ extension WebRTCCoordinator.StateMachine.Stage { try await join( coordinator: coordinator, - sfuAdapter: sfuAdapter + sfuAdapter: sfuAdapter, + isFastReconnecting: false ) transitionOrDisconnect(.joined(context)) @@ -258,32 +265,38 @@ extension WebRTCCoordinator.StateMachine.Stage { /// reconnection. /// - publisher: The RTC peer connection coordinator for publishing. /// - Returns: The subscriber session description as a string. - private func buildSubscriberSessionDescription( + private func buildSessionDescription( + peerConnectionType: PeerConnectionType, coordinator: WebRTCCoordinator, sfuAdapter: SFUAdapter, - isFastReconnecting: Bool, - publisher: RTCPeerConnectionCoordinator? + isFastReconnecting: Bool ) async throws -> String { - let subscriberSessionDescription: String + let sessionDescription: String + + switch peerConnectionType { + case .subscriber: + if + isFastReconnecting, + let subscriber = await coordinator.stateAdapter.subscriber { + let offer = try await subscriber.createOffer() + sessionDescription = offer.sdp + } else { + sessionDescription = try await RTCTemporaryPeerConnection( + peerConnectionType: .subscriber, + coordinator: coordinator, + sfuAdapter: sfuAdapter + ).createOffer().sdp + } - if - isFastReconnecting, - let subscriber = await coordinator.stateAdapter.subscriber { - let offer = try await subscriber.createOffer() - subscriberSessionDescription = offer.sdp - } else { - try await publisher?.ensureSetUpHasBeenCompleted() - subscriberSessionDescription = try await RTCTemporaryPeerConnection( - sessionID: coordinator.stateAdapter.sessionID, - peerConnectionFactory: coordinator.stateAdapter.peerConnectionFactory, - configuration: coordinator.stateAdapter.connectOptions.rtcConfiguration, - sfuAdapter: sfuAdapter, - videoOptions: coordinator.stateAdapter.videoOptions, - localAudioTrack: publisher?.localTrack(of: .audio) as? RTCAudioTrack, - localVideoTrack: publisher?.localTrack(of: .video) as? RTCVideoTrack + case .publisher: + sessionDescription = try await RTCTemporaryPeerConnection( + peerConnectionType: .publisher, + coordinator: coordinator, + sfuAdapter: sfuAdapter ).createOffer().sdp } - return subscriberSessionDescription + + return sessionDescription } /// Performs the join process. @@ -292,7 +305,8 @@ extension WebRTCCoordinator.StateMachine.Stage { /// - sfuAdapter: The SFU adapter. private func join( coordinator: WebRTCCoordinator, - sfuAdapter: SFUAdapter + sfuAdapter: SFUAdapter, + isFastReconnecting: Bool ) async throws { if let eventObserver = context.sfuEventObserver { eventObserver.sfuAdapter = sfuAdapter @@ -311,6 +325,18 @@ extension WebRTCCoordinator.StateMachine.Stage { try Task.checkCancellation() + await coordinator + .stateAdapter + .set(publishOptions: .init(joinResponse.publishOptions)) + + try Task.checkCancellation() + + if !isFastReconnecting { + try await coordinator.stateAdapter.configurePeerConnections() + } + + try Task.checkCancellation() + let participants = joinResponse .callState .participants diff --git a/Sources/StreamVideo/WebRTC/v2/VideoCapturing/ActionHandlers/Broadcast/BroadcastStartCaptureHandler.swift b/Sources/StreamVideo/WebRTC/v2/VideoCapturing/ActionHandlers/Broadcast/BroadcastStartCaptureHandler.swift new file mode 100644 index 000000000..5db956360 --- /dev/null +++ b/Sources/StreamVideo/WebRTC/v2/VideoCapturing/ActionHandlers/Broadcast/BroadcastStartCaptureHandler.swift @@ -0,0 +1,182 @@ +// +// Copyright © 2024 Stream.io Inc. All rights reserved. +// + +import Foundation +import ReplayKit +import StreamWebRTC + +final class BroadcastStartCaptureHandler: StreamVideoCapturerActionHandler, @unchecked Sendable { + + private struct Session { + var frameRate: Int = 15 + var adaptedOutputFormat: Bool = false + var preferredDimensions: CGSize + var videoSource: RTCVideoSource + var videoCapturer: RTCVideoCapturer + var videoCapturerDelegate: RTCVideoCapturerDelegate + } + + private lazy var broadcastBufferReader = InjectedValues[\.broadcastBufferReader] + private var activeSession: Session? + + // MARK: - StreamVideoCapturerActionHandler + + func handle(_ action: StreamVideoCapturer.Action) async throws { + switch action { + case let .startCapture(_, dimensions, _, videoSource, videoCapturer, videoCapturerDelegate): + try await execute( + dimensions: dimensions, + videoSource: videoSource, + videoCapturer: videoCapturer, + videoCapturerDelegate: videoCapturerDelegate + ) + case .stopCapture: + activeSession = nil + default: + break + } + } + + // MARK: Private + + private func execute( + dimensions: CGSize, + videoSource: RTCVideoSource, + videoCapturer: RTCVideoCapturer, + videoCapturerDelegate: RTCVideoCapturerDelegate + ) async throws { + /// - Important: If a session is already active, we should not attempt to start a new one. + /// The moment a new Connection is being created for a filePath that is already in use, then the + /// current session stops and we end up in a state where we try to initiate the session, but we + /// can't do it programmatically (user's interaction is required in order to present accept iOS popup). + guard activeSession == nil else { + return log.debug( + "\(type(of: self)) unable to start broadcast as another session is active.", + subsystems: .videoCapturer + ) + } + + guard + let identifier = infoPlistValue(for: BroadcastConstants.broadcastAppGroupIdentifier), + let filePath = filePathForIdentifier(identifier) + else { + throw ClientError( + "\(type(of: self)) unable to start broadcast as no shared container was found." + ) + } + + InjectedValues[\.broadcastBufferReader] = .init() + + guard + let socketConnection = BroadcastBufferReaderConnection( + filePath: filePath, + streamDelegate: broadcastBufferReader + ) + else { + throw ClientError( + "\(type(of: self)) unable to start broadcast as socket connection couldn't be established." + ) + } + + broadcastBufferReader.onCapture = { [weak self] pixelBuffer, rotation in + self?.didReceive( + pixelBuffer: pixelBuffer, + rotation: rotation + ) + } + + broadcastBufferReader.startCapturing(with: socketConnection) + + activeSession = .init( + preferredDimensions: dimensions, + videoSource: videoSource, + videoCapturer: videoCapturer, + videoCapturerDelegate: videoCapturerDelegate + ) + + log.debug( + "\(type(of: self)) started capturing.", + subsystems: .videoCapturer + ) + } + + private func didReceive( + pixelBuffer: CVPixelBuffer, + rotation: RTCVideoRotation + ) { + guard + let activeSession = self.activeSession + else { + log.warning( + "\(type(of: self)) received sample buffer but no active session was found.", + subsystems: .videoCapturer + ) + return + } + + let systemTime = ProcessInfo.processInfo.systemUptime + let timeStampNs = Int64(systemTime * Double(NSEC_PER_SEC)) + + let rtcBuffer = RTCCVPixelBuffer(pixelBuffer: pixelBuffer) + let rtcFrame = RTCVideoFrame( + buffer: rtcBuffer, + rotation: rotation, + timeStampNs: timeStampNs + ) + + activeSession.videoCapturerDelegate.capturer( + activeSession.videoCapturer, + didCapture: rtcFrame + ) + + adaptOutputFormatIfRequired( + .init( + width: CVPixelBufferGetWidth(pixelBuffer), + height: CVPixelBufferGetHeight(pixelBuffer) + ) + ) + } + + private func adaptOutputFormatIfRequired( + _ bufferDimensions: CGSize + ) { + guard + let activeSession, + !activeSession.adaptedOutputFormat + else { return } + + let adaptedDimensions = bufferDimensions.adjusted( + toFit: max( + activeSession.preferredDimensions.width, + activeSession.preferredDimensions.height + ) + ) + + activeSession.videoSource.adaptOutputFormat( + toWidth: Int32(adaptedDimensions.width), + height: Int32(adaptedDimensions.height), + fps: Int32(activeSession.frameRate) + ) + + self.activeSession?.adaptedOutputFormat = true + + log.debug( + "\(type(of: self)) videoSource adaptation executed for dimensions:\(bufferDimensions).", + subsystems: .videoCapturer + ) + } + + private func filePathForIdentifier(_ identifier: String) -> String? { + guard let sharedContainer = FileManager + .default + .containerURL(forSecurityApplicationGroupIdentifier: identifier) + else { + return nil + } + + return sharedContainer + .appendingPathComponent(BroadcastConstants.broadcastSharePath) + .path + } +} diff --git a/Sources/StreamVideo/WebRTC/v2/VideoCapturing/ActionHandlers/Broadcast/BroadcastStopCaptureHandler.swift b/Sources/StreamVideo/WebRTC/v2/VideoCapturing/ActionHandlers/Broadcast/BroadcastStopCaptureHandler.swift new file mode 100644 index 000000000..84cd9a566 --- /dev/null +++ b/Sources/StreamVideo/WebRTC/v2/VideoCapturing/ActionHandlers/Broadcast/BroadcastStopCaptureHandler.swift @@ -0,0 +1,24 @@ +// +// Copyright © 2024 Stream.io Inc. All rights reserved. +// + +import Foundation +import ReplayKit +import StreamWebRTC + +final class BroadcastStopCaptureHandler: StreamVideoCapturerActionHandler, @unchecked Sendable { + + @Injected(\.broadcastBufferReader) private var broadcastBufferReader + + // MARK: - StreamVideoCapturerActionHandler + + func handle(_ action: StreamVideoCapturer.Action) async throws { + switch action { + case .stopCapture: + broadcastBufferReader.stopCapturing() + log.debug("\(type(of: self)) stopped capturing.", subsystems: .videoCapturer) + default: + break + } + } +} diff --git a/Sources/StreamVideo/WebRTC/v2/VideoCapturing/ActionHandlers/Camera/CameraBackgroundAccessHandler.swift b/Sources/StreamVideo/WebRTC/v2/VideoCapturing/ActionHandlers/Camera/CameraBackgroundAccessHandler.swift new file mode 100644 index 000000000..dfd644a98 --- /dev/null +++ b/Sources/StreamVideo/WebRTC/v2/VideoCapturing/ActionHandlers/Camera/CameraBackgroundAccessHandler.swift @@ -0,0 +1,27 @@ +// +// Copyright © 2024 Stream.io Inc. All rights reserved. +// + +import AVFoundation +import Foundation + +final class CameraBackgroundAccessHandler: StreamVideoCapturerActionHandler { + + // MARK: - StreamVideoCapturerActionHandler + + func handle(_ action: StreamVideoCapturer.Action) async throws { + guard #available(iOS 16, *) else { + return + } + + switch action { + case let .checkBackgroundCameraAccess(videoCaptureSession) + where videoCaptureSession.isMultitaskingCameraAccessSupported == true: + videoCaptureSession.beginConfiguration() + videoCaptureSession.isMultitaskingCameraAccessEnabled = true + videoCaptureSession.commitConfiguration() + default: + break + } + } +} diff --git a/Sources/StreamVideo/WebRTC/v2/VideoCapturing/ActionHandlers/Camera/CameraCapturePhotoHandler.swift b/Sources/StreamVideo/WebRTC/v2/VideoCapturing/ActionHandlers/Camera/CameraCapturePhotoHandler.swift new file mode 100644 index 000000000..5067d6bca --- /dev/null +++ b/Sources/StreamVideo/WebRTC/v2/VideoCapturing/ActionHandlers/Camera/CameraCapturePhotoHandler.swift @@ -0,0 +1,96 @@ +// +// Copyright © 2024 Stream.io Inc. All rights reserved. +// + +import AVFoundation +import CoreMedia +import Foundation +import StreamWebRTC + +final class CameraCapturePhotoHandler: StreamVideoCapturerActionHandler, @unchecked Sendable { + + // MARK: - StreamVideoCapturerActionHandler + + func handle(_ action: StreamVideoCapturer.Action) async throws { + switch action { + case let .addCapturePhotoOutput(capturePhotoOutput, captureSession): + try addCapturePhotoOutput( + capturePhotoOutput: capturePhotoOutput, + captureSession: captureSession + ) + + case let .removeCapturePhotoOutput(capturePhotoOutput, captureSession): + removeCapturePhotoOutput( + capturePhotoOutput: capturePhotoOutput, + captureSession: captureSession + ) + default: + break + } + } + + // MARK: - Private + + /// Adds the `AVCapturePhotoOutput` on the `CameraVideoCapturer` to enable photo + /// capturing capabilities. + /// + /// This method configures the local user's `CameraVideoCapturer` with an + /// `AVCapturePhotoOutput` for capturing photos. This enhancement allows applications to capture + /// still images while video capturing is ongoing. + /// + /// - Parameter capturePhotoOutput: The `AVCapturePhotoOutput` instance to be added + /// to the `CameraVideoCapturer`. This output enables the capture of photos alongside video + /// capturing. + /// + /// - Throws: An error if the `CameraVideoCapturer` does not support adding an `AVCapturePhotoOutput`. + /// This method is specifically designed for `RTCCameraVideoCapturer` instances. If the + /// `CameraVideoCapturer` in use does not support photo output functionality, an appropriate error + /// will be thrown to indicate that the operation is not supported. + /// + /// - Warning: A maximum of one output of each type may be added. + private func addCapturePhotoOutput( + capturePhotoOutput: AVCapturePhotoOutput, + captureSession: AVCaptureSession + ) throws { + guard + captureSession.canAddOutput(capturePhotoOutput) + else { + throw ClientError("\(type(of: self)) captureSession cannot addOutput output:\(capturePhotoOutput).") + } + + captureSession.beginConfiguration() + captureSession.addOutput(capturePhotoOutput) + captureSession.commitConfiguration() + } + + /// Removes the `AVCapturePhotoOutput` from the `CameraVideoCapturer` to disable photo + /// capturing capabilities. + /// + /// This method configures the local user's `CameraVideoCapturer` by removing an + /// `AVCapturePhotoOutput` previously added for capturing photos. This action is necessary when + /// the application needs to stop capturing still images or when adjusting the capturing setup. It ensures + /// that the video capturing process can continue without the overhead or interference of photo + /// capturing capabilities. + /// + /// - Parameter capturePhotoOutput: The `AVCapturePhotoOutput` instance to be removed + /// from the `CameraVideoCapturer`. Removing this output disables the capture of photos alongside + /// video capturing. + /// + /// - Throws: An error if the `CameraVideoCapturer` does not support removing an + /// `AVCapturePhotoOutput`. + /// This method is specifically designed for `RTCCameraVideoCapturer` instances. If the + /// `CameraVideoCapturer` in use does not support the removal of photo output functionality, an + /// appropriate error will be thrown to indicate that the operation is not supported. + /// + /// - Note: Ensure that the `AVCapturePhotoOutput` being removed was previously added to the + /// `CameraVideoCapturer`. Attempting to remove an output that is not currently added will not + /// affect the capture session but may result in unnecessary processing. + private func removeCapturePhotoOutput( + capturePhotoOutput: AVCapturePhotoOutput, + captureSession: AVCaptureSession + ) { + captureSession.beginConfiguration() + captureSession.removeOutput(capturePhotoOutput) + captureSession.commitConfiguration() + } +} diff --git a/Sources/StreamVideo/WebRTC/v2/VideoCapturing/ActionHandlers/Camera/CameraFocusHandler.swift b/Sources/StreamVideo/WebRTC/v2/VideoCapturing/ActionHandlers/Camera/CameraFocusHandler.swift new file mode 100644 index 000000000..c0ed88260 --- /dev/null +++ b/Sources/StreamVideo/WebRTC/v2/VideoCapturing/ActionHandlers/Camera/CameraFocusHandler.swift @@ -0,0 +1,93 @@ +// +// Copyright © 2024 Stream.io Inc. All rights reserved. +// + +import AVFoundation +import CoreMedia +import Foundation +import StreamWebRTC + +final class CameraFocusHandler: StreamVideoCapturerActionHandler, @unchecked Sendable { + + // MARK: - StreamVideoCapturerActionHandler + + func handle(_ action: StreamVideoCapturer.Action) async throws { + switch action { + case let .focus(point, captureSession): + try execute( + point: point, + captureSession: captureSession + ) + default: + break + } + } + + // MARK: - Private + + /// Initiates a focus and exposure operation at the specified point on the camera's view. + /// + /// This method attempts to focus the camera and set the exposure at a specific point by interacting + /// with the device's capture session. + /// It requires the `videoCapturer` property to be cast to `RTCCameraVideoCapturer`, and for + /// a valid `AVCaptureDeviceInput` to be accessible. + /// If these conditions are not met, it throws a `ClientError` error. + /// + /// - Parameter point: A `CGPoint` representing the location within the view where the camera + /// should adjust focus and exposure. + /// - Throws: A `ClientError` error if the necessary video capture components are + /// not available or properly configured. + /// + /// - Note: Ensure that the `point` is normalized to the camera's coordinate space, ranging + /// from (0,0) at the top-left to (1,1) at the bottom-right. + private func execute( + point: CGPoint, + captureSession: AVCaptureSession + ) throws { + guard + let activeCaptureDevice = captureSession.activeVideoCaptureDevice + else { + throw ClientError("\(type(of: self)) was unable to perform action because no capturing device found.") + } + + try activeCaptureDevice.lockForConfiguration() + + if activeCaptureDevice.isFocusPointOfInterestSupported { + activeCaptureDevice.focusPointOfInterest = point + } else { + log.warning( + "\(type(of: self)) capture device doesn't support focusPointOfInterest.", + subsystems: .videoCapturer + ) + } + + if activeCaptureDevice.isFocusModeSupported(.autoFocus) { + activeCaptureDevice.focusMode = .autoFocus + } else { + log.warning( + "\(type(of: self)) capture device doesn't support focusMode:.autoFocus.", + subsystems: .videoCapturer + ) + } + + if activeCaptureDevice.isExposurePointOfInterestSupported { + activeCaptureDevice.exposurePointOfInterest = point + } else { + log.warning( + "\(type(of: self)) capture device doesn't support exposurePointOfInterest.", + subsystems: .videoCapturer + ) + } + + if activeCaptureDevice.isExposureModeSupported(.autoExpose) { + activeCaptureDevice.exposureMode = .autoExpose + } else { + log.warning( + "\(type(of: self)) capture device doesn't support exposureMode:.autoExpose.", + subsystems: .videoCapturer + ) + } + + activeCaptureDevice.unlockForConfiguration() + } +} diff --git a/Sources/StreamVideo/WebRTC/v2/VideoCapturing/ActionHandlers/Camera/CameraStartCaptureHandler.swift b/Sources/StreamVideo/WebRTC/v2/VideoCapturing/ActionHandlers/Camera/CameraStartCaptureHandler.swift new file mode 100644 index 000000000..a5346c4a7 --- /dev/null +++ b/Sources/StreamVideo/WebRTC/v2/VideoCapturing/ActionHandlers/Camera/CameraStartCaptureHandler.swift @@ -0,0 +1,241 @@ +// +// Copyright © 2024 Stream.io Inc. All rights reserved. +// + +import AVFoundation +import CoreMedia +import Foundation +import StreamWebRTC + +final class CameraStartCaptureHandler: StreamVideoCapturerActionHandler, @unchecked Sendable { + + private struct Configuration: Equatable, Sendable { + /// The camera position to use for capturing (e.g., front or back camera). + var position: AVCaptureDevice.Position + /// The dimensions (width and height) for the captured video. + var dimensions: CGSize + /// The frame rate for video capturing in frames per second (fps). + var frameRate: Int + } + + @Injected(\.captureDeviceProvider) private var captureDeviceProvider + + private var activeConfiguration: Configuration? + + // MARK: - StreamVideoCapturerActionHandler + + func handle(_ action: StreamVideoCapturer.Action) async throws { + switch action { + case let .startCapture(position, dimensions, frameRate, videoSource, videoCapturer, videoCapturerDelegate): + guard let cameraCapturer = videoCapturer as? RTCCameraVideoCapturer else { + return + } + try await execute( + configuration: .init( + position: position, + dimensions: dimensions, + frameRate: frameRate + ), + videoSource: videoSource, + videoCapturer: cameraCapturer, + videoCapturerDelegate: videoCapturerDelegate + ) + + case let .setCameraPosition(position, videoSource, videoCapturer, videoCapturerDelegate): + guard + let cameraCapturer = videoCapturer as? RTCCameraVideoCapturer, + let activeConfiguration + else { + return + } + try await execute( + configuration: .init( + position: position, + dimensions: activeConfiguration.dimensions, + frameRate: activeConfiguration.frameRate + ), + videoSource: videoSource, + videoCapturer: cameraCapturer, + videoCapturerDelegate: videoCapturerDelegate + ) + + case let .updateCaptureQuality(dimensions, device, videoSource, videoCapturer, videoCapturerDelegate): + guard + let cameraCapturer = videoCapturer as? RTCCameraVideoCapturer, + let activeConfiguration + else { + return + } + try await updateCaptureQuality( + configuration: .init( + position: activeConfiguration.position, + dimensions: dimensions, + frameRate: activeConfiguration.frameRate + ), + captureDevice: device, + videoSource: videoSource, + videoCapturer: cameraCapturer, + videoCapturerDelegate: videoCapturerDelegate + ) + + case .stopCapture: + activeConfiguration = nil + default: + break + } + } + + // MARK: - Private + + private func execute( + configuration: Configuration, + videoSource: RTCVideoSource, + videoCapturer: RTCCameraVideoCapturer, + videoCapturerDelegate: RTCVideoCapturerDelegate + ) async throws { + guard configuration != activeConfiguration else { + log.debug( + "\(type(of: self)) performed no action as configuration wasn't changed.", + subsystems: .videoCapturer + ) + return + } + + guard + let captureDevice = captureDeviceProvider.device(for: configuration.position) + else { + throw ClientError("\(type(of: self)) was unable to perform action because no capture device was found.") + } + + guard let outputFormat = captureDevice.outputFormat( + preferredDimensions: .init(configuration.dimensions), + preferredFrameRate: configuration.frameRate + ) else { + throw ClientError( + "\(type(of: self)) was unable to perform action because no output format found for dimensions:\(configuration.dimensions) frameRate:\(configuration.frameRate)." + ) + } + + adaptOutputFormatIfRequired( + outputFormat, + on: videoSource, + configuration: configuration + ) + + try await startCapture( + on: videoCapturer, + videoCapturerDelegate: videoCapturerDelegate, + with: captureDevice, + format: outputFormat, + configuration: configuration + ) + + activeConfiguration = configuration + + log.debug( + "\(type(of: self)) started capturing with configuration position:\(configuration.position) dimensions:\(configuration.dimensions) frameRate:\(configuration.frameRate).", + subsystems: .videoCapturer + ) + } + + private func adaptOutputFormatIfRequired( + _ outputFormat: AVCaptureDevice.Format, + on videoSource: RTCVideoSource, + configuration: Configuration + ) { + let outputFormatDimensions = outputFormat.dimensions + + guard + outputFormatDimensions.area != CMVideoDimensions(configuration.dimensions).area + else { + log.debug( + "\(type(of: self)) videoSource adaptation isn't required for dimensions:\(CGSize(outputFormatDimensions)) frameRate:\(configuration.frameRate).", + subsystems: .videoCapturer + ) + return + } + + videoSource.adaptOutputFormat( + toWidth: outputFormatDimensions.width, + height: outputFormatDimensions.height, + fps: Int32(configuration.frameRate.clamped(to: outputFormat.frameRateRange)) + ) + + log.debug( + "\(type(of: self)) videoSource adaptation executed for dimensions:\(CGSize(outputFormatDimensions)) frameRate:\(configuration.frameRate).", + subsystems: .videoCapturer + ) + } + + private func startCapture( + on videoCapturer: RTCCameraVideoCapturer, + videoCapturerDelegate: RTCVideoCapturerDelegate, + with device: AVCaptureDevice, + format: AVCaptureDevice.Format, + configuration: Configuration + ) async throws { + try await withCheckedThrowingContinuation { continuation in + videoCapturer.startCapture( + with: device, + format: format, + fps: configuration.frameRate.clamped(to: format.frameRateRange) + ) { error in + if let error { + continuation.resume(throwing: ClientError(error.localizedDescription)) + } else { + continuation.resume() + } + } + } as Void + + if let videoCapturerDelegate = videoCapturerDelegate as? StreamVideoCaptureHandler { + videoCapturerDelegate.currentCameraPosition = device.position + } + } + + private func updateCaptureQuality( + configuration: Configuration, + captureDevice: AVCaptureDevice, + videoSource: RTCVideoSource, + videoCapturer: RTCCameraVideoCapturer, + videoCapturerDelegate: RTCVideoCapturerDelegate + ) async throws { + guard configuration != activeConfiguration else { + log.debug( + "\(type(of: self)) performed no action as configuration wasn't changed.", + subsystems: .videoCapturer + ) + return + } + + guard let outputFormat = captureDevice.outputFormat( + preferredDimensions: .init(configuration.dimensions), + preferredFrameRate: configuration.frameRate + ) else { + throw ClientError( + "\(type(of: self)) was unable to perform action because no output format found for dimensions:\(configuration.dimensions) frameRate:\(configuration.frameRate)." + ) + } + + adaptOutputFormatIfRequired( + outputFormat, + on: videoSource, + configuration: configuration + ) + + try await startCapture( + on: videoCapturer, + videoCapturerDelegate: videoCapturerDelegate, + with: captureDevice, + format: outputFormat, + configuration: configuration + ) + + activeConfiguration = configuration + + log.debug( + "\(type(of: self)) updated capturing with configuration position:\(configuration.position) dimensions:\(configuration.dimensions) frameRate:\(configuration.frameRate).", + subsystems: .videoCapturer + ) + } +} diff --git a/Sources/StreamVideo/WebRTC/v2/VideoCapturing/ActionHandlers/Camera/CameraStopCaptureHandler.swift b/Sources/StreamVideo/WebRTC/v2/VideoCapturing/ActionHandlers/Camera/CameraStopCaptureHandler.swift new file mode 100644 index 000000000..c60e2cd6d --- /dev/null +++ b/Sources/StreamVideo/WebRTC/v2/VideoCapturing/ActionHandlers/Camera/CameraStopCaptureHandler.swift @@ -0,0 +1,38 @@ +// +// Copyright © 2024 Stream.io Inc. All rights reserved. +// + +import AVFoundation +import CoreMedia +import Foundation +import StreamWebRTC + +final class CameraStopCaptureHandler: StreamVideoCapturerActionHandler, @unchecked Sendable { + + // MARK: - StreamVideoCapturerActionHandler + + func handle(_ action: StreamVideoCapturer.Action) async throws { + switch action { + case let .stopCapture(videoCapturer): + guard + let cameraVideoCapturer = videoCapturer as? RTCCameraVideoCapturer + else { + return + } + await execute(cameraVideoCapturer) + default: + break + } + } + + // MARK: - Private + + private func execute(_ videoCapturer: RTCCameraVideoCapturer) async { + await withCheckedContinuation { continuation in + videoCapturer.stopCapture { + continuation.resume() + } + } + log.debug("\(type(of: self)) stopped capturing.", subsystems: .videoCapturer) + } +} diff --git a/Sources/StreamVideo/WebRTC/v2/VideoCapturing/ActionHandlers/Camera/CameraVideoOutputHandler.swift b/Sources/StreamVideo/WebRTC/v2/VideoCapturing/ActionHandlers/Camera/CameraVideoOutputHandler.swift new file mode 100644 index 000000000..16b9f0fe0 --- /dev/null +++ b/Sources/StreamVideo/WebRTC/v2/VideoCapturing/ActionHandlers/Camera/CameraVideoOutputHandler.swift @@ -0,0 +1,99 @@ +// +// Copyright © 2024 Stream.io Inc. All rights reserved. +// + +import AVFoundation +import CoreMedia +import Foundation +import StreamWebRTC + +final class CameraVideoOutputHandler: StreamVideoCapturerActionHandler, @unchecked Sendable { + + // MARK: - StreamVideoCapturerActionHandler + + func handle(_ action: StreamVideoCapturer.Action) async throws { + switch action { + case let .addVideoOutput(videoOutput, captureSession): + try addVideoOutput( + videoOutput: videoOutput, + captureSession: captureSession + ) + + case let .removeVideoOutput(videoOutput, captureSession): + removeVideoOutput( + videoOutput: videoOutput, + captureSession: captureSession + ) + default: + break + } + } + + // MARK: - Private + + /// Adds an `AVCaptureVideoDataOutput` to the `CameraVideoCapturer` for video frame + /// processing capabilities. + /// + /// This method configures the local user's `CameraVideoCapturer` with an + /// `AVCaptureVideoDataOutput`, enabling the processing of video frames. This is particularly + /// useful for applications that require access to raw video data for analysis, filtering, or other processing + /// tasks while video capturing is in progress. + /// + /// - Parameter videoOutput: The `AVCaptureVideoDataOutput` instance to be added to + /// the `CameraVideoCapturer`. This output facilitates the capture and processing of live video + /// frames. + /// + /// - Throws: An error if the `CameraVideoCapturer` does not support adding an + /// `AVCaptureVideoDataOutput`. This functionality is specific to `RTCCameraVideoCapturer` + /// instances. If the current `CameraVideoCapturer` does not accommodate video output, an error + /// will be thrown to signify the unsupported operation. + /// + /// - Warning: A maximum of one output of each type may be added. For applications linked on or + /// after iOS 16.0, this restriction no longer applies to AVCaptureVideoDataOutputs. When adding more + /// than one AVCaptureVideoDataOutput, AVCaptureSession.hardwareCost must be taken into account. + func addVideoOutput( + videoOutput: AVCaptureVideoDataOutput, + captureSession: AVCaptureSession + ) throws { + guard + captureSession.canAddOutput(videoOutput) + else { + throw ClientError("\(type(of: self)) captureSession cannot addOutput output:\(videoOutput).") + } + captureSession.beginConfiguration() + captureSession.addOutput(videoOutput) + captureSession.commitConfiguration() + } + + /// Removes an `AVCaptureVideoDataOutput` from the `CameraVideoCapturer` to disable + /// video frame processing capabilities. + /// + /// This method reconfigures the local user's `CameraVideoCapturer` by removing an + /// `AVCaptureVideoDataOutput` that was previously added. This change is essential when the + /// application no longer requires access to raw video data for analysis, filtering, or other processing + /// tasks, or when adjusting the video capturing setup for different operational requirements. It ensures t + /// hat video capturing can proceed without the additional processing overhead associated with + /// handling video frame outputs. + /// + /// - Parameter videoOutput: The `AVCaptureVideoDataOutput` instance to be removed + /// from the `CameraVideoCapturer`. Removing this output stops the capture and processing of live video + /// frames through the specified output, simplifying the capture session. + /// + /// - Throws: An error if the `CameraVideoCapturer` does not support removing an + /// `AVCaptureVideoDataOutput`. This functionality is tailored for `RTCCameraVideoCapturer` + /// instances. If the `CameraVideoCapturer` being used does not permit the removal of video outputs, + /// an error will be thrown to indicate the unsupported operation. + /// + /// - Note: It is crucial to ensure that the `AVCaptureVideoDataOutput` intended for removal + /// has been previously added to the `CameraVideoCapturer`. Trying to remove an output that is + /// not part of the capture session will have no negative impact but could lead to unnecessary processing + /// and confusion. + func removeVideoOutput( + videoOutput: AVCaptureVideoDataOutput, + captureSession: AVCaptureSession + ) { + captureSession.beginConfiguration() + captureSession.removeOutput(videoOutput) + captureSession.commitConfiguration() + } +} diff --git a/Sources/StreamVideo/WebRTC/v2/VideoCapturing/ActionHandlers/Camera/CameraZoomHandler.swift b/Sources/StreamVideo/WebRTC/v2/VideoCapturing/ActionHandlers/Camera/CameraZoomHandler.swift new file mode 100644 index 000000000..3dce64e53 --- /dev/null +++ b/Sources/StreamVideo/WebRTC/v2/VideoCapturing/ActionHandlers/Camera/CameraZoomHandler.swift @@ -0,0 +1,60 @@ +// +// Copyright © 2024 Stream.io Inc. All rights reserved. +// + +import AVFoundation +import Foundation + +final class CameraZoomHandler: StreamVideoCapturerActionHandler, @unchecked Sendable { + + // MARK: - StreamVideoCapturerActionHandler + + func handle(_ action: StreamVideoCapturer.Action) async throws { + switch action { + case let .zoom(factor, captureSession): + try execute( + factor: factor, + captureSession: captureSession + ) + default: + break + } + } + + // MARK: - Private + + /// Zooms the camera video by the specified factor. + /// + /// This method attempts to zoom the camera's video feed by adjusting the `videoZoomFactor` of + /// the camera's active device. It first checks if the video capturer is of type `RTCCameraVideoCapturer` + /// and if the current camera device supports zoom by verifying that the `videoMaxZoomFactor` of + /// the active format is greater than 1.0. If these conditions are met, it proceeds to apply the requested + /// zoom factor, clamping it within the supported range to avoid exceeding the device's capabilities. + /// + /// - Parameter factor: The desired zoom factor. A value of 1.0 represents no zoom, while values + /// greater than 1.0 increase the zoom level. The factor is clamped to the maximum zoom factor supported + /// by the device to ensure it remains within valid bounds. + /// + /// - Throws: `ClientError.Unexpected` if the video capturer is not of type + /// `RTCCameraVideoCapturer`, or if the device does not support zoom. Also, throws an error if + /// locking the device for configuration fails. + /// + /// - Note: This method should be used cautiously, as setting a zoom factor significantly beyond the + /// optimal range can degrade video quality. + private func execute( + factor: CGFloat, + captureSession: AVCaptureSession + ) throws { + guard + let activeCaptureDevice = captureSession.activeVideoCaptureDevice, + activeCaptureDevice.activeFormat.videoMaxZoomFactor > 1.0 // That ensures that the devices supports zoom. + else { + throw ClientError("\(type(of: self)) captureDevice doesn't support zoom.") + } + + try activeCaptureDevice.lockForConfiguration() + let zoomFactor = max(1.0, min(factor, activeCaptureDevice.activeFormat.videoMaxZoomFactor)) + activeCaptureDevice.videoZoomFactor = zoomFactor + activeCaptureDevice.unlockForConfiguration() + } +} diff --git a/Sources/StreamVideo/WebRTC/v2/VideoCapturing/ActionHandlers/ScreenShare/ScreenShareStartCaptureHandler.swift b/Sources/StreamVideo/WebRTC/v2/VideoCapturing/ActionHandlers/ScreenShare/ScreenShareStartCaptureHandler.swift new file mode 100644 index 000000000..0e733a3cd --- /dev/null +++ b/Sources/StreamVideo/WebRTC/v2/VideoCapturing/ActionHandlers/ScreenShare/ScreenShareStartCaptureHandler.swift @@ -0,0 +1,150 @@ +// +// Copyright © 2024 Stream.io Inc. All rights reserved. +// + +import Foundation +import ReplayKit +import StreamWebRTC + +final class ScreenShareStartCaptureHandler: StreamVideoCapturerActionHandler, @unchecked Sendable { + + private let recorder: RPScreenRecorder + private var activeSession: Session? + + private struct Session { + var videoCapturer: RTCVideoCapturer + var videoCapturerDelegate: RTCVideoCapturerDelegate + } + + init( + recorder: RPScreenRecorder = .shared() + ) { + self.recorder = recorder + } + + // MARK: - StreamVideoCapturerActionHandler + + func handle(_ action: StreamVideoCapturer.Action) async throws { + switch action { + case let .startCapture(_, _, _, _, videoCapturer, videoCapturerDelegate): + try await execute( + videoCapturer: videoCapturer, + videoCapturerDelegate: videoCapturerDelegate + ) + case .stopCapture: + activeSession = nil + default: + break + } + } + + // MARK: Private + + private func execute( + videoCapturer: RTCVideoCapturer, + videoCapturerDelegate: RTCVideoCapturerDelegate + ) async throws { + guard !recorder.isRecording else { + log.debug( + "\(type(of: self)) performed no action as recording is in progress.", + subsystems: .videoCapturer + ) + return + } + + // We disable the microphone as we don't support .screenshareAudio tracks + recorder.isMicrophoneEnabled = false + + try await withCheckedThrowingContinuation { [weak self] continuation in + guard + let recorder = self?.recorder + else { + continuation.resume() + return + } + + self?.activeSession = .init( + videoCapturer: videoCapturer, + videoCapturerDelegate: videoCapturerDelegate + ) + + recorder.startCapture { [weak self] sampleBuffer, sampleBufferType, error in + self?.didReceive( + sampleBuffer: sampleBuffer, + sampleBufferType: sampleBufferType, + error: error + ) + } completionHandler: { error in + if let error { + self?.activeSession = nil + continuation.resume(throwing: error) + } else { + continuation.resume() + } + } + } + } + + private func didReceive( + sampleBuffer: CMSampleBuffer, + sampleBufferType: RPSampleBufferType, + error: Error? + ) { + guard + let activeSession = self.activeSession + else { + log.warning( + "\(type(of: self)) received sample buffer but no active session was found.", + subsystems: .videoCapturer + ) + return + } + + guard + sampleBufferType == .video + else { + log.warning( + "\(type(of: self)) only video sample buffers are supported. Received \(sampleBufferType).", + subsystems: .videoCapturer + ) + return + } + + guard + CMSampleBufferGetNumSamples(sampleBuffer) == 1, + CMSampleBufferIsValid(sampleBuffer), + CMSampleBufferDataIsReady(sampleBuffer) + else { + log.debug( + "\(type(of: self)) screenshare video sample buffer is invalid or not ready.", + subsystems: .videoCapturer + ) + return + } + + guard + let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) + else { + log.debug( + "\(type(of: self)) unable to extract pixel buffer from sample buffer.", + subsystems: .videoCapturer + ) + return + } + + let timeStamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer) + let timeStampNs = Int64(CMTimeGetSeconds(timeStamp) * Double(NSEC_PER_SEC)) + + let rtcBuffer = RTCCVPixelBuffer(pixelBuffer: pixelBuffer) + let rtcFrame = RTCVideoFrame( + buffer: rtcBuffer, + rotation: ._0, + timeStampNs: timeStampNs + ) + + activeSession.videoCapturerDelegate.capturer( + activeSession.videoCapturer, + didCapture: rtcFrame + ) + } +} diff --git a/Sources/StreamVideo/WebRTC/v2/VideoCapturing/ActionHandlers/ScreenShare/ScreenShareStopCaptureHandler.swift b/Sources/StreamVideo/WebRTC/v2/VideoCapturing/ActionHandlers/ScreenShare/ScreenShareStopCaptureHandler.swift new file mode 100644 index 000000000..a3be00c81 --- /dev/null +++ b/Sources/StreamVideo/WebRTC/v2/VideoCapturing/ActionHandlers/ScreenShare/ScreenShareStopCaptureHandler.swift @@ -0,0 +1,51 @@ +// +// Copyright © 2024 Stream.io Inc. All rights reserved. +// + +import Foundation +import ReplayKit +import StreamWebRTC + +final class ScreenShareStopCaptureHandler: StreamVideoCapturerActionHandler, @unchecked Sendable { + + private let recorder: RPScreenRecorder + + init( + recorder: RPScreenRecorder = .shared() + ) { + self.recorder = recorder + } + + // MARK: - StreamVideoCapturerActionHandler + + func handle(_ action: StreamVideoCapturer.Action) async throws { + switch action { + case .stopCapture: + try await execute() + default: + break + } + } + + // MARK: - Private + + private func execute() async throws { + try await withCheckedThrowingContinuation { [weak self] continuation in + guard + let recorder = self?.recorder, + recorder.isRecording + else { + continuation.resume() + return + } + + recorder.stopCapture { error in + if let error { + continuation.resume(throwing: error) + } else { + continuation.resume() + } + } + } + } +} diff --git a/Sources/StreamVideo/WebRTC/v2/VideoCapturing/ActionHandlers/Simulator/SimulatorStartCaptureHandler.swift b/Sources/StreamVideo/WebRTC/v2/VideoCapturing/ActionHandlers/Simulator/SimulatorStartCaptureHandler.swift new file mode 100644 index 000000000..19abc4d4d --- /dev/null +++ b/Sources/StreamVideo/WebRTC/v2/VideoCapturing/ActionHandlers/Simulator/SimulatorStartCaptureHandler.swift @@ -0,0 +1,22 @@ +// +// Copyright © 2024 Stream.io Inc. All rights reserved. +// + +import Foundation + +final class SimulatorStartCaptureHandler: StreamVideoCapturerActionHandler, @unchecked Sendable { + + // MARK: - StreamVideoCapturerActionHandler + + func handle(_ action: StreamVideoCapturer.Action) async throws { + switch action { + case let .startCapture(_, _, _, _, videoCapturer, _): + guard let simulatorCapturer = videoCapturer as? SimulatorScreenCapturer else { + return + } + simulatorCapturer.startCapturing() + default: + break + } + } +} diff --git a/Sources/StreamVideo/WebRTC/v2/VideoCapturing/ActionHandlers/Simulator/SimulatorStopCaptureHandler.swift b/Sources/StreamVideo/WebRTC/v2/VideoCapturing/ActionHandlers/Simulator/SimulatorStopCaptureHandler.swift new file mode 100644 index 000000000..56ed455c6 --- /dev/null +++ b/Sources/StreamVideo/WebRTC/v2/VideoCapturing/ActionHandlers/Simulator/SimulatorStopCaptureHandler.swift @@ -0,0 +1,22 @@ +// +// Copyright © 2024 Stream.io Inc. All rights reserved. +// + +import Foundation + +final class SimulatorStopCaptureHandler: StreamVideoCapturerActionHandler, @unchecked Sendable { + + // MARK: - StreamVideoCapturerActionHandler + + func handle(_ action: StreamVideoCapturer.Action) async throws { + switch action { + case let .stopCapture(videoCapturer): + guard let simulatorCapturer = videoCapturer as? SimulatorScreenCapturer else { + return + } + simulatorCapturer.stopCapturing() + default: + break + } + } +} diff --git a/Sources/StreamVideo/WebRTC/v2/VideoCapturing/BroadcastBufferReaderKey.swift b/Sources/StreamVideo/WebRTC/v2/VideoCapturing/BroadcastBufferReaderKey.swift new file mode 100644 index 000000000..8198c5102 --- /dev/null +++ b/Sources/StreamVideo/WebRTC/v2/VideoCapturing/BroadcastBufferReaderKey.swift @@ -0,0 +1,16 @@ +// +// Copyright © 2024 Stream.io Inc. All rights reserved. +// + +import Foundation + +enum BroadcastBufferReaderKey: InjectionKey { + static var currentValue: BroadcastBufferReader = .init() +} + +extension InjectedValues { + var broadcastBufferReader: BroadcastBufferReader { + get { Self[BroadcastBufferReaderKey.self] } + set { Self[BroadcastBufferReaderKey.self] = newValue } + } +} diff --git a/Sources/StreamVideo/WebRTC/v2/VideoCapturing/StreamCaptureDeviceProvider.swift b/Sources/StreamVideo/WebRTC/v2/VideoCapturing/StreamCaptureDeviceProvider.swift new file mode 100644 index 000000000..efc21642c --- /dev/null +++ b/Sources/StreamVideo/WebRTC/v2/VideoCapturing/StreamCaptureDeviceProvider.swift @@ -0,0 +1,44 @@ +// +// Copyright © 2024 Stream.io Inc. All rights reserved. +// + +import Foundation +import StreamWebRTC + +final class StreamCaptureDeviceProvider { + + private let firstResultIfMiss: Bool + + private var devices: [AVCaptureDevice] { + RTCCameraVideoCapturer.captureDevices() + } + + init(firstResultIfMiss: Bool = true) { + self.firstResultIfMiss = firstResultIfMiss + } + + func device(for position: AVCaptureDevice.Position) -> AVCaptureDevice? { + if let deviceFound = devices.first(where: { $0.position == position }) { + return deviceFound + } else if firstResultIfMiss { + return devices.first + } else { + return nil + } + } + + func device(for position: CameraPosition) -> AVCaptureDevice? { + device(for: position == .front ? AVCaptureDevice.Position.front : .back) + } +} + +extension StreamCaptureDeviceProvider: InjectionKey { + static var currentValue: StreamCaptureDeviceProvider = .init() +} + +extension InjectedValues { + var captureDeviceProvider: StreamCaptureDeviceProvider { + get { Self[StreamCaptureDeviceProvider.self] } + set { Self[StreamCaptureDeviceProvider.self] = newValue } + } +} diff --git a/Sources/StreamVideo/WebRTC/v2/VideoCapturing/StreamVideoCapturer.swift b/Sources/StreamVideo/WebRTC/v2/VideoCapturing/StreamVideoCapturer.swift new file mode 100644 index 000000000..2b42c6a83 --- /dev/null +++ b/Sources/StreamVideo/WebRTC/v2/VideoCapturing/StreamVideoCapturer.swift @@ -0,0 +1,356 @@ +// +// Copyright © 2024 Stream.io Inc. All rights reserved. +// + +import Foundation +@preconcurrency import StreamWebRTC + +protocol StreamVideoCapturerActionHandler: Sendable { + func handle(_ action: StreamVideoCapturer.Action) async throws +} + +actor StreamVideoCapturer { + + enum Action: @unchecked Sendable, CustomStringConvertible { + case checkBackgroundCameraAccess(_ videoCaptureSession: AVCaptureSession) + case startCapture( + position: AVCaptureDevice.Position, + dimensions: CGSize, + frameRate: Int, + videoSource: RTCVideoSource, + videoCapturer: RTCVideoCapturer, + videoCapturerDelegate: RTCVideoCapturerDelegate + ) + case stopCapture(videoCapturer: RTCVideoCapturer) + case setCameraPosition( + position: AVCaptureDevice.Position, + videoSource: RTCVideoSource, + videoCapturer: RTCVideoCapturer, + videoCapturerDelegate: RTCVideoCapturerDelegate + ) + case updateCaptureQuality( + dimensions: CGSize, + device: AVCaptureDevice, + videoSource: RTCVideoSource, + videoCapturer: RTCVideoCapturer, + videoCapturerDelegate: RTCVideoCapturerDelegate + ) + case focus( + point: CGPoint, + videoCaptureSession: AVCaptureSession + ) + case addCapturePhotoOutput( + capturePhotoOutput: AVCapturePhotoOutput, + videoCaptureSession: AVCaptureSession + ) + case removeCapturePhotoOutput( + capturePhotoOutput: AVCapturePhotoOutput, + videoCaptureSession: AVCaptureSession + ) + case addVideoOutput( + videoOutput: AVCaptureVideoDataOutput, + videoCaptureSession: AVCaptureSession + ) + case removeVideoOutput( + videoOutput: AVCaptureVideoDataOutput, + videoCaptureSession: AVCaptureSession + ) + case zoom( + factor: CGFloat, + videoCaptureSession: AVCaptureSession + ) + + var description: String { + switch self { + case let .checkBackgroundCameraAccess(videoCaptureSession): + return ".checkBackgroundCameraAccess(videoCaptureSession:\(customString(for: videoCaptureSession)))" + + case let .startCapture(position, dimensions, frameRate, videoSource, videoCapturer, videoCapturerDelegate): + return ".startCapture(position:\(position), dimensions:\(dimensions), frameRate:\(frameRate), videoSource:\(customString(for: videoSource)), videoCapturer:\(customString(for: videoCapturer)), videoCapturerDelegate:\(customString(for: videoCapturerDelegate)))" + + case let .stopCapture(videoCapturer): + return ".stopCapture(videoCapturer:\(customString(for: videoCapturer)))" + + case let .setCameraPosition(position, videoSource, videoCapturer, videoCapturerDelegate): + return ".startCapture(position:\(position), videoSource:\(customString(for: videoSource)), videoCapturer:\(customString(for: videoCapturer)), videoCapturerDelegate:\(customString(for: videoCapturerDelegate)))" + + case let .updateCaptureQuality(dimensions, device, videoSource, videoCapturer, videoCapturerDelegate): + return ".startCapture(dimensions:\(dimensions), device:\(customString(for: device)), videoSource:\(customString(for: videoSource)), videoCapturer:\(customString(for: videoCapturer)), videoCapturerDelegate:\(customString(for: videoCapturerDelegate)))" + + case let .focus(point, videoCaptureSession): + return ".focus(point:\(point), videoCaptureSession:\(customString(for: videoCaptureSession)))" + + case let .addCapturePhotoOutput(capturePhotoOutput, videoCaptureSession): + return ".addCapturePhotoOutput(capturePhotoOutput:\(capturePhotoOutput), videoCaptureSession:\(customString(for: videoCaptureSession)))" + + case let .removeCapturePhotoOutput(capturePhotoOutput, videoCaptureSession): + return ".removeCapturePhotoOutput(capturePhotoOutput:\(capturePhotoOutput), videoCaptureSession:\(customString(for: videoCaptureSession)))" + + case let .addVideoOutput(videoOutput, videoCaptureSession): + return ".addVideoOutput(videoOutput:\(videoOutput), videoCaptureSession:\(customString(for: videoCaptureSession)))" + + case let .removeVideoOutput(videoOutput, videoCaptureSession): + return ".removeVideoOutput(videoOutput:\(videoOutput), videoCaptureSession:\(customString(for: videoCaptureSession)))" + + case let .zoom(factor, videoCaptureSession): + return ".zoom(factor:\(factor), videoCaptureSession:\(customString(for: videoCaptureSession)))" + } + } + } + + private let videoSource: RTCVideoSource + private let videoCapturer: RTCVideoCapturer + private let videoCapturerDelegate: RTCVideoCapturerDelegate + private let actionHandlers: [StreamVideoCapturerActionHandler] + + private var videoCaptureSession: AVCaptureSession? { + guard + let cameraVideoCapturer = videoCapturer as? RTCCameraVideoCapturer + else { + return nil + } + return cameraVideoCapturer.captureSession + } + + // MARK: - Initialisers + + static func cameraCapturer( + with videoSource: RTCVideoSource, + videoCaptureSession: AVCaptureSession = .init() + ) -> StreamVideoCapturer { + let videoCapturerDelegate = StreamVideoCaptureHandler(source: videoSource) + + #if targetEnvironment(simulator) + let videoCapturer: RTCVideoCapturer = { + if let videoURL = InjectedValues[\.simulatorStreamFile] { + return SimulatorScreenCapturer( + delegate: videoCapturerDelegate, + videoURL: videoURL + ) + } else { + return RTCFileVideoCapturer(delegate: videoSource) + } + }() + return .init( + videoSource: videoSource, + videoCapturer: videoCapturer, + videoCapturerDelegate: videoCapturerDelegate, + actionHandlers: [ + SimulatorStartCaptureHandler(), + SimulatorStopCaptureHandler() + ] + ) + #else + return .init( + videoSource: videoSource, + videoCapturer: RTCCameraVideoCapturer( + delegate: videoCapturerDelegate, + captureSession: videoCaptureSession + ), + videoCapturerDelegate: videoCapturerDelegate, + actionHandlers: [ + CameraBackgroundAccessHandler(), + CameraStartCaptureHandler(), + CameraStopCaptureHandler(), + CameraFocusHandler(), + CameraCapturePhotoHandler(), + CameraVideoOutputHandler(), + CameraZoomHandler() + ] + ) + #endif + } + + static func screenShareCapturer( + with videoSource: RTCVideoSource + ) -> StreamVideoCapturer { + .init( + videoSource: videoSource, + videoCapturer: RTCVideoCapturer(delegate: videoSource), + videoCapturerDelegate: videoSource, + actionHandlers: [ + ScreenShareStartCaptureHandler(), + ScreenShareStopCaptureHandler() + ] + ) + } + + static func broadcastCapturer( + with videoSource: RTCVideoSource + ) -> StreamVideoCapturer { + .init( + videoSource: videoSource, + videoCapturer: RTCVideoCapturer(delegate: videoSource), + videoCapturerDelegate: videoSource, + actionHandlers: [ + BroadcastStartCaptureHandler(), + BroadcastStopCaptureHandler() + ] + ) + } + + init( + videoSource: RTCVideoSource, + videoCapturer: RTCVideoCapturer, + videoCapturerDelegate: RTCVideoCapturerDelegate, + actionHandlers: [StreamVideoCapturerActionHandler] + ) { + self.videoSource = videoSource + self.videoCapturer = videoCapturer + self.videoCapturerDelegate = videoCapturerDelegate + self.actionHandlers = actionHandlers + } + + // MARK: - Actions + + func startCapture( + position: AVCaptureDevice.Position = .front, + dimensions: CGSize, + frameRate: Int + ) async throws { + _ = try await enqueueOperation( + for: .startCapture( + position: position, + dimensions: dimensions, + frameRate: frameRate, + videoSource: videoSource, + videoCapturer: videoCapturer, + videoCapturerDelegate: videoCapturerDelegate + ) + ).value + } + + func stopCapture() async throws { + _ = try await enqueueOperation( + for: .stopCapture( + videoCapturer: videoCapturer + ) + ).value + } + + func setCameraPosition(_ position: AVCaptureDevice.Position) async throws { + guard videoCaptureSession != nil else { return } + _ = try await enqueueOperation( + for: .setCameraPosition( + position: position, + videoSource: videoSource, + videoCapturer: videoCapturer, + videoCapturerDelegate: videoCapturerDelegate + ) + ).value + } + + func setVideoFilter(_ videoFilter: VideoFilter?) { + guard + let videoCapturerDelegate = videoCapturerDelegate as? StreamVideoCaptureHandler + else { + return + } + videoCapturerDelegate.selectedFilter = videoFilter + } + + func updateCaptureQuality( + _ dimensions: CGSize, + on device: AVCaptureDevice + ) async throws { + guard videoCaptureSession != nil else { return } + _ = try await enqueueOperation( + for: .updateCaptureQuality( + dimensions: dimensions, + device: device, + videoSource: videoSource, + videoCapturer: videoCapturer, + videoCapturerDelegate: videoCapturerDelegate + ) + ).value + } + + func focus(at point: CGPoint) async throws { + guard let videoCaptureSession else { return } + _ = try await enqueueOperation( + for: .focus( + point: point, + videoCaptureSession: videoCaptureSession + ) + ).value + } + + func zoom(by factor: CGFloat) async throws { + guard let videoCaptureSession else { return } + _ = try await enqueueOperation( + for: .zoom( + factor: factor, + videoCaptureSession: videoCaptureSession + ) + ).value + } + + func addCapturePhotoOutput( + _ capturePhotoOutput: AVCapturePhotoOutput + ) async throws { + guard let videoCaptureSession else { return } + _ = try await enqueueOperation( + for: .addCapturePhotoOutput( + capturePhotoOutput: capturePhotoOutput, + videoCaptureSession: videoCaptureSession + ) + ).value + } + + func removeCapturePhotoOutput( + _ capturePhotoOutput: AVCapturePhotoOutput + ) async throws { + guard let videoCaptureSession else { return } + _ = try await enqueueOperation( + for: .removeCapturePhotoOutput( + capturePhotoOutput: capturePhotoOutput, + videoCaptureSession: videoCaptureSession + ) + ).value + } + + func addVideoOutput( + _ videoOutput: AVCaptureVideoDataOutput + ) async throws { + guard let videoCaptureSession else { return } + _ = try await enqueueOperation( + for: .addVideoOutput( + videoOutput: videoOutput, + videoCaptureSession: videoCaptureSession + ) + ).value + } + + func removeVideoOutput( + _ videoOutput: AVCaptureVideoDataOutput + ) async throws { + guard let videoCaptureSession else { return } + _ = try await enqueueOperation( + for: .removeVideoOutput( + videoOutput: videoOutput, + videoCaptureSession: videoCaptureSession + ) + ).value + } + + // MARK: - Private + + private func enqueueOperation( + for action: Action + ) -> Task { + Task { + let actionHandlers = self.actionHandlers + for actionHandler in actionHandlers { + try await actionHandler.handle(action) + } + log.debug( + "VideoCapturer completed execution of action:\(action).", + subsystems: .videoCapturer + ) + } + } +} + +private func customString(for object: AnyObject) -> String { + "\(type(of: object))(\(Unmanaged.passUnretained(object).toOpaque()))" +} diff --git a/Sources/StreamVideo/WebRTC/v2/VideoLayerFactory/VideoLayerFactory.swift b/Sources/StreamVideo/WebRTC/v2/VideoLayerFactory/VideoLayerFactory.swift new file mode 100644 index 000000000..a87b6712e --- /dev/null +++ b/Sources/StreamVideo/WebRTC/v2/VideoLayerFactory/VideoLayerFactory.swift @@ -0,0 +1,40 @@ +// +// Copyright © 2024 Stream.io Inc. All rights reserved. +// + +import CoreMedia +import Foundation + +final class VideoLayerFactory { + + func videoLayers( + for publishOption: Stream_Video_Sfu_Models_PublishOption, + qualities: [VideoLayer.Quality] = [.full, .half, .quarter] + ) -> [VideoLayer] { + let publishOptionWidth = Int(publishOption.videoDimension.width) + let publishOptionHeight = Int(publishOption.videoDimension.height) + let publishOptionBitrate = Int(publishOption.bitrate) + + var scaleDownFactor: Int = 1 + + var videoLayers: [VideoLayer] = [] + for quality in qualities { + let width = publishOptionWidth / scaleDownFactor + let height = publishOptionHeight / scaleDownFactor + let bitrate = publishOptionBitrate / Int(scaleDownFactor) + let dimensions = CMVideoDimensions(width: Int32(width), height: Int32(height)) + + let videoLayer = VideoLayer( + dimensions: dimensions, + quality: quality, + maxBitrate: bitrate, + sfuQuality: .init(quality) + ) + + videoLayers.append(videoLayer) + scaleDownFactor *= 2 + } + + return videoLayers.reversed() + } +} diff --git a/Sources/StreamVideo/WebRTC/v2/WebRTCAuthenticator.swift b/Sources/StreamVideo/WebRTC/v2/WebRTCAuthenticator.swift index 30aa4de42..13d1b0c84 100644 --- a/Sources/StreamVideo/WebRTC/v2/WebRTCAuthenticator.swift +++ b/Sources/StreamVideo/WebRTC/v2/WebRTCAuthenticator.swift @@ -88,7 +88,6 @@ struct WebRTCAuthenticator: WebRTCAuthenticating { let videoOptions = await coordinator.stateAdapter.videoOptions await coordinator.stateAdapter.set( videoOptions: videoOptions - .with(preferredTargetResolution: response.call.settings.video.targetResolution) .with(preferredCameraPosition: { switch response.call.settings.video.cameraFacing { case .back: diff --git a/Sources/StreamVideo/WebRTC/v2/WebRTCCoordinator.swift b/Sources/StreamVideo/WebRTC/v2/WebRTCCoordinator.swift index aa8e0bc69..76942cf8c 100644 --- a/Sources/StreamVideo/WebRTC/v2/WebRTCCoordinator.swift +++ b/Sources/StreamVideo/WebRTC/v2/WebRTCCoordinator.swift @@ -406,12 +406,11 @@ final class WebRTCCoordinator: @unchecked Sendable { preferredVideoCodec: VideoCodec, maxBitrate: Int ) async { - await stateAdapter.set( - videoOptions: await stateAdapter - .videoOptions - .with(preferredBitrate: maxBitrate) - .with(preferredVideoCodec: preferredVideoCodec) - ) + await stateAdapter.set(publishOptions: PublishOptions( + video: [ + .init(codec: preferredVideoCodec, bitrate: maxBitrate) + ] + )) } // MARK: - Private diff --git a/Sources/StreamVideo/WebRTC/v2/WebRTCJoinRequestFactory.swift b/Sources/StreamVideo/WebRTC/v2/WebRTCJoinRequestFactory.swift index f3a6cce1d..35fb5bb3c 100644 --- a/Sources/StreamVideo/WebRTC/v2/WebRTCJoinRequestFactory.swift +++ b/Sources/StreamVideo/WebRTC/v2/WebRTCJoinRequestFactory.swift @@ -39,6 +39,7 @@ struct WebRTCJoinRequestFactory { func buildRequest( with connectionType: ConnectionType, coordinator: WebRTCCoordinator, + publisherSdp: String, subscriberSdp: String, reconnectAttempt: UInt32, publisher: RTCPeerConnectionCoordinator?, @@ -49,9 +50,21 @@ struct WebRTCJoinRequestFactory { var result = Stream_Video_Sfu_Event_JoinRequest() result.clientDetails = SystemEnvironment.clientDetails result.sessionID = await coordinator.stateAdapter.sessionID + result.publisherSdp = publisherSdp result.subscriberSdp = subscriberSdp result.fastReconnect = connectionType.isFastReconnect result.token = await coordinator.stateAdapter.token + + switch connectionType { + case .default: + result.preferredPublishOptions = await buildPreferredPublishOptions( + coordinator: coordinator, + publisherSdp: publisherSdp + ) + default: + break + } + if let reconnectDetails = await buildReconnectDetails( for: connectionType, coordinator: coordinator, @@ -95,7 +108,6 @@ struct WebRTCJoinRequestFactory { case .fastReconnect: result.announcedTracks = buildAnnouncedTracks( publisher, - videoOptions: await coordinator.stateAdapter.videoOptions, file: file, function: function, line: line @@ -116,7 +128,6 @@ struct WebRTCJoinRequestFactory { case let .migration(fromHostname): result.announcedTracks = buildAnnouncedTracks( publisher, - videoOptions: await coordinator.stateAdapter.videoOptions, file: file, function: function, line: line @@ -138,7 +149,6 @@ struct WebRTCJoinRequestFactory { case let .rejoin(fromSessionID): result.announcedTracks = buildAnnouncedTracks( publisher, - videoOptions: await coordinator.stateAdapter.videoOptions, file: file, function: function, line: line @@ -164,51 +174,25 @@ struct WebRTCJoinRequestFactory { /// Builds announced tracks for the join request. /// - Parameters: /// - publisher: The RTC peer connection coordinator for publishing. - /// - videoOptions: The video options for the tracks. /// - file: The file where the method is called. /// - function: The function where the method is called. /// - line: The line number where the method is called. /// - Returns: An array of announced tracks. func buildAnnouncedTracks( _ publisher: RTCPeerConnectionCoordinator?, - videoOptions: VideoOptions, file: StaticString = #file, function: StaticString = #function, line: UInt = #line ) -> [Stream_Video_Sfu_Models_TrackInfo] { var result = [Stream_Video_Sfu_Models_TrackInfo]() - if let mid = publisher?.mid(for: .audio) { - var trackInfo = Stream_Video_Sfu_Models_TrackInfo() - trackInfo.trackID = publisher?.localTrack(of: .audio)?.trackId ?? "" - trackInfo.mid = mid - trackInfo.trackType = .audio - trackInfo.muted = publisher?.localTrack(of: .audio)?.isEnabled != true - result.append(trackInfo) + guard let publisher else { + return result } - if let mid = publisher?.mid(for: .video) { - var trackInfo = Stream_Video_Sfu_Models_TrackInfo() - trackInfo.trackID = publisher?.localTrack(of: .video)?.trackId ?? "" - trackInfo.layers = videoOptions - .videoLayers - .map { Stream_Video_Sfu_Models_VideoLayer($0) } - trackInfo.mid = mid - trackInfo.trackType = .video - trackInfo.muted = publisher?.localTrack(of: .video)?.isEnabled != true - result.append(trackInfo) - } - - if let mid = publisher?.mid(for: .screenshare) { - var trackInfo = Stream_Video_Sfu_Models_TrackInfo() - trackInfo.trackID = publisher?.localTrack(of: .screenshare)?.trackId ?? "" - trackInfo.layers = [VideoLayer.screenshare] - .map { Stream_Video_Sfu_Models_VideoLayer($0, fps: 15) } - trackInfo.mid = mid - trackInfo.trackType = .screenShare - trackInfo.muted = publisher?.localTrack(of: .screenshare)?.isEnabled != true - result.append(trackInfo) - } + result.append(contentsOf: publisher.trackInfo(for: .audio)) + result.append(contentsOf: publisher.trackInfo(for: .video)) + result.append(contentsOf: publisher.trackInfo(for: .screenshare)) return result } @@ -236,4 +220,24 @@ struct WebRTCJoinRequestFactory { .filter { $0.id != sessionID && $0.id != previousSessionID } .flatMap { $0.trackSubscriptionDetails(incomingVideoQualitySettings: incomingVideoQualitySettings) } } + + func buildPreferredPublishOptions( + coordinator: WebRTCCoordinator, + publisherSdp: String + ) async -> [Stream_Video_Sfu_Models_PublishOption] { + let sdpParser = SDPParser() + let rtmapVisitor = RTPMapVisitor() + sdpParser.registerVisitor(rtmapVisitor) + await sdpParser.parse(sdp: publisherSdp) + + return await coordinator + .stateAdapter + .publishOptions + .source + .map { + var publishOption = $0 + publishOption.codec.payloadType = UInt32(rtmapVisitor.payloadType(for: $0.codec.name) ?? 0) + return publishOption + } + } } diff --git a/Sources/StreamVideo/WebRTC/v2/WebRTCStateAdapter.swift b/Sources/StreamVideo/WebRTC/v2/WebRTCStateAdapter.swift index 3cf093827..326e1f172 100644 --- a/Sources/StreamVideo/WebRTC/v2/WebRTCStateAdapter.swift +++ b/Sources/StreamVideo/WebRTC/v2/WebRTCStateAdapter.swift @@ -55,6 +55,11 @@ actor WebRTCStateAdapter: ObservableObject, StreamAudioSessionAdapterDelegate { didSet { didUpdate(videoOptions: videoOptions) } } + /// Published property to track video options and update them. + @Published private(set) var publishOptions: PublishOptions = .default { + didSet { didUpdate(publishOptions: publishOptions) } + } + @Published private(set) var connectOptions: ConnectOptions = .init(iceServers: []) @Published private(set) var ownCapabilities: Set = [] @Published private(set) var sfuAdapter: SFUAdapter? @@ -131,13 +136,10 @@ actor WebRTCStateAdapter: ObservableObject, StreamAudioSessionAdapterDelegate { func set(audioSettings value: AudioSettings) { self.audioSettings = value } /// Sets the video options. - func set(videoOptions value: VideoOptions) { - self.videoOptions = value - - // Note that the preferredEncodingCodec won't affect any transceivers - // that have already been created. - peerConnectionFactory.setPreferredEncodingCodec(value.preferredVideoCodec) - } + func set(videoOptions value: VideoOptions) { self.videoOptions = value } + + /// Sets the publish options. + func set(publishOptions value: PublishOptions) { self.publishOptions = value } /// Sets the connection options. func set(connectOptions value: ConnectOptions) { self.connectOptions = value } @@ -222,6 +224,7 @@ actor WebRTCStateAdapter: ObservableObject, StreamAudioSessionAdapterDelegate { videoConfig: videoConfig, callSettings: callSettings, audioSettings: audioSettings, + publishOptions: publishOptions, sfuAdapter: sfuAdapter, videoCaptureSessionProvider: videoCaptureSessionProvider, screenShareSessionProvider: screenShareSessionProvider @@ -239,6 +242,7 @@ actor WebRTCStateAdapter: ObservableObject, StreamAudioSessionAdapterDelegate { videoConfig: videoConfig, callSettings: callSettings, audioSettings: audioSettings, + publishOptions: publishOptions, sfuAdapter: sfuAdapter, videoCaptureSessionProvider: videoCaptureSessionProvider, screenShareSessionProvider: screenShareSessionProvider @@ -495,6 +499,11 @@ actor WebRTCStateAdapter: ObservableObject, StreamAudioSessionAdapterDelegate { subscriber?.videoOptions = videoOptions } + /// Updates the video options and notifies the publisher and subscriber. + private func didUpdate(publishOptions: PublishOptions) { + publisher?.publishOptions = publishOptions + } + // MARK: Participant Operations /// Updates the current participants and logs those with video tracks. diff --git a/StreamVideo.xcodeproj/project.pbxproj b/StreamVideo.xcodeproj/project.pbxproj index 3cf4a4b85..0f642463f 100644 --- a/StreamVideo.xcodeproj/project.pbxproj +++ b/StreamVideo.xcodeproj/project.pbxproj @@ -8,12 +8,15 @@ /* Begin PBXBuildFile section */ 40013DDC2B87AA2300915453 /* SerialActor.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40013DDB2B87AA2300915453 /* SerialActor.swift */; }; + 40034C202CFDABE600A318B1 /* PublishOptions.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40034C1F2CFDABE600A318B1 /* PublishOptions.swift */; }; 40034C262CFE155C00A318B1 /* CallKitAvailabilityPolicyProtocol.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40034C252CFE155C00A318B1 /* CallKitAvailabilityPolicyProtocol.swift */; }; 40034C282CFE156800A318B1 /* CallKitAvailabilityPolicy.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40034C272CFE156800A318B1 /* CallKitAvailabilityPolicy.swift */; }; 40034C2A2CFE156F00A318B1 /* CallKitRegionBasedAvailabilityPolicy.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40034C292CFE156F00A318B1 /* CallKitRegionBasedAvailabilityPolicy.swift */; }; 40034C2C2CFE157300A318B1 /* CallKitAlwaysAvailabilityPolicy.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40034C2B2CFE157300A318B1 /* CallKitAlwaysAvailabilityPolicy.swift */; }; 40034C2E2CFE15AC00A318B1 /* CallKitRegionBasedAvailabilityPolicy.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40034C2D2CFE15AC00A318B1 /* CallKitRegionBasedAvailabilityPolicy.swift */; }; 40034C312CFE168D00A318B1 /* StreamLocaleProvider.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40034C302CFE168D00A318B1 /* StreamLocaleProvider.swift */; }; + 40070F392CF0EF2200035FA9 /* Stream_Video_Sfu_Models_Codec+Convenience.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40070F382CF0EF2200035FA9 /* Stream_Video_Sfu_Models_Codec+Convenience.swift */; }; + 40070F3B2CF0EF4500035FA9 /* Stream_Video_Sfu_Models_PublishOption+Convenience.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40070F3A2CF0EF4500035FA9 /* Stream_Video_Sfu_Models_PublishOption+Convenience.swift */; }; 40073B6F2C456CB4006A2867 /* StreamPictureInPictureVideoRendererTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40073B6E2C456CB4006A2867 /* StreamPictureInPictureVideoRendererTests.swift */; }; 40073B752C456E06006A2867 /* StreamPictureInPictureAdaptiveWindowSizePolicy.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40073B732C456DFC006A2867 /* StreamPictureInPictureAdaptiveWindowSizePolicy.swift */; }; 40073B762C456E0E006A2867 /* StreamPictureInPictureWindowSizePolicy.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40073B682C456250006A2867 /* StreamPictureInPictureWindowSizePolicy.swift */; }; @@ -142,6 +145,10 @@ 40382F472C89D00200C2D00F /* Stream_Video_Sfu_Models_Participant+Convenience.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40382F442C89D00200C2D00F /* Stream_Video_Sfu_Models_Participant+Convenience.swift */; }; 40382F482C89D03700C2D00F /* Stream_Video_Sfu_Models_ConnectionQuality+Convenience.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40382F412C89CF9300C2D00F /* Stream_Video_Sfu_Models_ConnectionQuality+Convenience.swift */; }; 40382F502C8B3DAE00C2D00F /* StreamRTCPeerConnection.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40382F4F2C8B3DA800C2D00F /* StreamRTCPeerConnection.swift */; }; + 4039F0C02D0099E40078159E /* RTCRtpCodecCapability+Convenience.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4039F0BF2D0099E40078159E /* RTCRtpCodecCapability+Convenience.swift */; }; + 4039F0CA2D0222E40078159E /* VideoLayerFactory.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4039F0C92D0222E40078159E /* VideoLayerFactory.swift */; }; + 4039F0CC2D0241120078159E /* AudioCodec.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4039F0CB2D0241120078159E /* AudioCodec.swift */; }; + 4039F0CF2D024DDF0078159E /* MediaTransceiverStorage.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4039F0CE2D024DDF0078159E /* MediaTransceiverStorage.swift */; }; 403BE0FE2A24C07300988F65 /* DeeplinkAdapter.swift in Sources */ = {isa = PBXBuildFile; fileRef = 403BE0FD2A24C07300988F65 /* DeeplinkAdapter.swift */; }; 403BE1012A24C70000988F65 /* DemoApp+Sentry.swift in Sources */ = {isa = PBXBuildFile; fileRef = 403BE1002A24C70000988F65 /* DemoApp+Sentry.swift */; }; 403CA9B22CC7BAD6001A88C2 /* VideoLayer.swift in Sources */ = {isa = PBXBuildFile; fileRef = 403CA9B12CC7BAD6001A88C2 /* VideoLayer.swift */; }; @@ -185,6 +192,10 @@ 404C27CC2BF2552900DF2937 /* XCTestCase+PredicateFulfillment.swift in Sources */ = {isa = PBXBuildFile; fileRef = 409CA7982BEE21720045F7AA /* XCTestCase+PredicateFulfillment.swift */; }; 404CAEE72B8F48F6007087BC /* DemoBackgroundEffectSelector.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40A0E95F2B88ABC80089E8D3 /* DemoBackgroundEffectSelector.swift */; }; 4059C3422AAF0CE40006928E /* DemoChatViewModel+Injection.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4059C3412AAF0CE40006928E /* DemoChatViewModel+Injection.swift */; }; + 406128812CF32FEF007F5CDC /* SDPLineVisitor.swift in Sources */ = {isa = PBXBuildFile; fileRef = 406128802CF32FEF007F5CDC /* SDPLineVisitor.swift */; }; + 406128832CF33000007F5CDC /* SDPParser.swift in Sources */ = {isa = PBXBuildFile; fileRef = 406128822CF33000007F5CDC /* SDPParser.swift */; }; + 406128882CF33029007F5CDC /* RTPMapVisitor.swift in Sources */ = {isa = PBXBuildFile; fileRef = 406128872CF33029007F5CDC /* RTPMapVisitor.swift */; }; + 4061288B2CF33088007F5CDC /* SupportedPrefix.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4061288A2CF33088007F5CDC /* SupportedPrefix.swift */; }; 4063033F2AD847EC0091AE77 /* CallState_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4063033E2AD847EC0091AE77 /* CallState_Tests.swift */; }; 406303422AD848000091AE77 /* CallParticipant_Mock.swift in Sources */ = {isa = PBXBuildFile; fileRef = 406303412AD848000091AE77 /* CallParticipant_Mock.swift */; }; 406303462AD9432D0091AE77 /* GoogleSignInSwift in Frameworks */ = {isa = PBXBuildFile; productRef = 406303442AD942ED0091AE77 /* GoogleSignInSwift */; }; @@ -518,6 +529,31 @@ 40E18AAF2CD51E9400A65C9F /* LockQueuing.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E18AAE2CD51E8E00A65C9F /* LockQueuing.swift */; }; 40E18AB22CD51FC100A65C9F /* UnfairQueueTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E18AB12CD51FC100A65C9F /* UnfairQueueTests.swift */; }; 40E18AB42CD522F700A65C9F /* RecursiveQueueTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E18AB32CD522F700A65C9F /* RecursiveQueueTests.swift */; }; + 40E3632E2D09DBFA0028C52A /* Int+DefaultValues.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E3632D2D09DBFA0028C52A /* Int+DefaultValues.swift */; }; + 40E363312D09DC650028C52A /* CGSize+DefaultValues.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E363302D09DC650028C52A /* CGSize+DefaultValues.swift */; }; + 40E363362D09E4C80028C52A /* Stream_Video_Sfu_Models_VideoQuality+Convenience.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E363352D09E4C80028C52A /* Stream_Video_Sfu_Models_VideoQuality+Convenience.swift */; }; + 40E363382D09E6560028C52A /* Array+RTCRtpEncodingParameters.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E363372D09E6560028C52A /* Array+RTCRtpEncodingParameters.swift */; }; + 40E3633E2D09EF560028C52A /* CMVideoDimensions+DefaultValues.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E3633D2D09EF560028C52A /* CMVideoDimensions+DefaultValues.swift */; }; + 40E363402D09F0950028C52A /* Comparable+Clamped.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E3633F2D09F0950028C52A /* Comparable+Clamped.swift */; }; + 40E363452D09F2BD0028C52A /* AVCaptureDevice.Format+Convenience.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E363442D09F2BD0028C52A /* AVCaptureDevice.Format+Convenience.swift */; }; + 40E363492D09F6BB0028C52A /* StreamVideoCapturer.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E363482D09F6B20028C52A /* StreamVideoCapturer.swift */; }; + 40E3634C2D09F9EF0028C52A /* CameraBackgroundAccessHandler.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E3634B2D09F9EF0028C52A /* CameraBackgroundAccessHandler.swift */; }; + 40E3634E2D09FDE50028C52A /* CameraStartCaptureHandler.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E3634D2D09FDDF0028C52A /* CameraStartCaptureHandler.swift */; }; + 40E363502D0A03FF0028C52A /* StreamCaptureDeviceProvider.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E3634F2D0A03FF0028C52A /* StreamCaptureDeviceProvider.swift */; }; + 40E363522D0A11620028C52A /* AVCaptureDevice+OutputFormat.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E363512D0A11620028C52A /* AVCaptureDevice+OutputFormat.swift */; }; + 40E363562D0A11D30028C52A /* CameraStopCaptureHandler.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E363552D0A11D30028C52A /* CameraStopCaptureHandler.swift */; }; + 40E363592D0A139E0028C52A /* CameraFocusHandler.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E363582D0A139E0028C52A /* CameraFocusHandler.swift */; }; + 40E3635B2D0A15E40028C52A /* CameraCapturePhotoHandler.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E3635A2D0A15E40028C52A /* CameraCapturePhotoHandler.swift */; }; + 40E3635D2D0A17C10028C52A /* CameraVideoOutputHandler.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E3635C2D0A17C10028C52A /* CameraVideoOutputHandler.swift */; }; + 40E3635F2D0A18B10028C52A /* CameraZoomHandler.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E3635E2D0A18B10028C52A /* CameraZoomHandler.swift */; }; + 40E363622D0A1C2E0028C52A /* SimulatorStartCaptureHandler.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E363612D0A1C2E0028C52A /* SimulatorStartCaptureHandler.swift */; }; + 40E363642D0A1C360028C52A /* SimulatorStopCaptureHandler.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E363632D0A1C360028C52A /* SimulatorStopCaptureHandler.swift */; }; + 40E3636C2D0A24390028C52A /* ScreenShareStartCaptureHandler.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E3636B2D0A24310028C52A /* ScreenShareStartCaptureHandler.swift */; }; + 40E3636E2D0A26B40028C52A /* ScreenShareStopCaptureHandler.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E3636D2D0A26B40028C52A /* ScreenShareStopCaptureHandler.swift */; }; + 40E363712D0A27640028C52A /* BroadcastStartCaptureHandler.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E363702D0A27640028C52A /* BroadcastStartCaptureHandler.swift */; }; + 40E363732D0A277C0028C52A /* BroadcastStopCaptureHandler.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E363722D0A277C0028C52A /* BroadcastStopCaptureHandler.swift */; }; + 40E363752D0A2C6B0028C52A /* CGSize+Adapt.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E363742D0A2C6B0028C52A /* CGSize+Adapt.swift */; }; + 40E363772D0A2E320028C52A /* BroadcastBufferReaderKey.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E363762D0A2E320028C52A /* BroadcastBufferReaderKey.swift */; }; 40E9B3B12BCD755F00ACF18F /* MemberResponse+Dummy.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E9B3B02BCD755F00ACF18F /* MemberResponse+Dummy.swift */; }; 40E9B3B32BCD93AE00ACF18F /* JoinCallResponse+Dummy.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E9B3B22BCD93AE00ACF18F /* JoinCallResponse+Dummy.swift */; }; 40E9B3B52BCD93F500ACF18F /* Credentials+Dummy.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E9B3B42BCD93F500ACF18F /* Credentials+Dummy.swift */; }; @@ -1434,12 +1470,15 @@ /* Begin PBXFileReference section */ 40013DDB2B87AA2300915453 /* SerialActor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SerialActor.swift; sourceTree = ""; }; + 40034C1F2CFDABE600A318B1 /* PublishOptions.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PublishOptions.swift; sourceTree = ""; }; 40034C252CFE155C00A318B1 /* CallKitAvailabilityPolicyProtocol.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CallKitAvailabilityPolicyProtocol.swift; sourceTree = ""; }; 40034C272CFE156800A318B1 /* CallKitAvailabilityPolicy.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CallKitAvailabilityPolicy.swift; sourceTree = ""; }; 40034C292CFE156F00A318B1 /* CallKitRegionBasedAvailabilityPolicy.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CallKitRegionBasedAvailabilityPolicy.swift; sourceTree = ""; }; 40034C2B2CFE157300A318B1 /* CallKitAlwaysAvailabilityPolicy.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CallKitAlwaysAvailabilityPolicy.swift; sourceTree = ""; }; 40034C2D2CFE15AC00A318B1 /* CallKitRegionBasedAvailabilityPolicy.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CallKitRegionBasedAvailabilityPolicy.swift; sourceTree = ""; }; 40034C302CFE168D00A318B1 /* StreamLocaleProvider.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = StreamLocaleProvider.swift; sourceTree = ""; }; + 40070F382CF0EF2200035FA9 /* Stream_Video_Sfu_Models_Codec+Convenience.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "Stream_Video_Sfu_Models_Codec+Convenience.swift"; sourceTree = ""; }; + 40070F3A2CF0EF4500035FA9 /* Stream_Video_Sfu_Models_PublishOption+Convenience.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "Stream_Video_Sfu_Models_PublishOption+Convenience.swift"; sourceTree = ""; }; 40073B682C456250006A2867 /* StreamPictureInPictureWindowSizePolicy.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = StreamPictureInPictureWindowSizePolicy.swift; sourceTree = ""; }; 40073B6E2C456CB4006A2867 /* StreamPictureInPictureVideoRendererTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = StreamPictureInPictureVideoRendererTests.swift; sourceTree = ""; }; 40073B712C456DF6006A2867 /* StreamPictureInPictureFixedWindowSizePolicy.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = StreamPictureInPictureFixedWindowSizePolicy.swift; sourceTree = ""; }; @@ -1521,6 +1560,10 @@ 40382F412C89CF9300C2D00F /* Stream_Video_Sfu_Models_ConnectionQuality+Convenience.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "Stream_Video_Sfu_Models_ConnectionQuality+Convenience.swift"; sourceTree = ""; }; 40382F442C89D00200C2D00F /* Stream_Video_Sfu_Models_Participant+Convenience.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "Stream_Video_Sfu_Models_Participant+Convenience.swift"; sourceTree = ""; }; 40382F4F2C8B3DA800C2D00F /* StreamRTCPeerConnection.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = StreamRTCPeerConnection.swift; sourceTree = ""; }; + 4039F0BF2D0099E40078159E /* RTCRtpCodecCapability+Convenience.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "RTCRtpCodecCapability+Convenience.swift"; sourceTree = ""; }; + 4039F0C92D0222E40078159E /* VideoLayerFactory.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VideoLayerFactory.swift; sourceTree = ""; }; + 4039F0CB2D0241120078159E /* AudioCodec.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AudioCodec.swift; sourceTree = ""; }; + 4039F0CE2D024DDF0078159E /* MediaTransceiverStorage.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MediaTransceiverStorage.swift; sourceTree = ""; }; 403BE0FD2A24C07300988F65 /* DeeplinkAdapter.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DeeplinkAdapter.swift; sourceTree = ""; }; 403BE1002A24C70000988F65 /* DemoApp+Sentry.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "DemoApp+Sentry.swift"; sourceTree = ""; }; 403CA9B12CC7BAD6001A88C2 /* VideoLayer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VideoLayer.swift; sourceTree = ""; }; @@ -1558,6 +1601,10 @@ 4049CE832BBBF8EF003D07D2 /* StreamAsyncImage.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = StreamAsyncImage.swift; sourceTree = ""; }; 404A5CFA2AD5648100EF1C62 /* DemoChatModifier.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DemoChatModifier.swift; sourceTree = ""; }; 4059C3412AAF0CE40006928E /* DemoChatViewModel+Injection.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "DemoChatViewModel+Injection.swift"; sourceTree = ""; }; + 406128802CF32FEF007F5CDC /* SDPLineVisitor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SDPLineVisitor.swift; sourceTree = ""; }; + 406128822CF33000007F5CDC /* SDPParser.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SDPParser.swift; sourceTree = ""; }; + 406128872CF33029007F5CDC /* RTPMapVisitor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RTPMapVisitor.swift; sourceTree = ""; }; + 4061288A2CF33088007F5CDC /* SupportedPrefix.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SupportedPrefix.swift; sourceTree = ""; }; 4063033E2AD847EC0091AE77 /* CallState_Tests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CallState_Tests.swift; sourceTree = ""; }; 406303412AD848000091AE77 /* CallParticipant_Mock.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CallParticipant_Mock.swift; sourceTree = ""; }; 406583852B87694B00B4F979 /* BlurBackgroundVideoFilter.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = BlurBackgroundVideoFilter.swift; sourceTree = ""; }; @@ -1813,6 +1860,31 @@ 40E18AAE2CD51E8E00A65C9F /* LockQueuing.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LockQueuing.swift; sourceTree = ""; }; 40E18AB12CD51FC100A65C9F /* UnfairQueueTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = UnfairQueueTests.swift; sourceTree = ""; }; 40E18AB32CD522F700A65C9F /* RecursiveQueueTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RecursiveQueueTests.swift; sourceTree = ""; }; + 40E3632D2D09DBFA0028C52A /* Int+DefaultValues.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "Int+DefaultValues.swift"; sourceTree = ""; }; + 40E363302D09DC650028C52A /* CGSize+DefaultValues.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CGSize+DefaultValues.swift"; sourceTree = ""; }; + 40E363352D09E4C80028C52A /* Stream_Video_Sfu_Models_VideoQuality+Convenience.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "Stream_Video_Sfu_Models_VideoQuality+Convenience.swift"; sourceTree = ""; }; + 40E363372D09E6560028C52A /* Array+RTCRtpEncodingParameters.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "Array+RTCRtpEncodingParameters.swift"; sourceTree = ""; }; + 40E3633D2D09EF560028C52A /* CMVideoDimensions+DefaultValues.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CMVideoDimensions+DefaultValues.swift"; sourceTree = ""; }; + 40E3633F2D09F0950028C52A /* Comparable+Clamped.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "Comparable+Clamped.swift"; sourceTree = ""; }; + 40E363442D09F2BD0028C52A /* AVCaptureDevice.Format+Convenience.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVCaptureDevice.Format+Convenience.swift"; sourceTree = ""; }; + 40E363482D09F6B20028C52A /* StreamVideoCapturer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = StreamVideoCapturer.swift; sourceTree = ""; }; + 40E3634B2D09F9EF0028C52A /* CameraBackgroundAccessHandler.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CameraBackgroundAccessHandler.swift; sourceTree = ""; }; + 40E3634D2D09FDDF0028C52A /* CameraStartCaptureHandler.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CameraStartCaptureHandler.swift; sourceTree = ""; }; + 40E3634F2D0A03FF0028C52A /* StreamCaptureDeviceProvider.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = StreamCaptureDeviceProvider.swift; sourceTree = ""; }; + 40E363512D0A11620028C52A /* AVCaptureDevice+OutputFormat.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVCaptureDevice+OutputFormat.swift"; sourceTree = ""; }; + 40E363552D0A11D30028C52A /* CameraStopCaptureHandler.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CameraStopCaptureHandler.swift; sourceTree = ""; }; + 40E363582D0A139E0028C52A /* CameraFocusHandler.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CameraFocusHandler.swift; sourceTree = ""; }; + 40E3635A2D0A15E40028C52A /* CameraCapturePhotoHandler.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CameraCapturePhotoHandler.swift; sourceTree = ""; }; + 40E3635C2D0A17C10028C52A /* CameraVideoOutputHandler.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CameraVideoOutputHandler.swift; sourceTree = ""; }; + 40E3635E2D0A18B10028C52A /* CameraZoomHandler.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CameraZoomHandler.swift; sourceTree = ""; }; + 40E363612D0A1C2E0028C52A /* SimulatorStartCaptureHandler.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SimulatorStartCaptureHandler.swift; sourceTree = ""; }; + 40E363632D0A1C360028C52A /* SimulatorStopCaptureHandler.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SimulatorStopCaptureHandler.swift; sourceTree = ""; }; + 40E3636B2D0A24310028C52A /* ScreenShareStartCaptureHandler.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ScreenShareStartCaptureHandler.swift; sourceTree = ""; }; + 40E3636D2D0A26B40028C52A /* ScreenShareStopCaptureHandler.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ScreenShareStopCaptureHandler.swift; sourceTree = ""; }; + 40E363702D0A27640028C52A /* BroadcastStartCaptureHandler.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = BroadcastStartCaptureHandler.swift; sourceTree = ""; }; + 40E363722D0A277C0028C52A /* BroadcastStopCaptureHandler.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = BroadcastStopCaptureHandler.swift; sourceTree = ""; }; + 40E363742D0A2C6B0028C52A /* CGSize+Adapt.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CGSize+Adapt.swift"; sourceTree = ""; }; + 40E363762D0A2E320028C52A /* BroadcastBufferReaderKey.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = BroadcastBufferReaderKey.swift; sourceTree = ""; }; 40E9B3B02BCD755F00ACF18F /* MemberResponse+Dummy.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "MemberResponse+Dummy.swift"; sourceTree = ""; }; 40E9B3B22BCD93AE00ACF18F /* JoinCallResponse+Dummy.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "JoinCallResponse+Dummy.swift"; sourceTree = ""; }; 40E9B3B42BCD93F500ACF18F /* Credentials+Dummy.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "Credentials+Dummy.swift"; sourceTree = ""; }; @@ -3019,6 +3091,22 @@ path = Protocols; sourceTree = ""; }; + 4039F0C82D0222D00078159E /* VideoLayerFactory */ = { + isa = PBXGroup; + children = ( + 4039F0C92D0222E40078159E /* VideoLayerFactory.swift */, + ); + path = VideoLayerFactory; + sourceTree = ""; + }; + 4039F0CD2D024DCE0078159E /* Utilities */ = { + isa = PBXGroup; + children = ( + 4039F0CE2D024DDF0078159E /* MediaTransceiverStorage.swift */, + ); + path = Utilities; + sourceTree = ""; + }; 403EFC9D2BDBFDEE0057C248 /* Feedback */ = { isa = PBXGroup; children = ( @@ -3090,6 +3178,25 @@ path = AsyncImage; sourceTree = ""; }; + 4061287F2CF32FE4007F5CDC /* SDP Parsing */ = { + isa = PBXGroup; + children = ( + 406128842CF33018007F5CDC /* Visitors */, + 4061288A2CF33088007F5CDC /* SupportedPrefix.swift */, + 406128822CF33000007F5CDC /* SDPParser.swift */, + ); + path = "SDP Parsing"; + sourceTree = ""; + }; + 406128842CF33018007F5CDC /* Visitors */ = { + isa = PBXGroup; + children = ( + 406128802CF32FEF007F5CDC /* SDPLineVisitor.swift */, + 406128872CF33029007F5CDC /* RTPMapVisitor.swift */, + ); + path = Visitors; + sourceTree = ""; + }; 4063033D2AD847E60091AE77 /* CallState */ = { isa = PBXGroup; children = ( @@ -3612,13 +3719,14 @@ 40AB34AF2C5D048800B5B6B3 /* Extensions */ = { isa = PBXGroup; children = ( + 40E363412D09F2A70028C52A /* AVFoundation */, + 40E3633C2D09EF490028C52A /* CoreMedia */, + 40AB34B02C5D049A00B5B6B3 /* Foundation */, + 40E363322D09DDAF0028C52A /* Protobuf */, + 40E3632F2D09DC5D0028C52A /* CoreGraphics */, 40C9E4612C99881700802B28 /* WebRTC */, - 40C9E4492C94744E00802B28 /* Stream_Video_Sfu_Models_VideoDimension+Convenience.swift */, - 40C9E4472C94743800802B28 /* Stream_Video_Sfu_Signal_TrackSubscriptionDetails+Convenience.swift */, - 40C9E4432C94740600802B28 /* Stream_Video_Sfu_Models_VideoLayer+Convenience.swift */, 40C9E4452C94742300802B28 /* CallParticipant+Convenience.swift */, 40382F342C89A6C500C2D00F /* AudioSettings+Convenience.swift */, - 40AB34B02C5D049A00B5B6B3 /* Foundation */, ); path = Extensions; sourceTree = ""; @@ -3631,6 +3739,9 @@ 40AB34B52C5D089E00B5B6B3 /* Task+Timeout.swift */, 40C689172C64DDC70054528A /* Publisher+TaskSink.swift */, 40C6891B2C657F280054528A /* Publisher+AsyncStream.swift */, + 40E3632D2D09DBFA0028C52A /* Int+DefaultValues.swift */, + 40E363372D09E6560028C52A /* Array+RTCRtpEncodingParameters.swift */, + 40E3633F2D09F0950028C52A /* Comparable+Clamped.swift */, ); path = Foundation; sourceTree = ""; @@ -3752,6 +3863,7 @@ 40BBC4B82C627F76002AEF92 /* MediaAdapters */ = { isa = PBXGroup; children = ( + 4039F0CD2D024DCE0078159E /* Utilities */, 408CF9C92CAFF45300F56833 /* VideoCapturePolicy */, 40BBC4AD2C627689002AEF92 /* LocalMediaAdapters */, 40BBC4822C623C6E002AEF92 /* MediaAdapter.swift */, @@ -3767,6 +3879,7 @@ 40BBC4C12C637377002AEF92 /* v2 */ = { isa = PBXGroup; children = ( + 40E363472D09F69D0028C52A /* VideoCapturing */, 40483CB72C9B1DEE00B4FCA8 /* WebRTCCoordinatorProviding.swift */, 40BBC4C52C638915002AEF92 /* WebRTCCoordinator.swift */, 40BBC4C22C6373C4002AEF92 /* WebRTCStateAdapter.swift */, @@ -3775,6 +3888,8 @@ 40429D5C2C779AED00AC7FFF /* WebRTCMigrationStatusObserver.swift */, 40BBC4D72C639740002AEF92 /* WebRTCJoinRequestFactory.swift */, 406B3C3B2C9197FA00FC93A1 /* WebRTCConfiguration.swift */, + 4039F0C82D0222D00078159E /* VideoLayerFactory */, + 4061287F2CF32FE4007F5CDC /* SDP Parsing */, 40BBC4C72C639007002AEF92 /* StateMachine */, 40AB34AC2C5D02CB00B5B6B3 /* SFU */, 40AB34AF2C5D048800B5B6B3 /* Extensions */, @@ -3898,6 +4013,7 @@ isa = PBXGroup; children = ( 40C9E45C2C9987E300802B28 /* RTCMediaStreamTrack+Sendable.swift */, + 4039F0BF2D0099E40078159E /* RTCRtpCodecCapability+Convenience.swift */, ); path = WebRTC; sourceTree = ""; @@ -3966,6 +4082,108 @@ path = Queues; sourceTree = ""; }; + 40E3632F2D09DC5D0028C52A /* CoreGraphics */ = { + isa = PBXGroup; + children = ( + 40E363302D09DC650028C52A /* CGSize+DefaultValues.swift */, + 40E363742D0A2C6B0028C52A /* CGSize+Adapt.swift */, + ); + path = CoreGraphics; + sourceTree = ""; + }; + 40E363322D09DDAF0028C52A /* Protobuf */ = { + isa = PBXGroup; + children = ( + 40070F382CF0EF2200035FA9 /* Stream_Video_Sfu_Models_Codec+Convenience.swift */, + 40C9E4492C94744E00802B28 /* Stream_Video_Sfu_Models_VideoDimension+Convenience.swift */, + 40C9E4472C94743800802B28 /* Stream_Video_Sfu_Signal_TrackSubscriptionDetails+Convenience.swift */, + 40C9E4432C94740600802B28 /* Stream_Video_Sfu_Models_VideoLayer+Convenience.swift */, + 40070F3A2CF0EF4500035FA9 /* Stream_Video_Sfu_Models_PublishOption+Convenience.swift */, + 40E363352D09E4C80028C52A /* Stream_Video_Sfu_Models_VideoQuality+Convenience.swift */, + ); + path = Protobuf; + sourceTree = ""; + }; + 40E3633C2D09EF490028C52A /* CoreMedia */ = { + isa = PBXGroup; + children = ( + 40E3633D2D09EF560028C52A /* CMVideoDimensions+DefaultValues.swift */, + ); + path = CoreMedia; + sourceTree = ""; + }; + 40E363412D09F2A70028C52A /* AVFoundation */ = { + isa = PBXGroup; + children = ( + 40E363512D0A11620028C52A /* AVCaptureDevice+OutputFormat.swift */, + 40E363442D09F2BD0028C52A /* AVCaptureDevice.Format+Convenience.swift */, + ); + path = AVFoundation; + sourceTree = ""; + }; + 40E363472D09F69D0028C52A /* VideoCapturing */ = { + isa = PBXGroup; + children = ( + 40E3634A2D09F9EB0028C52A /* ActionHandlers */, + 40E3634F2D0A03FF0028C52A /* StreamCaptureDeviceProvider.swift */, + 40E363482D09F6B20028C52A /* StreamVideoCapturer.swift */, + 40E363762D0A2E320028C52A /* BroadcastBufferReaderKey.swift */, + ); + path = VideoCapturing; + sourceTree = ""; + }; + 40E3634A2D09F9EB0028C52A /* ActionHandlers */ = { + isa = PBXGroup; + children = ( + 40E3636F2D0A27590028C52A /* Broadcast */, + 40E3636A2D0A24280028C52A /* ScreenShare */, + 40E363602D0A1C180028C52A /* Simulator */, + 40E363572D0A137A0028C52A /* Camera */, + ); + path = ActionHandlers; + sourceTree = ""; + }; + 40E363572D0A137A0028C52A /* Camera */ = { + isa = PBXGroup; + children = ( + 40E3634B2D09F9EF0028C52A /* CameraBackgroundAccessHandler.swift */, + 40E3634D2D09FDDF0028C52A /* CameraStartCaptureHandler.swift */, + 40E363552D0A11D30028C52A /* CameraStopCaptureHandler.swift */, + 40E363582D0A139E0028C52A /* CameraFocusHandler.swift */, + 40E3635E2D0A18B10028C52A /* CameraZoomHandler.swift */, + 40E3635A2D0A15E40028C52A /* CameraCapturePhotoHandler.swift */, + 40E3635C2D0A17C10028C52A /* CameraVideoOutputHandler.swift */, + ); + path = Camera; + sourceTree = ""; + }; + 40E363602D0A1C180028C52A /* Simulator */ = { + isa = PBXGroup; + children = ( + 40E363612D0A1C2E0028C52A /* SimulatorStartCaptureHandler.swift */, + 40E363632D0A1C360028C52A /* SimulatorStopCaptureHandler.swift */, + ); + path = Simulator; + sourceTree = ""; + }; + 40E3636A2D0A24280028C52A /* ScreenShare */ = { + isa = PBXGroup; + children = ( + 40E3636B2D0A24310028C52A /* ScreenShareStartCaptureHandler.swift */, + 40E3636D2D0A26B40028C52A /* ScreenShareStopCaptureHandler.swift */, + ); + path = ScreenShare; + sourceTree = ""; + }; + 40E3636F2D0A27590028C52A /* Broadcast */ = { + isa = PBXGroup; + children = ( + 40E363702D0A27640028C52A /* BroadcastStartCaptureHandler.swift */, + 40E363722D0A277C0028C52A /* BroadcastStopCaptureHandler.swift */, + ); + path = Broadcast; + sourceTree = ""; + }; 40F0173C2BBEB85F00E89FD1 /* Utilities */ = { isa = PBXGroup; children = ( @@ -4697,6 +4915,8 @@ 84DCA2132A38A428000C3411 /* CoordinatorModels.swift */, 40F161AA2A4C6B5C00846E3E /* ScreenSharingSession.swift */, 8454A3182AAB374B00A012C6 /* CallStatsReport.swift */, + 40034C1F2CFDABE600A318B1 /* PublishOptions.swift */, + 4039F0CB2D0241120078159E /* AudioCodec.swift */, ); path = Models; sourceTree = ""; @@ -4953,41 +5173,40 @@ isa = PBXGroup; children = ( 40034C2F2CFE168900A318B1 /* LocaleProvider */, - 4067F3062CDA32F0002E28BD /* AudioSession */, - 408CF9C42CAEC24500F56833 /* ScreenPropertiesAdapter */, - 40C9E44F2C9880D300802B28 /* Unwrap */, - 40382F2C2C88B87500C2D00F /* ReflectiveStringConvertible */, - 40F646232C7F225200FFB10A /* CollectionDelayedUpdateObserver.swift */, - 40C2B5BC2C2C447700EC2C2D /* RejectionReasonProvider */, - 40C2B5B42C2B604800EC2C2D /* DisposableBag */, - 40C4DF402C1C21360035DBC2 /* ParticipantAutoLeavePolicy */, - 408937892C062B0B000EEB69 /* UUIDProviding */, - 403FF3DF2BA1D20E0092CE8A /* Queues */, - 40FB150D2BF77CA200D5E580 /* StateMachine */, - 40FB15082BF74C0A00D5E580 /* CallCache */, - 8456E6C7287EC343004E180E /* Logger */, - 84C2997C28784BB30034B735 /* Utils.swift */, - 84AF64D4287C79320012A503 /* RawJSON.swift */, - 8456E6DA287EC530004E180E /* StreamRuntimeCheck.swift */, + 8454A31C2AAF41E100A012C6 /* Array+SafeSubscript.swift */, 84A7E183288362DF00526C98 /* Atomic.swift */, - 84A7E1A72883E46200526C98 /* Timers.swift */, + 40CB9FA32B7F8EA4006BED93 /* AVCaptureSession+ActiveCaptureDevice.swift */, + 840042CA2A701C2000917B30 /* BroadcastUtils.swift */, 841947972886D9CD0007B36E /* BundleExtensions.swift */, + 40F646232C7F225200FFB10A /* CollectionDelayedUpdateObserver.swift */, + 8490DD22298D5330007E53D2 /* Data+Gzip.swift */, 84B9A56C29112F39004DE31A /* EndpointConfig.swift */, + 840042C42A6FED2900917B30 /* IntExtensions.swift */, + 84A4DDBC2A3B35030097F3E9 /* LocationFetcher.swift */, + 40C4DF432C1C261D0035DBC2 /* Publisher+WeakAssign.swift */, + 84AF64D4287C79320012A503 /* RawJSON.swift */, + 40013DDB2B87AA2300915453 /* SerialActor.swift */, + 846E4B0029D2D372003733AB /* Sorting.swift */, + 8456E6DA287EC530004E180E /* StreamRuntimeCheck.swift */, 8268615F290A7556005BFFED /* SystemEnvironment.swift */, 841FF51A2A5FED4800809BBB /* SystemEnvironment+XStreamClient.swift */, - 8490DD22298D5330007E53D2 /* Data+Gzip.swift */, - 846E4B0029D2D372003733AB /* Sorting.swift */, - 846A06CD29E056C40084C264 /* StringExtensions.swift */, - 84A4DDBC2A3B35030097F3E9 /* LocationFetcher.swift */, - 840042C42A6FED2900917B30 /* IntExtensions.swift */, - 840042CA2A701C2000917B30 /* BroadcastUtils.swift */, - 8454A31C2AAF41E100A012C6 /* Array+SafeSubscript.swift */, 401A0F022AB1C1B600BE2DBD /* ThermalStateObserver.swift */, - 40CB9FA32B7F8EA4006BED93 /* AVCaptureSession+ActiveCaptureDevice.swift */, - 40013DDB2B87AA2300915453 /* SerialActor.swift */, + 84A7E1A72883E46200526C98 /* Timers.swift */, 40A0E9612B88D3DC0089E8D3 /* UIInterfaceOrientation+CGOrientation.swift */, - 40C4DF432C1C261D0035DBC2 /* Publisher+WeakAssign.swift */, + 84C2997C28784BB30034B735 /* Utils.swift */, + 4067F3062CDA32F0002E28BD /* AudioSession */, + 40FB15082BF74C0A00D5E580 /* CallCache */, 402F04AC2B714E9B00CA1986 /* DeviceOrientation */, + 40C2B5B42C2B604800EC2C2D /* DisposableBag */, + 8456E6C7287EC343004E180E /* Logger */, + 40C4DF402C1C21360035DBC2 /* ParticipantAutoLeavePolicy */, + 403FF3DF2BA1D20E0092CE8A /* Queues */, + 40382F2C2C88B87500C2D00F /* ReflectiveStringConvertible */, + 40C2B5BC2C2C447700EC2C2D /* RejectionReasonProvider */, + 408CF9C42CAEC24500F56833 /* ScreenPropertiesAdapter */, + 40FB150D2BF77CA200D5E580 /* StateMachine */, + 40C9E44F2C9880D300802B28 /* Unwrap */, + 408937892C062B0B000EEB69 /* UUIDProviding */, ); path = Utils; sourceTree = ""; @@ -5390,24 +5609,24 @@ 84F737EE287C13AC00A363F4 /* StreamVideo */ = { isa = PBXGroup; children = ( + 84F737EF287C13AC00A363F4 /* StreamVideo.h */, + 82B82F2229114001001B5FD7 /* Info.plist */, 847BE09B29DADE0100B55D21 /* Call.swift */, + 84530C6B2A3C4E0700F2678E /* CallState.swift */, + 84C2996D2876E42D0034B735 /* StreamVideo.swift */, + 8492B874290808AE00006649 /* StreamVideoEnvironment.swift */, + 8478EB12288A054B00525538 /* VideoConfig.swift */, 40FB01FF2BAC8A4000A1C206 /* CallKit */, 846D16202A52B8A10036CE4C /* CallSettings */, - 84530C6B2A3C4E0700F2678E /* CallState.swift */, 84EA5D3D28C09A95004D3531 /* Controllers */, 84C299712876E77E0034B735 /* DependencyInjection */, 8456E6D8287EC436004E180E /* Errors */, 8206D8512A5FF30E0099F5EC /* Generated */, 84A7E1772881921000526C98 /* HTTPClient */, - 82B82F2229114001001B5FD7 /* Info.plist */, 8456E6D9287EC46D004E180E /* Models */, 84B57D3A2981645900E4E709 /* OpenApi */, 84ED240B286C9500002A3186 /* protobuf */, - 84F737EF287C13AC00A363F4 /* StreamVideo.h */, - 84C2996D2876E42D0034B735 /* StreamVideo.swift */, - 8492B874290808AE00006649 /* StreamVideoEnvironment.swift */, 84AF64D3287C79220012A503 /* Utils */, - 8478EB12288A054B00525538 /* VideoConfig.swift */, 84FC2C1C28ACF29300181490 /* WebRTC */, 84A7E17A2883624500526C98 /* WebSockets */, ); @@ -6312,6 +6531,7 @@ 84E5C51C2A013C440003A27A /* PushNotificationsConfig.swift in Sources */, 84A7E184288362DF00526C98 /* Atomic.swift in Sources */, 8449824E2C738A830029734D /* StopAllRTMPBroadcastsResponse.swift in Sources */, + 40E363522D0A11620028C52A /* AVCaptureDevice+OutputFormat.swift in Sources */, 84D2E37729DC856D001D2118 /* CallMemberUpdatedEvent.swift in Sources */, 40149DD02B7E839500473176 /* AudioSessionProtocol.swift in Sources */, 40DFA88D2CC10FF3003DCE05 /* Stream_Video_Sfu_Models_AppleThermalState+Convenience.swift in Sources */, @@ -6323,6 +6543,7 @@ 84C4004229E3F446007B69C2 /* ConnectedEvent.swift in Sources */, 84DC389C29ADFCFD00946713 /* GetOrCreateCallResponse.swift in Sources */, 406B3BD92C8F337000FC93A1 /* MediaAdapting.swift in Sources */, + 40E363622D0A1C2E0028C52A /* SimulatorStartCaptureHandler.swift in Sources */, 4065839B2B877ADA00B4F979 /* CIImage+Sendable.swift in Sources */, 84DCA2242A3A0F0D000C3411 /* HTTPClient.swift in Sources */, 84A737CE28F4716E001A6769 /* signal.pb.swift in Sources */, @@ -6338,6 +6559,7 @@ 406B3C3C2C9197FF00FC93A1 /* WebRTCConfiguration.swift in Sources */, 84D2E37829DC856D001D2118 /* CallMemberUpdatedPermissionEvent.swift in Sources */, 84F3B0E0289150B10088751D /* CallParticipant.swift in Sources */, + 40E363562D0A11D30028C52A /* CameraStopCaptureHandler.swift in Sources */, 406B3BEB2C8F386100FC93A1 /* RTCRtpReceiver+CustomStringConvertible.swift in Sources */, 842B8E292A2DFED900863A87 /* StartTranscriptionResponse.swift in Sources */, 843697CF28C7898A00839D99 /* VideoOptions.swift in Sources */, @@ -6353,12 +6575,15 @@ 84D91E9D2C7CB0AA00B163A0 /* CallRtmpBroadcastFailedEvent.swift in Sources */, 84A737D028F4716E001A6769 /* models.pb.swift in Sources */, 846D16222A52B8D00036CE4C /* MicrophoneManager.swift in Sources */, + 40E3634C2D09F9EF0028C52A /* CameraBackgroundAccessHandler.swift in Sources */, 842E70DB2B91BE1700D2D68B /* ListTranscriptionsResponse.swift in Sources */, 4012B1902BFCA4D3006B0031 /* StreamCallStateMachine+AcceptedStage.swift in Sources */, 40BBC4A12C623D03002AEF92 /* RTCMediaStream+Convenience.swift in Sources */, 84DCA20E2A3885FE000C3411 /* Permissions.swift in Sources */, 842E70D82B91BE1700D2D68B /* StatsOptions.swift in Sources */, 4067F30B2CDA3359002E28BD /* AVAudioSessionRouteDescription+Convenience.swift in Sources */, + 40E3635F2D0A18B10028C52A /* CameraZoomHandler.swift in Sources */, + 40E363312D09DC650028C52A /* CGSize+DefaultValues.swift in Sources */, 40BBC48E2C623C6E002AEF92 /* PeerConnectionType.swift in Sources */, 84BAD77E2A6BFFB200733156 /* BroadcastSampleHandler.swift in Sources */, 40C2B5BB2C2C41DA00EC2C2D /* RejectCallRequest+Reason.swift in Sources */, @@ -6372,15 +6597,19 @@ 84AF64D2287C78E70012A503 /* User.swift in Sources */, 84274F482884251600CF8794 /* InternetConnection.swift in Sources */, 84DC389129ADFCFD00946713 /* VideoSettings.swift in Sources */, + 4039F0CA2D0222E40078159E /* VideoLayerFactory.swift in Sources */, 842E70D12B91BE1700D2D68B /* CallDeletedEvent.swift in Sources */, 8456E6D6287EC343004E180E /* LogFormatter.swift in Sources */, 43217A0C2A44A28B002B5857 /* ConnectionErrorEvent.swift in Sources */, 40BBC49E2C623D03002AEF92 /* RTCIceConnectionState+CustomStringConvertible.swift in Sources */, 84A7E18C288363AC00526C98 /* EventNotificationCenter.swift in Sources */, + 40E363492D09F6BB0028C52A /* StreamVideoCapturer.swift in Sources */, 84A7E1962883661A00526C98 /* BackgroundTaskScheduler.swift in Sources */, 84DC38DD29ADFCFD00946713 /* Models.swift in Sources */, 842D3B5F29F6D3720051698A /* DeviceData.swift in Sources */, 842E70D02B91BE1700D2D68B /* ClosedCaptionEvent.swift in Sources */, + 40E3632E2D09DBFA0028C52A /* Int+DefaultValues.swift in Sources */, + 40E363382D09E6560028C52A /* Array+RTCRtpEncodingParameters.swift in Sources */, 842D3B5829F667660051698A /* CreateDeviceRequest.swift in Sources */, 84BBF62B28AFC24000387A02 /* PeerConnectionFactory.swift in Sources */, 4159F1932C86FA41002B94D3 /* UserMuteResponse.swift in Sources */, @@ -6388,6 +6617,7 @@ 402F04A92B70ED8600CA1986 /* StreamCallStatisticsReporter.swift in Sources */, 8490DD1F298D39D9007E53D2 /* JsonEventDecoder.swift in Sources */, 40FB15192BF77EE700D5E580 /* StreamCallStateMachine+IdleStage.swift in Sources */, + 40E3633E2D09EF560028C52A /* CMVideoDimensions+DefaultValues.swift in Sources */, 40382F2B2C88B84800C2D00F /* Stream_Video_Sfu_Event_SfuEvent.OneOf_EventPayload+Payload.swift in Sources */, 84BAD7842A6C01AF00733156 /* BroadcastBufferReader.swift in Sources */, 40034C312CFE168D00A318B1 /* StreamLocaleProvider.swift in Sources */, @@ -6407,9 +6637,11 @@ 841BAA332BD15CDE000C73E4 /* SFULocationResponse.swift in Sources */, 84DC38D129ADFCFD00946713 /* Credentials.swift in Sources */, 406B3BFE2C8F53CB00FC93A1 /* StreamRTCPeerConnectionProtocol.swift in Sources */, + 4039F0C02D0099E40078159E /* RTCRtpCodecCapability+Convenience.swift in Sources */, 84A7E1B02883E73100526C98 /* EventBatcher.swift in Sources */, 40BBC4DE2C63A507002AEF92 /* WebRTCCoordinator+CleanUp.swift in Sources */, 84CD12222C73831000056640 /* CallRtmpBroadcastStoppedEvent.swift in Sources */, + 40E3636E2D0A26B40028C52A /* ScreenShareStopCaptureHandler.swift in Sources */, 40FB15212BF78FA100D5E580 /* Publisher+NextValue.swift in Sources */, 40F646242C7F225200FFB10A /* CollectionDelayedUpdateObserver.swift in Sources */, 40382F282C88B80C00C2D00F /* SignalServerEvent.swift in Sources */, @@ -6427,6 +6659,7 @@ 406583992B877AB400B4F979 /* CIImage+Resize.swift in Sources */, 40BBC4A42C623D03002AEF92 /* RTCRtpTransceiverInit+Convenience.swift in Sources */, 841BAA382BD15CDE000C73E4 /* CallTimeline.swift in Sources */, + 40E363592D0A139E0028C52A /* CameraFocusHandler.swift in Sources */, 8449824D2C738A830029734D /* StartRTMPBroadcastsResponse.swift in Sources */, 4159F1332C861404002B94D3 /* VideoEvent.swift in Sources */, 84DC38C629ADFCFD00946713 /* CallRejectedEvent.swift in Sources */, @@ -6440,12 +6673,14 @@ 84DC38AB29ADFCFD00946713 /* RecordSettingsRequest.swift in Sources */, 84EA5D3F28C09AAC004D3531 /* CallController.swift in Sources */, 84DC38CA29ADFCFD00946713 /* UpdateCallRequest.swift in Sources */, + 40E363752D0A2C6B0028C52A /* CGSize+Adapt.swift in Sources */, 4031D7FA2B84B077002EC6E4 /* StreamActiveCallProvider.swift in Sources */, 84AF64D9287C79F60012A503 /* Errors.swift in Sources */, 84DC389429ADFCFD00946713 /* UpdateUserPermissionsRequest.swift in Sources */, 40BBC4D02C639054002AEF92 /* WebRTCCoordinator+Idle.swift in Sources */, 8456E6D3287EC343004E180E /* BaseLogDestination.swift in Sources */, 84DCA2142A38A428000C3411 /* CoordinatorModels.swift in Sources */, + 4061288B2CF33088007F5CDC /* SupportedPrefix.swift in Sources */, 40BBC4C02C629408002AEF92 /* RTCTemporaryPeerConnection.swift in Sources */, 84B0091B2A4C521100CF1FA7 /* Retries.swift in Sources */, 84DC38CD29ADFCFD00946713 /* SendEventRequest.swift in Sources */, @@ -6470,16 +6705,20 @@ 40FA12F22B76AC8300CE3EC9 /* RTCCVPixelBuffer+Convenience.swift in Sources */, 8490032629D308A000AD9BB4 /* TranscriptionSettings.swift in Sources */, 841BAA342BD15CDE000C73E4 /* GeolocationResult.swift in Sources */, + 40E363712D0A27640028C52A /* BroadcastStartCaptureHandler.swift in Sources */, 848CCCEB2AB8ED8F002E83A2 /* CallHLSBroadcastingStoppedEvent.swift in Sources */, 40BBC4C42C638789002AEF92 /* RTCPeerConnectionCoordinator.swift in Sources */, 4067F3152CDA4094002E28BD /* StreamRTCAudioSession.swift in Sources */, 40BBC4C62C638915002AEF92 /* WebRTCCoordinator.swift in Sources */, 841BAA392BD15CDE000C73E4 /* UserSessionStats.swift in Sources */, 406B3BD72C8F332200FC93A1 /* RTCVideoTrack+Sendable.swift in Sources */, + 406128812CF32FEF007F5CDC /* SDPLineVisitor.swift in Sources */, 4067F3132CDA33C6002E28BD /* RTCAudioSessionConfiguration+Default.swift in Sources */, 8409465829AF4EEC007AF5BF /* SendReactionRequest.swift in Sources */, 40BBC4BA2C627F83002AEF92 /* TrackEvent.swift in Sources */, + 406128832CF33000007F5CDC /* SDPParser.swift in Sources */, 84B9A56D29112F39004DE31A /* EndpointConfig.swift in Sources */, + 4039F0CF2D024DDF0078159E /* MediaTransceiverStorage.swift in Sources */, 8469593829BB6B4E00134EA0 /* GetEdgesResponse.swift in Sources */, 40AB34AE2C5D02D400B5B6B3 /* SFUAdapter.swift in Sources */, 84DC389A29ADFCFD00946713 /* APIError.swift in Sources */, @@ -6515,8 +6754,10 @@ 84CD12252C73840300056640 /* CallUserMutedEvent.swift in Sources */, 84DC38AC29ADFCFD00946713 /* CallAcceptedEvent.swift in Sources */, 84FC2C2828AD350100181490 /* WebRTCEvents.swift in Sources */, + 40E3635D2D0A17C10028C52A /* CameraVideoOutputHandler.swift in Sources */, 4159F17B2C86FA41002B94D3 /* RTMPSettingsRequest.swift in Sources */, 84DC38A129ADFCFD00946713 /* BlockUserResponse.swift in Sources */, + 40E363362D09E4C80028C52A /* Stream_Video_Sfu_Models_VideoQuality+Convenience.swift in Sources */, 4012B1942BFCAC1C006B0031 /* StreamCallStateMachine+RejectingStage.swift in Sources */, 40BBC4D22C639158002AEF92 /* WebRTCCoordinator+Connecting.swift in Sources */, 40BBC4AF2C627692002AEF92 /* LocalMediaAdapting.swift in Sources */, @@ -6541,6 +6782,7 @@ 842B8E242A2DFED900863A87 /* CallSessionParticipantJoinedEvent.swift in Sources */, 40BBC4D42C639371002AEF92 /* WebRTCCoordinator+Connected.swift in Sources */, 848CCCE62AB8ED8F002E83A2 /* BroadcastSettingsResponse.swift in Sources */, + 4039F0CC2D0241120078159E /* AudioCodec.swift in Sources */, 4097B3832BF4E37B0057992D /* OnChangeViewModifier_iOS13.swift in Sources */, 84A7E1862883632100526C98 /* ConnectionStatus.swift in Sources */, 841BAA472BD15CDE000C73E4 /* CallTranscriptionReadyEvent.swift in Sources */, @@ -6570,6 +6812,7 @@ 40FB151D2BF77EFA00D5E580 /* StreamCallStateMachine+JoinedStage.swift in Sources */, 40BBC4B52C627761002AEF92 /* LocalVideoMediaAdapter.swift in Sources */, 84DC38C429ADFCFD00946713 /* MemberResponse.swift in Sources */, + 40E363452D09F2BD0028C52A /* AVCaptureDevice.Format+Convenience.swift in Sources */, 84DC38CB29ADFCFD00946713 /* SortParamRequest.swift in Sources */, 8490032529D308A000AD9BB4 /* GetCallResponse.swift in Sources */, 841947982886D9CD0007B36E /* BundleExtensions.swift in Sources */, @@ -6607,6 +6850,7 @@ 8456E6D4287EC343004E180E /* LogDestination.swift in Sources */, 84C267C928F5980F00F0F673 /* ConnectOptions.swift in Sources */, 406B3BEF2C8F38CE00FC93A1 /* RTCPeerConnectionEvent.swift in Sources */, + 40070F392CF0EF2200035FA9 /* Stream_Video_Sfu_Models_Codec+Convenience.swift in Sources */, 842E70D22B91BE1700D2D68B /* StartTranscriptionRequest.swift in Sources */, 84DC389E29ADFCFD00946713 /* CallCreatedEvent.swift in Sources */, 842B8E202A2DFED900863A87 /* CallSessionParticipantLeftEvent.swift in Sources */, @@ -6639,6 +6883,7 @@ 406B3C432C91E41400FC93A1 /* WebRTCAuthenticator.swift in Sources */, 84BAD77A2A6BFEF900733156 /* BroadcastBufferUploader.swift in Sources */, 40C4DF4B2C1C2C330035DBC2 /* ParticipantAutoLeavePolicy.swift in Sources */, + 406128882CF33029007F5CDC /* RTPMapVisitor.swift in Sources */, 84DC38A429ADFCFD00946713 /* BackstageSettings.swift in Sources */, 84BBF62D28AFC72700387A02 /* DefaultRTCMediaConstraints.swift in Sources */, 40BBC4A72C623D03002AEF92 /* StreamRTCPeerConnection+DelegatePublisher.swift in Sources */, @@ -6664,6 +6909,7 @@ 84DC38D429ADFCFD00946713 /* CallSettingsResponse.swift in Sources */, 84A737CF28F4716E001A6769 /* signal.twirp.swift in Sources */, 40BBC4E42C63A5FF002AEF92 /* WebRTCCoordinator+FastReconnected.swift in Sources */, + 40E363502D0A03FF0028C52A /* StreamCaptureDeviceProvider.swift in Sources */, 842B8E2E2A2DFED900863A87 /* RejectCallResponse.swift in Sources */, 845C09852C0DEB5C00F725B3 /* LimitsSettingsResponse.swift in Sources */, 4159F1952C86FA41002B94D3 /* AggregatedStats.swift in Sources */, @@ -6682,6 +6928,7 @@ 4159F1902C86FA41002B94D3 /* PublisherAggregateStats.swift in Sources */, 40C4DF482C1C2BFC0035DBC2 /* LastParticipantAutoLeavePolicy.swift in Sources */, 82686160290A7556005BFFED /* SystemEnvironment.swift in Sources */, + 40E3634E2D09FDE50028C52A /* CameraStartCaptureHandler.swift in Sources */, 406583922B877A1600B4F979 /* BackgroundImageFilterProcessor.swift in Sources */, 8490DD23298D5330007E53D2 /* Data+Gzip.swift in Sources */, 84DC38B829ADFCFD00946713 /* UpdateUserPermissionsResponse.swift in Sources */, @@ -6702,6 +6949,7 @@ 842B8E1D2A2DFED900863A87 /* EgressHLSResponse.swift in Sources */, 848CCCE82AB8ED8F002E83A2 /* StartHLSBroadcastingResponse.swift in Sources */, 84D2E37629DC856D001D2118 /* CallMemberRemovedEvent.swift in Sources */, + 40E3636C2D0A24390028C52A /* ScreenShareStartCaptureHandler.swift in Sources */, 84F73859287C1A3400A363F4 /* StreamVideo.swift in Sources */, 4012B1892BFC9E6F006B0031 /* UnfairQueue.swift in Sources */, 840F59922A77FDCB00EF3EB2 /* UnpinResponse.swift in Sources */, @@ -6710,6 +6958,7 @@ 4159F1992C86FA41002B94D3 /* CountrywiseAggregateStats.swift in Sources */, 40C689182C64DDC70054528A /* Publisher+TaskSink.swift in Sources */, 40BBC4E22C63A5C5002AEF92 /* WebRTCCoordinator+FastReconnecting.swift in Sources */, + 40070F3B2CF0EF4500035FA9 /* Stream_Video_Sfu_Models_PublishOption+Convenience.swift in Sources */, 406B3BE92C8F384A00FC93A1 /* RTCMediaStream+CustomStringConvertible.swift in Sources */, 40429D612C779B7000AC7FFF /* SFUSignalService.swift in Sources */, 435F01B32A501148009CD0BD /* OwnCapability+Identifiable.swift in Sources */, @@ -6718,6 +6967,7 @@ 40FB150F2BF77CEC00D5E580 /* StreamStateMachine.swift in Sources */, 40CB9FA42B7F8EA4006BED93 /* AVCaptureSession+ActiveCaptureDevice.swift in Sources */, 4159F1762C86FA41002B94D3 /* RTMPSettingsResponse.swift in Sources */, + 40E363772D0A2E320028C52A /* BroadcastBufferReaderKey.swift in Sources */, 40BBC4EA2C63A665002AEF92 /* WebRTCCoordinator+Migrated.swift in Sources */, 84E4F7D1294CB5F300DD4CE3 /* ConnectionQuality.swift in Sources */, 848CCCE72AB8ED8F002E83A2 /* ThumbnailsSettingsRequest.swift in Sources */, @@ -6748,20 +6998,24 @@ 84C28C922A84D16A00742E33 /* GoLiveRequest.swift in Sources */, 84FC2C1328ACDF3A00181490 /* ProtoModel.swift in Sources */, 40BBC4CE2C639054002AEF92 /* WebRTCCoordinator+Error.swift in Sources */, + 40E363732D0A277C0028C52A /* BroadcastStopCaptureHandler.swift in Sources */, 848CCCEA2AB8ED8F002E83A2 /* RecordSettingsResponse.swift in Sources */, 40BBC4A62C623D03002AEF92 /* RTCRtpTransceiverDirection+CustomStringConvertible.swift in Sources */, 84DC38BA29ADFCFD00946713 /* CallResponse.swift in Sources */, 843697CD28C647B600839D99 /* VideoCapturer.swift in Sources */, 842E70DF2B91E84800D2D68B /* HealthCheckEvent.swift in Sources */, 84DC388F29ADFCFD00946713 /* CodableHelper.swift in Sources */, + 40E363642D0A1C360028C52A /* SimulatorStopCaptureHandler.swift in Sources */, 8409465A29AF4EEC007AF5BF /* ReactionResponse.swift in Sources */, 402F04AA2B70ED8600CA1986 /* Statistics+Convenience.swift in Sources */, 8490032429D308A000AD9BB4 /* RingSettings.swift in Sources */, 840F598E2A77FDCB00EF3EB2 /* BroadcastSettingsRequest.swift in Sources */, + 40E3635B2D0A15E40028C52A /* CameraCapturePhotoHandler.swift in Sources */, 842B8E272A2DFED900863A87 /* CallSessionEndedEvent.swift in Sources */, 844982482C738A830029734D /* DeleteCallResponse.swift in Sources */, 84DC38CF29ADFCFD00946713 /* QueryCallsRequest.swift in Sources */, 40BBC4D82C639740002AEF92 /* WebRTCJoinRequestFactory.swift in Sources */, + 40E363402D09F0950028C52A /* Comparable+Clamped.swift in Sources */, 406B3BDD2C8F351E00FC93A1 /* RTCIceCandidate+Sendable.swift in Sources */, 84A7E1822883629700526C98 /* RetryStrategy.swift in Sources */, 841BAA402BD15CDE000C73E4 /* UserStats.swift in Sources */,