From 202a12fd8208fcdfbc7ec22e60aac761eb51654a Mon Sep 17 00:00:00 2001 From: Krzysztof Moch Date: Sat, 2 Dec 2023 16:38:26 +0100 Subject: [PATCH 01/10] chore: format swift code --- ios/Video/DataStructures/Chapter.swift | 43 +- ios/Video/DataStructures/DRMParams.swift | 54 +- .../SelectedTrackCriteria.swift | 30 +- ios/Video/DataStructures/TextTrack.swift | 43 +- ios/Video/DataStructures/VideoSource.swift | 99 +- ios/Video/Features/RCTIMAAdsManager.swift | 325 +- ios/Video/Features/RCTPictureInPicture.swift | 111 +- ios/Video/Features/RCTPlayerObserver.swift | 404 +-- ios/Video/Features/RCTPlayerOperations.swift | 420 +-- .../Features/RCTResourceLoaderDelegate.swift | 329 +- ios/Video/Features/RCTVideoDRM.swift | 306 +- .../Features/RCTVideoErrorHandling.swift | 207 +- ios/Video/Features/RCTVideoSave.swift | 128 +- ios/Video/Features/RCTVideoTVUtils.swift | 65 +- ios/Video/Features/RCTVideoUtils.swift | 609 ++-- ios/Video/RCTVideo.swift | 2633 ++++++++--------- ios/Video/RCTVideoManager.swift | 152 +- ios/Video/RCTVideoPlayerViewController.swift | 58 +- ...RCTVideoPlayerViewControllerDelegate.swift | 8 +- .../RCTVideoSwiftLog/RCTVideoSwiftLog.swift | 27 +- ios/Video/UIView+FindUIViewController.swift | 24 +- ios/VideoCaching/RCTVideoCachingHandler.swift | 151 +- 22 files changed, 3098 insertions(+), 3128 deletions(-) diff --git a/ios/Video/DataStructures/Chapter.swift b/ios/Video/DataStructures/Chapter.swift index 398122b5a6..39ae0a056b 100644 --- a/ios/Video/DataStructures/Chapter.swift +++ b/ios/Video/DataStructures/Chapter.swift @@ -1,25 +1,24 @@ - struct Chapter { - let title: String - let uri: String? - let startTime: Double - let endTime: Double - - let json: NSDictionary? - - init(_ json: NSDictionary!) { - guard json != nil else { - self.json = nil - self.title = "" - self.uri = nil - self.startTime = 0 - self.endTime = 0 - return - } - self.json = json - self.title = json["title"] as? String ?? "" - self.uri = json["uri"] as? String - self.startTime = json["startTime"] as? Double ?? 0 - self.endTime = json["endTime"] as? Double ?? 0 + let title: String + let uri: String? + let startTime: Double + let endTime: Double + + let json: NSDictionary? + + init(_ json: NSDictionary!) { + guard json != nil else { + self.json = nil + self.title = "" + self.uri = nil + self.startTime = 0 + self.endTime = 0 + return } + self.json = json + self.title = json["title"] as? String ?? "" + self.uri = json["uri"] as? String + self.startTime = json["startTime"] as? Double ?? 0 + self.endTime = json["endTime"] as? Double ?? 0 + } } diff --git a/ios/Video/DataStructures/DRMParams.swift b/ios/Video/DataStructures/DRMParams.swift index 64add7e018..4d676a0a92 100644 --- a/ios/Video/DataStructures/DRMParams.swift +++ b/ios/Video/DataStructures/DRMParams.swift @@ -1,30 +1,30 @@ struct DRMParams { - let type: String? - let licenseServer: String? - let headers: Dictionary? - let contentId: String? - let certificateUrl: String? - let base64Certificate: Bool? - - let json: NSDictionary? - - init(_ json: NSDictionary!) { - guard json != nil else { - self.json = nil - self.type = nil - self.licenseServer = nil - self.contentId = nil - self.certificateUrl = nil - self.base64Certificate = nil - self.headers = nil - return - } - self.json = json - self.type = json["type"] as? String - self.licenseServer = json["licenseServer"] as? String - self.contentId = json["contentId"] as? String - self.certificateUrl = json["certificateUrl"] as? String - self.base64Certificate = json["base64Certificate"] as? Bool - self.headers = json["headers"] as? Dictionary + let type: String? + let licenseServer: String? + let headers: [String: Any]? + let contentId: String? + let certificateUrl: String? + let base64Certificate: Bool? + + let json: NSDictionary? + + init(_ json: NSDictionary!) { + guard json != nil else { + self.json = nil + self.type = nil + self.licenseServer = nil + self.contentId = nil + self.certificateUrl = nil + self.base64Certificate = nil + self.headers = nil + return } + self.json = json + self.type = json["type"] as? String + self.licenseServer = json["licenseServer"] as? String + self.contentId = json["contentId"] as? String + self.certificateUrl = json["certificateUrl"] as? String + self.base64Certificate = json["base64Certificate"] as? Bool + self.headers = json["headers"] as? [String: Any] + } } diff --git a/ios/Video/DataStructures/SelectedTrackCriteria.swift b/ios/Video/DataStructures/SelectedTrackCriteria.swift index 7d97b8f2e9..f118760f40 100644 --- a/ios/Video/DataStructures/SelectedTrackCriteria.swift +++ b/ios/Video/DataStructures/SelectedTrackCriteria.swift @@ -1,18 +1,18 @@ struct SelectedTrackCriteria { - let type: String - let value: Any? - - let json: NSDictionary? - - init(_ json: NSDictionary!) { - guard json != nil else { - self.json = nil - self.type = "" - self.value = nil - return - } - self.json = json - self.type = json["type"] as? String ?? "" - self.value = json["value"] + let type: String + let value: Any? + + let json: NSDictionary? + + init(_ json: NSDictionary!) { + guard json != nil else { + self.json = nil + self.type = "" + self.value = nil + return } + self.json = json + self.type = json["type"] as? String ?? "" + self.value = json["value"] + } } diff --git a/ios/Video/DataStructures/TextTrack.swift b/ios/Video/DataStructures/TextTrack.swift index b0bdad80ff..4592ca3ad4 100644 --- a/ios/Video/DataStructures/TextTrack.swift +++ b/ios/Video/DataStructures/TextTrack.swift @@ -1,25 +1,24 @@ - struct TextTrack { - let type: String - let language: String - let title: String - let uri: String - - let json: NSDictionary? - - init(_ json: NSDictionary!) { - guard json != nil else { - self.json = nil - self.type = "" - self.language = "" - self.title = "" - self.uri = "" - return - } - self.json = json - self.type = json["type"] as? String ?? "" - self.language = json["language"] as? String ?? "" - self.title = json["title"] as? String ?? "" - self.uri = json["uri"] as? String ?? "" + let type: String + let language: String + let title: String + let uri: String + + let json: NSDictionary? + + init(_ json: NSDictionary!) { + guard json != nil else { + self.json = nil + self.type = "" + self.language = "" + self.title = "" + self.uri = "" + return } + self.json = json + self.type = json["type"] as? String ?? "" + self.language = json["language"] as? String ?? "" + self.title = json["title"] as? String ?? "" + self.uri = json["uri"] as? String ?? "" + } } diff --git a/ios/Video/DataStructures/VideoSource.swift b/ios/Video/DataStructures/VideoSource.swift index 368310b5ca..45d8d4a8d5 100644 --- a/ios/Video/DataStructures/VideoSource.swift +++ b/ios/Video/DataStructures/VideoSource.swift @@ -1,53 +1,52 @@ - struct VideoSource { - let type: String? - let uri: String? - let isNetwork: Bool - let isAsset: Bool - let shouldCache: Bool - let requestHeaders: Dictionary? - let startPosition: Int64? - let cropStart: Int64? - let cropEnd: Int64? - // Custom Metadata - let title: String? - let subtitle: String? - let description: String? - let customImageUri: String? - - let json: NSDictionary? - - init(_ json: NSDictionary!) { - guard json != nil else { - self.json = nil - self.type = nil - self.uri = nil - self.isNetwork = false - self.isAsset = false - self.shouldCache = false - self.requestHeaders = nil - self.startPosition = nil - self.cropStart = nil - self.cropEnd = nil - self.title = nil - self.subtitle = nil - self.description = nil - self.customImageUri = nil - return - } - self.json = json - self.type = json["type"] as? String - self.uri = json["uri"] as? String - self.isNetwork = json["isNetwork"] as? Bool ?? false - self.isAsset = json["isAsset"] as? Bool ?? false - self.shouldCache = json["shouldCache"] as? Bool ?? false - self.requestHeaders = json["requestHeaders"] as? Dictionary - self.startPosition = json["startPosition"] as? Int64 - self.cropStart = json["cropStart"] as? Int64 - self.cropEnd = json["cropEnd"] as? Int64 - self.title = json["title"] as? String - self.subtitle = json["subtitle"] as? String - self.description = json["description"] as? String - self.customImageUri = json["customImageUri"] as? String + let type: String? + let uri: String? + let isNetwork: Bool + let isAsset: Bool + let shouldCache: Bool + let requestHeaders: [String: Any]? + let startPosition: Int64? + let cropStart: Int64? + let cropEnd: Int64? + // Custom Metadata + let title: String? + let subtitle: String? + let description: String? + let customImageUri: String? + + let json: NSDictionary? + + init(_ json: NSDictionary!) { + guard json != nil else { + self.json = nil + self.type = nil + self.uri = nil + self.isNetwork = false + self.isAsset = false + self.shouldCache = false + self.requestHeaders = nil + self.startPosition = nil + self.cropStart = nil + self.cropEnd = nil + self.title = nil + self.subtitle = nil + self.description = nil + self.customImageUri = nil + return } + self.json = json + self.type = json["type"] as? String + self.uri = json["uri"] as? String + self.isNetwork = json["isNetwork"] as? Bool ?? false + self.isAsset = json["isAsset"] as? Bool ?? false + self.shouldCache = json["shouldCache"] as? Bool ?? false + self.requestHeaders = json["requestHeaders"] as? [String: Any] + self.startPosition = json["startPosition"] as? Int64 + self.cropStart = json["cropStart"] as? Int64 + self.cropEnd = json["cropEnd"] as? Int64 + self.title = json["title"] as? String + self.subtitle = json["subtitle"] as? String + self.description = json["description"] as? String + self.customImageUri = json["customImageUri"] as? String + } } diff --git a/ios/Video/Features/RCTIMAAdsManager.swift b/ios/Video/Features/RCTIMAAdsManager.swift index e8f9b2568e..fc24447d94 100644 --- a/ios/Video/Features/RCTIMAAdsManager.swift +++ b/ios/Video/Features/RCTIMAAdsManager.swift @@ -1,230 +1,209 @@ #if USE_GOOGLE_IMA -import Foundation -import GoogleInteractiveMediaAds - -class RCTIMAAdsManager: NSObject, IMAAdsLoaderDelegate, IMAAdsManagerDelegate, IMALinkOpenerDelegate { + import Foundation + import GoogleInteractiveMediaAds + class RCTIMAAdsManager: NSObject, IMAAdsLoaderDelegate, IMAAdsManagerDelegate, IMALinkOpenerDelegate { private weak var _video: RCTVideo? - private var _pipEnabled:() -> Bool + private var _pipEnabled: () -> Bool /* Entry point for the SDK. Used to make ad requests. */ private var adsLoader: IMAAdsLoader! /* Main point of interaction with the SDK. Created by the SDK as the result of an ad request. */ private var adsManager: IMAAdsManager! - init(video:RCTVideo!, pipEnabled:@escaping () -> Bool) { - _video = video - _pipEnabled = pipEnabled + init(video: RCTVideo!, pipEnabled: @escaping () -> Bool) { + _video = video + _pipEnabled = pipEnabled - super.init() + super.init() } func setUpAdsLoader() { - adsLoader = IMAAdsLoader(settings: nil) - adsLoader.delegate = self + adsLoader = IMAAdsLoader(settings: nil) + adsLoader.delegate = self } func requestAds() { - guard let _video = _video else {return} - // Create ad display container for ad rendering. - let adDisplayContainer = IMAAdDisplayContainer(adContainer: _video, viewController: _video.reactViewController()) - - let adTagUrl = _video.getAdTagUrl() - let contentPlayhead = _video.getContentPlayhead() - - if adTagUrl != nil && contentPlayhead != nil { - // Create an ad request with our ad tag, display container, and optional user context. - let request = IMAAdsRequest( - adTagUrl: adTagUrl!, - adDisplayContainer: adDisplayContainer, - contentPlayhead: contentPlayhead, - userContext: nil) - - adsLoader.requestAds(with: request) - } + guard let _video = _video else { return } + // Create ad display container for ad rendering. + let adDisplayContainer = IMAAdDisplayContainer(adContainer: _video, viewController: _video.reactViewController()) + + let adTagUrl = _video.getAdTagUrl() + let contentPlayhead = _video.getContentPlayhead() + + if adTagUrl != nil && contentPlayhead != nil { + // Create an ad request with our ad tag, display container, and optional user context. + let request = IMAAdsRequest( + adTagUrl: adTagUrl!, + adDisplayContainer: adDisplayContainer, + contentPlayhead: contentPlayhead, + userContext: nil + ) + + adsLoader.requestAds(with: request) + } } // MARK: - Getters func getAdsLoader() -> IMAAdsLoader? { - return adsLoader + return adsLoader } func getAdsManager() -> IMAAdsManager? { - return adsManager + return adsManager } // MARK: - IMAAdsLoaderDelegate - func adsLoader(_ loader: IMAAdsLoader, adsLoadedWith adsLoadedData: IMAAdsLoadedData) { - guard let _video = _video else {return} - // Grab the instance of the IMAAdsManager and set yourself as the delegate. - adsManager = adsLoadedData.adsManager - adsManager?.delegate = self - + func adsLoader(_: IMAAdsLoader, adsLoadedWith adsLoadedData: IMAAdsLoadedData) { + guard let _video = _video else { return } + // Grab the instance of the IMAAdsManager and set yourself as the delegate. + adsManager = adsLoadedData.adsManager + adsManager?.delegate = self - // Create ads rendering settings and tell the SDK to use the in-app browser. - let adsRenderingSettings: IMAAdsRenderingSettings = IMAAdsRenderingSettings(); - adsRenderingSettings.linkOpenerDelegate = self; - adsRenderingSettings.linkOpenerPresentingController = _video.reactViewController(); + // Create ads rendering settings and tell the SDK to use the in-app browser. + let adsRenderingSettings = IMAAdsRenderingSettings() + adsRenderingSettings.linkOpenerDelegate = self + adsRenderingSettings.linkOpenerPresentingController = _video.reactViewController() - adsManager.initialize(with: adsRenderingSettings) + adsManager.initialize(with: adsRenderingSettings) } - func adsLoader(_ loader: IMAAdsLoader, failedWith adErrorData: IMAAdLoadingErrorData) { - if adErrorData.adError.message != nil { - print("Error loading ads: " + adErrorData.adError.message!) - } + func adsLoader(_: IMAAdsLoader, failedWith adErrorData: IMAAdLoadingErrorData) { + if adErrorData.adError.message != nil { + print("Error loading ads: " + adErrorData.adError.message!) + } - _video?.setPaused(false) + _video?.setPaused(false) } // MARK: - IMAAdsManagerDelegate func adsManager(_ adsManager: IMAAdsManager, didReceive event: IMAAdEvent) { - guard let _video = _video else {return} - // Mute ad if the main player is muted - if (_video.isMuted()) { - adsManager.volume = 0; - } - // Play each ad once it has been loaded - if event.type == IMAAdEventType.LOADED { - if (_pipEnabled()) { - return - } - adsManager.start() + guard let _video = _video else { return } + // Mute ad if the main player is muted + if _video.isMuted() { + adsManager.volume = 0 + } + // Play each ad once it has been loaded + if event.type == IMAAdEventType.LOADED { + if _pipEnabled() { + return } - - if _video.onReceiveAdEvent != nil { - let type = convertEventToString(event: event.type) - - if (event.adData != nil) { - _video.onReceiveAdEvent?([ - "event": type, - "data": event.adData ?? [String](), - "target": _video.reactTag! - ]); - } else { - _video.onReceiveAdEvent?([ - "event": type, - "target": _video.reactTag! - ]); - } + adsManager.start() + } + + if _video.onReceiveAdEvent != nil { + let type = convertEventToString(event: event.type) + + if event.adData != nil { + _video.onReceiveAdEvent?([ + "event": type, + "data": event.adData ?? [String](), + "target": _video.reactTag!, + ]) + } else { + _video.onReceiveAdEvent?([ + "event": type, + "target": _video.reactTag!, + ]) } + } } - func adsManager(_ adsManager: IMAAdsManager, didReceive error: IMAAdError) { - if error.message != nil { - print("AdsManager error: " + error.message!) - } - - guard let _video = _video else {return} - - if _video.onReceiveAdEvent != nil { - _video.onReceiveAdEvent?([ - "event": "ERROR", - "data": [ - "message": error.message ?? "", - "code": error.code, - "type": error.type, - ], - "target": _video.reactTag! - ]) - } - - // Fall back to playing content - _video.setPaused(false) + func adsManager(_: IMAAdsManager, didReceive error: IMAAdError) { + if error.message != nil { + print("AdsManager error: " + error.message!) + } + + guard let _video = _video else { return } + + if _video.onReceiveAdEvent != nil { + _video.onReceiveAdEvent?([ + "event": "ERROR", + "data": [ + "message": error.message ?? "", + "code": error.code, + "type": error.type, + ], + "target": _video.reactTag!, + ]) + } + + // Fall back to playing content + _video.setPaused(false) } - func adsManagerDidRequestContentPause(_ adsManager: IMAAdsManager) { - // Pause the content for the SDK to play ads. - _video?.setPaused(true) - _video?.setAdPlaying(true) + func adsManagerDidRequestContentPause(_: IMAAdsManager) { + // Pause the content for the SDK to play ads. + _video?.setPaused(true) + _video?.setAdPlaying(true) } - func adsManagerDidRequestContentResume(_ adsManager: IMAAdsManager) { - // Resume the content since the SDK is done playing ads (at least for now). - _video?.setAdPlaying(false) - _video?.setPaused(false) + func adsManagerDidRequestContentResume(_: IMAAdsManager) { + // Resume the content since the SDK is done playing ads (at least for now). + _video?.setAdPlaying(false) + _video?.setPaused(false) } // MARK: - IMALinkOpenerDelegate - func linkOpenerDidClose(inAppLink linkOpener: NSObject) { - adsManager?.resume() + func linkOpenerDidClose(inAppLink _: NSObject) { + adsManager?.resume() } // MARK: - Helpers func convertEventToString(event: IMAAdEventType!) -> String { - var result = "UNKNOWN"; - - switch(event) { - case .AD_BREAK_READY: - result = "AD_BREAK_READY"; - break; - case .AD_BREAK_ENDED: - result = "AD_BREAK_ENDED"; - break; - case .AD_BREAK_STARTED: - result = "AD_BREAK_STARTED"; - break; - case .AD_PERIOD_ENDED: - result = "AD_PERIOD_ENDED"; - break; - case .AD_PERIOD_STARTED: - result = "AD_PERIOD_STARTED"; - break; - case .ALL_ADS_COMPLETED: - result = "ALL_ADS_COMPLETED"; - break; - case .CLICKED: - result = "CLICK"; - break; - case .COMPLETE: - result = "COMPLETED"; - break; - case .CUEPOINTS_CHANGED: - result = "CUEPOINTS_CHANGED"; - break; - case .FIRST_QUARTILE: - result = "FIRST_QUARTILE"; - break; - case .LOADED: - result = "LOADED"; - break; - case .LOG: - result = "LOG"; - break; - case .MIDPOINT: - result = "MIDPOINT"; - break; - case .PAUSE: - result = "PAUSED"; - break; - case .RESUME: - result = "RESUMED"; - break; - case .SKIPPED: - result = "SKIPPED"; - break; - case .STARTED: - result = "STARTED"; - break; - case .STREAM_LOADED: - result = "STREAM_LOADED"; - break; - case .TAPPED: - result = "TAPPED"; - break; - case .THIRD_QUARTILE: - result = "THIRD_QUARTILE"; - break; - default: - result = "UNKNOWN"; - } - - return result; + var result = "UNKNOWN" + + switch event { + case .AD_BREAK_READY: + result = "AD_BREAK_READY" + case .AD_BREAK_ENDED: + result = "AD_BREAK_ENDED" + case .AD_BREAK_STARTED: + result = "AD_BREAK_STARTED" + case .AD_PERIOD_ENDED: + result = "AD_PERIOD_ENDED" + case .AD_PERIOD_STARTED: + result = "AD_PERIOD_STARTED" + case .ALL_ADS_COMPLETED: + result = "ALL_ADS_COMPLETED" + case .CLICKED: + result = "CLICK" + case .COMPLETE: + result = "COMPLETED" + case .CUEPOINTS_CHANGED: + result = "CUEPOINTS_CHANGED" + case .FIRST_QUARTILE: + result = "FIRST_QUARTILE" + case .LOADED: + result = "LOADED" + case .LOG: + result = "LOG" + case .MIDPOINT: + result = "MIDPOINT" + case .PAUSE: + result = "PAUSED" + case .RESUME: + result = "RESUMED" + case .SKIPPED: + result = "SKIPPED" + case .STARTED: + result = "STARTED" + case .STREAM_LOADED: + result = "STREAM_LOADED" + case .TAPPED: + result = "TAPPED" + case .THIRD_QUARTILE: + result = "THIRD_QUARTILE" + default: + result = "UNKNOWN" + } + + return result } -} + } #endif diff --git a/ios/Video/Features/RCTPictureInPicture.swift b/ios/Video/Features/RCTPictureInPicture.swift index 62bdb8136b..1af10965bc 100644 --- a/ios/Video/Features/RCTPictureInPicture.swift +++ b/ios/Video/Features/RCTPictureInPicture.swift @@ -1,75 +1,74 @@ import AVFoundation import AVKit +import Foundation import MediaAccessibility import React -import Foundation #if os(iOS) -class RCTPictureInPicture: NSObject, AVPictureInPictureControllerDelegate { - private var _onPictureInPictureStatusChanged: (() -> Void)? = nil - private var _onRestoreUserInterfaceForPictureInPictureStop: (() -> Void)? = nil - private var _restoreUserInterfaceForPIPStopCompletionHandler:((Bool) -> Void)? = nil - private var _pipController:AVPictureInPictureController? - private var _isActive:Bool = false - + class RCTPictureInPicture: NSObject, AVPictureInPictureControllerDelegate { + private var _onPictureInPictureStatusChanged: (() -> Void)? + private var _onRestoreUserInterfaceForPictureInPictureStop: (() -> Void)? + private var _restoreUserInterfaceForPIPStopCompletionHandler: ((Bool) -> Void)? + private var _pipController: AVPictureInPictureController? + private var _isActive = false + init(_ onPictureInPictureStatusChanged: (() -> Void)? = nil, _ onRestoreUserInterfaceForPictureInPictureStop: (() -> Void)? = nil) { - _onPictureInPictureStatusChanged = onPictureInPictureStatusChanged - _onRestoreUserInterfaceForPictureInPictureStop = onRestoreUserInterfaceForPictureInPictureStop + _onPictureInPictureStatusChanged = onPictureInPictureStatusChanged + _onRestoreUserInterfaceForPictureInPictureStop = onRestoreUserInterfaceForPictureInPictureStop } - - func pictureInPictureControllerDidStartPictureInPicture(_ pictureInPictureController: AVPictureInPictureController) { - guard let _onPictureInPictureStatusChanged = _onPictureInPictureStatusChanged else { return } - - _onPictureInPictureStatusChanged() + + func pictureInPictureControllerDidStartPictureInPicture(_: AVPictureInPictureController) { + guard let _onPictureInPictureStatusChanged = _onPictureInPictureStatusChanged else { return } + + _onPictureInPictureStatusChanged() } - - func pictureInPictureControllerDidStopPictureInPicture(_ pictureInPictureController: AVPictureInPictureController) { - guard let _onPictureInPictureStatusChanged = _onPictureInPictureStatusChanged else { return } - - _onPictureInPictureStatusChanged() + + func pictureInPictureControllerDidStopPictureInPicture(_: AVPictureInPictureController) { + guard let _onPictureInPictureStatusChanged = _onPictureInPictureStatusChanged else { return } + + _onPictureInPictureStatusChanged() } - - func pictureInPictureController(_ pictureInPictureController: AVPictureInPictureController, restoreUserInterfaceForPictureInPictureStopWithCompletionHandler completionHandler: @escaping (Bool) -> Void) { - guard let _onRestoreUserInterfaceForPictureInPictureStop = _onRestoreUserInterfaceForPictureInPictureStop else { return } - - _onRestoreUserInterfaceForPictureInPictureStop() - - _restoreUserInterfaceForPIPStopCompletionHandler = completionHandler + func pictureInPictureController(_: AVPictureInPictureController, restoreUserInterfaceForPictureInPictureStopWithCompletionHandler completionHandler: @escaping (Bool) -> Void) { + guard let _onRestoreUserInterfaceForPictureInPictureStop = _onRestoreUserInterfaceForPictureInPictureStop else { return } + + _onRestoreUserInterfaceForPictureInPictureStop() + + _restoreUserInterfaceForPIPStopCompletionHandler = completionHandler } - - func setRestoreUserInterfaceForPIPStopCompletionHandler(_ restore:Bool) { - guard let _restoreUserInterfaceForPIPStopCompletionHandler = _restoreUserInterfaceForPIPStopCompletionHandler else { return } - _restoreUserInterfaceForPIPStopCompletionHandler(restore) - self._restoreUserInterfaceForPIPStopCompletionHandler = nil + + func setRestoreUserInterfaceForPIPStopCompletionHandler(_ restore: Bool) { + guard let _restoreUserInterfaceForPIPStopCompletionHandler = _restoreUserInterfaceForPIPStopCompletionHandler else { return } + _restoreUserInterfaceForPIPStopCompletionHandler(restore) + self._restoreUserInterfaceForPIPStopCompletionHandler = nil } - + func setupPipController(_ playerLayer: AVPlayerLayer?) { - // Create new controller passing reference to the AVPlayerLayer - _pipController = AVPictureInPictureController(playerLayer:playerLayer!) - if #available(iOS 14.2, *) { - _pipController?.canStartPictureInPictureAutomaticallyFromInline = true - } - _pipController?.delegate = self + // Create new controller passing reference to the AVPlayerLayer + _pipController = AVPictureInPictureController(playerLayer: playerLayer!) + if #available(iOS 14.2, *) { + _pipController?.canStartPictureInPictureAutomaticallyFromInline = true + } + _pipController?.delegate = self } - - func setPictureInPicture(_ isActive:Bool) { - if _isActive == isActive { - return + + func setPictureInPicture(_ isActive: Bool) { + if _isActive == isActive { + return + } + _isActive = isActive + + guard let _pipController = _pipController else { return } + + if _isActive && !_pipController.isPictureInPictureActive { + DispatchQueue.main.async { + _pipController.startPictureInPicture() } - _isActive = isActive - - guard let _pipController = _pipController else { return } - - if _isActive && !_pipController.isPictureInPictureActive { - DispatchQueue.main.async(execute: { - _pipController.startPictureInPicture() - }) - } else if !_isActive && _pipController.isPictureInPictureActive { - DispatchQueue.main.async(execute: { - _pipController.stopPictureInPicture() - }) + } else if !_isActive && _pipController.isPictureInPictureActive { + DispatchQueue.main.async { + _pipController.stopPictureInPicture() } + } } -} + } #endif diff --git a/ios/Video/Features/RCTPlayerObserver.swift b/ios/Video/Features/RCTPlayerObserver.swift index d5441409df..aa1280fbf8 100644 --- a/ios/Video/Features/RCTPlayerObserver.swift +++ b/ios/Video/Features/RCTPlayerObserver.swift @@ -2,222 +2,232 @@ import AVFoundation import AVKit import Foundation +// MARK: - RCTPlayerObserverHandlerObjc + @objc protocol RCTPlayerObserverHandlerObjc { - func handleDidFailToFinishPlaying(notification:NSNotification!) - func handlePlaybackStalled(notification:NSNotification!) - func handlePlayerItemDidReachEnd(notification:NSNotification!) - func handleAVPlayerAccess(notification:NSNotification!) + func handleDidFailToFinishPlaying(notification: NSNotification!) + func handlePlaybackStalled(notification: NSNotification!) + func handlePlayerItemDidReachEnd(notification: NSNotification!) + func handleAVPlayerAccess(notification: NSNotification!) } +// MARK: - RCTPlayerObserverHandler + protocol RCTPlayerObserverHandler: RCTPlayerObserverHandlerObjc { - func handleTimeUpdate(time:CMTime) - func handleReadyForDisplay(changeObject: Any, change:NSKeyValueObservedChange) - func handleTimeMetadataChange(playerItem:AVPlayerItem, change:NSKeyValueObservedChange<[AVMetadataItem]?>) - func handlePlayerItemStatusChange(playerItem:AVPlayerItem, change:NSKeyValueObservedChange) - func handlePlaybackBufferKeyEmpty(playerItem:AVPlayerItem, change:NSKeyValueObservedChange) - func handlePlaybackLikelyToKeepUp(playerItem:AVPlayerItem, change:NSKeyValueObservedChange) - func handlePlaybackRateChange(player: AVPlayer, change: NSKeyValueObservedChange) - func handleVolumeChange(player: AVPlayer, change: NSKeyValueObservedChange) - func handleExternalPlaybackActiveChange(player: AVPlayer, change: NSKeyValueObservedChange) - func handleViewControllerOverlayViewFrameChange(overlayView:UIView, change:NSKeyValueObservedChange) + func handleTimeUpdate(time: CMTime) + func handleReadyForDisplay(changeObject: Any, change: NSKeyValueObservedChange) + func handleTimeMetadataChange(playerItem: AVPlayerItem, change: NSKeyValueObservedChange<[AVMetadataItem]?>) + func handlePlayerItemStatusChange(playerItem: AVPlayerItem, change: NSKeyValueObservedChange) + func handlePlaybackBufferKeyEmpty(playerItem: AVPlayerItem, change: NSKeyValueObservedChange) + func handlePlaybackLikelyToKeepUp(playerItem: AVPlayerItem, change: NSKeyValueObservedChange) + func handlePlaybackRateChange(player: AVPlayer, change: NSKeyValueObservedChange) + func handleVolumeChange(player: AVPlayer, change: NSKeyValueObservedChange) + func handleExternalPlaybackActiveChange(player: AVPlayer, change: NSKeyValueObservedChange) + func handleViewControllerOverlayViewFrameChange(overlayView: UIView, change: NSKeyValueObservedChange) } +// MARK: - RCTPlayerObserver + class RCTPlayerObserver: NSObject { - weak var _handlers: RCTPlayerObserverHandler? - - var player:AVPlayer? { - willSet { - removePlayerObservers() - removePlayerTimeObserver() - } - didSet { - if player != nil { - addPlayerObservers() - addPlayerTimeObserver() - } - } - } - var playerItem:AVPlayerItem? { - willSet { - removePlayerItemObservers() - } - didSet { - if playerItem != nil { - addPlayerItemObservers() - } - } - } - var playerViewController:AVPlayerViewController? { - willSet { - removePlayerViewControllerObservers() - } - didSet { - if playerViewController != nil { - addPlayerViewControllerObservers() - } - } - } - var playerLayer:AVPlayerLayer? { - willSet { - removePlayerLayerObserver() - } - didSet { - if playerLayer != nil { - addPlayerLayerObserver() - } - } - } - - private var _progressUpdateInterval:TimeInterval = 250 - private var _timeObserver:Any? - - private var _playerRateChangeObserver:NSKeyValueObservation? - private var _playerVolumeChangeObserver:NSKeyValueObservation? - private var _playerExternalPlaybackActiveObserver:NSKeyValueObservation? - private var _playerItemStatusObserver:NSKeyValueObservation? - private var _playerPlaybackBufferEmptyObserver:NSKeyValueObservation? - private var _playerPlaybackLikelyToKeepUpObserver:NSKeyValueObservation? - private var _playerTimedMetadataObserver:NSKeyValueObservation? - private var _playerViewControllerReadyForDisplayObserver:NSKeyValueObservation? - private var _playerLayerReadyForDisplayObserver:NSKeyValueObservation? - private var _playerViewControllerOverlayFrameObserver:NSKeyValueObservation? - - deinit { - if let _handlers = _handlers { - NotificationCenter.default.removeObserver(_handlers) - } - } - - func addPlayerObservers() { - guard let player = player, let _handlers = _handlers else { - return - } - - _playerRateChangeObserver = player.observe(\.rate, options: [.old], changeHandler: _handlers.handlePlaybackRateChange) - _playerVolumeChangeObserver = player.observe(\.volume, options: [.old] ,changeHandler: _handlers.handleVolumeChange) - _playerExternalPlaybackActiveObserver = player.observe(\.isExternalPlaybackActive, changeHandler: _handlers.handleExternalPlaybackActiveChange) - } - - func removePlayerObservers() { - _playerRateChangeObserver?.invalidate() - _playerExternalPlaybackActiveObserver?.invalidate() + weak var _handlers: RCTPlayerObserverHandler? + + var player: AVPlayer? { + willSet { + removePlayerObservers() + removePlayerTimeObserver() + } + didSet { + if player != nil { + addPlayerObservers() + addPlayerTimeObserver() + } + } + } + + var playerItem: AVPlayerItem? { + willSet { + removePlayerItemObservers() } - - func addPlayerItemObservers() { - guard let playerItem = playerItem, let _handlers = _handlers else { return } - _playerItemStatusObserver = playerItem.observe(\.status, options: [.new, .old], changeHandler: _handlers.handlePlayerItemStatusChange) - _playerPlaybackBufferEmptyObserver = playerItem.observe(\.isPlaybackBufferEmpty, options: [.new, .old], changeHandler: _handlers.handlePlaybackBufferKeyEmpty) - _playerPlaybackLikelyToKeepUpObserver = playerItem.observe(\.isPlaybackLikelyToKeepUp, options: [.new, .old], changeHandler: _handlers.handlePlaybackLikelyToKeepUp) - _playerTimedMetadataObserver = playerItem.observe(\.timedMetadata, options: [.new], changeHandler: _handlers.handleTimeMetadataChange) + didSet { + if playerItem != nil { + addPlayerItemObservers() + } } - - func removePlayerItemObservers() { - _playerItemStatusObserver?.invalidate() - _playerPlaybackBufferEmptyObserver?.invalidate() - _playerPlaybackLikelyToKeepUpObserver?.invalidate() - _playerTimedMetadataObserver?.invalidate() + } + + var playerViewController: AVPlayerViewController? { + willSet { + removePlayerViewControllerObservers() } - func addPlayerViewControllerObservers() { - guard let playerViewController = playerViewController, let _handlers = _handlers else { return } - - _playerViewControllerReadyForDisplayObserver = playerViewController.observe(\.isReadyForDisplay, options: [.new], changeHandler: _handlers.handleReadyForDisplay) - - _playerViewControllerOverlayFrameObserver = playerViewController.contentOverlayView?.observe(\.frame, options: [.new, .old], changeHandler: _handlers.handleViewControllerOverlayViewFrameChange) + didSet { + if playerViewController != nil { + addPlayerViewControllerObservers() + } } - - func removePlayerViewControllerObservers() { - _playerViewControllerReadyForDisplayObserver?.invalidate() - _playerViewControllerOverlayFrameObserver?.invalidate() + } + + var playerLayer: AVPlayerLayer? { + willSet { + removePlayerLayerObserver() } - - func addPlayerLayerObserver() { - guard let _handlers = _handlers else {return} - _playerLayerReadyForDisplayObserver = playerLayer?.observe(\.isReadyForDisplay, options: [.new], changeHandler: _handlers.handleReadyForDisplay) + didSet { + if playerLayer != nil { + addPlayerLayerObserver() + } } - - func removePlayerLayerObserver() { - _playerLayerReadyForDisplayObserver?.invalidate() + } + + private var _progressUpdateInterval: TimeInterval = 250 + private var _timeObserver: Any? + + private var _playerRateChangeObserver: NSKeyValueObservation? + private var _playerVolumeChangeObserver: NSKeyValueObservation? + private var _playerExternalPlaybackActiveObserver: NSKeyValueObservation? + private var _playerItemStatusObserver: NSKeyValueObservation? + private var _playerPlaybackBufferEmptyObserver: NSKeyValueObservation? + private var _playerPlaybackLikelyToKeepUpObserver: NSKeyValueObservation? + private var _playerTimedMetadataObserver: NSKeyValueObservation? + private var _playerViewControllerReadyForDisplayObserver: NSKeyValueObservation? + private var _playerLayerReadyForDisplayObserver: NSKeyValueObservation? + private var _playerViewControllerOverlayFrameObserver: NSKeyValueObservation? + + deinit { + if let _handlers = _handlers { + NotificationCenter.default.removeObserver(_handlers) } - - func addPlayerTimeObserver() { - guard let _handlers = _handlers else {return} - removePlayerTimeObserver() - let progressUpdateIntervalMS:Float64 = _progressUpdateInterval / 1000 - // @see endScrubbing in AVPlayerDemoPlaybackViewController.m - // of https://developer.apple.com/library/ios/samplecode/AVPlayerDemo/Introduction/Intro.html - _timeObserver = player?.addPeriodicTimeObserver( - forInterval: CMTimeMakeWithSeconds(progressUpdateIntervalMS, preferredTimescale: Int32(NSEC_PER_SEC)), - queue:nil, - using:_handlers.handleTimeUpdate - ) + } + + func addPlayerObservers() { + guard let player = player, let _handlers = _handlers else { + return } - - /* Cancels the previously registered time observer. */ - func removePlayerTimeObserver() { - if _timeObserver != nil { - player?.removeTimeObserver(_timeObserver) - _timeObserver = nil - } + + _playerRateChangeObserver = player.observe(\.rate, options: [.old], changeHandler: _handlers.handlePlaybackRateChange) + _playerVolumeChangeObserver = player.observe(\.volume, options: [.old], changeHandler: _handlers.handleVolumeChange) + _playerExternalPlaybackActiveObserver = player.observe(\.isExternalPlaybackActive, changeHandler: _handlers.handleExternalPlaybackActiveChange) + } + + func removePlayerObservers() { + _playerRateChangeObserver?.invalidate() + _playerExternalPlaybackActiveObserver?.invalidate() + } + + func addPlayerItemObservers() { + guard let playerItem = playerItem, let _handlers = _handlers else { return } + _playerItemStatusObserver = playerItem.observe(\.status, options: [.new, .old], changeHandler: _handlers.handlePlayerItemStatusChange) + _playerPlaybackBufferEmptyObserver = playerItem.observe(\.isPlaybackBufferEmpty, options: [.new, .old], changeHandler: _handlers.handlePlaybackBufferKeyEmpty) + _playerPlaybackLikelyToKeepUpObserver = playerItem.observe(\.isPlaybackLikelyToKeepUp, options: [.new, .old], changeHandler: _handlers.handlePlaybackLikelyToKeepUp) + _playerTimedMetadataObserver = playerItem.observe(\.timedMetadata, options: [.new], changeHandler: _handlers.handleTimeMetadataChange) + } + + func removePlayerItemObservers() { + _playerItemStatusObserver?.invalidate() + _playerPlaybackBufferEmptyObserver?.invalidate() + _playerPlaybackLikelyToKeepUpObserver?.invalidate() + _playerTimedMetadataObserver?.invalidate() + } + + func addPlayerViewControllerObservers() { + guard let playerViewController = playerViewController, let _handlers = _handlers else { return } + + _playerViewControllerReadyForDisplayObserver = playerViewController.observe(\.isReadyForDisplay, options: [.new], changeHandler: _handlers.handleReadyForDisplay) + + _playerViewControllerOverlayFrameObserver = playerViewController.contentOverlayView?.observe(\.frame, options: [.new, .old], changeHandler: _handlers.handleViewControllerOverlayViewFrameChange) + } + + func removePlayerViewControllerObservers() { + _playerViewControllerReadyForDisplayObserver?.invalidate() + _playerViewControllerOverlayFrameObserver?.invalidate() + } + + func addPlayerLayerObserver() { + guard let _handlers = _handlers else { return } + _playerLayerReadyForDisplayObserver = playerLayer?.observe(\.isReadyForDisplay, options: [.new], changeHandler: _handlers.handleReadyForDisplay) + } + + func removePlayerLayerObserver() { + _playerLayerReadyForDisplayObserver?.invalidate() + } + + func addPlayerTimeObserver() { + guard let _handlers = _handlers else { return } + removePlayerTimeObserver() + let progressUpdateIntervalMS: Float64 = _progressUpdateInterval / 1000 + // @see endScrubbing in AVPlayerDemoPlaybackViewController.m + // of https://developer.apple.com/library/ios/samplecode/AVPlayerDemo/Introduction/Intro.html + _timeObserver = player?.addPeriodicTimeObserver( + forInterval: CMTimeMakeWithSeconds(progressUpdateIntervalMS, preferredTimescale: Int32(NSEC_PER_SEC)), + queue: nil, + using: _handlers.handleTimeUpdate + ) + } + + /* Cancels the previously registered time observer. */ + func removePlayerTimeObserver() { + if _timeObserver != nil { + player?.removeTimeObserver(_timeObserver) + _timeObserver = nil } - - func addTimeObserverIfNotSet() { - if (_timeObserver == nil) { - addPlayerTimeObserver() - } + } + + func addTimeObserverIfNotSet() { + if _timeObserver == nil { + addPlayerTimeObserver() } - - func replaceTimeObserverIfSet(_ newUpdateInterval:Float64? = nil) { - if let newUpdateInterval = newUpdateInterval { - _progressUpdateInterval = newUpdateInterval - } - if (_timeObserver != nil) { - addPlayerTimeObserver() - } + } + + func replaceTimeObserverIfSet(_ newUpdateInterval: Float64? = nil) { + if let newUpdateInterval = newUpdateInterval { + _progressUpdateInterval = newUpdateInterval } - - func attachPlayerEventListeners() { - guard let _handlers = _handlers else {return} - NotificationCenter.default.removeObserver(_handlers, - name:NSNotification.Name.AVPlayerItemDidPlayToEndTime, - object:player?.currentItem) - - NotificationCenter.default.addObserver(_handlers, - selector:#selector(RCTPlayerObserverHandler.handlePlayerItemDidReachEnd(notification:)), - name:NSNotification.Name.AVPlayerItemDidPlayToEndTime, - object:player?.currentItem) - - NotificationCenter.default.removeObserver(_handlers, - name:NSNotification.Name.AVPlayerItemPlaybackStalled, - object:nil) - - NotificationCenter.default.addObserver(_handlers, - selector:#selector(RCTPlayerObserverHandler.handlePlaybackStalled(notification:)), - name:NSNotification.Name.AVPlayerItemPlaybackStalled, - object:nil) - - NotificationCenter.default.removeObserver(_handlers, - name: NSNotification.Name.AVPlayerItemFailedToPlayToEndTime, - object:nil) - - NotificationCenter.default.addObserver(_handlers, - selector:#selector(RCTPlayerObserverHandler.handleDidFailToFinishPlaying(notification:)), - name: NSNotification.Name.AVPlayerItemFailedToPlayToEndTime, - object:nil) - - NotificationCenter.default.removeObserver(_handlers, name: NSNotification.Name.AVPlayerItemNewAccessLogEntry, object: player?.currentItem) - - NotificationCenter.default.addObserver(_handlers, - selector:#selector(RCTPlayerObserverHandlerObjc.handleAVPlayerAccess(notification:)), - name: NSNotification.Name.AVPlayerItemNewAccessLogEntry, - object: player?.currentItem) + if _timeObserver != nil { + addPlayerTimeObserver() } - - func clearPlayer() { - player = nil - playerItem = nil - if let _handlers = _handlers { - NotificationCenter.default.removeObserver(_handlers) - } + } + + func attachPlayerEventListeners() { + guard let _handlers = _handlers else { return } + NotificationCenter.default.removeObserver(_handlers, + name: NSNotification.Name.AVPlayerItemDidPlayToEndTime, + object: player?.currentItem) + + NotificationCenter.default.addObserver(_handlers, + selector: #selector(RCTPlayerObserverHandler.handlePlayerItemDidReachEnd(notification:)), + name: NSNotification.Name.AVPlayerItemDidPlayToEndTime, + object: player?.currentItem) + + NotificationCenter.default.removeObserver(_handlers, + name: NSNotification.Name.AVPlayerItemPlaybackStalled, + object: nil) + + NotificationCenter.default.addObserver(_handlers, + selector: #selector(RCTPlayerObserverHandler.handlePlaybackStalled(notification:)), + name: NSNotification.Name.AVPlayerItemPlaybackStalled, + object: nil) + + NotificationCenter.default.removeObserver(_handlers, + name: NSNotification.Name.AVPlayerItemFailedToPlayToEndTime, + object: nil) + + NotificationCenter.default.addObserver(_handlers, + selector: #selector(RCTPlayerObserverHandler.handleDidFailToFinishPlaying(notification:)), + name: NSNotification.Name.AVPlayerItemFailedToPlayToEndTime, + object: nil) + + NotificationCenter.default.removeObserver(_handlers, name: NSNotification.Name.AVPlayerItemNewAccessLogEntry, object: player?.currentItem) + + NotificationCenter.default.addObserver(_handlers, + selector: #selector(RCTPlayerObserverHandlerObjc.handleAVPlayerAccess(notification:)), + name: NSNotification.Name.AVPlayerItemNewAccessLogEntry, + object: player?.currentItem) + } + + func clearPlayer() { + player = nil + playerItem = nil + if let _handlers = _handlers { + NotificationCenter.default.removeObserver(_handlers) } + } } diff --git a/ios/Video/Features/RCTPlayerOperations.swift b/ios/Video/Features/RCTPlayerOperations.swift index 6e82e2e76b..8da49eeba3 100644 --- a/ios/Video/Features/RCTPlayerOperations.swift +++ b/ios/Video/Features/RCTPlayerOperations.swift @@ -4,242 +4,242 @@ import Promises let RCTVideoUnset = -1 +// MARK: - RCTPlayerOperations + /*! * Collection of mutating functions */ enum RCTPlayerOperations { + static func setSideloadedText(player: AVPlayer?, textTracks: [TextTrack]?, criteria: SelectedTrackCriteria?) { + let type = criteria?.type + let textTracks: [TextTrack]! = textTracks ?? RCTVideoUtils.getTextTrackInfo(player) + let trackCount: Int! = player?.currentItem?.tracks.count ?? 0 + + // The first few tracks will be audio & video track + var firstTextIndex = 0 + for i in 0 ..< trackCount { + if player?.currentItem?.tracks[i].assetTrack?.hasMediaCharacteristic(.legible) ?? false { + firstTextIndex = i + break + } + } - static func setSideloadedText(player:AVPlayer?, textTracks:[TextTrack]?, criteria:SelectedTrackCriteria?) { - let type = criteria?.type - let textTracks:[TextTrack]! = textTracks ?? RCTVideoUtils.getTextTrackInfo(player) - let trackCount:Int! = player?.currentItem?.tracks.count ?? 0 - - // The first few tracks will be audio & video track - var firstTextIndex:Int = 0 - for i in 0..<(trackCount) { - if player?.currentItem?.tracks[i].assetTrack?.hasMediaCharacteristic(.legible) ?? false { - firstTextIndex = i - break - } + var selectedTrackIndex: Int = RCTVideoUnset + + if type == "disabled" { + // Select the last text index which is the disabled text track + selectedTrackIndex = trackCount - firstTextIndex + } else if type == "language" { + let selectedValue = criteria?.value as? String + for i in 0 ..< textTracks.count { + let currentTextTrack = textTracks[i] + if selectedValue == currentTextTrack.language { + selectedTrackIndex = i + break } - - var selectedTrackIndex:Int = RCTVideoUnset - - if (type == "disabled") { - // Select the last text index which is the disabled text track - selectedTrackIndex = trackCount - firstTextIndex - } else if (type == "language") { - let selectedValue = criteria?.value as? String - for i in 0.. index { - selectedTrackIndex = index - } - } + } + } else if type == "title" { + let selectedValue = criteria?.value as? String + for i in 0 ..< textTracks.count { + let currentTextTrack = textTracks[i] + if selectedValue == currentTextTrack.title { + selectedTrackIndex = i + break } - - // in the situation that a selected text track is not available (eg. specifies a textTrack not available) - if (type != "disabled") && selectedTrackIndex == RCTVideoUnset { - let captioningMediaCharacteristics = MACaptionAppearanceCopyPreferredCaptioningMediaCharacteristics(.user) - let captionSettings = captioningMediaCharacteristics as? [AnyHashable] - if ((captionSettings?.contains(AVMediaCharacteristic.transcribesSpokenDialogForAccessibility)) != nil) { - selectedTrackIndex = 0 // If we can't find a match, use the first available track - let systemLanguage = NSLocale.preferredLanguages.first - for i in 0.. index { + selectedTrackIndex = index } + } + } - for i in firstTextIndex..<(trackCount) { - var isEnabled = false - if selectedTrackIndex != RCTVideoUnset { - isEnabled = i == selectedTrackIndex + firstTextIndex - } - player?.currentItem?.tracks[i].isEnabled = isEnabled + // in the situation that a selected text track is not available (eg. specifies a textTrack not available) + if (type != "disabled") && selectedTrackIndex == RCTVideoUnset { + let captioningMediaCharacteristics = MACaptionAppearanceCopyPreferredCaptioningMediaCharacteristics(.user) + let captionSettings = captioningMediaCharacteristics as? [AnyHashable] + if (captionSettings?.contains(AVMediaCharacteristic.transcribesSpokenDialogForAccessibility)) != nil { + selectedTrackIndex = 0 // If we can't find a match, use the first available track + let systemLanguage = NSLocale.preferredLanguages.first + for i in 0 ..< textTracks.count { + let currentTextTrack = textTracks[i] + if systemLanguage == currentTextTrack.language { + selectedTrackIndex = i + break + } } + } } - // UNUSED - static func setStreamingText(player:AVPlayer?, criteria:SelectedTrackCriteria?) { - let type = criteria?.type - let group:AVMediaSelectionGroup! = player?.currentItem?.asset.mediaSelectionGroup(forMediaCharacteristic: AVMediaCharacteristic.legible) - var mediaOption:AVMediaSelectionOption! - - if (type == "disabled") { - // Do nothing. We want to ensure option is nil - } else if (type == "language") || (type == "title") { - let value = criteria?.value as? String - for i in 0.. index { - mediaOption = group.options[index] - } - } - } else { // default. invalid type or "system" - #if os(tvOS) - // Do noting. Fix for tvOS native audio menu language selector - #else - player?.currentItem?.selectMediaOptionAutomatically(in: group) - return - #endif + for i in firstTextIndex ..< trackCount { + var isEnabled = false + if selectedTrackIndex != RCTVideoUnset { + isEnabled = i == selectedTrackIndex + firstTextIndex + } + player?.currentItem?.tracks[i].isEnabled = isEnabled + } + } + + // UNUSED + static func setStreamingText(player: AVPlayer?, criteria: SelectedTrackCriteria?) { + let type = criteria?.type + let group: AVMediaSelectionGroup! = player?.currentItem?.asset.mediaSelectionGroup(forMediaCharacteristic: AVMediaCharacteristic.legible) + var mediaOption: AVMediaSelectionOption! + + if type == "disabled" { + // Do nothing. We want to ensure option is nil + } else if (type == "language") || (type == "title") { + let value = criteria?.value as? String + for i in 0 ..< group.options.count { + let currentOption: AVMediaSelectionOption! = group.options[i] + var optionValue: String! + if type == "language" { + optionValue = currentOption.extendedLanguageTag + } else { + optionValue = currentOption.commonMetadata.map(\.value)[0] as! String } - - #if os(tvOS) - // Do noting. Fix for tvOS native audio menu language selector - #else - // If a match isn't found, option will be nil and text tracks will be disabled - player?.currentItem?.select(mediaOption, in:group) - #endif + if value == optionValue { + mediaOption = currentOption + break + } + } + // } else if ([type isEqualToString:@"default"]) { + // option = group.defaultOption; */ + } else if type == "index" { + if let value = criteria?.value, let index = value as? Int { + if group.options.count > index { + mediaOption = group.options[index] + } + } + } else { // default. invalid type or "system" + #if os(tvOS) + // Do noting. Fix for tvOS native audio menu language selector + #else + player?.currentItem?.selectMediaOptionAutomatically(in: group) + return + #endif } - static func setMediaSelectionTrackForCharacteristic(player:AVPlayer?, characteristic:AVMediaCharacteristic, criteria:SelectedTrackCriteria?) { - let type = criteria?.type - let group:AVMediaSelectionGroup! = player?.currentItem?.asset.mediaSelectionGroup(forMediaCharacteristic: characteristic) - var mediaOption:AVMediaSelectionOption! - - guard group != nil else { return } - - if (type == "disabled") { - // Do nothing. We want to ensure option is nil - } else if (type == "language") || (type == "title") { - let value = criteria?.value as? String - for i in 0.. index { - mediaOption = group.options[index] - } - } - } else if let group = group { // default. invalid type or "system" - player?.currentItem?.selectMediaOptionAutomatically(in: group) - return + #if os(tvOS) + // Do noting. Fix for tvOS native audio menu language selector + #else + // If a match isn't found, option will be nil and text tracks will be disabled + player?.currentItem?.select(mediaOption, in: group) + #endif + } + + static func setMediaSelectionTrackForCharacteristic(player: AVPlayer?, characteristic: AVMediaCharacteristic, criteria: SelectedTrackCriteria?) { + let type = criteria?.type + let group: AVMediaSelectionGroup! = player?.currentItem?.asset.mediaSelectionGroup(forMediaCharacteristic: characteristic) + var mediaOption: AVMediaSelectionOption! + + guard group != nil else { return } + + if type == "disabled" { + // Do nothing. We want to ensure option is nil + } else if (type == "language") || (type == "title") { + let value = criteria?.value as? String + for i in 0 ..< group.options.count { + let currentOption: AVMediaSelectionOption! = group.options[i] + var optionValue: String! + if type == "language" { + optionValue = currentOption.extendedLanguageTag + } else { + optionValue = currentOption.commonMetadata.map(\.value)[0] as? String } - - if let group = group { - // If a match isn't found, option will be nil and text tracks will be disabled - player?.currentItem?.select(mediaOption, in:group) + if value == optionValue { + mediaOption = currentOption + break } - + } + // } else if ([type isEqualToString:@"default"]) { + // option = group.defaultOption; */ + } else if type == "index" { + if let value = criteria?.value, let index = value as? Int { + if group.options.count > index { + mediaOption = group.options[index] + } + } + } else if let group = group { // default. invalid type or "system" + player?.currentItem?.selectMediaOptionAutomatically(in: group) + return } - static func seek(player: AVPlayer, playerItem:AVPlayerItem, paused:Bool, seekTime:Float, seekTolerance:Float) -> Promise { - let timeScale:Int = 1000 - let cmSeekTime:CMTime = CMTimeMakeWithSeconds(Float64(seekTime), preferredTimescale: Int32(timeScale)) - let current:CMTime = playerItem.currentTime() - let tolerance:CMTime = CMTimeMake(value: Int64(seekTolerance), timescale: Int32(timeScale)) - - return Promise(on: .global()) { fulfill, reject in - guard CMTimeCompare(current, cmSeekTime) != 0 else { - reject(NSError(domain: "", code: 0, userInfo: nil)) - return - } - if !paused { player.pause() } - - player.seek(to: cmSeekTime, toleranceBefore:tolerance, toleranceAfter:tolerance, completionHandler:{ (finished:Bool) in - fulfill(finished) - }) - } + if let group = group { + // If a match isn't found, option will be nil and text tracks will be disabled + player?.currentItem?.select(mediaOption, in: group) + } + } + + static func seek(player: AVPlayer, playerItem: AVPlayerItem, paused: Bool, seekTime: Float, seekTolerance: Float) -> Promise { + let timeScale = 1000 + let cmSeekTime: CMTime = CMTimeMakeWithSeconds(Float64(seekTime), preferredTimescale: Int32(timeScale)) + let current: CMTime = playerItem.currentTime() + let tolerance: CMTime = CMTimeMake(value: Int64(seekTolerance), timescale: Int32(timeScale)) + + return Promise(on: .global()) { fulfill, reject in + guard CMTimeCompare(current, cmSeekTime) != 0 else { + reject(NSError(domain: "", code: 0, userInfo: nil)) + return + } + if !paused { player.pause() } + + player.seek(to: cmSeekTime, toleranceBefore: tolerance, toleranceAfter: tolerance, completionHandler: { (finished: Bool) in + fulfill(finished) + }) } + } - static func configureAudio(ignoreSilentSwitch:String, mixWithOthers:String, audioOutput:String) { - let audioSession:AVAudioSession! = AVAudioSession.sharedInstance() - var category:AVAudioSession.Category? = nil - var options:AVAudioSession.CategoryOptions? = nil + static func configureAudio(ignoreSilentSwitch: String, mixWithOthers: String, audioOutput: String) { + let audioSession: AVAudioSession! = AVAudioSession.sharedInstance() + var category: AVAudioSession.Category? + var options: AVAudioSession.CategoryOptions? - if (ignoreSilentSwitch == "ignore") { - category = audioOutput == "earpiece" ? AVAudioSession.Category.playAndRecord : AVAudioSession.Category.playback - } else if (ignoreSilentSwitch == "obey") { - category = AVAudioSession.Category.ambient - } + if ignoreSilentSwitch == "ignore" { + category = audioOutput == "earpiece" ? AVAudioSession.Category.playAndRecord : AVAudioSession.Category.playback + } else if ignoreSilentSwitch == "obey" { + category = AVAudioSession.Category.ambient + } - if (mixWithOthers == "mix") { - options = .mixWithOthers - } else if (mixWithOthers == "duck") { - options = .duckOthers - } + if mixWithOthers == "mix" { + options = .mixWithOthers + } else if mixWithOthers == "duck" { + options = .duckOthers + } - if let category = category, let options = options { - do { - try audioSession.setCategory(category, options: options) - } catch { - debugPrint("[RCTPlayerOperations] Problem setting up AVAudioSession category and options. Error: \(error).") - #if !os(tvOS) - // Handle specific set category and option combination error - // setCategory:AVAudioSessionCategoryPlayback withOptions:mixWithOthers || duckOthers - // Failed to set category, error: 'what' Error Domain=NSOSStatusErrorDomain - // https://developer.apple.com/forums/thread/714598 - if #available(iOS 16.0, *) { - do { - debugPrint("[RCTPlayerOperations] Reseting AVAudioSession category to playAndRecord with defaultToSpeaker options.") - try audioSession.setCategory(audioOutput == "earpiece" ? AVAudioSession.Category.playAndRecord : AVAudioSession.Category.playback, options: AVAudioSession.CategoryOptions.defaultToSpeaker) - } catch { - debugPrint("[RCTPlayerOperations] Reseting AVAudioSession category and options problem. Error: \(error).") - } - } - #endif - } - } else if let category = category, options == nil { + if let category = category, let options = options { + do { + try audioSession.setCategory(category, options: options) + } catch { + debugPrint("[RCTPlayerOperations] Problem setting up AVAudioSession category and options. Error: \(error).") + #if !os(tvOS) + // Handle specific set category and option combination error + // setCategory:AVAudioSessionCategoryPlayback withOptions:mixWithOthers || duckOthers + // Failed to set category, error: 'what' Error Domain=NSOSStatusErrorDomain + // https://developer.apple.com/forums/thread/714598 + if #available(iOS 16.0, *) { do { - try audioSession.setCategory(category) + debugPrint("[RCTPlayerOperations] Reseting AVAudioSession category to playAndRecord with defaultToSpeaker options.") + try audioSession.setCategory(audioOutput == "earpiece" ? AVAudioSession.Category.playAndRecord : AVAudioSession.Category.playback, options: AVAudioSession.CategoryOptions.defaultToSpeaker) } catch { - debugPrint("[RCTPlayerOperations] Problem setting up AVAudioSession category. Error: \(error).") + debugPrint("[RCTPlayerOperations] Reseting AVAudioSession category and options problem. Error: \(error).") } - } else if category == nil, let options = options { - do { - try audioSession.setCategory(audioSession.category, options: options) - } catch { - debugPrint("[RCTPlayerOperations] Problem setting up AVAudioSession options. Error: \(error).") - } - } + } + #endif + } + } else if let category = category, options == nil { + do { + try audioSession.setCategory(category) + } catch { + debugPrint("[RCTPlayerOperations] Problem setting up AVAudioSession category. Error: \(error).") + } + } else if category == nil, let options = options { + do { + try audioSession.setCategory(audioSession.category, options: options) + } catch { + debugPrint("[RCTPlayerOperations] Problem setting up AVAudioSession options. Error: \(error).") + } } + } } diff --git a/ios/Video/Features/RCTResourceLoaderDelegate.swift b/ios/Video/Features/RCTResourceLoaderDelegate.swift index 07b8e62af4..24b6fcb8c6 100644 --- a/ios/Video/Features/RCTResourceLoaderDelegate.swift +++ b/ios/Video/Features/RCTResourceLoaderDelegate.swift @@ -2,189 +2,184 @@ import AVFoundation import Promises class RCTResourceLoaderDelegate: NSObject, AVAssetResourceLoaderDelegate, URLSessionDelegate { - - private var _loadingRequests: [String: AVAssetResourceLoadingRequest?] = [:] - private var _requestingCertificate:Bool = false - private var _requestingCertificateErrored:Bool = false - private var _drm: DRMParams? - private var _localSourceEncryptionKeyScheme: String? - private var _reactTag: NSNumber? - private var _onVideoError: RCTDirectEventBlock? - private var _onGetLicense: RCTDirectEventBlock? - - - init( - asset: AVURLAsset, - drm: DRMParams?, - localSourceEncryptionKeyScheme: String?, - onVideoError: RCTDirectEventBlock?, - onGetLicense: RCTDirectEventBlock?, - reactTag: NSNumber - ) { - super.init() - let queue = DispatchQueue(label: "assetQueue") - asset.resourceLoader.setDelegate(self, queue: queue) - _reactTag = reactTag - _onVideoError = onVideoError - _onGetLicense = onGetLicense - _drm = drm - _localSourceEncryptionKeyScheme = localSourceEncryptionKeyScheme + private var _loadingRequests: [String: AVAssetResourceLoadingRequest?] = [:] + private var _requestingCertificate = false + private var _requestingCertificateErrored = false + private var _drm: DRMParams? + private var _localSourceEncryptionKeyScheme: String? + private var _reactTag: NSNumber? + private var _onVideoError: RCTDirectEventBlock? + private var _onGetLicense: RCTDirectEventBlock? + + init( + asset: AVURLAsset, + drm: DRMParams?, + localSourceEncryptionKeyScheme: String?, + onVideoError: RCTDirectEventBlock?, + onGetLicense: RCTDirectEventBlock?, + reactTag: NSNumber + ) { + super.init() + let queue = DispatchQueue(label: "assetQueue") + asset.resourceLoader.setDelegate(self, queue: queue) + _reactTag = reactTag + _onVideoError = onVideoError + _onGetLicense = onGetLicense + _drm = drm + _localSourceEncryptionKeyScheme = localSourceEncryptionKeyScheme + } + + deinit { + for request in _loadingRequests.values { + request?.finishLoading() } - - deinit { - for request in _loadingRequests.values { - request?.finishLoading() - } + } + + func resourceLoader(_: AVAssetResourceLoader, shouldWaitForRenewalOfRequestedResource renewalRequest: AVAssetResourceRenewalRequest) -> Bool { + return loadingRequestHandling(renewalRequest) + } + + func resourceLoader(_: AVAssetResourceLoader, shouldWaitForLoadingOfRequestedResource loadingRequest: AVAssetResourceLoadingRequest) -> Bool { + return loadingRequestHandling(loadingRequest) + } + + func resourceLoader(_: AVAssetResourceLoader, didCancel _: AVAssetResourceLoadingRequest) { + RCTLog("didCancelLoadingRequest") + } + + func setLicenseResult(_ license: String!, _ licenseUrl: String!) { + // Check if the loading request exists in _loadingRequests based on licenseUrl + guard let loadingRequest = _loadingRequests[licenseUrl] else { + setLicenseResultError("Loading request for licenseUrl \(licenseUrl) not found", licenseUrl) + return } - - func resourceLoader(_ resourceLoader:AVAssetResourceLoader, shouldWaitForRenewalOfRequestedResource renewalRequest:AVAssetResourceRenewalRequest) -> Bool { - return loadingRequestHandling(renewalRequest) + + // Check if the license data is valid + guard let respondData = RCTVideoUtils.base64DataFromBase64String(base64String: license) else { + setLicenseResultError("No data from JS license response", licenseUrl) + return } - - func resourceLoader(_ resourceLoader:AVAssetResourceLoader, shouldWaitForLoadingOfRequestedResource loadingRequest:AVAssetResourceLoadingRequest) -> Bool { - return loadingRequestHandling(loadingRequest) + + let dataRequest: AVAssetResourceLoadingDataRequest! = loadingRequest?.dataRequest + dataRequest.respond(with: respondData) + loadingRequest!.finishLoading() + _loadingRequests.removeValue(forKey: licenseUrl) + } + + func setLicenseResultError(_ error: String!, _ licenseUrl: String!) { + // Check if the loading request exists in _loadingRequests based on licenseUrl + guard let loadingRequest = _loadingRequests[licenseUrl] else { + print("Loading request for licenseUrl \(licenseUrl) not found. Error: \(error)") + return } - - func resourceLoader(_ resourceLoader:AVAssetResourceLoader, didCancel loadingRequest:AVAssetResourceLoadingRequest) { - RCTLog("didCancelLoadingRequest") + + self.finishLoadingWithError(error: RCTVideoErrorHandler.fromJSPart(error), licenseUrl: licenseUrl) + } + + func finishLoadingWithError(error: Error!, licenseUrl: String!) -> Bool { + // Check if the loading request exists in _loadingRequests based on licenseUrl + guard let loadingRequest = _loadingRequests[licenseUrl], let error = error as NSError? else { + // Handle the case where the loading request is not found or error is nil + return false } - func setLicenseResult(_ license:String!,_ licenseUrl: String!) { - - // Check if the loading request exists in _loadingRequests based on licenseUrl - guard let loadingRequest = _loadingRequests[licenseUrl] else { - setLicenseResultError("Loading request for licenseUrl \(licenseUrl) not found", licenseUrl) - return - } - - // Check if the license data is valid - guard let respondData = RCTVideoUtils.base64DataFromBase64String(base64String: license) else { - setLicenseResultError("No data from JS license response", licenseUrl) - return - } - - let dataRequest: AVAssetResourceLoadingDataRequest! = loadingRequest?.dataRequest - dataRequest.respond(with: respondData) - loadingRequest!.finishLoading() - _loadingRequests.removeValue(forKey: licenseUrl) + loadingRequest!.finishLoading(with: error) + _loadingRequests.removeValue(forKey: licenseUrl) + _onVideoError?([ + "error": [ + "code": NSNumber(value: error.code), + "localizedDescription": error.localizedDescription ?? "", + "localizedFailureReason": error.localizedFailureReason ?? "", + "localizedRecoverySuggestion": error.localizedRecoverySuggestion ?? "", + "domain": error.domain, + ], + "target": _reactTag, + ]) + + return false + } + + func loadingRequestHandling(_ loadingRequest: AVAssetResourceLoadingRequest!) -> Bool { + if handleEmbeddedKey(loadingRequest) { + return true } - - func setLicenseResultError(_ error:String!,_ licenseUrl: String!) { - // Check if the loading request exists in _loadingRequests based on licenseUrl - guard let loadingRequest = _loadingRequests[licenseUrl] else { - print("Loading request for licenseUrl \(licenseUrl) not found. Error: \(error)") - return - } - self.finishLoadingWithError(error: RCTVideoErrorHandler.fromJSPart(error), licenseUrl: licenseUrl) + if _drm != nil { + return handleDrm(loadingRequest) } - - func finishLoadingWithError(error: Error!, licenseUrl: String!) -> Bool { - // Check if the loading request exists in _loadingRequests based on licenseUrl - guard let loadingRequest = _loadingRequests[licenseUrl], let error = error as NSError? else { - // Handle the case where the loading request is not found or error is nil - return false - } - loadingRequest!.finishLoading(with: error) - _loadingRequests.removeValue(forKey: licenseUrl) - _onVideoError?([ - "error": [ - "code": NSNumber(value: error.code), - "localizedDescription": error.localizedDescription ?? "", - "localizedFailureReason": error.localizedFailureReason ?? "", - "localizedRecoverySuggestion": error.localizedRecoverySuggestion ?? "", - "domain": error.domain - ], - "target": _reactTag - ]) - - return false + return false + } + + func handleEmbeddedKey(_ loadingRequest: AVAssetResourceLoadingRequest!) -> Bool { + guard let url = loadingRequest.request.url, + let _localSourceEncryptionKeyScheme = _localSourceEncryptionKeyScheme, + let persistentKeyData = RCTVideoUtils.extractDataFromCustomSchemeUrl(from: url, scheme: _localSourceEncryptionKeyScheme) + else { + return false } - - func loadingRequestHandling(_ loadingRequest:AVAssetResourceLoadingRequest!) -> Bool { - if handleEmbeddedKey(loadingRequest) { - return true - } - - if _drm != nil { - return handleDrm(loadingRequest) - } - - return false + loadingRequest.contentInformationRequest?.contentType = AVStreamingKeyDeliveryPersistentContentKeyType + loadingRequest.contentInformationRequest?.isByteRangeAccessSupported = true + loadingRequest.contentInformationRequest?.contentLength = Int64(persistentKeyData.count) + loadingRequest.dataRequest?.respond(with: persistentKeyData) + loadingRequest.finishLoading() + + return true + } + + func handleDrm(_ loadingRequest: AVAssetResourceLoadingRequest!) -> Bool { + if _requestingCertificate { + return true + } else if _requestingCertificateErrored { + return false } - - func handleEmbeddedKey(_ loadingRequest:AVAssetResourceLoadingRequest!) -> Bool { - guard let url = loadingRequest.request.url, - let _localSourceEncryptionKeyScheme = _localSourceEncryptionKeyScheme, - let persistentKeyData = RCTVideoUtils.extractDataFromCustomSchemeUrl(from: url, scheme: _localSourceEncryptionKeyScheme) - else { - return false + + var requestKey: String = loadingRequest.request.url?.absoluteString ?? "" + + _loadingRequests[requestKey] = loadingRequest + + guard let _drm = _drm, let drmType = _drm.type, drmType == "fairplay" else { + return finishLoadingWithError(error: RCTVideoErrorHandler.noDRMData, licenseUrl: requestKey) + } + + var promise: Promise + if _onGetLicense != nil { + let contentId = _drm.contentId ?? loadingRequest.request.url?.host + promise = RCTVideoDRM.handleWithOnGetLicense( + loadingRequest: loadingRequest, + contentId: contentId, + certificateUrl: _drm.certificateUrl, + base64Certificate: _drm.base64Certificate + ).then { spcData in + self._requestingCertificate = true + self._onGetLicense?(["licenseUrl": self._drm?.licenseServer ?? loadingRequest.request.url?.absoluteString ?? "", + "contentId": contentId ?? "", + "spcBase64": spcData.base64EncodedString(options: []), + "target": self._reactTag]) + } + } else { + promise = RCTVideoDRM.handleInternalGetLicense( + loadingRequest: loadingRequest, + contentId: _drm.contentId, + licenseServer: _drm.licenseServer, + certificateUrl: _drm.certificateUrl, + base64Certificate: _drm.base64Certificate, + headers: _drm.headers + ).then { data in + guard let dataRequest = loadingRequest.dataRequest else { + throw RCTVideoErrorHandler.noCertificateData } - - loadingRequest.contentInformationRequest?.contentType = AVStreamingKeyDeliveryPersistentContentKeyType - loadingRequest.contentInformationRequest?.isByteRangeAccessSupported = true - loadingRequest.contentInformationRequest?.contentLength = Int64(persistentKeyData.count) - loadingRequest.dataRequest?.respond(with: persistentKeyData) + dataRequest.respond(with: data) loadingRequest.finishLoading() - - return true + } } - - func handleDrm(_ loadingRequest:AVAssetResourceLoadingRequest!) -> Bool { - if _requestingCertificate { - return true - } else if _requestingCertificateErrored { - return false - } - - var requestKey: String = loadingRequest.request.url?.absoluteString ?? "" - _loadingRequests[requestKey] = loadingRequest - - guard let _drm = _drm, let drmType = _drm.type, drmType == "fairplay" else { - return finishLoadingWithError(error: RCTVideoErrorHandler.noDRMData, licenseUrl: requestKey) - } - - var promise: Promise - if _onGetLicense != nil { - let contentId = _drm.contentId ?? loadingRequest.request.url?.host - promise = RCTVideoDRM.handleWithOnGetLicense( - loadingRequest:loadingRequest, - contentId:contentId, - certificateUrl:_drm.certificateUrl, - base64Certificate:_drm.base64Certificate - ) .then{ spcData -> Void in - self._requestingCertificate = true - self._onGetLicense?(["licenseUrl": self._drm?.licenseServer ?? loadingRequest.request.url?.absoluteString ?? "", - "contentId": contentId ?? "", - "spcBase64": spcData.base64EncodedString(options: []), - "target": self._reactTag]) - } - } else { - promise = RCTVideoDRM.handleInternalGetLicense( - loadingRequest:loadingRequest, - contentId:_drm.contentId, - licenseServer:_drm.licenseServer, - certificateUrl:_drm.certificateUrl, - base64Certificate:_drm.base64Certificate, - headers:_drm.headers - ) .then{ data -> Void in - guard let dataRequest = loadingRequest.dataRequest else { - throw RCTVideoErrorHandler.noCertificateData - } - dataRequest.respond(with:data) - loadingRequest.finishLoading() - } - } - - - promise.catch{ error in - self.finishLoadingWithError(error:error, licenseUrl: requestKey) - self._requestingCertificateErrored = true - } - - return true + promise.catch { error in + self.finishLoadingWithError(error: error, licenseUrl: requestKey) + self._requestingCertificateErrored = true } + + return true + } } diff --git a/ios/Video/Features/RCTVideoDRM.swift b/ios/Video/Features/RCTVideoDRM.swift index d059bbc491..78cc7a0be5 100644 --- a/ios/Video/Features/RCTVideoDRM.swift +++ b/ios/Video/Features/RCTVideoDRM.swift @@ -2,167 +2,165 @@ import AVFoundation import Promises struct RCTVideoDRM { - @available(*, unavailable) private init() {} - - static func fetchLicense( - licenseServer: String, - spcData: Data?, - contentId: String, - headers: [String:Any]? - ) -> Promise { - let request = createLicenseRequest(licenseServer:licenseServer, spcData:spcData, contentId:contentId, headers:headers) - - return Promise(on: .global()) { fulfill, reject in - let postDataTask = URLSession.shared.dataTask(with: request as URLRequest, completionHandler:{ (data:Data!,response:URLResponse!,error:Error!) in - - let httpResponse:HTTPURLResponse! = (response as! HTTPURLResponse) - - guard error == nil else { - print("Error getting license from \(licenseServer), HTTP status code \(httpResponse.statusCode)") - reject(error) - return - } - guard httpResponse.statusCode == 200 else { - print("Error getting license from \(licenseServer), HTTP status code \(httpResponse.statusCode)") - reject(RCTVideoErrorHandler.licenseRequestNotOk(httpResponse.statusCode)) - return - } - - guard data != nil, let decodedData = Data(base64Encoded: data, options: []) else { - reject(RCTVideoErrorHandler.noDataFromLicenseRequest) - return - } - - fulfill(decodedData) - }) - postDataTask.resume() + @available(*, unavailable) private init() {} + + static func fetchLicense( + licenseServer: String, + spcData: Data?, + contentId: String, + headers: [String: Any]? + ) -> Promise { + let request = createLicenseRequest(licenseServer: licenseServer, spcData: spcData, contentId: contentId, headers: headers) + + return Promise(on: .global()) { fulfill, reject in + let postDataTask = URLSession.shared.dataTask(with: request as URLRequest, completionHandler: { (data: Data!, response: URLResponse!, error: Error!) in + let httpResponse: HTTPURLResponse! = (response as! HTTPURLResponse) + + guard error == nil else { + print("Error getting license from \(licenseServer), HTTP status code \(httpResponse.statusCode)") + reject(error) + return } - } - - static func createLicenseRequest( - licenseServer: String, - spcData: Data?, - contentId: String, - headers: [String:Any]? - ) -> URLRequest { - var request = URLRequest(url: URL(string: licenseServer)!) - request.httpMethod = "POST" - - if let headers = headers { - for item in headers { - guard let key = item.key as? String, let value = item.value as? String else { - continue - } - request.setValue(value, forHTTPHeaderField: key) - } + guard httpResponse.statusCode == 200 else { + print("Error getting license from \(licenseServer), HTTP status code \(httpResponse.statusCode)") + reject(RCTVideoErrorHandler.licenseRequestNotOk(httpResponse.statusCode)) + return } - - let spcEncoded = spcData?.base64EncodedString(options: []) - let spcUrlEncoded = CFURLCreateStringByAddingPercentEscapes(kCFAllocatorDefault, spcEncoded as? CFString? as! CFString, nil, "?=&+" as CFString, CFStringBuiltInEncodings.UTF8.rawValue) as? String - let post = String(format:"spc=%@&%@", spcUrlEncoded as! CVarArg, contentId) - let postData = post.data(using: String.Encoding.utf8, allowLossyConversion:true) - request.httpBody = postData - - return request - } - - static func fetchSpcData( - loadingRequest: AVAssetResourceLoadingRequest, - certificateData: Data, - contentIdData: Data - ) -> Promise { - return Promise(on: .global()) { fulfill, reject in - var spcError:NSError! - var spcData: Data? - do { - spcData = try loadingRequest.streamingContentKeyRequestData(forApp: certificateData, contentIdentifier: contentIdData as Data, options: nil) - } catch _ { - print("SPC error") - } - - if spcError != nil { - reject(spcError) - } - - guard let spcData = spcData else { - reject(RCTVideoErrorHandler.noSPC) - return - } - - fulfill(spcData) + + guard data != nil, let decodedData = Data(base64Encoded: data, options: []) else { + reject(RCTVideoErrorHandler.noDataFromLicenseRequest) + return } + + fulfill(decodedData) + }) + postDataTask.resume() } - - static func createCertificateData(certificateStringUrl:String?, base64Certificate:Bool?) -> Promise { - return Promise(on: .global()) { fulfill, reject in - - guard let certificateStringUrl = certificateStringUrl, - let certificateURL = URL(string: certificateStringUrl.addingPercentEncoding(withAllowedCharacters: .urlFragmentAllowed) ?? "") else { - reject(RCTVideoErrorHandler.noCertificateURL) - return - } - - var certificateData:Data? - do { - certificateData = try Data(contentsOf: certificateURL) - if (base64Certificate != nil) { - certificateData = Data(base64Encoded: certificateData! as Data, options: .ignoreUnknownCharacters) - } - } catch {} - - guard let certificateData = certificateData else { - reject(RCTVideoErrorHandler.noCertificateData) - return - } - - fulfill(certificateData) + } + + static func createLicenseRequest( + licenseServer: String, + spcData: Data?, + contentId: String, + headers: [String: Any]? + ) -> URLRequest { + var request = URLRequest(url: URL(string: licenseServer)!) + request.httpMethod = "POST" + + if let headers = headers { + for item in headers { + guard let key = item.key as? String, let value = item.value as? String else { + continue } + request.setValue(value, forHTTPHeaderField: key) + } } - - static func handleWithOnGetLicense(loadingRequest: AVAssetResourceLoadingRequest, contentId:String?, certificateUrl:String?, base64Certificate:Bool?) -> Promise { - let contentIdData = contentId?.data(using: .utf8) - - return RCTVideoDRM.createCertificateData(certificateStringUrl:certificateUrl, base64Certificate:base64Certificate) - .then{ certificateData -> Promise in - guard let contentIdData = contentIdData else { - throw RCTVideoError.invalidContentId as! Error - } - - return RCTVideoDRM.fetchSpcData( - loadingRequest:loadingRequest, - certificateData:certificateData, - contentIdData:contentIdData - ) - } + + let spcEncoded = spcData?.base64EncodedString(options: []) + let spcUrlEncoded = CFURLCreateStringByAddingPercentEscapes(kCFAllocatorDefault, spcEncoded as? CFString? as! CFString, nil, "?=&+" as CFString, CFStringBuiltInEncodings.UTF8.rawValue) as? String + let post = String(format: "spc=%@&%@", spcUrlEncoded as! CVarArg, contentId) + let postData = post.data(using: String.Encoding.utf8, allowLossyConversion: true) + request.httpBody = postData + + return request + } + + static func fetchSpcData( + loadingRequest: AVAssetResourceLoadingRequest, + certificateData: Data, + contentIdData: Data + ) -> Promise { + return Promise(on: .global()) { fulfill, reject in + var spcError: NSError! + var spcData: Data? + do { + spcData = try loadingRequest.streamingContentKeyRequestData(forApp: certificateData, contentIdentifier: contentIdData as Data, options: nil) + } catch _ { + print("SPC error") + } + + if spcError != nil { + reject(spcError) + } + + guard let spcData = spcData else { + reject(RCTVideoErrorHandler.noSPC) + return + } + + fulfill(spcData) } - - static func handleInternalGetLicense(loadingRequest: AVAssetResourceLoadingRequest, contentId:String?, licenseServer:String?, certificateUrl:String?, base64Certificate:Bool?, headers: [String:Any]?) -> Promise { - let url = loadingRequest.request.url - - guard let contentId = contentId ?? url?.absoluteString.replacingOccurrences(of: "skd://", with:"") else { - return Promise(RCTVideoError.invalidContentId as! Error) + } + + static func createCertificateData(certificateStringUrl: String?, base64Certificate: Bool?) -> Promise { + return Promise(on: .global()) { fulfill, reject in + guard let certificateStringUrl = certificateStringUrl, + let certificateURL = URL(string: certificateStringUrl.addingPercentEncoding(withAllowedCharacters: .urlFragmentAllowed) ?? "") else { + reject(RCTVideoErrorHandler.noCertificateURL) + return + } + + var certificateData: Data? + do { + certificateData = try Data(contentsOf: certificateURL) + if base64Certificate != nil { + certificateData = Data(base64Encoded: certificateData! as Data, options: .ignoreUnknownCharacters) + } + } catch {} + + guard let certificateData = certificateData else { + reject(RCTVideoErrorHandler.noCertificateData) + return + } + + fulfill(certificateData) + } + } + + static func handleWithOnGetLicense(loadingRequest: AVAssetResourceLoadingRequest, contentId: String?, certificateUrl: String?, base64Certificate: Bool?) -> Promise { + let contentIdData = contentId?.data(using: .utf8) + + return RCTVideoDRM.createCertificateData(certificateStringUrl: certificateUrl, base64Certificate: base64Certificate) + .then { certificateData -> Promise in + guard let contentIdData = contentIdData else { + throw RCTVideoError.invalidContentId as! Error } - - let contentIdData = NSData(bytes: contentId.cString(using: String.Encoding.utf8), length:contentId.lengthOfBytes(using: String.Encoding.utf8)) as Data - - return RCTVideoDRM.createCertificateData(certificateStringUrl:certificateUrl, base64Certificate:base64Certificate) - .then{ certificateData in - return RCTVideoDRM.fetchSpcData( - loadingRequest:loadingRequest, - certificateData:certificateData, - contentIdData:contentIdData - ) - } - .then{ spcData -> Promise in - guard let licenseServer = licenseServer else { - throw RCTVideoError.noLicenseServerURL as! Error - } - return RCTVideoDRM.fetchLicense( - licenseServer: licenseServer, - spcData: spcData, - contentId: contentId, - headers: headers - ) - } + + return RCTVideoDRM.fetchSpcData( + loadingRequest: loadingRequest, + certificateData: certificateData, + contentIdData: contentIdData + ) + } + } + + static func handleInternalGetLicense(loadingRequest: AVAssetResourceLoadingRequest, contentId: String?, licenseServer: String?, certificateUrl: String?, base64Certificate: Bool?, headers: [String: Any]?) -> Promise { + let url = loadingRequest.request.url + + guard let contentId = contentId ?? url?.absoluteString.replacingOccurrences(of: "skd://", with: "") else { + return Promise(RCTVideoError.invalidContentId as! Error) } + + let contentIdData = NSData(bytes: contentId.cString(using: String.Encoding.utf8), length: contentId.lengthOfBytes(using: String.Encoding.utf8)) as Data + + return RCTVideoDRM.createCertificateData(certificateStringUrl: certificateUrl, base64Certificate: base64Certificate) + .then { certificateData in + return RCTVideoDRM.fetchSpcData( + loadingRequest: loadingRequest, + certificateData: certificateData, + contentIdData: contentIdData + ) + } + .then { spcData -> Promise in + guard let licenseServer = licenseServer else { + throw RCTVideoError.noLicenseServerURL as! Error + } + return RCTVideoDRM.fetchLicense( + licenseServer: licenseServer, + spcData: spcData, + contentId: contentId, + headers: headers + ) + } + } } diff --git a/ios/Video/Features/RCTVideoErrorHandling.swift b/ios/Video/Features/RCTVideoErrorHandling.swift index e795aa2865..caee88bb10 100644 --- a/ios/Video/Features/RCTVideoErrorHandling.swift +++ b/ios/Video/Features/RCTVideoErrorHandling.swift @@ -1,103 +1,114 @@ -enum RCTVideoError : Int { - case fromJSPart - case noLicenseServerURL - case licenseRequestNotOk - case noDataFromLicenseRequest - case noSPC - case noDataRequest - case noCertificateData - case noCertificateURL - case noFairplayDRM - case noDRMData - case invalidContentId +// MARK: - RCTVideoError + +enum RCTVideoError: Int { + case fromJSPart + case noLicenseServerURL + case licenseRequestNotOk + case noDataFromLicenseRequest + case noSPC + case noDataRequest + case noCertificateData + case noCertificateURL + case noFairplayDRM + case noDRMData + case invalidContentId } +// MARK: - RCTVideoErrorHandler + enum RCTVideoErrorHandler { - - static let noDRMData = NSError( - domain: "RCTVideo", - code: RCTVideoError.noDRMData.rawValue, - userInfo: [ - NSLocalizedDescriptionKey: "Error obtaining DRM license.", - NSLocalizedFailureReasonErrorKey: "No drm object found.", - NSLocalizedRecoverySuggestionErrorKey: "Have you specified the 'drm' prop?" - ]) - - static let noCertificateURL = NSError( - domain: "RCTVideo", - code: RCTVideoError.noCertificateURL.rawValue, - userInfo: [ - NSLocalizedDescriptionKey: "Error obtaining DRM License.", - NSLocalizedFailureReasonErrorKey: "No certificate URL has been found.", - NSLocalizedRecoverySuggestionErrorKey: "Did you specified the prop certificateUrl?" - ]) - - static let noCertificateData = NSError( - domain: "RCTVideo", - code: RCTVideoError.noCertificateData.rawValue, - userInfo: [ - NSLocalizedDescriptionKey: "Error obtaining DRM license.", - NSLocalizedFailureReasonErrorKey: "No certificate data obtained from the specificied url.", - NSLocalizedRecoverySuggestionErrorKey: "Have you specified a valid 'certificateUrl'?" - ]) - - static let noSPC = NSError( - domain: "RCTVideo", - code: RCTVideoError.noSPC.rawValue, - userInfo: [ - NSLocalizedDescriptionKey: "Error obtaining license.", - NSLocalizedFailureReasonErrorKey: "No spc received.", - NSLocalizedRecoverySuggestionErrorKey: "Check your DRM config." - ]) - - static let noLicenseServerURL = NSError( - domain: "RCTVideo", - code: RCTVideoError.noLicenseServerURL.rawValue, - userInfo: [ - NSLocalizedDescriptionKey: "Error obtaining DRM License.", - NSLocalizedFailureReasonErrorKey: "No license server URL has been found.", - NSLocalizedRecoverySuggestionErrorKey: "Did you specified the prop licenseServer?" - ]) - - static let noDataFromLicenseRequest = NSError( - domain: "RCTVideo", - code: RCTVideoError.noDataFromLicenseRequest.rawValue, - userInfo: [ - NSLocalizedDescriptionKey: "Error obtaining DRM license.", - NSLocalizedFailureReasonErrorKey: "No data received from the license server.", - NSLocalizedRecoverySuggestionErrorKey: "Is the licenseServer ok?" - ]) - - static func licenseRequestNotOk(_ statusCode: Int) -> NSError { - return NSError( - domain: "RCTVideo", - code: RCTVideoError.licenseRequestNotOk.rawValue, - userInfo: [ - NSLocalizedDescriptionKey: "Error obtaining license.", - NSLocalizedFailureReasonErrorKey: String( - format:"License server responded with status code %li", - (statusCode) - ), - NSLocalizedRecoverySuggestionErrorKey: "Did you send the correct data to the license Server? Is the server ok?" - ]) - } + static let noDRMData = NSError( + domain: "RCTVideo", + code: RCTVideoError.noDRMData.rawValue, + userInfo: [ + NSLocalizedDescriptionKey: "Error obtaining DRM license.", + NSLocalizedFailureReasonErrorKey: "No drm object found.", + NSLocalizedRecoverySuggestionErrorKey: "Have you specified the 'drm' prop?", + ] + ) + + static let noCertificateURL = NSError( + domain: "RCTVideo", + code: RCTVideoError.noCertificateURL.rawValue, + userInfo: [ + NSLocalizedDescriptionKey: "Error obtaining DRM License.", + NSLocalizedFailureReasonErrorKey: "No certificate URL has been found.", + NSLocalizedRecoverySuggestionErrorKey: "Did you specified the prop certificateUrl?", + ] + ) + + static let noCertificateData = NSError( + domain: "RCTVideo", + code: RCTVideoError.noCertificateData.rawValue, + userInfo: [ + NSLocalizedDescriptionKey: "Error obtaining DRM license.", + NSLocalizedFailureReasonErrorKey: "No certificate data obtained from the specificied url.", + NSLocalizedRecoverySuggestionErrorKey: "Have you specified a valid 'certificateUrl'?", + ] + ) + + static let noSPC = NSError( + domain: "RCTVideo", + code: RCTVideoError.noSPC.rawValue, + userInfo: [ + NSLocalizedDescriptionKey: "Error obtaining license.", + NSLocalizedFailureReasonErrorKey: "No spc received.", + NSLocalizedRecoverySuggestionErrorKey: "Check your DRM config.", + ] + ) + + static let noLicenseServerURL = NSError( + domain: "RCTVideo", + code: RCTVideoError.noLicenseServerURL.rawValue, + userInfo: [ + NSLocalizedDescriptionKey: "Error obtaining DRM License.", + NSLocalizedFailureReasonErrorKey: "No license server URL has been found.", + NSLocalizedRecoverySuggestionErrorKey: "Did you specified the prop licenseServer?", + ] + ) + + static let noDataFromLicenseRequest = NSError( + domain: "RCTVideo", + code: RCTVideoError.noDataFromLicenseRequest.rawValue, + userInfo: [ + NSLocalizedDescriptionKey: "Error obtaining DRM license.", + NSLocalizedFailureReasonErrorKey: "No data received from the license server.", + NSLocalizedRecoverySuggestionErrorKey: "Is the licenseServer ok?", + ] + ) + + static func licenseRequestNotOk(_ statusCode: Int) -> NSError { + return NSError( + domain: "RCTVideo", + code: RCTVideoError.licenseRequestNotOk.rawValue, + userInfo: [ + NSLocalizedDescriptionKey: "Error obtaining license.", + NSLocalizedFailureReasonErrorKey: String( + format: "License server responded with status code %li", + statusCode + ), + NSLocalizedRecoverySuggestionErrorKey: "Did you send the correct data to the license Server? Is the server ok?", + ] + ) + } + + static func fromJSPart(_ error: String) -> NSError { + return NSError(domain: "RCTVideo", + code: RCTVideoError.fromJSPart.rawValue, + userInfo: [ + NSLocalizedDescriptionKey: error, + NSLocalizedFailureReasonErrorKey: error, + NSLocalizedRecoverySuggestionErrorKey: error, + ]) + } - static func fromJSPart(_ error: String) -> NSError { - return NSError(domain: "RCTVideo", - code: RCTVideoError.fromJSPart.rawValue, - userInfo: [ - NSLocalizedDescriptionKey: error, - NSLocalizedFailureReasonErrorKey: error, - NSLocalizedRecoverySuggestionErrorKey: error - ]) - } - - static let invalidContentId = NSError( - domain: "RCTVideo", - code: RCTVideoError.invalidContentId.rawValue, - userInfo: [ - NSLocalizedDescriptionKey: "Error obtaining DRM license.", - NSLocalizedFailureReasonErrorKey: "No valide content Id received", - NSLocalizedRecoverySuggestionErrorKey: "Is the contentId and url ok?" - ]) + static let invalidContentId = NSError( + domain: "RCTVideo", + code: RCTVideoError.invalidContentId.rawValue, + userInfo: [ + NSLocalizedDescriptionKey: "Error obtaining DRM license.", + NSLocalizedFailureReasonErrorKey: "No valide content Id received", + NSLocalizedRecoverySuggestionErrorKey: "Is the contentId and url ok?", + ] + ) } diff --git a/ios/Video/Features/RCTVideoSave.swift b/ios/Video/Features/RCTVideoSave.swift index ff8155ec2a..66b9d0a3d7 100644 --- a/ios/Video/Features/RCTVideoSave.swift +++ b/ios/Video/Features/RCTVideoSave.swift @@ -1,75 +1,69 @@ import AVFoundation enum RCTVideoSave { + static func save( + options _: NSDictionary!, + resolve: @escaping RCTPromiseResolveBlock, + reject: @escaping RCTPromiseRejectBlock, - static func save( - options:NSDictionary!, - resolve: @escaping RCTPromiseResolveBlock, - reject:@escaping RCTPromiseRejectBlock, - - playerItem: AVPlayerItem? - ) { - let asset:AVAsset! = playerItem?.asset - - guard asset != nil else { - reject("ERROR_ASSET_NIL", "Asset is nil", nil) - return - } - - guard let exportSession = AVAssetExportSession(asset: asset, presetName:AVAssetExportPresetHighestQuality) else { - reject("ERROR_COULD_NOT_CREATE_EXPORT_SESSION", "Could not create export session", nil) - return - } - var path:String! = nil - path = RCTVideoSave.generatePathInDirectory( - directory: URL(fileURLWithPath: RCTVideoSave.cacheDirectoryPath() ?? "").appendingPathComponent("Videos").path, - withExtension: ".mp4") - let url:NSURL! = NSURL.fileURL(withPath: path) as NSURL - exportSession.outputFileType = AVFileType.mp4 - exportSession.outputURL = url as URL? - exportSession.videoComposition = playerItem?.videoComposition - exportSession.shouldOptimizeForNetworkUse = true - exportSession.exportAsynchronously(completionHandler: { - - switch (exportSession.status) { - case .failed: - reject("ERROR_COULD_NOT_EXPORT_VIDEO", "Could not export video", exportSession.error) - break - case .cancelled: - reject("ERROR_EXPORT_SESSION_CANCELLED", "Export session was cancelled", exportSession.error) - break - default: - resolve(["uri": url.absoluteString]) - break - } - - }) - } - - static func generatePathInDirectory(directory: String?, withExtension `extension`: String?) -> String? { - let fileName = UUID().uuidString + (`extension` ?? "") - RCTVideoSave.ensureDirExists(withPath: directory) - return URL(fileURLWithPath: directory ?? "").appendingPathComponent(fileName).path + playerItem: AVPlayerItem? + ) { + let asset: AVAsset! = playerItem?.asset + + guard asset != nil else { + reject("ERROR_ASSET_NIL", "Asset is nil", nil) + return } - - static func cacheDirectoryPath() -> String? { - let array = FileManager.default.urls(for: .cachesDirectory, in: .userDomainMask).map(\.path) - return array[0] + + guard let exportSession = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetHighestQuality) else { + reject("ERROR_COULD_NOT_CREATE_EXPORT_SESSION", "Could not create export session", nil) + return } - - static func ensureDirExists(withPath path: String?) -> Bool { - var isDir: ObjCBool = false - var error: Error? - let exists = FileManager.default.fileExists(atPath: path ?? "", isDirectory: &isDir) - if !(exists && isDir.boolValue) { - do { - try FileManager.default.createDirectory(atPath: path ?? "", withIntermediateDirectories: true, attributes: nil) - } catch { - } - if error != nil { - return false - } - } - return true + var path: String! + path = RCTVideoSave.generatePathInDirectory( + directory: URL(fileURLWithPath: RCTVideoSave.cacheDirectoryPath() ?? "").appendingPathComponent("Videos").path, + withExtension: ".mp4" + ) + let url: NSURL! = NSURL.fileURL(withPath: path) as NSURL + exportSession.outputFileType = AVFileType.mp4 + exportSession.outputURL = url as URL? + exportSession.videoComposition = playerItem?.videoComposition + exportSession.shouldOptimizeForNetworkUse = true + exportSession.exportAsynchronously(completionHandler: { + switch exportSession.status { + case .failed: + reject("ERROR_COULD_NOT_EXPORT_VIDEO", "Could not export video", exportSession.error) + case .cancelled: + reject("ERROR_EXPORT_SESSION_CANCELLED", "Export session was cancelled", exportSession.error) + default: + resolve(["uri": url.absoluteString]) + } + }) + } + + static func generatePathInDirectory(directory: String?, withExtension extension: String?) -> String? { + let fileName = UUID().uuidString + (`extension` ?? "") + RCTVideoSave.ensureDirExists(withPath: directory) + return URL(fileURLWithPath: directory ?? "").appendingPathComponent(fileName).path + } + + static func cacheDirectoryPath() -> String? { + let array = FileManager.default.urls(for: .cachesDirectory, in: .userDomainMask).map(\.path) + return array[0] + } + + static func ensureDirExists(withPath path: String?) -> Bool { + var isDir: ObjCBool = false + var error: Error? + let exists = FileManager.default.fileExists(atPath: path ?? "", isDirectory: &isDir) + if !(exists && isDir.boolValue) { + do { + try FileManager.default.createDirectory(atPath: path ?? "", withIntermediateDirectories: true, attributes: nil) + } catch {} + if error != nil { + return false + } } + return true + } } diff --git a/ios/Video/Features/RCTVideoTVUtils.swift b/ios/Video/Features/RCTVideoTVUtils.swift index 71f2a4a4af..84d8f1e03e 100644 --- a/ios/Video/Features/RCTVideoTVUtils.swift +++ b/ios/Video/Features/RCTVideoTVUtils.swift @@ -1,49 +1,48 @@ -import Foundation import AVFoundation import AVKit +import Foundation /*! * Collection of helper functions for tvOS specific features */ #if os(tvOS) -enum RCTVideoTVUtils { + enum RCTVideoTVUtils { static func makeNavigationMarkerGroups(_ chapters: [Chapter]) -> [AVNavigationMarkersGroup] { - var metadataGroups = [AVTimedMetadataGroup]() + var metadataGroups = [AVTimedMetadataGroup]() - // Iterate over the defined chapters and build a timed metadata group object for each. - chapters.forEach { chapter in - metadataGroups.append(makeTimedMetadataGroup(for: chapter)) - } + // Iterate over the defined chapters and build a timed metadata group object for each. + chapters.forEach { chapter in + metadataGroups.append(makeTimedMetadataGroup(for: chapter)) + } - return [AVNavigationMarkersGroup(title: nil, timedNavigationMarkers: metadataGroups)] + return [AVNavigationMarkersGroup(title: nil, timedNavigationMarkers: metadataGroups)] } static func makeTimedMetadataGroup(for chapter: Chapter) -> AVTimedMetadataGroup { - var metadata = [AVMetadataItem]() - - // Create a metadata item that contains the chapter title. - let titleItem = RCTVideoUtils.createMetadataItem(for: .commonIdentifierTitle, value: chapter.title) - metadata.append(titleItem) - - // Create a time range for the metadata group. - let timescale: Int32 = 600 - let startTime = CMTime(seconds: chapter.startTime, preferredTimescale: timescale) - let endTime = CMTime(seconds: chapter.endTime, preferredTimescale: timescale) - let timeRange = CMTimeRangeFromTimeToTime(start: startTime, end: endTime) - - // Image - if let imgUri = chapter.uri, - let uri = URL(string: imgUri), - let imgData = try? Data(contentsOf: uri), - let image = UIImage(data: imgData), - let pngData = image.pngData() - { - let imageItem = RCTVideoUtils.createMetadataItem(for: .commonIdentifierArtwork, value: pngData) - metadata.append(imageItem) - } - - return AVTimedMetadataGroup(items: metadata, timeRange: timeRange) + var metadata = [AVMetadataItem]() + + // Create a metadata item that contains the chapter title. + let titleItem = RCTVideoUtils.createMetadataItem(for: .commonIdentifierTitle, value: chapter.title) + metadata.append(titleItem) + + // Create a time range for the metadata group. + let timescale: Int32 = 600 + let startTime = CMTime(seconds: chapter.startTime, preferredTimescale: timescale) + let endTime = CMTime(seconds: chapter.endTime, preferredTimescale: timescale) + let timeRange = CMTimeRangeFromTimeToTime(start: startTime, end: endTime) + + // Image + if let imgUri = chapter.uri, + let uri = URL(string: imgUri), + let imgData = try? Data(contentsOf: uri), + let image = UIImage(data: imgData), + let pngData = image.pngData() { + let imageItem = RCTVideoUtils.createMetadataItem(for: .commonIdentifierArtwork, value: pngData) + metadata.append(imageItem) + } + + return AVTimedMetadataGroup(items: metadata, timeRange: timeRange) } -} + } #endif diff --git a/ios/Video/Features/RCTVideoUtils.swift b/ios/Video/Features/RCTVideoUtils.swift index 6df65d9a8c..f818b529b5 100644 --- a/ios/Video/Features/RCTVideoUtils.swift +++ b/ios/Video/Features/RCTVideoUtils.swift @@ -1,343 +1,344 @@ import AVFoundation -import Promises import Photos +import Promises /*! * Collection of pure functions */ enum RCTVideoUtils { - - /*! - * Calculates and returns the playable duration of the current player item using its loaded time ranges. - * - * \returns The playable duration of the current player item in seconds. - */ - static func calculatePlayableDuration(_ player:AVPlayer?, withSource source:VideoSource?) -> NSNumber { - guard let player = player, - let video:AVPlayerItem = player.currentItem, - video.status == AVPlayerItem.Status.readyToPlay else { - return 0 - } - - if (source?.cropStart != nil && source?.cropEnd != nil) { - return NSNumber(value: (Float64(source?.cropEnd ?? 0) - Float64(source?.cropStart ?? 0)) / 1000) - } - - var effectiveTimeRange:CMTimeRange? - for (_, value) in video.loadedTimeRanges.enumerated() { - let timeRange:CMTimeRange = value.timeRangeValue - if CMTimeRangeContainsTime(timeRange, time: video.currentTime()) { - effectiveTimeRange = timeRange - break - } - } - - if let effectiveTimeRange = effectiveTimeRange { - let playableDuration:Float64 = CMTimeGetSeconds(CMTimeRangeGetEnd(effectiveTimeRange)) - if playableDuration > 0 { - if (source?.cropStart != nil) { - return NSNumber(value: (playableDuration - Float64(source?.cropStart ?? 0) / 1000)) - } - - return playableDuration as NSNumber - } - } - - return 0 + /*! + * Calculates and returns the playable duration of the current player item using its loaded time ranges. + * + * \returns The playable duration of the current player item in seconds. + */ + static func calculatePlayableDuration(_ player: AVPlayer?, withSource source: VideoSource?) -> NSNumber { + guard let player = player, + let video: AVPlayerItem = player.currentItem, + video.status == AVPlayerItem.Status.readyToPlay else { + return 0 } - static func urlFilePath(filepath:NSString!, searchPath:FileManager.SearchPathDirectory) -> NSURL! { - if filepath.contains("file://") { - return NSURL(string: filepath as String) - } - - // if no file found, check if the file exists in the Document directory - let paths:[String]! = NSSearchPathForDirectoriesInDomains(searchPath, .userDomainMask, true) - var relativeFilePath:String! = filepath.lastPathComponent - // the file may be multiple levels below the documents directory - let directoryString:String! = searchPath == .cachesDirectory ? "Library/Caches/" : "Documents"; - let fileComponents:[String]! = filepath.components(separatedBy: directoryString) - if fileComponents.count > 1 { - relativeFilePath = fileComponents[1] - } - - let path:String! = (paths.first! as NSString).appendingPathComponent(relativeFilePath) - if FileManager.default.fileExists(atPath: path) { - return NSURL.fileURL(withPath: path) as NSURL - } - return nil - } - - static func playerItemSeekableTimeRange(_ player:AVPlayer?) -> CMTimeRange { - if let playerItem = player?.currentItem, - playerItem.status == .readyToPlay, - let firstItem = playerItem.seekableTimeRanges.first { - return firstItem.timeRangeValue - } - - return (CMTimeRange.zero) + if source?.cropStart != nil && source?.cropEnd != nil { + return NSNumber(value: (Float64(source?.cropEnd ?? 0) - Float64(source?.cropStart ?? 0)) / 1000) } - - static func playerItemDuration(_ player:AVPlayer?) -> CMTime { - if let playerItem = player?.currentItem, - playerItem.status == .readyToPlay { - return(playerItem.duration) - } - - return(CMTime.invalid) + + var effectiveTimeRange: CMTimeRange? + for (_, value) in video.loadedTimeRanges.enumerated() { + let timeRange: CMTimeRange = value.timeRangeValue + if CMTimeRangeContainsTime(timeRange, time: video.currentTime()) { + effectiveTimeRange = timeRange + break + } } - - static func calculateSeekableDuration(_ player:AVPlayer?) -> NSNumber { - let timeRange:CMTimeRange = RCTVideoUtils.playerItemSeekableTimeRange(player) - if CMTIME_IS_NUMERIC(timeRange.duration) - { - return NSNumber(value: CMTimeGetSeconds(timeRange.duration)) + + if let effectiveTimeRange = effectiveTimeRange { + let playableDuration: Float64 = CMTimeGetSeconds(CMTimeRangeGetEnd(effectiveTimeRange)) + if playableDuration > 0 { + if source?.cropStart != nil { + return NSNumber(value: playableDuration - Float64(source?.cropStart ?? 0) / 1000) } - return 0 + + return playableDuration as NSNumber + } } - - static func getAudioTrackInfo(_ player:AVPlayer?) -> [AnyObject]! { - guard let player = player else { - return [] - } - let audioTracks:NSMutableArray! = NSMutableArray() - let group = player.currentItem?.asset.mediaSelectionGroup(forMediaCharacteristic: .audible) - for i in 0..<(group?.options.count ?? 0) { - let currentOption = group?.options[i] - var title = "" - let values = currentOption?.commonMetadata.map(\.value) - if (values?.count ?? 0) > 0, let value = values?[0] { - title = value as! String - } - let language:String! = currentOption?.extendedLanguageTag ?? "" - - let selectedOption: AVMediaSelectionOption? = player.currentItem?.currentMediaSelection.selectedMediaOption(in: group!) - - let audioTrack = [ - "index": NSNumber(value: i), - "title": title, - "language": language ?? "", - "selected": currentOption?.displayName == selectedOption?.displayName - ] as [String : Any] - audioTracks.add(audioTrack) - } - return audioTracks as [AnyObject]? + return 0 + } + + static func urlFilePath(filepath: NSString!, searchPath: FileManager.SearchPathDirectory) -> NSURL! { + if filepath.contains("file://") { + return NSURL(string: filepath as String) } - - static func getTextTrackInfo(_ player:AVPlayer?) -> [TextTrack]! { - guard let player = player else { - return [] - } - // if streaming video, we extract the text tracks - var textTracks:[TextTrack] = [] - let group = player.currentItem?.asset.mediaSelectionGroup(forMediaCharacteristic: .legible) - for i in 0..<(group?.options.count ?? 0) { - let currentOption = group?.options[i] - var title = "" - let values = currentOption?.commonMetadata.map(\.value) - if (values?.count ?? 0) > 0, let value = values?[0] { - title = value as! String - } - let language:String! = currentOption?.extendedLanguageTag ?? "" - let selectedOpt = player.currentItem?.currentMediaSelection - let selectedOption: AVMediaSelectionOption? = player.currentItem?.currentMediaSelection.selectedMediaOption(in: group!) - let textTrack = TextTrack([ - "index": NSNumber(value: i), - "title": title, - "language": language, - "selected": currentOption?.displayName == selectedOption?.displayName - ]) - textTracks.append(textTrack) - } - return textTracks + // if no file found, check if the file exists in the Document directory + let paths: [String]! = NSSearchPathForDirectoriesInDomains(searchPath, .userDomainMask, true) + var relativeFilePath: String! = filepath.lastPathComponent + // the file may be multiple levels below the documents directory + let directoryString: String! = searchPath == .cachesDirectory ? "Library/Caches/" : "Documents" + let fileComponents: [String]! = filepath.components(separatedBy: directoryString) + if fileComponents.count > 1 { + relativeFilePath = fileComponents[1] } - - // UNUSED - static func getCurrentTime(playerItem:AVPlayerItem?) -> Float { - return Float(CMTimeGetSeconds(playerItem?.currentTime() ?? .zero)) + + let path: String! = (paths.first! as NSString).appendingPathComponent(relativeFilePath) + if FileManager.default.fileExists(atPath: path) { + return NSURL.fileURL(withPath: path) as NSURL } - - static func base64DataFromBase64String(base64String:String?) -> Data? { - if let base64String = base64String { - return Data(base64Encoded:base64String) - } - return nil + return nil + } + + static func playerItemSeekableTimeRange(_ player: AVPlayer?) -> CMTimeRange { + if let playerItem = player?.currentItem, + playerItem.status == .readyToPlay, + let firstItem = playerItem.seekableTimeRanges.first { + return firstItem.timeRangeValue } - static func replaceURLScheme(url: URL, scheme: String?) -> URL? { - var urlComponents = URLComponents(url: url, resolvingAgainstBaseURL: false) - urlComponents?.scheme = scheme + return CMTimeRange.zero + } - return urlComponents?.url + static func playerItemDuration(_ player: AVPlayer?) -> CMTime { + if let playerItem = player?.currentItem, + playerItem.status == .readyToPlay { + return playerItem.duration } - static func extractDataFromCustomSchemeUrl(from url: URL, scheme: String) -> Data? { - guard url.scheme == scheme, - let adoptURL = RCTVideoUtils.replaceURLScheme(url:url, scheme: nil) else { return nil } + return CMTime.invalid + } - return Data(base64Encoded: adoptURL.absoluteString) + static func calculateSeekableDuration(_ player: AVPlayer?) -> NSNumber { + let timeRange: CMTimeRange = RCTVideoUtils.playerItemSeekableTimeRange(player) + if CMTIME_IS_NUMERIC(timeRange.duration) { + return NSNumber(value: CMTimeGetSeconds(timeRange.duration)) } - - static func generateMixComposition(_ asset:AVAsset) -> AVMutableComposition { - let mixComposition:AVMutableComposition = AVMutableComposition() - - let videoAsset:AVAssetTrack! = asset.tracks(withMediaType: AVMediaType.video).first - - // we need videoAsset asset to be not null to get durration later - if videoAsset == nil { - return mixComposition - } - - let videoCompTrack:AVMutableCompositionTrack! = mixComposition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID:kCMPersistentTrackID_Invalid) - try? videoCompTrack.insertTimeRange( - CMTimeRangeMake(start: .zero, duration: videoAsset.timeRange.duration), - of: videoAsset, - at: .zero) - - let audioAsset:AVAssetTrack! = asset.tracks(withMediaType: AVMediaType.audio).first - let audioCompTrack:AVMutableCompositionTrack! = mixComposition.addMutableTrack(withMediaType: AVMediaType.audio, preferredTrackID:kCMPersistentTrackID_Invalid) - try? audioCompTrack.insertTimeRange( - CMTimeRangeMake(start: .zero, duration: audioAsset.timeRange.duration), - of: audioAsset, - at: .zero) - - return mixComposition + return 0 + } + + static func getAudioTrackInfo(_ player: AVPlayer?) -> [AnyObject]! { + guard let player = player else { + return [] } - - static func getValidTextTracks(asset:AVAsset, assetOptions:NSDictionary?, mixComposition:AVMutableComposition, textTracks:[TextTrack]?) -> [TextTrack] { - let videoAsset:AVAssetTrack! = asset.tracks(withMediaType: AVMediaType.video).first - var validTextTracks:[TextTrack] = [] - - if let textTracks = textTracks, textTracks.count > 0 { - for i in 0.. 0, let value = values?[0] { + title = value as! String + } + let language: String! = currentOption?.extendedLanguageTag ?? "" + + let selectedOption: AVMediaSelectionOption? = player.currentItem?.currentMediaSelection.selectedMediaOption(in: group!) + + let audioTrack = [ + "index": NSNumber(value: i), + "title": title, + "language": language ?? "", + "selected": currentOption?.displayName == selectedOption?.displayName, + ] as [String: Any] + audioTracks.add(audioTrack) } + return audioTracks as [AnyObject]? + } - /* - * Create an useless / almost empty VTT file in the list with available tracks. This track gets selected when you give type: "disabled" as the selectedTextTrack - * This is needed because there is a bug where sideloaded texttracks cannot be disabled in the AVPlayer. Loading this VTT file instead solves that problem. - * For more info see: https://github.com/react-native-community/react-native-video/issues/1144 - */ - static func createEmptyVttFile() -> TextTrack? { - let fileManager = FileManager.default - let cachesDirectoryUrl = fileManager.urls(for: .cachesDirectory, in: .userDomainMask)[0] - let filePath = cachesDirectoryUrl.appendingPathComponent("empty.vtt").path - - if !fileManager.fileExists(atPath: filePath) { - let stringToWrite = "WEBVTT\n\n1\n99:59:59.000 --> 99:59:59.001\n." - - do { - try stringToWrite.write(to: URL(fileURLWithPath: filePath), atomically: true, encoding: String.Encoding.utf8) - } catch { - return nil - } - } - - return TextTrack([ - "language": "disabled", - "title": "EmptyVttFile", - "type": "text/vtt", - "uri": filePath, - ]) + static func getTextTrackInfo(_ player: AVPlayer?) -> [TextTrack]! { + guard let player = player else { + return [] } - - static func delay(seconds: Int = 0) -> Promise { - return Promise(on: .global()) { fulfill, reject in - DispatchQueue.main.asyncAfter(deadline: DispatchTime.now() + Double(Int64(seconds)) / Double(NSEC_PER_SEC), execute: { - fulfill(()) - }) - } + + // if streaming video, we extract the text tracks + var textTracks: [TextTrack] = [] + let group = player.currentItem?.asset.mediaSelectionGroup(forMediaCharacteristic: .legible) + for i in 0 ..< (group?.options.count ?? 0) { + let currentOption = group?.options[i] + var title = "" + let values = currentOption?.commonMetadata.map(\.value) + if (values?.count ?? 0) > 0, let value = values?[0] { + title = value as! String + } + let language: String! = currentOption?.extendedLanguageTag ?? "" + let selectedOpt = player.currentItem?.currentMediaSelection + let selectedOption: AVMediaSelectionOption? = player.currentItem?.currentMediaSelection.selectedMediaOption(in: group!) + let textTrack = TextTrack([ + "index": NSNumber(value: i), + "title": title, + "language": language, + "selected": currentOption?.displayName == selectedOption?.displayName, + ]) + textTracks.append(textTrack) } - - static func preparePHAsset(uri: String) -> Promise { - return Promise(on: .global()) { fulfill, reject in - let assetId = String(uri[uri.index(uri.startIndex, offsetBy: "ph://".count)...]) - guard let phAsset = PHAsset.fetchAssets(withLocalIdentifiers: [assetId], options: nil).firstObject else { - reject(NSError(domain: "", code: 0, userInfo: nil)) - return - } - let options = PHVideoRequestOptions() - options.isNetworkAccessAllowed = true - PHCachingImageManager().requestAVAsset(forVideo: phAsset, options: options) { data, _, _ in - fulfill(data) - } - } + return textTracks + } + + // UNUSED + static func getCurrentTime(playerItem: AVPlayerItem?) -> Float { + return Float(CMTimeGetSeconds(playerItem?.currentTime() ?? .zero)) + } + + static func base64DataFromBase64String(base64String: String?) -> Data? { + if let base64String = base64String { + return Data(base64Encoded: base64String) + } + return nil + } + + static func replaceURLScheme(url: URL, scheme: String?) -> URL? { + var urlComponents = URLComponents(url: url, resolvingAgainstBaseURL: false) + urlComponents?.scheme = scheme + + return urlComponents?.url + } + + static func extractDataFromCustomSchemeUrl(from url: URL, scheme: String) -> Data? { + guard url.scheme == scheme, + let adoptURL = RCTVideoUtils.replaceURLScheme(url: url, scheme: nil) else { return nil } + + return Data(base64Encoded: adoptURL.absoluteString) + } + + static func generateMixComposition(_ asset: AVAsset) -> AVMutableComposition { + let mixComposition = AVMutableComposition() + + let videoAsset: AVAssetTrack! = asset.tracks(withMediaType: AVMediaType.video).first + + // we need videoAsset asset to be not null to get durration later + if videoAsset == nil { + return mixComposition } - - static func prepareAsset(source:VideoSource) -> (asset:AVURLAsset?, assetOptions:NSMutableDictionary?)? { - guard let sourceUri = source.uri, sourceUri != "" else { return nil } - var asset:AVURLAsset! - let bundlePath = Bundle.main.path(forResource: source.uri, ofType: source.type) ?? "" - let url = source.isNetwork || source.isAsset - ? URL(string: source.uri?.addingPercentEncoding(withAllowedCharacters: .urlQueryAllowed) ?? "") - : URL(fileURLWithPath: bundlePath) - let assetOptions:NSMutableDictionary! = NSMutableDictionary() - - if source.isNetwork { - if let headers = source.requestHeaders, headers.count > 0 { - assetOptions.setObject(headers, forKey:"AVURLAssetHTTPHeaderFieldsKey" as NSCopying) - } - let cookies:[AnyObject]! = HTTPCookieStorage.shared.cookies - assetOptions.setObject(cookies, forKey:AVURLAssetHTTPCookiesKey as NSCopying) - asset = AVURLAsset(url: url!, options:assetOptions as! [String : Any]) + + let videoCompTrack: AVMutableCompositionTrack! = mixComposition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID: kCMPersistentTrackID_Invalid) + try? videoCompTrack.insertTimeRange( + CMTimeRangeMake(start: .zero, duration: videoAsset.timeRange.duration), + of: videoAsset, + at: .zero + ) + + let audioAsset: AVAssetTrack! = asset.tracks(withMediaType: AVMediaType.audio).first + let audioCompTrack: AVMutableCompositionTrack! = mixComposition.addMutableTrack(withMediaType: AVMediaType.audio, preferredTrackID: kCMPersistentTrackID_Invalid) + try? audioCompTrack.insertTimeRange( + CMTimeRangeMake(start: .zero, duration: audioAsset.timeRange.duration), + of: audioAsset, + at: .zero + ) + + return mixComposition + } + + static func getValidTextTracks(asset: AVAsset, assetOptions: NSDictionary?, mixComposition: AVMutableComposition, textTracks: [TextTrack]?) -> [TextTrack] { + let videoAsset: AVAssetTrack! = asset.tracks(withMediaType: AVMediaType.video).first + var validTextTracks: [TextTrack] = [] + + if let textTracks = textTracks, !textTracks.isEmpty { + for i in 0 ..< textTracks.count { + var textURLAsset: AVURLAsset! + let textUri: String = textTracks[i].uri + if textUri.lowercased().hasPrefix("http") { + textURLAsset = AVURLAsset(url: NSURL(string: textUri)! as URL, options: (assetOptions as! [String: Any])) } else { - asset = AVURLAsset(url: url!) + let isDisabledTrack: Bool! = textTracks[i].type == "disabled" + let searchPath: FileManager.SearchPathDirectory = isDisabledTrack ? .cachesDirectory : .documentDirectory + textURLAsset = AVURLAsset(url: RCTVideoUtils.urlFilePath(filepath: textUri as NSString?, searchPath: searchPath) as URL, options: nil) } - return (asset, assetOptions) - } - - static func createMetadataItems(for mapping: [AVMetadataIdentifier: Any]) -> [AVMetadataItem] { - return mapping.compactMap { createMetadataItem(for:$0, value:$1) } + let textTrackAsset: AVAssetTrack! = textURLAsset.tracks(withMediaType: AVMediaType.text).first + if textTrackAsset == nil { continue } // fix when there's no textTrackAsset + validTextTracks.append(textTracks[i]) + let textCompTrack: AVMutableCompositionTrack! = mixComposition.addMutableTrack(withMediaType: AVMediaType.text, + preferredTrackID: kCMPersistentTrackID_Invalid) + if videoAsset != nil { + try? textCompTrack.insertTimeRange( + CMTimeRangeMake(start: .zero, duration: videoAsset!.timeRange.duration), + of: textTrackAsset, + at: .zero + ) + } + } } - static func createMetadataItem(for identifier: AVMetadataIdentifier, - value: Any) -> AVMetadataItem { - let item = AVMutableMetadataItem() - item.identifier = identifier - item.value = value as? NSCopying & NSObjectProtocol - // Specify "und" to indicate an undefined language. - item.extendedLanguageTag = "und" - return item.copy() as! AVMetadataItem + let emptyVttFile: TextTrack? = self.createEmptyVttFile() + if emptyVttFile != nil { + validTextTracks.append(emptyVttFile!) } - - static func createImageMetadataItem(imageUri: String) -> Data? { - if let uri = URL(string: imageUri), - let imgData = try? Data(contentsOf: uri), - let image = UIImage(data: imgData), - let pngData = image.pngData() { - return pngData - } - + + return validTextTracks + } + + /* + * Create an useless / almost empty VTT file in the list with available tracks. This track gets selected when you give type: "disabled" as the selectedTextTrack + * This is needed because there is a bug where sideloaded texttracks cannot be disabled in the AVPlayer. Loading this VTT file instead solves that problem. + * For more info see: https://github.com/react-native-community/react-native-video/issues/1144 + */ + static func createEmptyVttFile() -> TextTrack? { + let fileManager = FileManager.default + let cachesDirectoryUrl = fileManager.urls(for: .cachesDirectory, in: .userDomainMask)[0] + let filePath = cachesDirectoryUrl.appendingPathComponent("empty.vtt").path + + if !fileManager.fileExists(atPath: filePath) { + let stringToWrite = "WEBVTT\n\n1\n99:59:59.000 --> 99:59:59.001\n." + + do { + try stringToWrite.write(to: URL(fileURLWithPath: filePath), atomically: true, encoding: String.Encoding.utf8) + } catch { return nil + } + } + + return TextTrack([ + "language": "disabled", + "title": "EmptyVttFile", + "type": "text/vtt", + "uri": filePath, + ]) + } + + static func delay(seconds: Int = 0) -> Promise { + return Promise(on: .global()) { fulfill, _ in + DispatchQueue.main.asyncAfter(deadline: DispatchTime.now() + Double(Int64(seconds)) / Double(NSEC_PER_SEC)) { + fulfill(()) + } + } + } + + static func preparePHAsset(uri: String) -> Promise { + return Promise(on: .global()) { fulfill, reject in + let assetId = String(uri[uri.index(uri.startIndex, offsetBy: "ph://".count)...]) + guard let phAsset = PHAsset.fetchAssets(withLocalIdentifiers: [assetId], options: nil).firstObject else { + reject(NSError(domain: "", code: 0, userInfo: nil)) + return + } + let options = PHVideoRequestOptions() + options.isNetworkAccessAllowed = true + PHCachingImageManager().requestAVAsset(forVideo: phAsset, options: options) { data, _, _ in + fulfill(data) + } } + } + + static func prepareAsset(source: VideoSource) -> (asset: AVURLAsset?, assetOptions: NSMutableDictionary?)? { + guard let sourceUri = source.uri, sourceUri != "" else { return nil } + var asset: AVURLAsset! + let bundlePath = Bundle.main.path(forResource: source.uri, ofType: source.type) ?? "" + let url = source.isNetwork || source.isAsset + ? URL(string: source.uri?.addingPercentEncoding(withAllowedCharacters: .urlQueryAllowed) ?? "") + : URL(fileURLWithPath: bundlePath) + let assetOptions: NSMutableDictionary! = NSMutableDictionary() + + if source.isNetwork { + if let headers = source.requestHeaders, !headers.isEmpty { + assetOptions.setObject(headers, forKey: "AVURLAssetHTTPHeaderFieldsKey" as NSCopying) + } + let cookies: [AnyObject]! = HTTPCookieStorage.shared.cookies + assetOptions.setObject(cookies, forKey: AVURLAssetHTTPCookiesKey as NSCopying) + asset = AVURLAsset(url: url!, options: assetOptions as! [String: Any]) + } else { + asset = AVURLAsset(url: url!) + } + return (asset, assetOptions) + } + + static func createMetadataItems(for mapping: [AVMetadataIdentifier: Any]) -> [AVMetadataItem] { + return mapping.compactMap { createMetadataItem(for: $0, value: $1) } + } + + static func createMetadataItem(for identifier: AVMetadataIdentifier, + value: Any) -> AVMetadataItem { + let item = AVMutableMetadataItem() + item.identifier = identifier + item.value = value as? NSCopying & NSObjectProtocol + // Specify "und" to indicate an undefined language. + item.extendedLanguageTag = "und" + return item.copy() as! AVMetadataItem + } + + static func createImageMetadataItem(imageUri: String) -> Data? { + if let uri = URL(string: imageUri), + let imgData = try? Data(contentsOf: uri), + let image = UIImage(data: imgData), + let pngData = image.pngData() { + return pngData + } + + return nil + } } diff --git a/ios/Video/RCTVideo.swift b/ios/Video/RCTVideo.swift index 530fe1fd84..1c46a8c0d2 100644 --- a/ios/Video/RCTVideo.swift +++ b/ios/Video/RCTVideo.swift @@ -2,1355 +2,1348 @@ import AVFoundation import AVKit import Foundation #if USE_GOOGLE_IMA -import GoogleInteractiveMediaAds + import GoogleInteractiveMediaAds #endif -import React import Promises +import React -class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverHandler { +// MARK: - RCTVideo - private var _player:AVPlayer? - private var _playerItem:AVPlayerItem? - private var _source:VideoSource? - private var _playerBufferEmpty:Bool = true - private var _playerLayer:AVPlayerLayer? - private var _chapters:[Chapter]? - - private var _playerViewController:RCTVideoPlayerViewController? - private var _videoURL:NSURL? - - /* DRM */ - private var _drm:DRMParams? - - private var _localSourceEncryptionKeyScheme:String? - - /* Required to publish events */ - private var _eventDispatcher:RCTEventDispatcher? - private var _videoLoadStarted:Bool = false - - private var _pendingSeek:Bool = false - private var _pendingSeekTime:Float = 0.0 - private var _lastSeekTime:Float = 0.0 - - /* For sending videoProgress events */ - private var _controls:Bool = false - - /* Keep track of any modifiers, need to be applied after each play */ - private var _audioOutput: String = "speaker" - private var _volume:Float = 1.0 - private var _rate:Float = 1.0 - private var _maxBitRate:Float? - - private var _automaticallyWaitsToMinimizeStalling:Bool = true - private var _muted:Bool = false - private var _paused:Bool = false - private var _repeat:Bool = false - private var _allowsExternalPlayback:Bool = true - private var _textTracks:[TextTrack]? - private var _selectedTextTrackCriteria:SelectedTrackCriteria? - private var _selectedAudioTrackCriteria:SelectedTrackCriteria? - private var _playbackStalled:Bool = false - private var _playInBackground:Bool = false - private var _preventsDisplaySleepDuringVideoPlayback:Bool = true - private var _preferredForwardBufferDuration:Float = 0.0 - private var _playWhenInactive:Bool = false - private var _ignoreSilentSwitch:String! = "inherit" // inherit, ignore, obey - private var _mixWithOthers:String! = "inherit" // inherit, mix, duck - private var _resizeMode:String! = "cover" - private var _fullscreen:Bool = false - private var _fullscreenAutorotate:Bool = true - private var _fullscreenOrientation:String! = "all" - private var _fullscreenPlayerPresented:Bool = false - private var _fullscreenUncontrolPlayerPresented:Bool = false // to call events switching full screen mode from player controls - private var _filterName:String! - private var _filterEnabled:Bool = false - private var _presentingViewController:UIViewController? - private var _pictureInPictureEnabled = false - private var _startPosition:Float64 = -1 - - /* IMA Ads */ - private var _adTagUrl:String? -#if USE_GOOGLE_IMA +class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverHandler { + private var _player: AVPlayer? + private var _playerItem: AVPlayerItem? + private var _source: VideoSource? + private var _playerBufferEmpty = true + private var _playerLayer: AVPlayerLayer? + private var _chapters: [Chapter]? + + private var _playerViewController: RCTVideoPlayerViewController? + private var _videoURL: NSURL? + + /* DRM */ + private var _drm: DRMParams? + + private var _localSourceEncryptionKeyScheme: String? + + /* Required to publish events */ + private var _eventDispatcher: RCTEventDispatcher? + private var _videoLoadStarted = false + + private var _pendingSeek = false + private var _pendingSeekTime: Float = 0.0 + private var _lastSeekTime: Float = 0.0 + + /* For sending videoProgress events */ + private var _controls = false + + /* Keep track of any modifiers, need to be applied after each play */ + private var _audioOutput: String = "speaker" + private var _volume: Float = 1.0 + private var _rate: Float = 1.0 + private var _maxBitRate: Float? + + private var _automaticallyWaitsToMinimizeStalling = true + private var _muted = false + private var _paused = false + private var _repeat = false + private var _allowsExternalPlayback = true + private var _textTracks: [TextTrack]? + private var _selectedTextTrackCriteria: SelectedTrackCriteria? + private var _selectedAudioTrackCriteria: SelectedTrackCriteria? + private var _playbackStalled = false + private var _playInBackground = false + private var _preventsDisplaySleepDuringVideoPlayback = true + private var _preferredForwardBufferDuration: Float = 0.0 + private var _playWhenInactive = false + private var _ignoreSilentSwitch: String! = "inherit" // inherit, ignore, obey + private var _mixWithOthers: String! = "inherit" // inherit, mix, duck + private var _resizeMode: String! = "cover" + private var _fullscreen = false + private var _fullscreenAutorotate = true + private var _fullscreenOrientation: String! = "all" + private var _fullscreenPlayerPresented = false + private var _fullscreenUncontrolPlayerPresented = false // to call events switching full screen mode from player controls + private var _filterName: String! + private var _filterEnabled = false + private var _presentingViewController: UIViewController? + private var _pictureInPictureEnabled = false + private var _startPosition: Float64 = -1 + + /* IMA Ads */ + private var _adTagUrl: String? + #if USE_GOOGLE_IMA private var _imaAdsManager: RCTIMAAdsManager! /* Playhead used by the SDK to track content video progress and insert mid-rolls. */ private var _contentPlayhead: IMAAVPlayerContentPlayhead? -#endif - private var _didRequestAds:Bool = false - private var _adPlaying:Bool = false - - private var _resouceLoaderDelegate: RCTResourceLoaderDelegate? - private var _playerObserver: RCTPlayerObserver = RCTPlayerObserver() - -#if USE_VIDEO_CACHING - private let _videoCache:RCTVideoCachingHandler = RCTVideoCachingHandler() -#endif - -#if os(iOS) - private var _pip:RCTPictureInPicture? = nil -#endif - - // Events - @objc var onVideoLoadStart: RCTDirectEventBlock? - @objc var onVideoLoad: RCTDirectEventBlock? - @objc var onVideoBuffer: RCTDirectEventBlock? - @objc var onVideoError: RCTDirectEventBlock? - @objc var onVideoProgress: RCTDirectEventBlock? - @objc var onVideoBandwidthUpdate: RCTDirectEventBlock? - @objc var onVideoSeek: RCTDirectEventBlock? - @objc var onVideoEnd: RCTDirectEventBlock? - @objc var onTimedMetadata: RCTDirectEventBlock? - @objc var onVideoAudioBecomingNoisy: RCTDirectEventBlock? - @objc var onVideoFullscreenPlayerWillPresent: RCTDirectEventBlock? - @objc var onVideoFullscreenPlayerDidPresent: RCTDirectEventBlock? - @objc var onVideoFullscreenPlayerWillDismiss: RCTDirectEventBlock? - @objc var onVideoFullscreenPlayerDidDismiss: RCTDirectEventBlock? - @objc var onReadyForDisplay: RCTDirectEventBlock? - @objc var onPlaybackStalled: RCTDirectEventBlock? - @objc var onPlaybackResume: RCTDirectEventBlock? - @objc var onPlaybackRateChange: RCTDirectEventBlock? - @objc var onVolumeChange: RCTDirectEventBlock? - @objc var onVideoPlaybackStateChanged: RCTDirectEventBlock? - @objc var onVideoExternalPlaybackChange: RCTDirectEventBlock? - @objc var onPictureInPictureStatusChanged: RCTDirectEventBlock? - @objc var onRestoreUserInterfaceForPictureInPictureStop: RCTDirectEventBlock? - @objc var onGetLicense: RCTDirectEventBlock? - @objc var onReceiveAdEvent: RCTDirectEventBlock? - - @objc func _onPictureInPictureStatusChanged() { - onPictureInPictureStatusChanged?([ "isActive": NSNumber(value: true)]) - } - - @objc func _onRestoreUserInterfaceForPictureInPictureStop() { - onPictureInPictureStatusChanged?([ "isActive": NSNumber(value: false)]) - } - - func isPipEnabled () -> Bool { - return _pictureInPictureEnabled - } - - init(eventDispatcher:RCTEventDispatcher!) { - super.init(frame: CGRect(x: 0, y: 0, width: 100, height: 100)) -#if USE_GOOGLE_IMA - _imaAdsManager = RCTIMAAdsManager(video: self, pipEnabled: isPipEnabled) -#endif - - _eventDispatcher = eventDispatcher - -#if os(iOS) - _pip = RCTPictureInPicture(self._onPictureInPictureStatusChanged, self._onRestoreUserInterfaceForPictureInPictureStop) -#endif - - NotificationCenter.default.addObserver( - self, - selector: #selector(applicationWillResignActive(notification:)), - name: UIApplication.willResignActiveNotification, - object: nil - ) - - NotificationCenter.default.addObserver( - self, - selector: #selector(applicationDidBecomeActive(notification:)), - name: UIApplication.didBecomeActiveNotification, - object: nil - ) - - NotificationCenter.default.addObserver( - self, - selector: #selector(applicationDidEnterBackground(notification:)), - name: UIApplication.didEnterBackgroundNotification, - object: nil - ) - - NotificationCenter.default.addObserver( - self, - selector: #selector(applicationWillEnterForeground(notification:)), - name: UIApplication.willEnterForegroundNotification, - object: nil - ) - - NotificationCenter.default.addObserver( - self, - selector: #selector(audioRouteChanged(notification:)), - name: AVAudioSession.routeChangeNotification, - object: nil - ) - _playerObserver._handlers = self -#if USE_VIDEO_CACHING - _videoCache.playerItemPrepareText = playerItemPrepareText -#endif - } - - required init?(coder aDecoder: NSCoder) { - super.init(coder: aDecoder) -#if USE_GOOGLE_IMA - _imaAdsManager = RCTIMAAdsManager(video: self, pipEnabled: isPipEnabled) -#endif - } - - deinit { - NotificationCenter.default.removeObserver(self) - self.removePlayerLayer() - _playerObserver.clearPlayer() - } - - // MARK: - App lifecycle handlers - - @objc func applicationWillResignActive(notification:NSNotification!) { - if _playInBackground || _playWhenInactive || _paused {return} - - _player?.pause() - _player?.rate = 0.0 - } - - @objc func applicationDidBecomeActive(notification: NSNotification!) { - if _playInBackground || _playWhenInactive || _paused { return } - - // Resume the player or any other tasks that should continue when the app becomes active. - _player?.play() - _player?.rate = _rate - } - - @objc func applicationDidEnterBackground(notification:NSNotification!) { - if !_playInBackground { - // Needed to play sound in background. See https://developer.apple.com/library/ios/qa/qa1668/_index.html - _playerLayer?.player = nil - _playerViewController?.player = nil - } - } - - @objc func applicationWillEnterForeground(notification:NSNotification!) { - self.applyModifiers() - if !_playInBackground { - _playerLayer?.player = _player - _playerViewController?.player = _player - } + #endif + private var _didRequestAds = false + private var _adPlaying = false + + private var _resouceLoaderDelegate: RCTResourceLoaderDelegate? + private var _playerObserver: RCTPlayerObserver = .init() + + #if USE_VIDEO_CACHING + private let _videoCache: RCTVideoCachingHandler = .init() + #endif + + #if os(iOS) + private var _pip: RCTPictureInPicture? + #endif + + // Events + @objc var onVideoLoadStart: RCTDirectEventBlock? + @objc var onVideoLoad: RCTDirectEventBlock? + @objc var onVideoBuffer: RCTDirectEventBlock? + @objc var onVideoError: RCTDirectEventBlock? + @objc var onVideoProgress: RCTDirectEventBlock? + @objc var onVideoBandwidthUpdate: RCTDirectEventBlock? + @objc var onVideoSeek: RCTDirectEventBlock? + @objc var onVideoEnd: RCTDirectEventBlock? + @objc var onTimedMetadata: RCTDirectEventBlock? + @objc var onVideoAudioBecomingNoisy: RCTDirectEventBlock? + @objc var onVideoFullscreenPlayerWillPresent: RCTDirectEventBlock? + @objc var onVideoFullscreenPlayerDidPresent: RCTDirectEventBlock? + @objc var onVideoFullscreenPlayerWillDismiss: RCTDirectEventBlock? + @objc var onVideoFullscreenPlayerDidDismiss: RCTDirectEventBlock? + @objc var onReadyForDisplay: RCTDirectEventBlock? + @objc var onPlaybackStalled: RCTDirectEventBlock? + @objc var onPlaybackResume: RCTDirectEventBlock? + @objc var onPlaybackRateChange: RCTDirectEventBlock? + @objc var onVolumeChange: RCTDirectEventBlock? + @objc var onVideoPlaybackStateChanged: RCTDirectEventBlock? + @objc var onVideoExternalPlaybackChange: RCTDirectEventBlock? + @objc var onPictureInPictureStatusChanged: RCTDirectEventBlock? + @objc var onRestoreUserInterfaceForPictureInPictureStop: RCTDirectEventBlock? + @objc var onGetLicense: RCTDirectEventBlock? + @objc var onReceiveAdEvent: RCTDirectEventBlock? + + @objc func _onPictureInPictureStatusChanged() { + onPictureInPictureStatusChanged?(["isActive": NSNumber(value: true)]) + } + + @objc func _onRestoreUserInterfaceForPictureInPictureStop() { + onPictureInPictureStatusChanged?(["isActive": NSNumber(value: false)]) + } + + func isPipEnabled() -> Bool { + return _pictureInPictureEnabled + } + + init(eventDispatcher: RCTEventDispatcher!) { + super.init(frame: CGRect(x: 0, y: 0, width: 100, height: 100)) + #if USE_GOOGLE_IMA + _imaAdsManager = RCTIMAAdsManager(video: self, pipEnabled: isPipEnabled) + #endif + + _eventDispatcher = eventDispatcher + + #if os(iOS) + _pip = RCTPictureInPicture(self._onPictureInPictureStatusChanged, self._onRestoreUserInterfaceForPictureInPictureStop) + #endif + + NotificationCenter.default.addObserver( + self, + selector: #selector(applicationWillResignActive(notification:)), + name: UIApplication.willResignActiveNotification, + object: nil + ) + + NotificationCenter.default.addObserver( + self, + selector: #selector(applicationDidBecomeActive(notification:)), + name: UIApplication.didBecomeActiveNotification, + object: nil + ) + + NotificationCenter.default.addObserver( + self, + selector: #selector(applicationDidEnterBackground(notification:)), + name: UIApplication.didEnterBackgroundNotification, + object: nil + ) + + NotificationCenter.default.addObserver( + self, + selector: #selector(applicationWillEnterForeground(notification:)), + name: UIApplication.willEnterForegroundNotification, + object: nil + ) + + NotificationCenter.default.addObserver( + self, + selector: #selector(audioRouteChanged(notification:)), + name: AVAudioSession.routeChangeNotification, + object: nil + ) + _playerObserver._handlers = self + #if USE_VIDEO_CACHING + _videoCache.playerItemPrepareText = playerItemPrepareText + #endif + } + + required init?(coder aDecoder: NSCoder) { + super.init(coder: aDecoder) + #if USE_GOOGLE_IMA + _imaAdsManager = RCTIMAAdsManager(video: self, pipEnabled: isPipEnabled) + #endif + } + + deinit { + NotificationCenter.default.removeObserver(self) + self.removePlayerLayer() + _playerObserver.clearPlayer() + } + + // MARK: - App lifecycle handlers + + @objc func applicationWillResignActive(notification _: NSNotification!) { + if _playInBackground || _playWhenInactive || _paused { return } + + _player?.pause() + _player?.rate = 0.0 + } + + @objc func applicationDidBecomeActive(notification _: NSNotification!) { + if _playInBackground || _playWhenInactive || _paused { return } + + // Resume the player or any other tasks that should continue when the app becomes active. + _player?.play() + _player?.rate = _rate + } + + @objc func applicationDidEnterBackground(notification _: NSNotification!) { + if !_playInBackground { + // Needed to play sound in background. See https://developer.apple.com/library/ios/qa/qa1668/_index.html + _playerLayer?.player = nil + _playerViewController?.player = nil + } + } + + @objc func applicationWillEnterForeground(notification _: NSNotification!) { + self.applyModifiers() + if !_playInBackground { + _playerLayer?.player = _player + _playerViewController?.player = _player + } + } + + // MARK: - Audio events + + @objc func audioRouteChanged(notification: NSNotification!) { + if let userInfo = notification.userInfo { + let reason: AVAudioSession.RouteChangeReason! = userInfo[AVAudioSessionRouteChangeReasonKey] as? AVAudioSession.RouteChangeReason + // let previousRoute:NSNumber! = userInfo[AVAudioSessionRouteChangePreviousRouteKey] as? NSNumber + if reason == .oldDeviceUnavailable, let onVideoAudioBecomingNoisy = onVideoAudioBecomingNoisy { + onVideoAudioBecomingNoisy(["target": reactTag as Any]) + } + } + } + + // MARK: - Progress + + func sendProgressUpdate() { + if let video = _player?.currentItem, + video == nil || video.status != AVPlayerItem.Status.readyToPlay { + return + } + + let playerDuration: CMTime = RCTVideoUtils.playerItemDuration(_player) + if CMTIME_IS_INVALID(playerDuration) { + return + } + + var currentTime = _player?.currentTime() + if currentTime != nil && _source?.cropStart != nil { + currentTime = CMTimeSubtract(currentTime!, CMTimeMake(value: _source?.cropStart ?? 0, timescale: 1000)) + } + let currentPlaybackTime = _player?.currentItem?.currentDate() + let duration = CMTimeGetSeconds(playerDuration) + let currentTimeSecs = CMTimeGetSeconds(currentTime ?? .zero) + + NotificationCenter.default.post(name: NSNotification.Name("RCTVideo_progress"), object: nil, userInfo: [ + "progress": NSNumber(value: currentTimeSecs / duration), + ]) + + if currentTimeSecs >= 0 { + #if USE_GOOGLE_IMA + if !_didRequestAds && currentTimeSecs >= 0.0001 && _adTagUrl != nil { + _imaAdsManager.requestAds() + _didRequestAds = true + } + #endif + onVideoProgress?([ + "currentTime": NSNumber(value: Float(currentTimeSecs)), + "playableDuration": RCTVideoUtils.calculatePlayableDuration(_player, withSource: _source), + "atValue": NSNumber(value: currentTime?.value ?? .zero), + "currentPlaybackTime": NSNumber(value: NSNumber(value: floor(currentPlaybackTime?.timeIntervalSince1970 ?? 0 * 1000)).int64Value), + "target": reactTag, + "seekableDuration": RCTVideoUtils.calculateSeekableDuration(_player), + ]) } + } + + // MARK: - Player and source - // MARK: - Audio events - - @objc func audioRouteChanged(notification:NSNotification!) { - if let userInfo = notification.userInfo { - let reason:AVAudioSession.RouteChangeReason! = userInfo[AVAudioSessionRouteChangeReasonKey] as? AVAudioSession.RouteChangeReason - // let previousRoute:NSNumber! = userInfo[AVAudioSessionRouteChangePreviousRouteKey] as? NSNumber - if reason == .oldDeviceUnavailable, let onVideoAudioBecomingNoisy = onVideoAudioBecomingNoisy { - onVideoAudioBecomingNoisy(["target": reactTag as Any]) + @objc + func setSrc(_ source: NSDictionary!) { + let dispatchClosure = { + self._source = VideoSource(source) + if self._source?.uri == nil || self._source?.uri == "" { + self._player?.replaceCurrentItem(with: nil) + return + } + self.removePlayerLayer() + self._playerObserver.player = nil + self._resouceLoaderDelegate = nil + self._playerObserver.playerItem = nil + + // perform on next run loop, otherwise other passed react-props may not be set + RCTVideoUtils.delay() + .then { [weak self] in + guard let self = self else { throw NSError(domain: "", code: 0, userInfo: nil) } + guard let source = self._source else { + DebugLog("The source not exist") + throw NSError(domain: "", code: 0, userInfo: nil) + } + if let uri = source.uri, uri.starts(with: "ph://") { + return Promise { + RCTVideoUtils.preparePHAsset(uri: uri).then { asset in + return self.playerItemPrepareText(asset: asset, assetOptions: nil, uri: source.uri ?? "") + } } - } - } - - // MARK: - Progress - - func sendProgressUpdate() { - if let video = _player?.currentItem, - video == nil || video.status != AVPlayerItem.Status.readyToPlay { - return - } - - let playerDuration:CMTime = RCTVideoUtils.playerItemDuration(_player) - if CMTIME_IS_INVALID(playerDuration) { - return - } - - var currentTime = _player?.currentTime() - if (currentTime != nil && _source?.cropStart != nil) { - currentTime = CMTimeSubtract(currentTime!, CMTimeMake(value: _source?.cropStart ?? 0, timescale: 1000)) - } - let currentPlaybackTime = _player?.currentItem?.currentDate() - let duration = CMTimeGetSeconds(playerDuration) - let currentTimeSecs = CMTimeGetSeconds(currentTime ?? .zero) - - NotificationCenter.default.post(name: NSNotification.Name("RCTVideo_progress"), object: nil, userInfo: [ - "progress": NSNumber(value: currentTimeSecs / duration) - ]) - - if currentTimeSecs >= 0 { -#if USE_GOOGLE_IMA - if !_didRequestAds && currentTimeSecs >= 0.0001 && _adTagUrl != nil { - _imaAdsManager.requestAds() - _didRequestAds = true + } + guard let assetResult = RCTVideoUtils.prepareAsset(source: source), + let asset = assetResult.asset, + let assetOptions = assetResult.assetOptions else { + DebugLog("Could not find video URL in source '\(String(describing: self._source))'") + throw NSError(domain: "", code: 0, userInfo: nil) + } + + if let startPosition = self._source?.startPosition { + self._startPosition = Float64(startPosition) / 1000 + } + + #if USE_VIDEO_CACHING + if self._videoCache.shouldCache(source: source, textTracks: self._textTracks) { + return self._videoCache.playerItemForSourceUsingCache(uri: source.uri, assetOptions: assetOptions) } -#endif - onVideoProgress?([ - "currentTime": NSNumber(value: Float(currentTimeSecs)), - "playableDuration": RCTVideoUtils.calculatePlayableDuration(_player, withSource: _source), - "atValue": NSNumber(value: currentTime?.value ?? .zero), - "currentPlaybackTime": NSNumber(value: NSNumber(value: floor(currentPlaybackTime?.timeIntervalSince1970 ?? 0 * 1000)).int64Value), - "target": reactTag, - "seekableDuration": RCTVideoUtils.calculateSeekableDuration(_player) - ]) - } - } - - // MARK: - Player and source - @objc - func setSrc(_ source:NSDictionary!) { - let dispatchClosure = { - self._source = VideoSource(source) - if (self._source?.uri == nil || self._source?.uri == "") { - self._player?.replaceCurrentItem(with: nil) - return; + #endif + + if self._drm != nil || self._localSourceEncryptionKeyScheme != nil { + self._resouceLoaderDelegate = RCTResourceLoaderDelegate( + asset: asset, + drm: self._drm, + localSourceEncryptionKeyScheme: self._localSourceEncryptionKeyScheme, + onVideoError: self.onVideoError, + onGetLicense: self.onGetLicense, + reactTag: self.reactTag + ) + } + + return Promise { self.playerItemPrepareText(asset: asset, assetOptions: assetOptions, uri: source.uri ?? "") } + }.then { [weak self] (playerItem: AVPlayerItem!) in + guard let self = self else { throw NSError(domain: "", code: 0, userInfo: nil) } + + self._player?.pause() + self._playerItem = playerItem + self._playerObserver.playerItem = self._playerItem + self.setPreferredForwardBufferDuration(self._preferredForwardBufferDuration) + self.setPlaybackRange(playerItem, withVideoStart: self._source?.cropStart, withVideoEnd: self._source?.cropEnd) + self.setFilter(self._filterName) + if let maxBitRate = self._maxBitRate { + self._playerItem?.preferredPeakBitRate = Double(maxBitRate) + } + + self._player = self._player ?? AVPlayer() + self._player?.replaceCurrentItem(with: playerItem) + self._playerObserver.player = self._player + self.applyModifiers() + self._player?.actionAtItemEnd = .none + + if #available(iOS 10.0, *) { + self.setAutomaticallyWaitsToMinimizeStalling(self._automaticallyWaitsToMinimizeStalling) + } + + #if USE_GOOGLE_IMA + if self._adTagUrl != nil { + // Set up your content playhead and contentComplete callback. + self._contentPlayhead = IMAAVPlayerContentPlayhead(avPlayer: self._player!) + + self._imaAdsManager.setUpAdsLoader() } - self.removePlayerLayer() - self._playerObserver.player = nil - self._resouceLoaderDelegate = nil - self._playerObserver.playerItem = nil - - // perform on next run loop, otherwise other passed react-props may not be set - RCTVideoUtils.delay() - .then{ [weak self] in - guard let self = self else {throw NSError(domain: "", code: 0, userInfo: nil)} - guard let source = self._source else { - DebugLog("The source not exist") - throw NSError(domain: "", code: 0, userInfo: nil) - } - if let uri = source.uri, uri.starts(with: "ph://") { - return Promise { - RCTVideoUtils.preparePHAsset(uri: uri).then { asset in - return self.playerItemPrepareText(asset:asset, assetOptions:nil, uri: source.uri ?? "") - } - } - } - guard let assetResult = RCTVideoUtils.prepareAsset(source: source), - let asset = assetResult.asset, - let assetOptions = assetResult.assetOptions else { - DebugLog("Could not find video URL in source '\(String(describing: self._source))'") - throw NSError(domain: "", code: 0, userInfo: nil) - } - - if let startPosition = self._source?.startPosition { - self._startPosition = Float64(startPosition) / 1000 - } - -#if USE_VIDEO_CACHING - if self._videoCache.shouldCache(source:source, textTracks:self._textTracks) { - return self._videoCache.playerItemForSourceUsingCache(uri: source.uri, assetOptions:assetOptions) - } -#endif - - if self._drm != nil || self._localSourceEncryptionKeyScheme != nil { - self._resouceLoaderDelegate = RCTResourceLoaderDelegate( - asset: asset, - drm: self._drm, - localSourceEncryptionKeyScheme: self._localSourceEncryptionKeyScheme, - onVideoError: self.onVideoError, - onGetLicense: self.onGetLicense, - reactTag: self.reactTag - ) - } - - return Promise{self.playerItemPrepareText(asset: asset, assetOptions:assetOptions, uri: source.uri ?? "")} - }.then{[weak self] (playerItem:AVPlayerItem!) in - guard let self = self else {throw NSError(domain: "", code: 0, userInfo: nil)} - - self._player?.pause() - self._playerItem = playerItem - self._playerObserver.playerItem = self._playerItem - self.setPreferredForwardBufferDuration(self._preferredForwardBufferDuration) - self.setPlaybackRange(playerItem, withVideoStart: self._source?.cropStart, withVideoEnd: self._source?.cropEnd) - self.setFilter(self._filterName) - if let maxBitRate = self._maxBitRate { - self._playerItem?.preferredPeakBitRate = Double(maxBitRate) - } - - self._player = self._player ?? AVPlayer() - self._player?.replaceCurrentItem(with: playerItem) - self._playerObserver.player = self._player - self.applyModifiers() - self._player?.actionAtItemEnd = .none - - if #available(iOS 10.0, *) { - self.setAutomaticallyWaitsToMinimizeStalling(self._automaticallyWaitsToMinimizeStalling) - } - -#if USE_GOOGLE_IMA - if self._adTagUrl != nil { - // Set up your content playhead and contentComplete callback. - self._contentPlayhead = IMAAVPlayerContentPlayhead(avPlayer: self._player!) - - self._imaAdsManager.setUpAdsLoader() - } -#endif - //Perform on next run loop, otherwise onVideoLoadStart is nil - self.onVideoLoadStart?([ - "src": [ - "uri": self._source?.uri ?? NSNull(), - "type": self._source?.type ?? NSNull(), - "isNetwork": NSNumber(value: self._source?.isNetwork ?? false) - ], - "drm": self._drm?.json ?? NSNull(), - "target": self.reactTag - ]) - }.catch{_ in } - self._videoLoadStarted = true - } - DispatchQueue.global(qos: .default).async(execute: dispatchClosure) - } - - @objc - func setDrm(_ drm:NSDictionary) { - _drm = DRMParams(drm) - } - - @objc - func setLocalSourceEncryptionKeyScheme(_ keyScheme:String) { - _localSourceEncryptionKeyScheme = keyScheme - } - - func playerItemPrepareText(asset:AVAsset!, assetOptions:NSDictionary?, uri: String) -> AVPlayerItem { - if (_textTracks == nil) || _textTracks?.count==0 || (uri.hasSuffix(".m3u8")) { - return self.playerItemPropegateMetadata(AVPlayerItem(asset: asset)) - } - - // AVPlayer can't airplay AVMutableCompositions - _allowsExternalPlayback = false - let mixComposition = RCTVideoUtils.generateMixComposition(asset) - let validTextTracks = RCTVideoUtils.getValidTextTracks( - asset:asset, - assetOptions:assetOptions, - mixComposition:mixComposition, - textTracks:_textTracks) - if validTextTracks.count != _textTracks?.count { - setTextTracks(validTextTracks) - } - - return self.playerItemPropegateMetadata(AVPlayerItem(asset: mixComposition)) - } - - func playerItemPropegateMetadata(_ playerItem: AVPlayerItem!) -> AVPlayerItem { - var mapping: [AVMetadataIdentifier: Any] = [:] - - if let title = _source?.title { - mapping[.commonIdentifierTitle] = title - } - - if let subtitle = _source?.subtitle { - mapping[.iTunesMetadataTrackSubTitle] = subtitle - } - - if let description = _source?.description { - mapping[.commonIdentifierDescription] = description - } - - if let customImageUri = _source?.customImageUri, - let imageData = RCTVideoUtils.createImageMetadataItem(imageUri: customImageUri) { - mapping[.commonIdentifierArtwork] = imageData - } - - if #available(iOS 12.2, *), !mapping.isEmpty { - playerItem.externalMetadata = RCTVideoUtils.createMetadataItems(for: mapping) - } - -#if os(tvOS) - if let chapters = _chapters { - playerItem.navigationMarkerGroups = RCTVideoTVUtils.makeNavigationMarkerGroups(chapters) - } -#endif - - return playerItem - } - - // MARK: - Prop setters - - @objc - func setResizeMode(_ mode: String) { - var resizeMode: AVLayerVideoGravity = .resizeAspect - - switch mode { - case "contain": - resizeMode = .resizeAspect - break - case "none": - resizeMode = .resizeAspect - break - case "cover": - resizeMode = .resizeAspectFill - break - case "stretch": - resizeMode = .resize - break - default: - resizeMode = .resizeAspect - } - - if _controls { - _playerViewController?.videoGravity = resizeMode - } else { - _playerLayer?.videoGravity = resizeMode - } - - _resizeMode = mode + #endif + // Perform on next run loop, otherwise onVideoLoadStart is nil + self.onVideoLoadStart?([ + "src": [ + "uri": self._source?.uri ?? NSNull(), + "type": self._source?.type ?? NSNull(), + "isNetwork": NSNumber(value: self._source?.isNetwork ?? false), + ], + "drm": self._drm?.json ?? NSNull(), + "target": self.reactTag, + ]) + }.catch { _ in } + self._videoLoadStarted = true } + DispatchQueue.global(qos: .default).async(execute: dispatchClosure) + } - @objc - func setPlayInBackground(_ playInBackground:Bool) { - _playInBackground = playInBackground - } - - @objc - func setPreventsDisplaySleepDuringVideoPlayback(_ preventsDisplaySleepDuringVideoPlayback:Bool) { - _preventsDisplaySleepDuringVideoPlayback = preventsDisplaySleepDuringVideoPlayback - self.applyModifiers() - } + @objc + func setDrm(_ drm: NSDictionary) { + _drm = DRMParams(drm) + } - @objc - func setAllowsExternalPlayback(_ allowsExternalPlayback:Bool) { - _allowsExternalPlayback = allowsExternalPlayback - _player?.allowsExternalPlayback = _allowsExternalPlayback - } - - @objc - func setPlayWhenInactive(_ playWhenInactive:Bool) { - _playWhenInactive = playWhenInactive - } + @objc + func setLocalSourceEncryptionKeyScheme(_ keyScheme: String) { + _localSourceEncryptionKeyScheme = keyScheme + } - @objc - func setPictureInPicture(_ pictureInPicture:Bool) { -#if os(iOS) - let audioSession = AVAudioSession.sharedInstance() - do { - try audioSession.setCategory(.playback) - try audioSession.setActive(true, options: []) - } catch { - } - if (pictureInPicture) { - _pictureInPictureEnabled = true - } else { - _pictureInPictureEnabled = false - } - _pip?.setPictureInPicture(pictureInPicture) -#endif + func playerItemPrepareText(asset: AVAsset!, assetOptions: NSDictionary?, uri: String) -> AVPlayerItem { + if (_textTracks == nil) || _textTracks?.isEmpty == true || (uri.hasSuffix(".m3u8")) { + return self.playerItemPropegateMetadata(AVPlayerItem(asset: asset)) } - @objc - func setRestoreUserInterfaceForPIPStopCompletionHandler(_ restore:Bool) { -#if os(iOS) - _pip?.setRestoreUserInterfaceForPIPStopCompletionHandler(restore) -#endif - } - - @objc - func setIgnoreSilentSwitch(_ ignoreSilentSwitch:String?) { - _ignoreSilentSwitch = ignoreSilentSwitch - RCTPlayerOperations.configureAudio(ignoreSilentSwitch:_ignoreSilentSwitch, mixWithOthers:_mixWithOthers, audioOutput:_audioOutput) - applyModifiers() - } - - @objc - func setMixWithOthers(_ mixWithOthers:String?) { - _mixWithOthers = mixWithOthers - applyModifiers() - } - - @objc - func setPaused(_ paused:Bool) { - if paused { - if _adPlaying { -#if USE_GOOGLE_IMA - _imaAdsManager.getAdsManager()?.pause() -#endif - } else { - _player?.pause() - _player?.rate = 0.0 - } + // AVPlayer can't airplay AVMutableCompositions + _allowsExternalPlayback = false + let mixComposition = RCTVideoUtils.generateMixComposition(asset) + let validTextTracks = RCTVideoUtils.getValidTextTracks( + asset: asset, + assetOptions: assetOptions, + mixComposition: mixComposition, + textTracks: _textTracks + ) + if validTextTracks.count != _textTracks?.count { + setTextTracks(validTextTracks) + } + + return self.playerItemPropegateMetadata(AVPlayerItem(asset: mixComposition)) + } + + func playerItemPropegateMetadata(_ playerItem: AVPlayerItem!) -> AVPlayerItem { + var mapping: [AVMetadataIdentifier: Any] = [:] + + if let title = _source?.title { + mapping[.commonIdentifierTitle] = title + } + + if let subtitle = _source?.subtitle { + mapping[.iTunesMetadataTrackSubTitle] = subtitle + } + + if let description = _source?.description { + mapping[.commonIdentifierDescription] = description + } + + if let customImageUri = _source?.customImageUri, + let imageData = RCTVideoUtils.createImageMetadataItem(imageUri: customImageUri) { + mapping[.commonIdentifierArtwork] = imageData + } + + if #available(iOS 12.2, *), !mapping.isEmpty { + playerItem.externalMetadata = RCTVideoUtils.createMetadataItems(for: mapping) + } + + #if os(tvOS) + if let chapters = _chapters { + playerItem.navigationMarkerGroups = RCTVideoTVUtils.makeNavigationMarkerGroups(chapters) + } + #endif + + return playerItem + } + + // MARK: - Prop setters + + @objc + func setResizeMode(_ mode: String) { + var resizeMode: AVLayerVideoGravity = .resizeAspect + + switch mode { + case "contain": + resizeMode = .resizeAspect + case "none": + resizeMode = .resizeAspect + case "cover": + resizeMode = .resizeAspectFill + case "stretch": + resizeMode = .resize + default: + resizeMode = .resizeAspect + } + + if _controls { + _playerViewController?.videoGravity = resizeMode + } else { + _playerLayer?.videoGravity = resizeMode + } + + _resizeMode = mode + } + + @objc + func setPlayInBackground(_ playInBackground: Bool) { + _playInBackground = playInBackground + } + + @objc + func setPreventsDisplaySleepDuringVideoPlayback(_ preventsDisplaySleepDuringVideoPlayback: Bool) { + _preventsDisplaySleepDuringVideoPlayback = preventsDisplaySleepDuringVideoPlayback + self.applyModifiers() + } + + @objc + func setAllowsExternalPlayback(_ allowsExternalPlayback: Bool) { + _allowsExternalPlayback = allowsExternalPlayback + _player?.allowsExternalPlayback = _allowsExternalPlayback + } + + @objc + func setPlayWhenInactive(_ playWhenInactive: Bool) { + _playWhenInactive = playWhenInactive + } + + @objc + func setPictureInPicture(_ pictureInPicture: Bool) { + #if os(iOS) + let audioSession = AVAudioSession.sharedInstance() + do { + try audioSession.setCategory(.playback) + try audioSession.setActive(true, options: []) + } catch {} + if pictureInPicture { + _pictureInPictureEnabled = true + } else { + _pictureInPictureEnabled = false + } + _pip?.setPictureInPicture(pictureInPicture) + #endif + } + + @objc + func setRestoreUserInterfaceForPIPStopCompletionHandler(_ restore: Bool) { + #if os(iOS) + _pip?.setRestoreUserInterfaceForPIPStopCompletionHandler(restore) + #endif + } + + @objc + func setIgnoreSilentSwitch(_ ignoreSilentSwitch: String?) { + _ignoreSilentSwitch = ignoreSilentSwitch + RCTPlayerOperations.configureAudio(ignoreSilentSwitch: _ignoreSilentSwitch, mixWithOthers: _mixWithOthers, audioOutput: _audioOutput) + applyModifiers() + } + + @objc + func setMixWithOthers(_ mixWithOthers: String?) { + _mixWithOthers = mixWithOthers + applyModifiers() + } + + @objc + func setPaused(_ paused: Bool) { + if paused { + if _adPlaying { + #if USE_GOOGLE_IMA + _imaAdsManager.getAdsManager()?.pause() + #endif + } else { + _player?.pause() + _player?.rate = 0.0 + } + } else { + RCTPlayerOperations.configureAudio(ignoreSilentSwitch: _ignoreSilentSwitch, mixWithOthers: _mixWithOthers, audioOutput: _audioOutput) + + if _adPlaying { + #if USE_GOOGLE_IMA + _imaAdsManager.getAdsManager()?.resume() + #endif + } else { + if #available(iOS 10.0, *), !_automaticallyWaitsToMinimizeStalling { + _player?.playImmediately(atRate: _rate) } else { - RCTPlayerOperations.configureAudio(ignoreSilentSwitch:_ignoreSilentSwitch, mixWithOthers:_mixWithOthers, audioOutput:_audioOutput) - - if _adPlaying { -#if USE_GOOGLE_IMA - _imaAdsManager.getAdsManager()?.resume() -#endif - } else { - if #available(iOS 10.0, *), !_automaticallyWaitsToMinimizeStalling { - _player?.playImmediately(atRate: _rate) - } else { - _player?.play() - _player?.rate = _rate - } - _player?.rate = _rate - } - } - - _paused = paused - } - - @objc - func setSeek(_ info:NSDictionary!) { - let seekTime:NSNumber! = info["time"] as! NSNumber - let seekTolerance:NSNumber! = info["tolerance"] as! NSNumber - let item:AVPlayerItem? = _player?.currentItem - guard item != nil, let player = _player, let item = item, item.status == AVPlayerItem.Status.readyToPlay else { - _pendingSeek = true - _pendingSeekTime = seekTime.floatValue - return + _player?.play() + _player?.rate = _rate } - let wasPaused = _paused - - RCTPlayerOperations.seek( - player:player, - playerItem:item, - paused:wasPaused, - seekTime:seekTime.floatValue, - seekTolerance:seekTolerance.floatValue) - .then{ [weak self] (finished:Bool) in + _player?.rate = _rate + } + } + + _paused = paused + } + + @objc + func setSeek(_ info: NSDictionary!) { + let seekTime: NSNumber! = info["time"] as! NSNumber + let seekTolerance: NSNumber! = info["tolerance"] as! NSNumber + let item: AVPlayerItem? = _player?.currentItem + guard item != nil, let player = _player, let item = item, item.status == AVPlayerItem.Status.readyToPlay else { + _pendingSeek = true + _pendingSeekTime = seekTime.floatValue + return + } + let wasPaused = _paused + + RCTPlayerOperations.seek( + player: player, + playerItem: item, + paused: wasPaused, + seekTime: seekTime.floatValue, + seekTolerance: seekTolerance.floatValue + ) + .then { [weak self] (_: Bool) in + guard let self = self else { return } + + self._playerObserver.addTimeObserverIfNotSet() + if !wasPaused { + self.setPaused(false) + } + self.onVideoSeek?(["currentTime": NSNumber(value: Float(CMTimeGetSeconds(item.currentTime()))), + "seekTime": seekTime, + "target": self.reactTag]) + }.catch { _ in } + + _pendingSeek = false + } + + @objc + func setRate(_ rate: Float) { + _rate = rate + applyModifiers() + } + + @objc + func isMuted() -> Bool { + return _muted + } + + @objc + func setMuted(_ muted: Bool) { + _muted = muted + applyModifiers() + } + + @objc + func setAudioOutput(_ audioOutput: String) { + _audioOutput = audioOutput + RCTPlayerOperations.configureAudio(ignoreSilentSwitch: _ignoreSilentSwitch, mixWithOthers: _mixWithOthers, audioOutput: _audioOutput) + do { + if audioOutput == "speaker" { + #if os(iOS) + try AVAudioSession.sharedInstance().overrideOutputAudioPort(AVAudioSession.PortOverride.speaker) + #endif + } else if audioOutput == "earpiece" { + try AVAudioSession.sharedInstance().overrideOutputAudioPort(AVAudioSession.PortOverride.none) + } + } catch { + print("Error occurred: \(error.localizedDescription)") + } + } + + @objc + func setVolume(_ volume: Float) { + _volume = volume + applyModifiers() + } + + @objc + func setMaxBitRate(_ maxBitRate: Float) { + _maxBitRate = maxBitRate + _playerItem?.preferredPeakBitRate = Double(maxBitRate) + } + + @objc + func setPreferredForwardBufferDuration(_ preferredForwardBufferDuration: Float) { + _preferredForwardBufferDuration = preferredForwardBufferDuration + if #available(iOS 10.0, *) { + _playerItem?.preferredForwardBufferDuration = TimeInterval(preferredForwardBufferDuration) + } else { + // Fallback on earlier versions + } + } + + @objc + func setAutomaticallyWaitsToMinimizeStalling(_ waits: Bool) { + _automaticallyWaitsToMinimizeStalling = waits + if #available(iOS 10.0, *) { + _player?.automaticallyWaitsToMinimizeStalling = waits + } else { + // Fallback on earlier versions + } + } + + func setPlaybackRange(_ item: AVPlayerItem!, withVideoStart videoStart: Int64?, withVideoEnd videoEnd: Int64?) { + if videoStart != nil { + let start = CMTimeMake(value: videoStart!, timescale: 1000) + item.reversePlaybackEndTime = start + _pendingSeekTime = Float(CMTimeGetSeconds(start)) + _pendingSeek = true + } + if videoEnd != nil { + item.forwardPlaybackEndTime = CMTimeMake(value: videoEnd!, timescale: 1000) + } + } + + func applyModifiers() { + if let video = _player?.currentItem, + video == nil || video.status != AVPlayerItem.Status.readyToPlay { + return + } + if _muted { + if !_controls { + _player?.volume = 0 + } + _player?.isMuted = true + } else { + _player?.volume = _volume + _player?.isMuted = false + } + + if #available(iOS 12.0, tvOS 12.0, *) { + _player?.preventsDisplaySleepDuringVideoPlayback = _preventsDisplaySleepDuringVideoPlayback + } else { + // Fallback on earlier versions + } + + if let _maxBitRate = _maxBitRate { + setMaxBitRate(_maxBitRate) + } + + setAudioOutput(_audioOutput) + setSelectedAudioTrack(_selectedAudioTrackCriteria) + setSelectedTextTrack(_selectedTextTrackCriteria) + setResizeMode(_resizeMode) + setRepeat(_repeat) + setControls(_controls) + setPaused(_paused) + setAllowsExternalPlayback(_allowsExternalPlayback) + } + + @objc + func setRepeat(_ repeat: Bool) { + _repeat = `repeat` + } + + @objc + func setSelectedAudioTrack(_ selectedAudioTrack: NSDictionary?) { + setSelectedAudioTrack(SelectedTrackCriteria(selectedAudioTrack)) + } + + func setSelectedAudioTrack(_ selectedAudioTrack: SelectedTrackCriteria?) { + _selectedAudioTrackCriteria = selectedAudioTrack + RCTPlayerOperations.setMediaSelectionTrackForCharacteristic(player: _player, characteristic: AVMediaCharacteristic.audible, + criteria: _selectedAudioTrackCriteria) + } + + @objc + func setSelectedTextTrack(_ selectedTextTrack: NSDictionary?) { + setSelectedTextTrack(SelectedTrackCriteria(selectedTextTrack)) + } + + func setSelectedTextTrack(_ selectedTextTrack: SelectedTrackCriteria?) { + _selectedTextTrackCriteria = selectedTextTrack + if _textTracks != nil { // sideloaded text tracks + RCTPlayerOperations.setSideloadedText(player: _player, textTracks: _textTracks, criteria: _selectedTextTrackCriteria) + } else { // text tracks included in the HLS playlist + RCTPlayerOperations.setMediaSelectionTrackForCharacteristic(player: _player, characteristic: AVMediaCharacteristic.legible, + criteria: _selectedTextTrackCriteria) + } + } + + @objc + func setTextTracks(_ textTracks: [NSDictionary]?) { + setTextTracks(textTracks?.map { TextTrack($0) }) + } + + func setTextTracks(_ textTracks: [TextTrack]?) { + _textTracks = textTracks + + // in case textTracks was set after selectedTextTrack + if _selectedTextTrackCriteria != nil { setSelectedTextTrack(_selectedTextTrackCriteria) } + } + + @objc + func setChapters(_ chapters: [NSDictionary]?) { + setChapters(chapters?.map { Chapter($0) }) + } + + func setChapters(_ chapters: [Chapter]?) { + _chapters = chapters + } + + @objc + func setFullscreen(_ fullscreen: Bool) { + if fullscreen && !_fullscreenPlayerPresented && _player != nil { + // Ensure player view controller is not null + // Controls will be displayed even if it is disabled in configuration + if _playerViewController == nil { + self.usePlayerViewController() + } + + // Set presentation style to fullscreen + _playerViewController?.modalPresentationStyle = .fullScreen + + // Find the nearest view controller + var viewController: UIViewController! = self.firstAvailableUIViewController() + if viewController == nil { + let keyWindow: UIWindow! = UIApplication.shared.keyWindow + viewController = keyWindow.rootViewController + if !viewController.children.isEmpty { + viewController = viewController.children.last + } + } + if viewController != nil { + _presentingViewController = viewController + + self.onVideoFullscreenPlayerWillPresent?(["target": reactTag as Any]) + + if let playerViewController = _playerViewController { + if _controls { + // prevents crash https://github.com/react-native-video/react-native-video/issues/3040 + self._playerViewController?.removeFromParent() + } + + viewController.present(playerViewController, animated: true, completion: { [weak self] in guard let self = self else { return } - - self._playerObserver.addTimeObserverIfNotSet() - if !wasPaused { - self.setPaused(false) - } - self.onVideoSeek?(["currentTime": NSNumber(value: Float(CMTimeGetSeconds(item.currentTime()))), - "seekTime": seekTime, - "target": self.reactTag]) - }.catch{_ in } - - _pendingSeek = false - } - - - @objc - func setRate(_ rate:Float) { - _rate = rate - applyModifiers() - } - - @objc - func isMuted() -> Bool { - return _muted - } - - @objc - func setMuted(_ muted:Bool) { - _muted = muted - applyModifiers() - } - - @objc - func setAudioOutput(_ audioOutput:String) { - _audioOutput = audioOutput - RCTPlayerOperations.configureAudio(ignoreSilentSwitch:_ignoreSilentSwitch, mixWithOthers:_mixWithOthers, audioOutput:_audioOutput) - do { - if audioOutput == "speaker" { -#if os(iOS) - try AVAudioSession.sharedInstance().overrideOutputAudioPort(AVAudioSession.PortOverride.speaker) -#endif - } else if audioOutput == "earpiece" { - try AVAudioSession.sharedInstance().overrideOutputAudioPort(AVAudioSession.PortOverride.none) - } - } catch { - print("Error occurred: \(error.localizedDescription)") - } - } - - @objc - func setVolume(_ volume:Float) { - _volume = volume - applyModifiers() - } - - @objc - func setMaxBitRate(_ maxBitRate:Float) { - _maxBitRate = maxBitRate - _playerItem?.preferredPeakBitRate = Double(maxBitRate) - } - - @objc - func setPreferredForwardBufferDuration(_ preferredForwardBufferDuration:Float) { - _preferredForwardBufferDuration = preferredForwardBufferDuration - if #available(iOS 10.0, *) { - _playerItem?.preferredForwardBufferDuration = TimeInterval(preferredForwardBufferDuration) - } else { - // Fallback on earlier versions - } - } - - @objc - func setAutomaticallyWaitsToMinimizeStalling(_ waits:Bool) { - _automaticallyWaitsToMinimizeStalling = waits - if #available(iOS 10.0, *) { - _player?.automaticallyWaitsToMinimizeStalling = waits - } else { - // Fallback on earlier versions - } - } - - func setPlaybackRange(_ item:AVPlayerItem!, withVideoStart videoStart:Int64?, withVideoEnd videoEnd:Int64?) { - if (videoStart != nil) { - let start = CMTimeMake(value: videoStart!, timescale: 1000) - item.reversePlaybackEndTime = start - _pendingSeekTime = Float(CMTimeGetSeconds(start)) - _pendingSeek = true - } - if (videoEnd != nil) { - item.forwardPlaybackEndTime = CMTimeMake(value: videoEnd!, timescale: 1000) - } - } - - func applyModifiers() { - if let video = _player?.currentItem, - video == nil || video.status != AVPlayerItem.Status.readyToPlay { - return - } - if _muted { - if !_controls { - _player?.volume = 0 - } - _player?.isMuted = true - } else { - _player?.volume = _volume - _player?.isMuted = false - } - - if #available(iOS 12.0, tvOS 12.0, *) { - _player?.preventsDisplaySleepDuringVideoPlayback = _preventsDisplaySleepDuringVideoPlayback - } else { - // Fallback on earlier versions - } - - if let _maxBitRate = _maxBitRate { - setMaxBitRate(_maxBitRate) - } - - setAudioOutput(_audioOutput) - setSelectedAudioTrack(_selectedAudioTrackCriteria) - setSelectedTextTrack(_selectedTextTrackCriteria) - setResizeMode(_resizeMode) - setRepeat(_repeat) - setControls(_controls) - setPaused(_paused) - setAllowsExternalPlayback(_allowsExternalPlayback) - } - - @objc - func setRepeat(_ `repeat`: Bool) { - _repeat = `repeat` - } - - @objc - func setSelectedAudioTrack(_ selectedAudioTrack:NSDictionary?) { - setSelectedAudioTrack(SelectedTrackCriteria(selectedAudioTrack)) - } - - func setSelectedAudioTrack(_ selectedAudioTrack:SelectedTrackCriteria?) { - _selectedAudioTrackCriteria = selectedAudioTrack - RCTPlayerOperations.setMediaSelectionTrackForCharacteristic(player:_player, characteristic: AVMediaCharacteristic.audible, - criteria:_selectedAudioTrackCriteria) - } - - @objc - func setSelectedTextTrack(_ selectedTextTrack:NSDictionary?) { - setSelectedTextTrack(SelectedTrackCriteria(selectedTextTrack)) - } - - func setSelectedTextTrack(_ selectedTextTrack:SelectedTrackCriteria?) { - _selectedTextTrackCriteria = selectedTextTrack - if (_textTracks != nil) { // sideloaded text tracks - RCTPlayerOperations.setSideloadedText(player:_player, textTracks:_textTracks, criteria:_selectedTextTrackCriteria) - } else { // text tracks included in the HLS playlist - RCTPlayerOperations.setMediaSelectionTrackForCharacteristic(player:_player, characteristic: AVMediaCharacteristic.legible, - criteria:_selectedTextTrackCriteria) - } - } - - @objc - func setTextTracks(_ textTracks:[NSDictionary]?) { - setTextTracks(textTracks?.map { TextTrack($0) }) - } - - func setTextTracks(_ textTracks:[TextTrack]?) { - _textTracks = textTracks - - // in case textTracks was set after selectedTextTrack - if (_selectedTextTrackCriteria != nil) {setSelectedTextTrack(_selectedTextTrackCriteria)} - } - - @objc - func setChapters(_ chapters:[NSDictionary]?) { - setChapters(chapters?.map { Chapter($0) }) - } - - func setChapters(_ chapters:[Chapter]?) { - _chapters = chapters - } - - @objc - func setFullscreen(_ fullscreen:Bool) { - if fullscreen && !_fullscreenPlayerPresented && _player != nil { - // Ensure player view controller is not null - // Controls will be displayed even if it is disabled in configuration - if _playerViewController == nil { - self.usePlayerViewController() - } - - // Set presentation style to fullscreen - _playerViewController?.modalPresentationStyle = .fullScreen - - // Find the nearest view controller - var viewController:UIViewController! = self.firstAvailableUIViewController() - if (viewController == nil) { - let keyWindow:UIWindow! = UIApplication.shared.keyWindow - viewController = keyWindow.rootViewController - if viewController.children.count > 0 - { - viewController = viewController.children.last - } - } - if viewController != nil { - _presentingViewController = viewController - - self.onVideoFullscreenPlayerWillPresent?(["target": reactTag as Any]) - - if let playerViewController = _playerViewController { - if(_controls) { - // prevents crash https://github.com/react-native-video/react-native-video/issues/3040 - self._playerViewController?.removeFromParent() - } - - viewController.present(playerViewController, animated:true, completion:{ [weak self] in - guard let self = self else {return} - // In fullscreen we must display controls - self._playerViewController?.showsPlaybackControls = true - self._fullscreenPlayerPresented = fullscreen - self._playerViewController?.autorotate = self._fullscreenAutorotate - - self.onVideoFullscreenPlayerDidPresent?(["target": self.reactTag]) - - }) - } - } - } else if !fullscreen && _fullscreenPlayerPresented, let _playerViewController = _playerViewController { - self.videoPlayerViewControllerWillDismiss(playerViewController: _playerViewController) - _presentingViewController?.dismiss(animated: true, completion:{[weak self] in - self?.videoPlayerViewControllerDidDismiss(playerViewController: _playerViewController) - }) - } - } - - @objc - func setFullscreenAutorotate(_ autorotate:Bool) { - _fullscreenAutorotate = autorotate - if _fullscreenPlayerPresented { - _playerViewController?.autorotate = autorotate - } - } - - @objc - func setFullscreenOrientation(_ orientation:String?) { - _fullscreenOrientation = orientation - if _fullscreenPlayerPresented { - _playerViewController?.preferredOrientation = orientation - } - } - - func usePlayerViewController() { - guard let _player = _player, let _playerItem = _playerItem else { return } - - if _playerViewController == nil { - _playerViewController = createPlayerViewController(player:_player, withPlayerItem:_playerItem) - } - // to prevent video from being animated when resizeMode is 'cover' - // resize mode must be set before subview is added - setResizeMode(_resizeMode) - - guard let _playerViewController = _playerViewController else { return } - - if _controls { - let viewController:UIViewController! = self.reactViewController() - viewController?.addChild(_playerViewController) - self.addSubview(_playerViewController.view) - } - - _playerObserver.playerViewController = _playerViewController - } - - func createPlayerViewController(player:AVPlayer, withPlayerItem playerItem:AVPlayerItem) -> RCTVideoPlayerViewController { - let viewController = RCTVideoPlayerViewController() - viewController.showsPlaybackControls = self._controls - viewController.rctDelegate = self - viewController.preferredOrientation = _fullscreenOrientation - - viewController.view.frame = self.bounds - viewController.player = player - if #available(tvOS 14.0, *) { - viewController.allowsPictureInPicturePlayback = true - } - return viewController - } - - func usePlayerLayer() { - if let _player = _player { - _playerLayer = AVPlayerLayer(player: _player) - _playerLayer?.frame = self.bounds - _playerLayer?.needsDisplayOnBoundsChange = true - - // to prevent video from being animated when resizeMode is 'cover' - // resize mode must be set before layer is added - setResizeMode(_resizeMode) - _playerObserver.playerLayer = _playerLayer - - if let _playerLayer = _playerLayer { - self.layer.addSublayer(_playerLayer) - } - self.layer.needsDisplayOnBoundsChange = true -#if os(iOS) - _pip?.setupPipController(_playerLayer) -#endif - } - } - - @objc - func setControls(_ controls:Bool) { - if _controls != controls || ((_playerLayer == nil) && (_playerViewController == nil)) - { - _controls = controls - if _controls - { - self.removePlayerLayer() - self.usePlayerViewController() - } - else - { - _playerViewController?.view.removeFromSuperview() - _playerViewController?.removeFromParent() - _playerViewController = nil - _playerObserver.playerViewController = nil - self.usePlayerLayer() - } - } - } - - @objc - func setProgressUpdateInterval(_ progressUpdateInterval:Float) { - _playerObserver.replaceTimeObserverIfSet(Float64(progressUpdateInterval)) - } - - func removePlayerLayer() { - _playerLayer?.removeFromSuperlayer() - _playerLayer = nil - _playerObserver.playerLayer = nil - } - - // MARK: - RCTVideoPlayerViewControllerDelegate - - func videoPlayerViewControllerWillDismiss(playerViewController:AVPlayerViewController) { - if _playerViewController == playerViewController && _fullscreenPlayerPresented, let onVideoFullscreenPlayerWillDismiss = onVideoFullscreenPlayerWillDismiss { - _playerObserver.removePlayerViewControllerObservers() - onVideoFullscreenPlayerWillDismiss(["target": reactTag as Any]) - } - } - - func videoPlayerViewControllerDidDismiss(playerViewController:AVPlayerViewController) { - if _playerViewController == playerViewController && _fullscreenPlayerPresented { - _fullscreenPlayerPresented = false - _presentingViewController = nil - _playerViewController = nil - _playerObserver.playerViewController = nil - self.applyModifiers() - - onVideoFullscreenPlayerDidDismiss?(["target": reactTag as Any]) - } - } - - @objc - func setFilter(_ filterName:String!) { - _filterName = filterName - - if !_filterEnabled { - return - } else if let uri = _source?.uri, uri.contains("m3u8") { - return // filters don't work for HLS... return - } else if _playerItem?.asset == nil { - return - } - - let filter:CIFilter! = CIFilter(name: filterName) - if #available(iOS 9.0, *), let _playerItem = _playerItem { - self._playerItem?.videoComposition = AVVideoComposition( - asset: _playerItem.asset, - applyingCIFiltersWithHandler: { (request:AVAsynchronousCIImageFilteringRequest) in - if filter == nil { - request.finish(with: request.sourceImage, context:nil) - } else { - let image:CIImage! = request.sourceImage.clampedToExtent() - filter.setValue(image, forKey:kCIInputImageKey) - let output:CIImage! = filter.outputImage?.cropped(to: request.sourceImage.extent) - request.finish(with: output, context:nil) - } - }) - } else { - // Fallback on earlier versions - } - } - - @objc - func setFilterEnabled(_ filterEnabled:Bool) { - _filterEnabled = filterEnabled - } - - // MARK: - RCTIMAAdsManager - - func getAdTagUrl() -> String? { - return _adTagUrl - } - - @objc - func setAdTagUrl(_ adTagUrl:String!) { - _adTagUrl = adTagUrl - } -#if USE_GOOGLE_IMA - func getContentPlayhead() -> IMAAVPlayerContentPlayhead? { - return _contentPlayhead - } -#endif - func setAdPlaying(_ adPlaying:Bool) { - _adPlaying = adPlaying - } - - // MARK: - React View Management - - func insertReactSubview(view:UIView!, atIndex:Int) { - if _controls { - view.frame = self.bounds - _playerViewController?.contentOverlayView?.insertSubview(view, at:atIndex) - } else { - RCTLogError("video cannot have any subviews") - } - return - } - - func removeReactSubview(subview:UIView!) { - if _controls { - subview.removeFromSuperview() - } else { - RCTLog("video cannot have any subviews") - } - return - } - - override func layoutSubviews() { - super.layoutSubviews() - if _controls, let _playerViewController = _playerViewController { - _playerViewController.view.frame = bounds - - // also adjust all subviews of contentOverlayView - for subview in _playerViewController.contentOverlayView?.subviews ?? [] { - subview.frame = bounds - } - } else { - CATransaction.begin() - CATransaction.setAnimationDuration(0) - _playerLayer?.frame = bounds - CATransaction.commit() - } - } - - // MARK: - Lifecycle - - override func removeFromSuperview() { - _player?.pause() - _player = nil - _resouceLoaderDelegate = nil - _playerObserver.clearPlayer() - + // In fullscreen we must display controls + self._playerViewController?.showsPlaybackControls = true + self._fullscreenPlayerPresented = fullscreen + self._playerViewController?.autorotate = self._fullscreenAutorotate + + self.onVideoFullscreenPlayerDidPresent?(["target": self.reactTag]) + }) + } + } + } else if !fullscreen && _fullscreenPlayerPresented, let _playerViewController = _playerViewController { + self.videoPlayerViewControllerWillDismiss(playerViewController: _playerViewController) + _presentingViewController?.dismiss(animated: true, completion: { [weak self] in + self?.videoPlayerViewControllerDidDismiss(playerViewController: _playerViewController) + }) + } + } + + @objc + func setFullscreenAutorotate(_ autorotate: Bool) { + _fullscreenAutorotate = autorotate + if _fullscreenPlayerPresented { + _playerViewController?.autorotate = autorotate + } + } + + @objc + func setFullscreenOrientation(_ orientation: String?) { + _fullscreenOrientation = orientation + if _fullscreenPlayerPresented { + _playerViewController?.preferredOrientation = orientation + } + } + + func usePlayerViewController() { + guard let _player = _player, let _playerItem = _playerItem else { return } + + if _playerViewController == nil { + _playerViewController = createPlayerViewController(player: _player, withPlayerItem: _playerItem) + } + // to prevent video from being animated when resizeMode is 'cover' + // resize mode must be set before subview is added + setResizeMode(_resizeMode) + + guard let _playerViewController = _playerViewController else { return } + + if _controls { + let viewController: UIViewController! = self.reactViewController() + viewController?.addChild(_playerViewController) + self.addSubview(_playerViewController.view) + } + + _playerObserver.playerViewController = _playerViewController + } + + func createPlayerViewController(player: AVPlayer, withPlayerItem _: AVPlayerItem) -> RCTVideoPlayerViewController { + let viewController = RCTVideoPlayerViewController() + viewController.showsPlaybackControls = self._controls + viewController.rctDelegate = self + viewController.preferredOrientation = _fullscreenOrientation + + viewController.view.frame = self.bounds + viewController.player = player + if #available(tvOS 14.0, *) { + viewController.allowsPictureInPicturePlayback = true + } + return viewController + } + + func usePlayerLayer() { + if let _player = _player { + _playerLayer = AVPlayerLayer(player: _player) + _playerLayer?.frame = self.bounds + _playerLayer?.needsDisplayOnBoundsChange = true + + // to prevent video from being animated when resizeMode is 'cover' + // resize mode must be set before layer is added + setResizeMode(_resizeMode) + _playerObserver.playerLayer = _playerLayer + + if let _playerLayer = _playerLayer { + self.layer.addSublayer(_playerLayer) + } + self.layer.needsDisplayOnBoundsChange = true + #if os(iOS) + _pip?.setupPipController(_playerLayer) + #endif + } + } + + @objc + func setControls(_ controls: Bool) { + if _controls != controls || ((_playerLayer == nil) && (_playerViewController == nil)) { + _controls = controls + if _controls { self.removePlayerLayer() - - if let _playerViewController = _playerViewController { - _playerViewController.view.removeFromSuperview() - _playerViewController.removeFromParent() - _playerViewController.rctDelegate = nil - _playerViewController.player = nil - self._playerViewController = nil - _playerObserver.playerViewController = nil - } - - _eventDispatcher = nil - NotificationCenter.default.removeObserver(self) - - super.removeFromSuperview() - } - - // MARK: - Export - - @objc - func save(options:NSDictionary!, resolve: @escaping RCTPromiseResolveBlock, reject:@escaping RCTPromiseRejectBlock) { - RCTVideoSave.save( - options:options, - resolve:resolve, - reject:reject, - playerItem:_playerItem - ) - } - - func setLicenseResult(_ license:String!, _ licenseUrl: String!) { - _resouceLoaderDelegate?.setLicenseResult(license, licenseUrl) - } - - func setLicenseResultError(_ error:String!, _ licenseUrl: String!) { - _resouceLoaderDelegate?.setLicenseResultError(error, licenseUrl) - } - - func dismissFullscreenPlayer() { - setFullscreen(false) - } - - func presentFullscreenPlayer() { - setFullscreen(true) - } - - // MARK: - RCTPlayerObserverHandler - - func handleTimeUpdate(time:CMTime) { - sendProgressUpdate() - } - - func handleReadyForDisplay(changeObject: Any, change:NSKeyValueObservedChange) { - onReadyForDisplay?([ - "target": reactTag - ]) - } - - // When timeMetadata is read the event onTimedMetadata is triggered - func handleTimeMetadataChange(playerItem:AVPlayerItem, change:NSKeyValueObservedChange<[AVMetadataItem]?>) { - guard let newValue = change.newValue, let _items = newValue, _items.count > 0 else { - return - } - - var metadata: [[String:String?]?] = [] - for item in _items { - let value = item.value as? String - let identifier = item.identifier?.rawValue - - if let value = value { - metadata.append(["value":value, "identifier":identifier]) - } - } - - onTimedMetadata?([ - "target": reactTag, - "metadata": metadata - ]) - } - - // Handle player item status change. - func handlePlayerItemStatusChange(playerItem:AVPlayerItem, change:NSKeyValueObservedChange) { - guard let _playerItem = _playerItem else { - return - } - - if _playerItem.status == .readyToPlay { - handleReadyToPlay() - } else if _playerItem.status == .failed { - handlePlaybackFailed() - } - } - - func handleReadyToPlay() { - guard let _playerItem = _playerItem else { return } - var duration:Float = Float(CMTimeGetSeconds(_playerItem.asset.duration)) - - if duration.isNaN { - duration = 0.0 - } - - var width: Float? = nil - var height: Float? = nil - var orientation = "undefined" - - if _playerItem.asset.tracks(withMediaType: AVMediaType.video).count > 0 { - let videoTrack = _playerItem.asset.tracks(withMediaType: .video)[0] - width = Float(videoTrack.naturalSize.width) - height = Float(videoTrack.naturalSize.height) - let preferredTransform = videoTrack.preferredTransform - - if (videoTrack.naturalSize.width == preferredTransform.tx - && videoTrack.naturalSize.height == preferredTransform.ty) - || (preferredTransform.tx == 0 && preferredTransform.ty == 0) - { - orientation = "landscape" - } else { - orientation = "portrait" - } - } else if _playerItem.presentationSize.height != 0.0 { - width = Float(_playerItem.presentationSize.width) - height = Float(_playerItem.presentationSize.height) - orientation = _playerItem.presentationSize.width > _playerItem.presentationSize.height ? "landscape" : "portrait" - } - - if _pendingSeek { - setSeek([ - "time": NSNumber(value: _pendingSeekTime), - "tolerance": NSNumber(value: 100) - ]) - _pendingSeek = false - } - - if _startPosition >= 0 { - setSeek([ - "time": NSNumber(value: _startPosition), - "tolerance": NSNumber(value: 100) - ]) - _startPosition = -1 - } - - if _videoLoadStarted { - let audioTracks = RCTVideoUtils.getAudioTrackInfo(_player) - let textTracks = RCTVideoUtils.getTextTrackInfo(_player).map(\.json) - onVideoLoad?(["duration": NSNumber(value: duration), - "currentTime": NSNumber(value: Float(CMTimeGetSeconds(_playerItem.currentTime()))), - "canPlayReverse": NSNumber(value: _playerItem.canPlayReverse), - "canPlayFastForward": NSNumber(value: _playerItem.canPlayFastForward), - "canPlaySlowForward": NSNumber(value: _playerItem.canPlaySlowForward), - "canPlaySlowReverse": NSNumber(value: _playerItem.canPlaySlowReverse), - "canStepBackward": NSNumber(value: _playerItem.canStepBackward), - "canStepForward": NSNumber(value: _playerItem.canStepForward), - "naturalSize": [ - "width": width != nil ? NSNumber(value: width!) : "undefinded", - "height": width != nil ? NSNumber(value: height!) : "undefinded", - "orientation": orientation - ], - "audioTracks": audioTracks, - "textTracks": textTracks, - "target": reactTag as Any]) - } - _videoLoadStarted = false - _playerObserver.attachPlayerEventListeners() - applyModifiers() - } - - func handlePlaybackFailed() { - guard let _playerItem = _playerItem else { return } - onVideoError?( - [ - "error": [ - "code": NSNumber(value: (_playerItem.error! as NSError).code), - "localizedDescription": _playerItem.error?.localizedDescription == nil ? "" : _playerItem.error?.localizedDescription, - "localizedFailureReason": ((_playerItem.error! as NSError).localizedFailureReason == nil ? "" : (_playerItem.error! as NSError).localizedFailureReason) ?? "", - "localizedRecoverySuggestion": ((_playerItem.error! as NSError).localizedRecoverySuggestion == nil ? "" : (_playerItem.error! as NSError).localizedRecoverySuggestion) ?? "", - "domain": (_playerItem.error as! NSError).domain - ], - "target": reactTag - ]) - } - - func handlePlaybackBufferKeyEmpty(playerItem:AVPlayerItem, change:NSKeyValueObservedChange) { - _playerBufferEmpty = true - onVideoBuffer?(["isBuffering": true, "target": reactTag as Any]) - } - - // Continue playing (or not if paused) after being paused due to hitting an unbuffered zone. - func handlePlaybackLikelyToKeepUp(playerItem:AVPlayerItem, change:NSKeyValueObservedChange) { - if (!(_controls || _fullscreenPlayerPresented) || _playerBufferEmpty) && ((_playerItem?.isPlaybackLikelyToKeepUp) == true) { - setPaused(_paused) - } - _playerBufferEmpty = false - onVideoBuffer?(["isBuffering": false, "target": reactTag as Any]) + self.usePlayerViewController() + } else { + _playerViewController?.view.removeFromSuperview() + _playerViewController?.removeFromParent() + _playerViewController = nil + _playerObserver.playerViewController = nil + self.usePlayerLayer() + } + } + } + + @objc + func setProgressUpdateInterval(_ progressUpdateInterval: Float) { + _playerObserver.replaceTimeObserverIfSet(Float64(progressUpdateInterval)) + } + + func removePlayerLayer() { + _playerLayer?.removeFromSuperlayer() + _playerLayer = nil + _playerObserver.playerLayer = nil + } + + // MARK: - RCTVideoPlayerViewControllerDelegate + + func videoPlayerViewControllerWillDismiss(playerViewController: AVPlayerViewController) { + if _playerViewController == playerViewController && _fullscreenPlayerPresented, let onVideoFullscreenPlayerWillDismiss = onVideoFullscreenPlayerWillDismiss { + _playerObserver.removePlayerViewControllerObservers() + onVideoFullscreenPlayerWillDismiss(["target": reactTag as Any]) + } + } + + func videoPlayerViewControllerDidDismiss(playerViewController: AVPlayerViewController) { + if _playerViewController == playerViewController && _fullscreenPlayerPresented { + _fullscreenPlayerPresented = false + _presentingViewController = nil + _playerViewController = nil + _playerObserver.playerViewController = nil + self.applyModifiers() + + onVideoFullscreenPlayerDidDismiss?(["target": reactTag as Any]) + } + } + + @objc + func setFilter(_ filterName: String!) { + _filterName = filterName + + if !_filterEnabled { + return + } else if let uri = _source?.uri, uri.contains("m3u8") { + return // filters don't work for HLS... return + } else if _playerItem?.asset == nil { + return + } + + let filter: CIFilter! = CIFilter(name: filterName) + if #available(iOS 9.0, *), let _playerItem = _playerItem { + self._playerItem?.videoComposition = AVVideoComposition( + asset: _playerItem.asset, + applyingCIFiltersWithHandler: { (request: AVAsynchronousCIImageFilteringRequest) in + if filter == nil { + request.finish(with: request.sourceImage, context: nil) + } else { + let image: CIImage! = request.sourceImage.clampedToExtent() + filter.setValue(image, forKey: kCIInputImageKey) + let output: CIImage! = filter.outputImage?.cropped(to: request.sourceImage.extent) + request.finish(with: output, context: nil) + } + } + ) + } else { + // Fallback on earlier versions + } + } + + @objc + func setFilterEnabled(_ filterEnabled: Bool) { + _filterEnabled = filterEnabled + } + + // MARK: - RCTIMAAdsManager + + func getAdTagUrl() -> String? { + return _adTagUrl + } + + @objc + func setAdTagUrl(_ adTagUrl: String!) { + _adTagUrl = adTagUrl + } + + #if USE_GOOGLE_IMA + func getContentPlayhead() -> IMAAVPlayerContentPlayhead? { + return _contentPlayhead + } + #endif + func setAdPlaying(_ adPlaying: Bool) { + _adPlaying = adPlaying + } + + // MARK: - React View Management + + func insertReactSubview(view: UIView!, atIndex: Int) { + if _controls { + view.frame = self.bounds + _playerViewController?.contentOverlayView?.insertSubview(view, at: atIndex) + } else { + RCTLogError("video cannot have any subviews") + } + return + } + + func removeReactSubview(subview: UIView!) { + if _controls { + subview.removeFromSuperview() + } else { + RCTLog("video cannot have any subviews") + } + return + } + + override func layoutSubviews() { + super.layoutSubviews() + if _controls, let _playerViewController = _playerViewController { + _playerViewController.view.frame = bounds + + // also adjust all subviews of contentOverlayView + for subview in _playerViewController.contentOverlayView?.subviews ?? [] { + subview.frame = bounds + } + } else { + CATransaction.begin() + CATransaction.setAnimationDuration(0) + _playerLayer?.frame = bounds + CATransaction.commit() } + } - func handlePlaybackRateChange(player: AVPlayer, change: NSKeyValueObservedChange) { - guard let _player = _player else { return } - - if(player.rate == change.oldValue && change.oldValue != nil) { - return - } + // MARK: - Lifecycle - onPlaybackRateChange?(["playbackRate": NSNumber(value: _player.rate), - "target": reactTag as Any]) + override func removeFromSuperview() { + _player?.pause() + _player = nil + _resouceLoaderDelegate = nil + _playerObserver.clearPlayer() - onVideoPlaybackStateChanged?(["isPlaying": _player.rate != 0, - "target": reactTag as Any]) + self.removePlayerLayer() - if _playbackStalled && _player.rate > 0 { - onPlaybackResume?(["playbackRate": NSNumber(value: _player.rate), - "target": reactTag as Any]) - _playbackStalled = false - } + if let _playerViewController = _playerViewController { + _playerViewController.view.removeFromSuperview() + _playerViewController.removeFromParent() + _playerViewController.rctDelegate = nil + _playerViewController.player = nil + self._playerViewController = nil + _playerObserver.playerViewController = nil } - func handleVolumeChange(player: AVPlayer, change: NSKeyValueObservedChange) { - guard let _player = _player else { return } + _eventDispatcher = nil + NotificationCenter.default.removeObserver(self) - if(player.rate == change.oldValue && change.oldValue != nil) { - return - } - - onVolumeChange?(["volume": NSNumber(value: _player.volume), + super.removeFromSuperview() + } + + // MARK: - Export + + @objc + func save(options: NSDictionary!, resolve: @escaping RCTPromiseResolveBlock, reject: @escaping RCTPromiseRejectBlock) { + RCTVideoSave.save( + options: options, + resolve: resolve, + reject: reject, + playerItem: _playerItem + ) + } + + func setLicenseResult(_ license: String!, _ licenseUrl: String!) { + _resouceLoaderDelegate?.setLicenseResult(license, licenseUrl) + } + + func setLicenseResultError(_ error: String!, _ licenseUrl: String!) { + _resouceLoaderDelegate?.setLicenseResultError(error, licenseUrl) + } + + func dismissFullscreenPlayer() { + setFullscreen(false) + } + + func presentFullscreenPlayer() { + setFullscreen(true) + } + + // MARK: - RCTPlayerObserverHandler + + func handleTimeUpdate(time _: CMTime) { + sendProgressUpdate() + } + + func handleReadyForDisplay(changeObject _: Any, change _: NSKeyValueObservedChange) { + onReadyForDisplay?([ + "target": reactTag, + ]) + } + + // When timeMetadata is read the event onTimedMetadata is triggered + func handleTimeMetadataChange(playerItem _: AVPlayerItem, change: NSKeyValueObservedChange<[AVMetadataItem]?>) { + guard let newValue = change.newValue, let _items = newValue, !_items.isEmpty else { + return + } + + var metadata: [[String: String?]?] = [] + for item in _items { + let value = item.value as? String + let identifier = item.identifier?.rawValue + + if let value = value { + metadata.append(["value": value, "identifier": identifier]) + } + } + + onTimedMetadata?([ + "target": reactTag, + "metadata": metadata, + ]) + } + + // Handle player item status change. + func handlePlayerItemStatusChange(playerItem _: AVPlayerItem, change _: NSKeyValueObservedChange) { + guard let _playerItem = _playerItem else { + return + } + + if _playerItem.status == .readyToPlay { + handleReadyToPlay() + } else if _playerItem.status == .failed { + handlePlaybackFailed() + } + } + + func handleReadyToPlay() { + guard let _playerItem = _playerItem else { return } + var duration = Float(CMTimeGetSeconds(_playerItem.asset.duration)) + + if duration.isNaN { + duration = 0.0 + } + + var width: Float? + var height: Float? + var orientation = "undefined" + + if !_playerItem.asset.tracks(withMediaType: AVMediaType.video).isEmpty { + let videoTrack = _playerItem.asset.tracks(withMediaType: .video)[0] + width = Float(videoTrack.naturalSize.width) + height = Float(videoTrack.naturalSize.height) + let preferredTransform = videoTrack.preferredTransform + + if (videoTrack.naturalSize.width == preferredTransform.tx + && videoTrack.naturalSize.height == preferredTransform.ty) + || (preferredTransform.tx == 0 && preferredTransform.ty == 0) { + orientation = "landscape" + } else { + orientation = "portrait" + } + } else if _playerItem.presentationSize.height != 0.0 { + width = Float(_playerItem.presentationSize.width) + height = Float(_playerItem.presentationSize.height) + orientation = _playerItem.presentationSize.width > _playerItem.presentationSize.height ? "landscape" : "portrait" + } + + if _pendingSeek { + setSeek([ + "time": NSNumber(value: _pendingSeekTime), + "tolerance": NSNumber(value: 100), + ]) + _pendingSeek = false + } + + if _startPosition >= 0 { + setSeek([ + "time": NSNumber(value: _startPosition), + "tolerance": NSNumber(value: 100), + ]) + _startPosition = -1 + } + + if _videoLoadStarted { + let audioTracks = RCTVideoUtils.getAudioTrackInfo(_player) + let textTracks = RCTVideoUtils.getTextTrackInfo(_player).map(\.json) + onVideoLoad?(["duration": NSNumber(value: duration), + "currentTime": NSNumber(value: Float(CMTimeGetSeconds(_playerItem.currentTime()))), + "canPlayReverse": NSNumber(value: _playerItem.canPlayReverse), + "canPlayFastForward": NSNumber(value: _playerItem.canPlayFastForward), + "canPlaySlowForward": NSNumber(value: _playerItem.canPlaySlowForward), + "canPlaySlowReverse": NSNumber(value: _playerItem.canPlaySlowReverse), + "canStepBackward": NSNumber(value: _playerItem.canStepBackward), + "canStepForward": NSNumber(value: _playerItem.canStepForward), + "naturalSize": [ + "width": width != nil ? NSNumber(value: width!) : "undefinded", + "height": width != nil ? NSNumber(value: height!) : "undefinded", + "orientation": orientation, + ], + "audioTracks": audioTracks, + "textTracks": textTracks, + "target": reactTag as Any]) + } + _videoLoadStarted = false + _playerObserver.attachPlayerEventListeners() + applyModifiers() + } + + func handlePlaybackFailed() { + guard let _playerItem = _playerItem else { return } + onVideoError?( + [ + "error": [ + "code": NSNumber(value: (_playerItem.error! as NSError).code), + "localizedDescription": _playerItem.error?.localizedDescription == nil ? "" : _playerItem.error?.localizedDescription, + "localizedFailureReason": ((_playerItem.error! as NSError).localizedFailureReason == nil ? "" : (_playerItem.error! as NSError).localizedFailureReason) ?? "", + "localizedRecoverySuggestion": ((_playerItem.error! as NSError).localizedRecoverySuggestion == nil ? "" : (_playerItem.error! as NSError).localizedRecoverySuggestion) ?? "", + "domain": (_playerItem.error as! NSError).domain, + ], + "target": reactTag, + ]) + } + + func handlePlaybackBufferKeyEmpty(playerItem _: AVPlayerItem, change _: NSKeyValueObservedChange) { + _playerBufferEmpty = true + onVideoBuffer?(["isBuffering": true, "target": reactTag as Any]) + } + + // Continue playing (or not if paused) after being paused due to hitting an unbuffered zone. + func handlePlaybackLikelyToKeepUp(playerItem _: AVPlayerItem, change _: NSKeyValueObservedChange) { + if (!(_controls || _fullscreenPlayerPresented) || _playerBufferEmpty) && ((_playerItem?.isPlaybackLikelyToKeepUp) == true) { + setPaused(_paused) + } + _playerBufferEmpty = false + onVideoBuffer?(["isBuffering": false, "target": reactTag as Any]) + } + + func handlePlaybackRateChange(player: AVPlayer, change: NSKeyValueObservedChange) { + guard let _player = _player else { return } + + if player.rate == change.oldValue && change.oldValue != nil { + return + } + + onPlaybackRateChange?(["playbackRate": NSNumber(value: _player.rate), + "target": reactTag as Any]) + + onVideoPlaybackStateChanged?(["isPlaying": _player.rate != 0, + "target": reactTag as Any]) + + if _playbackStalled && _player.rate > 0 { + onPlaybackResume?(["playbackRate": NSNumber(value: _player.rate), "target": reactTag as Any]) - } - - func handleExternalPlaybackActiveChange(player: AVPlayer, change: NSKeyValueObservedChange) { - guard let _player = _player else { return } - onVideoExternalPlaybackChange?(["isExternalPlaybackActive": NSNumber(value: _player.isExternalPlaybackActive), - "target": reactTag as Any]) - } - - func handleViewControllerOverlayViewFrameChange(overlayView:UIView, change:NSKeyValueObservedChange) { - let oldRect = change.oldValue - let newRect = change.newValue - if !oldRect!.equalTo(newRect!) { - // https://github.com/react-native-video/react-native-video/issues/3085#issuecomment-1557293391 - if newRect!.equalTo(UIScreen.main.bounds) { - RCTLog("in fullscreen") - if (!_fullscreenUncontrolPlayerPresented) { - _fullscreenUncontrolPlayerPresented = true; - - self.onVideoFullscreenPlayerWillPresent?(["target": self.reactTag as Any]) - self.onVideoFullscreenPlayerDidPresent?(["target": self.reactTag as Any]) - } - } else { - NSLog("not fullscreen") - if (_fullscreenUncontrolPlayerPresented) { - _fullscreenUncontrolPlayerPresented = false; - - self.onVideoFullscreenPlayerWillDismiss?(["target": self.reactTag as Any]) - self.onVideoFullscreenPlayerDidDismiss?(["target": self.reactTag as Any]) - } - } - - self.reactViewController().view.frame = UIScreen.main.bounds - self.reactViewController().view.setNeedsLayout() - } - } - - @objc func handleDidFailToFinishPlaying(notification:NSNotification!) { - let error:NSError! = notification.userInfo?[AVPlayerItemFailedToPlayToEndTimeErrorKey] as? NSError - onVideoError?( - [ - "error": [ - "code": NSNumber(value: (error as NSError).code), - "localizedDescription": error.localizedDescription ?? "", - "localizedFailureReason": (error as NSError).localizedFailureReason ?? "", - "localizedRecoverySuggestion": (error as NSError).localizedRecoverySuggestion ?? "", - "domain": (error as NSError).domain - ], - "target": reactTag - ]) - } - - @objc func handlePlaybackStalled(notification:NSNotification!) { - onPlaybackStalled?(["target": reactTag as Any]) - _playbackStalled = true - } - - @objc func handlePlayerItemDidReachEnd(notification:NSNotification!) { - onVideoEnd?(["target": reactTag as Any]) -#if USE_GOOGLE_IMA - if notification.object as? AVPlayerItem == _player?.currentItem { - _imaAdsManager.getAdsLoader()?.contentComplete() - } -#endif - if _repeat { - let item:AVPlayerItem! = notification.object as? AVPlayerItem - item.seek(to: CMTime.zero, completionHandler: nil) - self.applyModifiers() - } else { - self.setPaused(true); - _playerObserver.removePlayerTimeObserver() - } - } - - @objc func handleAVPlayerAccess(notification:NSNotification!) { - let accessLog:AVPlayerItemAccessLog! = (notification.object as! AVPlayerItem).accessLog() - let lastEvent:AVPlayerItemAccessLogEvent! = accessLog.events.last - - onVideoBandwidthUpdate?(["bitrate": lastEvent.observedBitrate, "target": reactTag]) - } + _playbackStalled = false + } + } + + func handleVolumeChange(player: AVPlayer, change: NSKeyValueObservedChange) { + guard let _player = _player else { return } + + if player.rate == change.oldValue && change.oldValue != nil { + return + } + + onVolumeChange?(["volume": NSNumber(value: _player.volume), + "target": reactTag as Any]) + } + + func handleExternalPlaybackActiveChange(player _: AVPlayer, change _: NSKeyValueObservedChange) { + guard let _player = _player else { return } + onVideoExternalPlaybackChange?(["isExternalPlaybackActive": NSNumber(value: _player.isExternalPlaybackActive), + "target": reactTag as Any]) + } + + func handleViewControllerOverlayViewFrameChange(overlayView _: UIView, change: NSKeyValueObservedChange) { + let oldRect = change.oldValue + let newRect = change.newValue + if !oldRect!.equalTo(newRect!) { + // https://github.com/react-native-video/react-native-video/issues/3085#issuecomment-1557293391 + if newRect!.equalTo(UIScreen.main.bounds) { + RCTLog("in fullscreen") + if !_fullscreenUncontrolPlayerPresented { + _fullscreenUncontrolPlayerPresented = true + + self.onVideoFullscreenPlayerWillPresent?(["target": self.reactTag as Any]) + self.onVideoFullscreenPlayerDidPresent?(["target": self.reactTag as Any]) + } + } else { + NSLog("not fullscreen") + if _fullscreenUncontrolPlayerPresented { + _fullscreenUncontrolPlayerPresented = false + + self.onVideoFullscreenPlayerWillDismiss?(["target": self.reactTag as Any]) + self.onVideoFullscreenPlayerDidDismiss?(["target": self.reactTag as Any]) + } + } + + self.reactViewController().view.frame = UIScreen.main.bounds + self.reactViewController().view.setNeedsLayout() + } + } + + @objc func handleDidFailToFinishPlaying(notification: NSNotification!) { + let error: NSError! = notification.userInfo?[AVPlayerItemFailedToPlayToEndTimeErrorKey] as? NSError + onVideoError?( + [ + "error": [ + "code": NSNumber(value: (error as NSError).code), + "localizedDescription": error.localizedDescription ?? "", + "localizedFailureReason": (error as NSError).localizedFailureReason ?? "", + "localizedRecoverySuggestion": (error as NSError).localizedRecoverySuggestion ?? "", + "domain": (error as NSError).domain, + ], + "target": reactTag, + ]) + } + + @objc func handlePlaybackStalled(notification _: NSNotification!) { + onPlaybackStalled?(["target": reactTag as Any]) + _playbackStalled = true + } + + @objc func handlePlayerItemDidReachEnd(notification: NSNotification!) { + onVideoEnd?(["target": reactTag as Any]) + #if USE_GOOGLE_IMA + if notification.object as? AVPlayerItem == _player?.currentItem { + _imaAdsManager.getAdsLoader()?.contentComplete() + } + #endif + if _repeat { + let item: AVPlayerItem! = notification.object as? AVPlayerItem + item.seek(to: CMTime.zero, completionHandler: nil) + self.applyModifiers() + } else { + self.setPaused(true) + _playerObserver.removePlayerTimeObserver() + } + } + + @objc func handleAVPlayerAccess(notification: NSNotification!) { + let accessLog: AVPlayerItemAccessLog! = (notification.object as! AVPlayerItem).accessLog() + let lastEvent: AVPlayerItemAccessLogEvent! = accessLog.events.last + + onVideoBandwidthUpdate?(["bitrate": lastEvent.observedBitrate, "target": reactTag]) + } } diff --git a/ios/Video/RCTVideoManager.swift b/ios/Video/RCTVideoManager.swift index 82851a1c7e..2839e61d00 100644 --- a/ios/Video/RCTVideoManager.swift +++ b/ios/Video/RCTVideoManager.swift @@ -3,88 +3,88 @@ import React @objc(RCTVideoManager) class RCTVideoManager: RCTViewManager { - - override func view() -> UIView { - return RCTVideo(eventDispatcher: bridge.eventDispatcher() as! RCTEventDispatcher) - } - - func methodQueue() -> DispatchQueue { - return bridge.uiManager.methodQueue - } - - @objc(save:reactTag:resolver:rejecter:) - func save(options: NSDictionary, reactTag: NSNumber, resolve: @escaping RCTPromiseResolveBlock,reject: @escaping RCTPromiseRejectBlock) -> Void { - bridge.uiManager.prependUIBlock({_ , viewRegistry in - let view = viewRegistry?[reactTag] - if !(view is RCTVideo) { - RCTLogError("Invalid view returned from registry, expecting RCTVideo, got: %@", String(describing: view)) - } else if let view = view as? RCTVideo { - view.save(options: options, resolve: resolve, reject: reject) - } - }) - } - - @objc(setLicenseResult:licenseUrl:reactTag:) - func setLicenseResult(license: NSString, licenseUrl:NSString, reactTag: NSNumber) -> Void { - bridge.uiManager.prependUIBlock({_ , viewRegistry in - let view = viewRegistry?[reactTag] - if !(view is RCTVideo) { - RCTLogError("Invalid view returned from registry, expecting RCTVideo, got: %@", String(describing: view)) - } else if let view = view as? RCTVideo { - view.setLicenseResult(license as String, licenseUrl as String) - } - }) + override func view() -> UIView { + return RCTVideo(eventDispatcher: bridge.eventDispatcher() as! RCTEventDispatcher) + } + + func methodQueue() -> DispatchQueue { + return bridge.uiManager.methodQueue + } + + @objc(save:reactTag:resolver:rejecter:) + func save(options: NSDictionary, reactTag: NSNumber, resolve: @escaping RCTPromiseResolveBlock, reject: @escaping RCTPromiseRejectBlock) { + bridge.uiManager.prependUIBlock { _, viewRegistry in + let view = viewRegistry?[reactTag] + if !(view is RCTVideo) { + RCTLogError("Invalid view returned from registry, expecting RCTVideo, got: %@", String(describing: view)) + } else if let view = view as? RCTVideo { + view.save(options: options, resolve: resolve, reject: reject) + } } - - @objc(setLicenseResultError:licenseUrl:reactTag:) - func setLicenseResultError(error: NSString, licenseUrl:NSString, reactTag: NSNumber) -> Void { - bridge.uiManager.prependUIBlock({_ , viewRegistry in - let view = viewRegistry?[reactTag] - if !(view is RCTVideo) { - RCTLogError("Invalid view returned from registry, expecting RCTVideo, got: %@", String(describing: view)) - } else if let view = view as? RCTVideo { - view.setLicenseResultError(error as String, licenseUrl as String) - } - }) + } + + @objc(setLicenseResult:licenseUrl:reactTag:) + func setLicenseResult(license: NSString, licenseUrl: NSString, reactTag: NSNumber) { + bridge.uiManager.prependUIBlock { _, viewRegistry in + let view = viewRegistry?[reactTag] + if !(view is RCTVideo) { + RCTLogError("Invalid view returned from registry, expecting RCTVideo, got: %@", String(describing: view)) + } else if let view = view as? RCTVideo { + view.setLicenseResult(license as String, licenseUrl as String) + } } - - @objc(dismissFullscreenPlayer:) - func dismissFullscreenPlayer(_ reactTag: NSNumber) -> Void { - bridge.uiManager.prependUIBlock({_ , viewRegistry in - let view = viewRegistry?[reactTag] - if !(view is RCTVideo) { - RCTLogError("Invalid view returned from registry, expecting RCTVideo, got: %@", String(describing: view)) - } else if let view = view as? RCTVideo { - view.dismissFullscreenPlayer() - } - }) + } + + @objc(setLicenseResultError:licenseUrl:reactTag:) + func setLicenseResultError(error: NSString, licenseUrl: NSString, reactTag: NSNumber) { + bridge.uiManager.prependUIBlock { _, viewRegistry in + let view = viewRegistry?[reactTag] + if !(view is RCTVideo) { + RCTLogError("Invalid view returned from registry, expecting RCTVideo, got: %@", String(describing: view)) + } else if let view = view as? RCTVideo { + view.setLicenseResultError(error as String, licenseUrl as String) + } } - @objc(presentFullscreenPlayer:) - func presentFullscreenPlayer(_ reactTag: NSNumber) -> Void { - bridge.uiManager.prependUIBlock({_ , viewRegistry in - let view = viewRegistry?[reactTag] - if !(view is RCTVideo) { - RCTLogError("Invalid view returned from registry, expecting RCTVideo, got: %@", String(describing: view)) - } else if let view = view as? RCTVideo { - view.presentFullscreenPlayer() - } - }) + } + + @objc(dismissFullscreenPlayer:) + func dismissFullscreenPlayer(_ reactTag: NSNumber) { + bridge.uiManager.prependUIBlock { _, viewRegistry in + let view = viewRegistry?[reactTag] + if !(view is RCTVideo) { + RCTLogError("Invalid view returned from registry, expecting RCTVideo, got: %@", String(describing: view)) + } else if let view = view as? RCTVideo { + view.dismissFullscreenPlayer() + } } + } - @objc(setPlayerPauseState:reactTag:) - func setPlayerPauseState(paused: NSNumber, reactTag: NSNumber) -> Void { - bridge.uiManager.prependUIBlock({_ , viewRegistry in - let view = viewRegistry?[reactTag] - if !(view is RCTVideo) { - RCTLogError("Invalid view returned from registry, expecting RCTVideo, got: %@", String(describing: view)) - } else if let view = view as? RCTVideo { - let paused = paused.boolValue - view.setPaused(paused) - } - }) + @objc(presentFullscreenPlayer:) + func presentFullscreenPlayer(_ reactTag: NSNumber) { + bridge.uiManager.prependUIBlock { _, viewRegistry in + let view = viewRegistry?[reactTag] + if !(view is RCTVideo) { + RCTLogError("Invalid view returned from registry, expecting RCTVideo, got: %@", String(describing: view)) + } else if let view = view as? RCTVideo { + view.presentFullscreenPlayer() + } } + } - override class func requiresMainQueueSetup() -> Bool { - return true + @objc(setPlayerPauseState:reactTag:) + func setPlayerPauseState(paused: NSNumber, reactTag: NSNumber) { + bridge.uiManager.prependUIBlock { _, viewRegistry in + let view = viewRegistry?[reactTag] + if !(view is RCTVideo) { + RCTLogError("Invalid view returned from registry, expecting RCTVideo, got: %@", String(describing: view)) + } else if let view = view as? RCTVideo { + let paused = paused.boolValue + view.setPaused(paused) + } } + } + + override class func requiresMainQueueSetup() -> Bool { + return true + } } diff --git a/ios/Video/RCTVideoPlayerViewController.swift b/ios/Video/RCTVideoPlayerViewController.swift index 8d4324b793..4cc175710a 100644 --- a/ios/Video/RCTVideoPlayerViewController.swift +++ b/ios/Video/RCTVideoPlayerViewController.swift @@ -1,46 +1,44 @@ import AVKit class RCTVideoPlayerViewController: AVPlayerViewController { + weak var rctDelegate: RCTVideoPlayerViewControllerDelegate? - weak var rctDelegate: RCTVideoPlayerViewControllerDelegate? + // Optional paramters + var preferredOrientation: String? + var autorotate: Bool? - // Optional paramters - var preferredOrientation:String? - var autorotate:Bool? - - func shouldAutorotate() -> Bool { - - if autorotate! || preferredOrientation == nil || (preferredOrientation!.lowercased() == "all") { - return true - } - - return false + func shouldAutorotate() -> Bool { + if autorotate! || preferredOrientation == nil || (preferredOrientation!.lowercased() == "all") { + return true } - override func viewDidDisappear(_ animated: Bool) { - super.viewDidDisappear(animated) + return false + } - rctDelegate?.videoPlayerViewControllerWillDismiss(playerViewController: self) - rctDelegate?.videoPlayerViewControllerDidDismiss(playerViewController: self) - } + override func viewDidDisappear(_ animated: Bool) { + super.viewDidDisappear(animated) + + rctDelegate?.videoPlayerViewControllerWillDismiss(playerViewController: self) + rctDelegate?.videoPlayerViewControllerDidDismiss(playerViewController: self) + } - #if !os(tvOS) + #if !os(tvOS) func supportedInterfaceOrientations() -> UIInterfaceOrientationMask { - return .all + return .all } func preferredInterfaceOrientationForPresentation() -> UIInterfaceOrientation { - if preferredOrientation?.lowercased() == "landscape" { - return .landscapeRight - } else if preferredOrientation?.lowercased() == "portrait" { - return .portrait - } else { - // default case - let orientation = UIApplication.shared.statusBarOrientation - return orientation - } + if preferredOrientation?.lowercased() == "landscape" { + return .landscapeRight + } else if preferredOrientation?.lowercased() == "portrait" { + return .portrait + } else { + // default case + let orientation = UIApplication.shared.statusBarOrientation + return orientation + } } - - #endif + + #endif } diff --git a/ios/Video/RCTVideoPlayerViewControllerDelegate.swift b/ios/Video/RCTVideoPlayerViewControllerDelegate.swift index 6635975f7b..d2bc0006a7 100644 --- a/ios/Video/RCTVideoPlayerViewControllerDelegate.swift +++ b/ios/Video/RCTVideoPlayerViewControllerDelegate.swift @@ -1,7 +1,7 @@ -import Foundation import AVKit +import Foundation -protocol RCTVideoPlayerViewControllerDelegate : NSObject { - func videoPlayerViewControllerWillDismiss(playerViewController:AVPlayerViewController) - func videoPlayerViewControllerDidDismiss(playerViewController:AVPlayerViewController) +protocol RCTVideoPlayerViewControllerDelegate: NSObject { + func videoPlayerViewControllerWillDismiss(playerViewController: AVPlayerViewController) + func videoPlayerViewControllerDidDismiss(playerViewController: AVPlayerViewController) } diff --git a/ios/Video/RCTVideoSwiftLog/RCTVideoSwiftLog.swift b/ios/Video/RCTVideoSwiftLog/RCTVideoSwiftLog.swift index 0e0e169750..361ee7d722 100644 --- a/ios/Video/RCTVideoSwiftLog/RCTVideoSwiftLog.swift +++ b/ios/Video/RCTVideoSwiftLog/RCTVideoSwiftLog.swift @@ -1,5 +1,5 @@ // -// RCTLog.swift +// RCTVideoSwiftLog.swift // WebViewExample // // Created by Jimmy Dee on 4/5/17. @@ -27,29 +27,28 @@ let logHeader: String = "RNV:" -func RCTLogError(_ message: String, _ file: String=#file, _ line: UInt=#line) { - RCTVideoSwiftLog.error(logHeader + message, file: file, line: line) +func RCTLogError(_ message: String, _ file: String = #file, _ line: UInt = #line) { + RCTVideoSwiftLog.error(logHeader + message, file: file, line: line) } -func RCTLogWarn(_ message: String, _ file: String=#file, _ line: UInt=#line) { - RCTVideoSwiftLog.warn(logHeader + message, file: file, line: line) +func RCTLogWarn(_ message: String, _ file: String = #file, _ line: UInt = #line) { + RCTVideoSwiftLog.warn(logHeader + message, file: file, line: line) } -func RCTLogInfo(_ message: String, _ file: String=#file, _ line: UInt=#line) { - RCTVideoSwiftLog.info(logHeader + message, file: file, line: line) +func RCTLogInfo(_ message: String, _ file: String = #file, _ line: UInt = #line) { + RCTVideoSwiftLog.info(logHeader + message, file: file, line: line) } -func RCTLog(_ message: String, _ file: String=#file, _ line: UInt=#line) { - RCTVideoSwiftLog.log(logHeader + message, file: file, line: line) +func RCTLog(_ message: String, _ file: String = #file, _ line: UInt = #line) { + RCTVideoSwiftLog.log(logHeader + message, file: file, line: line) } -func RCTLogTrace(_ message: String, _ file: String=#file, _ line: UInt=#line) { - RCTVideoSwiftLog.trace(logHeader + message, file: file, line: line) +func RCTLogTrace(_ message: String, _ file: String = #file, _ line: UInt = #line) { + RCTVideoSwiftLog.trace(logHeader + message, file: file, line: line) } func DebugLog(_ message: String) { -#if DEBUG + #if DEBUG print(logHeader + message) -#endif + #endif } - diff --git a/ios/Video/UIView+FindUIViewController.swift b/ios/Video/UIView+FindUIViewController.swift index 2947e1e384..3adc298a11 100644 --- a/ios/Video/UIView+FindUIViewController.swift +++ b/ios/Video/UIView+FindUIViewController.swift @@ -1,18 +1,18 @@ // Source: http://stackoverflow.com/a/3732812/1123156 extension UIView { - func firstAvailableUIViewController() -> UIViewController? { - // convenience function for casting and to "mask" the recursive function - return traverseResponderChainForUIViewController() - } + func firstAvailableUIViewController() -> UIViewController? { + // convenience function for casting and to "mask" the recursive function + return traverseResponderChainForUIViewController() + } - func traverseResponderChainForUIViewController() -> UIViewController? { - if let nextUIViewController = next as? UIViewController { - return nextUIViewController - } else if let nextUIView = next as? UIView { - return nextUIView.traverseResponderChainForUIViewController() - } else { - return nil - } + func traverseResponderChainForUIViewController() -> UIViewController? { + if let nextUIViewController = next as? UIViewController { + return nextUIViewController + } else if let nextUIView = next as? UIView { + return nextUIView.traverseResponderChainForUIViewController() + } else { + return nil } + } } diff --git a/ios/VideoCaching/RCTVideoCachingHandler.swift b/ios/VideoCaching/RCTVideoCachingHandler.swift index db3dc4cd06..6957c0c056 100644 --- a/ios/VideoCaching/RCTVideoCachingHandler.swift +++ b/ios/VideoCaching/RCTVideoCachingHandler.swift @@ -1,87 +1,84 @@ -import Foundation import AVFoundation import DVAssetLoaderDelegate +import Foundation import Promises class RCTVideoCachingHandler: NSObject, DVAssetLoaderDelegatesDelegate { - - private var _videoCache:RCTVideoCache! = RCTVideoCache.sharedInstance() - var playerItemPrepareText: ((AVAsset?, NSDictionary?, String) -> AVPlayerItem)? - - override init() { - super.init() - } - - func shouldCache(source: VideoSource, textTracks:[TextTrack]?) -> Bool { - if source.isNetwork && source.shouldCache && ((textTracks == nil) || (textTracks!.count == 0)) { - /* The DVURLAsset created by cache doesn't have a tracksWithMediaType property, so trying - * to bring in the text track code will crash. I suspect this is because the asset hasn't fully loaded. - * Until this is fixed, we need to bypass caching when text tracks are specified. - */ - DebugLog("Caching is not supported for uri '\(source.uri)' because text tracks are not compatible with the cache. Checkout https://github.com/react-native-community/react-native-video/blob/master/docs/caching.md") - return true - } - return false - } - - func playerItemForSourceUsingCache(uri:String!, assetOptions options:NSDictionary!) -> Promise { - let url = URL(string: uri) - return getItemForUri(uri) - .then{ [weak self] (videoCacheStatus:RCTVideoCacheStatus,cachedAsset:AVAsset?) -> AVPlayerItem in - guard let self = self, let playerItemPrepareText = self.playerItemPrepareText else {throw NSError(domain: "", code: 0, userInfo: nil)} - switch (videoCacheStatus) { - case .missingFileExtension: - DebugLog("Could not generate cache key for uri '\(uri)'. It is currently not supported to cache urls that do not include a file extension. The video file will not be cached. Checkout https://github.com/react-native-community/react-native-video/blob/master/docs/caching.md") - let asset:AVURLAsset! = AVURLAsset(url: url!, options:options as! [String : Any]) - return playerItemPrepareText(asset, options, "") - - case .unsupportedFileExtension: - DebugLog("Could not generate cache key for uri '\(uri)'. The file extension of that uri is currently not supported. The video file will not be cached. Checkout https://github.com/react-native-community/react-native-video/blob/master/docs/caching.md") - let asset:AVURLAsset! = AVURLAsset(url: url!, options:options as! [String : Any]) - return playerItemPrepareText(asset, options, "") - - default: - if let cachedAsset = cachedAsset { - DebugLog("Playing back uri '\(uri)' from cache") - // See note in playerItemForSource about not being able to support text tracks & caching - return AVPlayerItem(asset: cachedAsset) - } - } - - let asset:DVURLAsset! = DVURLAsset(url:url, options:options as! [String : Any], networkTimeout:10000) - asset.loaderDelegate = self - - /* More granular code to have control over the DVURLAsset - let resourceLoaderDelegate = DVAssetLoaderDelegate(url: url) - resourceLoaderDelegate.delegate = self - let components = NSURLComponents(url: url, resolvingAgainstBaseURL: false) - components?.scheme = DVAssetLoaderDelegate.scheme() - var asset: AVURLAsset? = nil - if let url = components?.url { - asset = AVURLAsset(url: url, options: options) - } - asset?.resourceLoader.setDelegate(resourceLoaderDelegate, queue: DispatchQueue.main) - */ - - return AVPlayerItem(asset: asset) - } + private var _videoCache: RCTVideoCache! = RCTVideoCache.sharedInstance() + var playerItemPrepareText: ((AVAsset?, NSDictionary?, String) -> AVPlayerItem)? + + override init() { + super.init() + } + + func shouldCache(source: VideoSource, textTracks: [TextTrack]?) -> Bool { + if source.isNetwork && source.shouldCache && ((textTracks == nil) || (textTracks!.isEmpty)) { + /* The DVURLAsset created by cache doesn't have a tracksWithMediaType property, so trying + * to bring in the text track code will crash. I suspect this is because the asset hasn't fully loaded. + * Until this is fixed, we need to bypass caching when text tracks are specified. + */ + DebugLog("Caching is not supported for uri '\(source.uri)' because text tracks are not compatible with the cache. Checkout https://github.com/react-native-community/react-native-video/blob/master/docs/caching.md") + return true } + return false + } + + func playerItemForSourceUsingCache(uri: String!, assetOptions options: NSDictionary!) -> Promise { + let url = URL(string: uri) + return getItemForUri(uri) + .then { [weak self] (videoCacheStatus: RCTVideoCacheStatus, cachedAsset: AVAsset?) -> AVPlayerItem in + guard let self = self, let playerItemPrepareText = self.playerItemPrepareText else { throw NSError(domain: "", code: 0, userInfo: nil) } + switch videoCacheStatus { + case .missingFileExtension: + DebugLog("Could not generate cache key for uri '\(uri)'. It is currently not supported to cache urls that do not include a file extension. The video file will not be cached. Checkout https://github.com/react-native-community/react-native-video/blob/master/docs/caching.md") + let asset: AVURLAsset! = AVURLAsset(url: url!, options: options as! [String: Any]) + return playerItemPrepareText(asset, options, "") - func getItemForUri(_ uri:String) -> Promise<(videoCacheStatus:RCTVideoCacheStatus,cachedAsset:AVAsset?)> { - return Promise<(videoCacheStatus:RCTVideoCacheStatus,cachedAsset:AVAsset?)> { fulfill, reject in - self._videoCache.getItemForUri(uri, withCallback:{ (videoCacheStatus:RCTVideoCacheStatus,cachedAsset:AVAsset?) in - fulfill((videoCacheStatus, cachedAsset)) - }) + case .unsupportedFileExtension: + DebugLog("Could not generate cache key for uri '\(uri)'. The file extension of that uri is currently not supported. The video file will not be cached. Checkout https://github.com/react-native-community/react-native-video/blob/master/docs/caching.md") + let asset: AVURLAsset! = AVURLAsset(url: url!, options: options as! [String: Any]) + return playerItemPrepareText(asset, options, "") + + default: + if let cachedAsset = cachedAsset { + DebugLog("Playing back uri '\(uri)' from cache") + // See note in playerItemForSource about not being able to support text tracks & caching + return AVPlayerItem(asset: cachedAsset) + } } + + let asset: DVURLAsset! = DVURLAsset(url: url, options: options as! [String: Any], networkTimeout: 10000) + asset.loaderDelegate = self + + /* More granular code to have control over the DVURLAsset + let resourceLoaderDelegate = DVAssetLoaderDelegate(url: url) + resourceLoaderDelegate.delegate = self + let components = NSURLComponents(url: url, resolvingAgainstBaseURL: false) + components?.scheme = DVAssetLoaderDelegate.scheme() + var asset: AVURLAsset? = nil + if let url = components?.url { + asset = AVURLAsset(url: url, options: options) + } + asset?.resourceLoader.setDelegate(resourceLoaderDelegate, queue: DispatchQueue.main) + */ + + return AVPlayerItem(asset: asset) + } + } + + func getItemForUri(_ uri: String) -> Promise<(videoCacheStatus: RCTVideoCacheStatus, cachedAsset: AVAsset?)> { + return Promise<(videoCacheStatus: RCTVideoCacheStatus, cachedAsset: AVAsset?)> { fulfill, _ in + self._videoCache.getItemForUri(uri, withCallback: { (videoCacheStatus: RCTVideoCacheStatus, cachedAsset: AVAsset?) in + fulfill((videoCacheStatus, cachedAsset)) + }) } - - // MARK: - DVAssetLoaderDelegate - - func dvAssetLoaderDelegate(_ loaderDelegate: DVAssetLoaderDelegate!, didLoad data: Data!, for url: URL!) { - _videoCache.storeItem(data as Data?, forUri:url.absoluteString, withCallback:{ (success:Bool) in - DebugLog("Cache data stored successfully 🎉") - }) - } - -} + } + // MARK: - DVAssetLoaderDelegate + + func dvAssetLoaderDelegate(_: DVAssetLoaderDelegate!, didLoad data: Data!, for url: URL!) { + _videoCache.storeItem(data as Data?, forUri: url.absoluteString, withCallback: { (_: Bool) in + DebugLog("Cache data stored successfully 🎉") + }) + } +} From b51dbece90f404bbc26d385193d24e9456d66d94 Mon Sep 17 00:00:00 2001 From: Krzysztof Moch Date: Sat, 2 Dec 2023 16:40:20 +0100 Subject: [PATCH 02/10] chore: format clang code --- ios/Video/RCTVideo-Bridging-Header.h | 5 +- ios/Video/RCTVideoManager.m | 31 ++-- ios/Video/RCTVideoSwiftLog/RCTVideoSwiftLog.h | 10 +- ios/Video/RCTVideoSwiftLog/RCTVideoSwiftLog.m | 25 ++-- ios/VideoCaching/RCTVideoCache.h | 37 +++-- ios/VideoCaching/RCTVideoCache.m | 135 +++++++++--------- 6 files changed, 116 insertions(+), 127 deletions(-) diff --git a/ios/Video/RCTVideo-Bridging-Header.h b/ios/Video/RCTVideo-Bridging-Header.h index 586eec1110..8afe7e3ebb 100644 --- a/ios/Video/RCTVideo-Bridging-Header.h +++ b/ios/Video/RCTVideo-Bridging-Header.h @@ -1,8 +1,7 @@ -#import -#import "RCTVideoSwiftLog.h" #import "RCTEventDispatcher.h" +#import "RCTVideoSwiftLog.h" +#import #if __has_include() #import "RCTVideoCache.h" #endif - diff --git a/ios/Video/RCTVideoManager.m b/ios/Video/RCTVideoManager.m index 291868eb71..c64db775b5 100644 --- a/ios/Video/RCTVideoManager.m +++ b/ios/Video/RCTVideoManager.m @@ -1,7 +1,7 @@ -#import #import "React/RCTViewManager.h" +#import -@interface RCT_EXTERN_MODULE(RCTVideoManager, RCTViewManager) +@interface RCT_EXTERN_MODULE (RCTVideoManager, RCTViewManager) RCT_EXPORT_VIEW_PROPERTY(src, NSDictionary); RCT_EXPORT_VIEW_PROPERTY(drm, NSDictionary); @@ -65,27 +65,22 @@ @interface RCT_EXTERN_MODULE(RCTVideoManager, RCTViewManager) RCT_EXPORT_VIEW_PROPERTY(onRestoreUserInterfaceForPictureInPictureStop, RCTDirectEventBlock); RCT_EXPORT_VIEW_PROPERTY(onReceiveAdEvent, RCTDirectEventBlock); -RCT_EXTERN_METHOD(save:(NSDictionary *)options - reactTag:(nonnull NSNumber *)reactTag - resolver:(RCTPromiseResolveBlock)resolve - rejecter:(RCTPromiseRejectBlock)reject) +RCT_EXTERN_METHOD(save + : (NSDictionary*)options reactTag + : (nonnull NSNumber*)reactTag resolver + : (RCTPromiseResolveBlock)resolve rejecter + : (RCTPromiseRejectBlock)reject) -RCT_EXTERN_METHOD(setLicenseResult:(NSString *)license - licenseUrl:(NSString *)licenseUrl - reactTag:(nonnull NSNumber *)reactTag) +RCT_EXTERN_METHOD(setLicenseResult : (NSString*)license licenseUrl : (NSString*)licenseUrl reactTag : (nonnull NSNumber*)reactTag) -RCT_EXTERN_METHOD(setLicenseResultError:(NSString *)error - licenseUrl:(NSString *)licenseUrl - reactTag:(nonnull NSNumber *)reactTag) +RCT_EXTERN_METHOD(setLicenseResultError : (NSString*)error licenseUrl : (NSString*)licenseUrl reactTag : (nonnull NSNumber*)reactTag) -RCT_EXTERN_METHOD(setPlayerPauseState:(nonnull NSNumber *)paused - reactTag:(nonnull NSNumber *)reactTag) +RCT_EXTERN_METHOD(setPlayerPauseState : (nonnull NSNumber*)paused reactTag : (nonnull NSNumber*)reactTag) -RCT_EXTERN_METHOD(presentFullscreenPlayer:(nonnull NSNumber *)reactTag) +RCT_EXTERN_METHOD(presentFullscreenPlayer : (nonnull NSNumber*)reactTag) -RCT_EXTERN_METHOD(dismissFullscreenPlayer:(nonnull NSNumber *)reactTag) +RCT_EXTERN_METHOD(dismissFullscreenPlayer : (nonnull NSNumber*)reactTag) -RCT_EXTERN_METHOD(dismissFullscreenPlayer - reactTag:(nonnull NSNumber *)reactTag) +RCT_EXTERN_METHOD(dismissFullscreenPlayer reactTag : (nonnull NSNumber*)reactTag) @end diff --git a/ios/Video/RCTVideoSwiftLog/RCTVideoSwiftLog.h b/ios/Video/RCTVideoSwiftLog/RCTVideoSwiftLog.h index f9978dc821..2664b7274f 100644 --- a/ios/Video/RCTVideoSwiftLog/RCTVideoSwiftLog.h +++ b/ios/Video/RCTVideoSwiftLog/RCTVideoSwiftLog.h @@ -2,10 +2,10 @@ @interface RCTVideoSwiftLog : NSObject -+ (void)error:(NSString * _Nonnull)message file:(NSString * _Nonnull)file line:(NSUInteger)line; -+ (void)warn:(NSString * _Nonnull)message file:(NSString * _Nonnull)file line:(NSUInteger)line; -+ (void)info:(NSString * _Nonnull)message file:(NSString * _Nonnull)file line:(NSUInteger)line; -+ (void)log:(NSString * _Nonnull)message file:(NSString * _Nonnull)file line:(NSUInteger)line; -+ (void)trace:(NSString * _Nonnull)message file:(NSString * _Nonnull)file line:(NSUInteger)line; ++ (void)error:(NSString* _Nonnull)message file:(NSString* _Nonnull)file line:(NSUInteger)line; ++ (void)warn:(NSString* _Nonnull)message file:(NSString* _Nonnull)file line:(NSUInteger)line; ++ (void)info:(NSString* _Nonnull)message file:(NSString* _Nonnull)file line:(NSUInteger)line; ++ (void)log:(NSString* _Nonnull)message file:(NSString* _Nonnull)file line:(NSUInteger)line; ++ (void)trace:(NSString* _Nonnull)message file:(NSString* _Nonnull)file line:(NSUInteger)line; @end diff --git a/ios/Video/RCTVideoSwiftLog/RCTVideoSwiftLog.m b/ios/Video/RCTVideoSwiftLog/RCTVideoSwiftLog.m index 61a38b19e3..1d8a670f74 100644 --- a/ios/Video/RCTVideoSwiftLog/RCTVideoSwiftLog.m +++ b/ios/Video/RCTVideoSwiftLog/RCTVideoSwiftLog.m @@ -4,29 +4,24 @@ @implementation RCTVideoSwiftLog -+ (void)info:(NSString *)message file:(NSString *)file line:(NSUInteger)line -{ - _RCTLogNativeInternal(RCTLogLevelInfo, file.UTF8String, (int)line, @"%@", message); ++ (void)info:(NSString*)message file:(NSString*)file line:(NSUInteger)line { + _RCTLogNativeInternal(RCTLogLevelInfo, file.UTF8String, (int)line, @"%@", message); } -+ (void)warn:(NSString *)message file:(NSString *)file line:(NSUInteger)line -{ - _RCTLogNativeInternal(RCTLogLevelWarning, file.UTF8String, (int)line, @"%@", message); ++ (void)warn:(NSString*)message file:(NSString*)file line:(NSUInteger)line { + _RCTLogNativeInternal(RCTLogLevelWarning, file.UTF8String, (int)line, @"%@", message); } -+ (void)error:(NSString *)message file:(NSString *)file line:(NSUInteger)line -{ - _RCTLogNativeInternal(RCTLogLevelError, file.UTF8String, (int)line, @"%@", message); ++ (void)error:(NSString*)message file:(NSString*)file line:(NSUInteger)line { + _RCTLogNativeInternal(RCTLogLevelError, file.UTF8String, (int)line, @"%@", message); } -+ (void)log:(NSString *)message file:(NSString *)file line:(NSUInteger)line -{ - _RCTLogNativeInternal(RCTLogLevelInfo, file.UTF8String, (int)line, @"%@", message); ++ (void)log:(NSString*)message file:(NSString*)file line:(NSUInteger)line { + _RCTLogNativeInternal(RCTLogLevelInfo, file.UTF8String, (int)line, @"%@", message); } -+ (void)trace:(NSString *)message file:(NSString *)file line:(NSUInteger)line -{ - _RCTLogNativeInternal(RCTLogLevelTrace, file.UTF8String, (int)line, @"%@", message); ++ (void)trace:(NSString*)message file:(NSString*)file line:(NSUInteger)line { + _RCTLogNativeInternal(RCTLogLevelTrace, file.UTF8String, (int)line, @"%@", message); } @end diff --git a/ios/VideoCaching/RCTVideoCache.h b/ios/VideoCaching/RCTVideoCache.h index bad999ff08..0681cc3146 100644 --- a/ios/VideoCaching/RCTVideoCache.h +++ b/ios/VideoCaching/RCTVideoCache.h @@ -1,8 +1,8 @@ -#import #import +#import +#import #import #import -#import typedef NS_ENUM(NSUInteger, RCTVideoCacheStatus) { RCTVideoCacheStatusMissingFileExtension, @@ -14,25 +14,24 @@ typedef NS_ENUM(NSUInteger, RCTVideoCacheStatus) { @class SPTPersistentCache; @class SPTPersistentCacheOptions; -@interface RCTVideoCache : NSObject -{ - SPTPersistentCache *videoCache; - NSString * _Nullable cachePath; - NSString * temporaryCachePath; - NSString * _Nullable cacheIdentifier; +@interface RCTVideoCache : NSObject { + SPTPersistentCache* videoCache; + NSString* _Nullable cachePath; + NSString* temporaryCachePath; + NSString* _Nullable cacheIdentifier; } -@property(nonatomic, strong) SPTPersistentCache * _Nullable videoCache; -@property(nonatomic, strong) NSString * cachePath; -@property(nonatomic, strong) NSString * cacheIdentifier; -@property(nonatomic, strong) NSString * temporaryCachePath; +@property(nonatomic, strong) SPTPersistentCache* _Nullable videoCache; +@property(nonatomic, strong) NSString* cachePath; +@property(nonatomic, strong) NSString* cacheIdentifier; +@property(nonatomic, strong) NSString* temporaryCachePath; -+ (RCTVideoCache *)sharedInstance; -- (void)storeItem:(NSData *)data forUri:(NSString *)uri withCallback:(void(^)(BOOL))handler; -- (void)getItemForUri:(NSString *)url withCallback:(void(^)(RCTVideoCacheStatus, AVAsset * _Nullable)) handler; -- (NSURL *)createUniqueTemporaryFileUrl:(NSString * _Nonnull)url withExtension:(NSString * _Nonnull) extension; -- (AVURLAsset *)getItemFromTemporaryStorage:(NSString *)key; -- (BOOL)saveDataToTemporaryStorage:(NSData *)data key:(NSString *)key; -- (void) createTemporaryPath; ++ (RCTVideoCache*)sharedInstance; +- (void)storeItem:(NSData*)data forUri:(NSString*)uri withCallback:(void (^)(BOOL))handler; +- (void)getItemForUri:(NSString*)url withCallback:(void (^)(RCTVideoCacheStatus, AVAsset* _Nullable))handler; +- (NSURL*)createUniqueTemporaryFileUrl:(NSString* _Nonnull)url withExtension:(NSString* _Nonnull)extension; +- (AVURLAsset*)getItemFromTemporaryStorage:(NSString*)key; +- (BOOL)saveDataToTemporaryStorage:(NSData*)data key:(NSString*)key; +- (void)createTemporaryPath; @end diff --git a/ios/VideoCaching/RCTVideoCache.m b/ios/VideoCaching/RCTVideoCache.m index 985b54c8a4..b59589a024 100644 --- a/ios/VideoCaching/RCTVideoCache.m +++ b/ios/VideoCaching/RCTVideoCache.m @@ -7,8 +7,8 @@ @implementation RCTVideoCache @synthesize cacheIdentifier; @synthesize temporaryCachePath; -+ (RCTVideoCache *)sharedInstance { - static RCTVideoCache *sharedInstance = nil; ++ (RCTVideoCache*)sharedInstance { + static RCTVideoCache* sharedInstance = nil; static dispatch_once_t onceToken; dispatch_once(&onceToken, ^{ sharedInstance = [[self alloc] init]; @@ -20,8 +20,9 @@ - (id)init { if (self = [super init]) { self.cacheIdentifier = @"rct.video.cache"; self.temporaryCachePath = [NSTemporaryDirectory() stringByAppendingPathComponent:self.cacheIdentifier]; - self.cachePath = [NSSearchPathForDirectoriesInDomains(NSCachesDirectory, NSUserDomainMask, YES).firstObject stringByAppendingPathComponent:self.cacheIdentifier]; - SPTPersistentCacheOptions *options = [SPTPersistentCacheOptions new]; + self.cachePath = [NSSearchPathForDirectoriesInDomains(NSCachesDirectory, NSUserDomainMask, YES).firstObject + stringByAppendingPathComponent:self.cacheIdentifier]; + SPTPersistentCacheOptions* options = [SPTPersistentCacheOptions new]; options.cachePath = self.cachePath; options.cacheIdentifier = self.cacheIdentifier; options.defaultExpirationPeriod = 60 * 60 * 24 * 30; @@ -29,7 +30,7 @@ - (id)init { options.sizeConstraintBytes = 1024 * 1024 * 100; options.useDirectorySeparation = NO; #ifdef DEBUG - options.debugOutput = ^(NSString *string) { + options.debugOutput = ^(NSString* string) { NSLog(@"VideoCache: debug %@", string); }; #endif @@ -40,8 +41,8 @@ - (id)init { return self; } -- (void) createTemporaryPath { - NSError *error = nil; +- (void)createTemporaryPath { + NSError* error = nil; BOOL success = [[NSFileManager defaultManager] createDirectoryAtPath:self.temporaryCachePath withIntermediateDirectories:YES attributes:nil @@ -53,97 +54,101 @@ - (void) createTemporaryPath { #endif } -- (void)storeItem:(NSData *)data forUri:(NSString *)uri withCallback:(void(^)(BOOL))handler; +- (void)storeItem:(NSData*)data forUri:(NSString*)uri withCallback:(void (^)(BOOL))handler; { - NSString *key = [self generateCacheKeyForUri:uri]; + NSString* key = [self generateCacheKeyForUri:uri]; if (key == nil) { handler(NO); return; } [self saveDataToTemporaryStorage:data key:key]; - [self.videoCache storeData:data forKey:key locked:NO withCallback:^(SPTPersistentCacheResponse * _Nonnull response) { - if (response.error) { + [self.videoCache storeData:data + forKey:key + locked:NO + withCallback:^(SPTPersistentCacheResponse* _Nonnull response) { + if (response.error) { #ifdef DEBUG - NSLog(@"VideoCache: An error occured while saving the video into the cache: %@", [response.error localizedDescription]); + NSLog(@"VideoCache: An error occured while saving the video into the cache: %@", [response.error localizedDescription]); #endif - handler(NO); - return; - } - handler(YES); - } onQueue:dispatch_get_main_queue()]; + handler(NO); + return; + } + handler(YES); + } + onQueue:dispatch_get_main_queue()]; return; } -- (AVURLAsset *)getItemFromTemporaryStorage:(NSString *)key { - NSString * temporaryFilePath = [self.temporaryCachePath stringByAppendingPathComponent:key]; - +- (AVURLAsset*)getItemFromTemporaryStorage:(NSString*)key { + NSString* temporaryFilePath = [self.temporaryCachePath stringByAppendingPathComponent:key]; + BOOL fileExists = [[NSFileManager defaultManager] fileExistsAtPath:temporaryFilePath]; if (!fileExists) { return nil; } - NSURL *assetUrl = [[NSURL alloc] initFileURLWithPath:temporaryFilePath]; - AVURLAsset *asset = [AVURLAsset URLAssetWithURL:assetUrl options:nil]; + NSURL* assetUrl = [[NSURL alloc] initFileURLWithPath:temporaryFilePath]; + AVURLAsset* asset = [AVURLAsset URLAssetWithURL:assetUrl options:nil]; return asset; } -- (BOOL)saveDataToTemporaryStorage:(NSData *)data key:(NSString *)key { - NSString *temporaryFilePath = [self.temporaryCachePath stringByAppendingPathComponent:key]; +- (BOOL)saveDataToTemporaryStorage:(NSData*)data key:(NSString*)key { + NSString* temporaryFilePath = [self.temporaryCachePath stringByAppendingPathComponent:key]; [data writeToFile:temporaryFilePath atomically:YES]; return YES; } -- (NSString *)generateCacheKeyForUri:(NSString *)uri { - NSString *uriWithoutQueryParams = uri; +- (NSString*)generateCacheKeyForUri:(NSString*)uri { + NSString* uriWithoutQueryParams = uri; // parse file extension if ([uri rangeOfString:@"?"].location != NSNotFound) { - NSArray * components = [uri componentsSeparatedByString:@"?"]; + NSArray* components = [uri componentsSeparatedByString:@"?"]; uriWithoutQueryParams = [components objectAtIndex:0]; } - NSString * pathExtension = [uriWithoutQueryParams pathExtension]; - NSArray * supportedExtensions = @[@"m4v", @"mp4", @"mov"]; + NSString* pathExtension = [uriWithoutQueryParams pathExtension]; + NSArray* supportedExtensions = @[ @"m4v", @"mp4", @"mov" ]; if ([pathExtension isEqualToString:@""]) { - NSDictionary *userInfo = @{ - NSLocalizedDescriptionKey: NSLocalizedString(@"Missing file extension.", nil), - NSLocalizedFailureReasonErrorKey: NSLocalizedString(@"Missing file extension.", nil), - NSLocalizedRecoverySuggestionErrorKey: NSLocalizedString(@"Missing file extension.", nil) - }; - NSError *error = [NSError errorWithDomain:@"RCTVideoCache" - code:RCTVideoCacheStatusMissingFileExtension userInfo:userInfo]; + NSDictionary* userInfo = @{ + NSLocalizedDescriptionKey : NSLocalizedString(@"Missing file extension.", nil), + NSLocalizedFailureReasonErrorKey : NSLocalizedString(@"Missing file extension.", nil), + NSLocalizedRecoverySuggestionErrorKey : NSLocalizedString(@"Missing file extension.", nil) + }; + NSError* error = [NSError errorWithDomain:@"RCTVideoCache" code:RCTVideoCacheStatusMissingFileExtension userInfo:userInfo]; @throw error; } else if (![supportedExtensions containsObject:pathExtension]) { // Notably, we don't currently support m3u8 (HLS playlists) - NSDictionary *userInfo = @{ - NSLocalizedDescriptionKey: NSLocalizedString(@"Unsupported file extension.", nil), - NSLocalizedFailureReasonErrorKey: NSLocalizedString(@"Unsupported file extension.", nil), - NSLocalizedRecoverySuggestionErrorKey: NSLocalizedString(@"Unsupported file extension.", nil) - }; - NSError *error = [NSError errorWithDomain:@"RCTVideoCache" - code:RCTVideoCacheStatusUnsupportedFileExtension userInfo:userInfo]; + NSDictionary* userInfo = @{ + NSLocalizedDescriptionKey : NSLocalizedString(@"Unsupported file extension.", nil), + NSLocalizedFailureReasonErrorKey : NSLocalizedString(@"Unsupported file extension.", nil), + NSLocalizedRecoverySuggestionErrorKey : NSLocalizedString(@"Unsupported file extension.", nil) + }; + NSError* error = [NSError errorWithDomain:@"RCTVideoCache" code:RCTVideoCacheStatusUnsupportedFileExtension userInfo:userInfo]; @throw error; } return [[self generateHashForUrl:uri] stringByAppendingPathExtension:pathExtension]; } -- (void)getItemForUri:(NSString *)uri withCallback:(void(^)(RCTVideoCacheStatus, AVAsset * _Nullable)) handler { +- (void)getItemForUri:(NSString*)uri withCallback:(void (^)(RCTVideoCacheStatus, AVAsset* _Nullable))handler { @try { - NSString *key = [self generateCacheKeyForUri:uri]; - AVURLAsset * temporaryAsset = [self getItemFromTemporaryStorage:key]; + NSString* key = [self generateCacheKeyForUri:uri]; + AVURLAsset* temporaryAsset = [self getItemFromTemporaryStorage:key]; if (temporaryAsset != nil) { handler(RCTVideoCacheStatusAvailable, temporaryAsset); return; } - - [self.videoCache loadDataForKey:key withCallback:^(SPTPersistentCacheResponse * _Nonnull response) { - if (response.record == nil || response.record.data == nil) { - handler(RCTVideoCacheStatusNotAvailable, nil); - return; - } - [self saveDataToTemporaryStorage:response.record.data key:key]; - handler(RCTVideoCacheStatusAvailable, [self getItemFromTemporaryStorage:key]); - } onQueue:dispatch_get_main_queue()]; - } @catch (NSError * err) { + + [self.videoCache loadDataForKey:key + withCallback:^(SPTPersistentCacheResponse* _Nonnull response) { + if (response.record == nil || response.record.data == nil) { + handler(RCTVideoCacheStatusNotAvailable, nil); + return; + } + [self saveDataToTemporaryStorage:response.record.data key:key]; + handler(RCTVideoCacheStatusAvailable, [self getItemFromTemporaryStorage:key]); + } + onQueue:dispatch_get_main_queue()]; + } @catch (NSError* err) { switch (err.code) { case RCTVideoCacheStatusMissingFileExtension: handler(RCTVideoCacheStatusMissingFileExtension, nil); @@ -157,18 +162,14 @@ - (void)getItemForUri:(NSString *)uri withCallback:(void(^)(RCTVideoCacheStatus, } } -- (NSString *)generateHashForUrl:(NSString *)string { - const char *cStr = [string UTF8String]; +- (NSString*)generateHashForUrl:(NSString*)string { + const char* cStr = [string UTF8String]; unsigned char result[CC_MD5_DIGEST_LENGTH]; - CC_MD5( cStr, (CC_LONG)strlen(cStr), result ); - - return [NSString stringWithFormat: - @"%02X%02X%02X%02X%02X%02X%02X%02X%02X%02X%02X%02X%02X%02X%02X%02X", - result[0], result[1], result[2], result[3], - result[4], result[5], result[6], result[7], - result[8], result[9], result[10], result[11], - result[12], result[13], result[14], result[15] - ]; + CC_MD5(cStr, (CC_LONG)strlen(cStr), result); + + return [NSString stringWithFormat:@"%02X%02X%02X%02X%02X%02X%02X%02X%02X%02X%02X%02X%02X%02X%02X%02X", result[0], result[1], result[2], + result[3], result[4], result[5], result[6], result[7], result[8], result[9], result[10], result[11], + result[12], result[13], result[14], result[15]]; } @end From d306154363542bd660a6cd5ef3102c077f32cd67 Mon Sep 17 00:00:00 2001 From: Krzysztof Moch Date: Sat, 2 Dec 2023 16:54:15 +0100 Subject: [PATCH 03/10] chore: format kotlin code --- .../com/brentvatne/common/API/ResizeMode.kt | 92 +++---- .../brentvatne/common/API/SubtitleStyle.kt | 57 ++-- .../brentvatne/common/API/TimedMetadata.kt | 8 +- .../java/com/brentvatne/common/API/Track.kt | 19 +- .../com/brentvatne/common/API/VideoTrack.kt | 18 +- .../common/react/VideoEventEmitter.java | 6 +- .../com/brentvatne/common/toolbox/DebugLog.kt | 135 ++++----- .../common/toolbox/ReactBridgeUtils.kt | 259 +++++++++--------- .../exoplayer/AspectRatioFrameLayout.java | 2 +- .../brentvatne/exoplayer/ExoPlayerView.java | 4 +- .../exoplayer/ReactExoplayerView.java | 10 +- .../exoplayer/ReactExoplayerViewManager.java | 4 +- 12 files changed, 309 insertions(+), 305 deletions(-) diff --git a/android/src/main/java/com/brentvatne/common/API/ResizeMode.kt b/android/src/main/java/com/brentvatne/common/API/ResizeMode.kt index bc0b95227f..fd77ea7bcf 100644 --- a/android/src/main/java/com/brentvatne/common/API/ResizeMode.kt +++ b/android/src/main/java/com/brentvatne/common/API/ResizeMode.kt @@ -1,54 +1,54 @@ -package com.brentvatne.common.API +package com.brentvatne.common.api import androidx.annotation.IntDef import java.lang.annotation.Retention import java.lang.annotation.RetentionPolicy internal object ResizeMode { - /** - * Either the width or height is decreased to obtain the desired aspect ratio. - */ - const val RESIZE_MODE_FIT = 0 - - /** - * The width is fixed and the height is increased or decreased to obtain the desired aspect ratio. - */ - const val RESIZE_MODE_FIXED_WIDTH = 1 - - /** - * The height is fixed and the width is increased or decreased to obtain the desired aspect ratio. - */ - const val RESIZE_MODE_FIXED_HEIGHT = 2 - - /** - * The height and the width is increased or decreased to fit the size of the view. - */ - const val RESIZE_MODE_FILL = 3 - - /** - * Keeps the aspect ratio but takes up the view's size. - */ - const val RESIZE_MODE_CENTER_CROP = 4 - @JvmStatic - @Mode - fun toResizeMode(ordinal: Int): Int { - return when (ordinal) { - RESIZE_MODE_FIXED_WIDTH -> RESIZE_MODE_FIXED_WIDTH - RESIZE_MODE_FIXED_HEIGHT -> RESIZE_MODE_FIXED_HEIGHT - RESIZE_MODE_FILL -> RESIZE_MODE_FILL - RESIZE_MODE_CENTER_CROP -> RESIZE_MODE_CENTER_CROP - RESIZE_MODE_FIT -> RESIZE_MODE_FIT - else -> RESIZE_MODE_FIT - } + /** + * Either the width or height is decreased to obtain the desired aspect ratio. + */ + const val RESIZE_MODE_FIT = 0 + + /** + * The width is fixed and the height is increased or decreased to obtain the desired aspect ratio. + */ + const val RESIZE_MODE_FIXED_WIDTH = 1 + + /** + * The height is fixed and the width is increased or decreased to obtain the desired aspect ratio. + */ + const val RESIZE_MODE_FIXED_HEIGHT = 2 + + /** + * The height and the width is increased or decreased to fit the size of the view. + */ + const val RESIZE_MODE_FILL = 3 + + /** + * Keeps the aspect ratio but takes up the view's size. + */ + const val RESIZE_MODE_CENTER_CROP = 4 + + @JvmStatic + @Mode + fun toResizeMode(ordinal: Int): Int = + when (ordinal) { + RESIZE_MODE_FIXED_WIDTH -> RESIZE_MODE_FIXED_WIDTH + RESIZE_MODE_FIXED_HEIGHT -> RESIZE_MODE_FIXED_HEIGHT + RESIZE_MODE_FILL -> RESIZE_MODE_FILL + RESIZE_MODE_CENTER_CROP -> RESIZE_MODE_CENTER_CROP + RESIZE_MODE_FIT -> RESIZE_MODE_FIT + else -> RESIZE_MODE_FIT } - @Retention(RetentionPolicy.SOURCE) - @IntDef( - RESIZE_MODE_FIT, - RESIZE_MODE_FIXED_WIDTH, - RESIZE_MODE_FIXED_HEIGHT, - RESIZE_MODE_FILL, - RESIZE_MODE_CENTER_CROP - ) - annotation class Mode -} \ No newline at end of file + @Retention(RetentionPolicy.SOURCE) + @IntDef( + RESIZE_MODE_FIT, + RESIZE_MODE_FIXED_WIDTH, + RESIZE_MODE_FIXED_HEIGHT, + RESIZE_MODE_FILL, + RESIZE_MODE_CENTER_CROP + ) + annotation class Mode +} diff --git a/android/src/main/java/com/brentvatne/common/API/SubtitleStyle.kt b/android/src/main/java/com/brentvatne/common/API/SubtitleStyle.kt index 6e9c16f2df..28b971d7dc 100644 --- a/android/src/main/java/com/brentvatne/common/API/SubtitleStyle.kt +++ b/android/src/main/java/com/brentvatne/common/API/SubtitleStyle.kt @@ -1,4 +1,4 @@ -package com.brentvatne.common.API +package com.brentvatne.common.api import com.brentvatne.common.toolbox.ReactBridgeUtils import com.facebook.react.bridge.ReadableMap @@ -7,32 +7,33 @@ import com.facebook.react.bridge.ReadableMap * Helper file to parse SubtitleStyle prop and build a dedicated class */ class SubtitleStyle private constructor() { - var fontSize = -1 - private set - var paddingLeft = 0 - private set - var paddingRight = 0 - private set - var paddingTop = 0 - private set - var paddingBottom = 0 - private set + var fontSize = -1 + private set + var paddingLeft = 0 + private set + var paddingRight = 0 + private set + var paddingTop = 0 + private set + var paddingBottom = 0 + private set - companion object { - private const val PROP_FONT_SIZE_TRACK = "fontSize" - private const val PROP_PADDING_BOTTOM = "paddingBottom" - private const val PROP_PADDING_TOP = "paddingTop" - private const val PROP_PADDING_LEFT = "paddingLeft" - private const val PROP_PADDING_RIGHT = "paddingRight" - @JvmStatic - fun parse(src: ReadableMap?): SubtitleStyle { - val subtitleStyle = SubtitleStyle() - subtitleStyle.fontSize = ReactBridgeUtils.safeGetInt(src, PROP_FONT_SIZE_TRACK, -1) - subtitleStyle.paddingBottom = ReactBridgeUtils.safeGetInt(src, PROP_PADDING_BOTTOM, 0) - subtitleStyle.paddingTop = ReactBridgeUtils.safeGetInt(src, PROP_PADDING_TOP, 0) - subtitleStyle.paddingLeft = ReactBridgeUtils.safeGetInt(src, PROP_PADDING_LEFT, 0) - subtitleStyle.paddingRight = ReactBridgeUtils.safeGetInt(src, PROP_PADDING_RIGHT, 0) - return subtitleStyle - } + companion object { + private const val PROP_FONT_SIZE_TRACK = "fontSize" + private const val PROP_PADDING_BOTTOM = "paddingBottom" + private const val PROP_PADDING_TOP = "paddingTop" + private const val PROP_PADDING_LEFT = "paddingLeft" + private const val PROP_PADDING_RIGHT = "paddingRight" + + @JvmStatic + fun parse(src: ReadableMap?): SubtitleStyle { + val subtitleStyle = SubtitleStyle() + subtitleStyle.fontSize = ReactBridgeUtils.safeGetInt(src, PROP_FONT_SIZE_TRACK, -1) + subtitleStyle.paddingBottom = ReactBridgeUtils.safeGetInt(src, PROP_PADDING_BOTTOM, 0) + subtitleStyle.paddingTop = ReactBridgeUtils.safeGetInt(src, PROP_PADDING_TOP, 0) + subtitleStyle.paddingLeft = ReactBridgeUtils.safeGetInt(src, PROP_PADDING_LEFT, 0) + subtitleStyle.paddingRight = ReactBridgeUtils.safeGetInt(src, PROP_PADDING_RIGHT, 0) + return subtitleStyle } -} \ No newline at end of file + } +} diff --git a/android/src/main/java/com/brentvatne/common/API/TimedMetadata.kt b/android/src/main/java/com/brentvatne/common/API/TimedMetadata.kt index 66868f36af..130eb21f16 100644 --- a/android/src/main/java/com/brentvatne/common/API/TimedMetadata.kt +++ b/android/src/main/java/com/brentvatne/common/API/TimedMetadata.kt @@ -1,10 +1,10 @@ -package com.brentvatne.common.API +package com.brentvatne.common.api /* * class to handle timedEvent retrieved from the stream */ class TimedMetadata(_identifier: String? = null, _value: String? = null) { - var identifier: String? = _identifier - var value: String? = _value -} \ No newline at end of file + var identifier: String? = _identifier + var value: String? = _value +} diff --git a/android/src/main/java/com/brentvatne/common/API/Track.kt b/android/src/main/java/com/brentvatne/common/API/Track.kt index 5bcd03518e..a5a5950ae2 100644 --- a/android/src/main/java/com/brentvatne/common/API/Track.kt +++ b/android/src/main/java/com/brentvatne/common/API/Track.kt @@ -1,14 +1,15 @@ -package com.brentvatne.common.API +package com.brentvatne.common.api /* * internal representation of audio & text tracks */ class Track { - var title: String? = null - var mimeType: String? = null - var language: String? = null - var isSelected = false - // in bps available only on audio tracks - var bitrate = 0 - var index = 0 -} \ No newline at end of file + var title: String? = null + var mimeType: String? = null + var language: String? = null + var isSelected = false + + // in bps available only on audio tracks + var bitrate = 0 + var index = 0 +} diff --git a/android/src/main/java/com/brentvatne/common/API/VideoTrack.kt b/android/src/main/java/com/brentvatne/common/API/VideoTrack.kt index 8251467e5f..379a987ec0 100644 --- a/android/src/main/java/com/brentvatne/common/API/VideoTrack.kt +++ b/android/src/main/java/com/brentvatne/common/API/VideoTrack.kt @@ -1,15 +1,15 @@ -package com.brentvatne.common.API +package com.brentvatne.common.api /* * internal representation of audio & text tracks */ class VideoTrack { - var width = 0 - var height = 0 - var bitrate = 0 - var codecs = "" - var id = -1 - var trackId = "" - var isSelected = false -} \ No newline at end of file + var width = 0 + var height = 0 + var bitrate = 0 + var codecs = "" + var id = -1 + var trackId = "" + var isSelected = false +} diff --git a/android/src/main/java/com/brentvatne/common/react/VideoEventEmitter.java b/android/src/main/java/com/brentvatne/common/react/VideoEventEmitter.java index f119cb2f78..31cbc27f61 100644 --- a/android/src/main/java/com/brentvatne/common/react/VideoEventEmitter.java +++ b/android/src/main/java/com/brentvatne/common/react/VideoEventEmitter.java @@ -4,9 +4,9 @@ import android.view.View; -import com.brentvatne.common.API.TimedMetadata; -import com.brentvatne.common.API.Track; -import com.brentvatne.common.API.VideoTrack; +import com.brentvatne.common.api.TimedMetadata; +import com.brentvatne.common.api.Track; +import com.brentvatne.common.api.VideoTrack; import com.facebook.react.bridge.Arguments; import com.facebook.react.bridge.ReactContext; import com.facebook.react.bridge.WritableArray; diff --git a/android/src/main/java/com/brentvatne/common/toolbox/DebugLog.kt b/android/src/main/java/com/brentvatne/common/toolbox/DebugLog.kt index a1250f8d7b..be66ede5a4 100644 --- a/android/src/main/java/com/brentvatne/common/toolbox/DebugLog.kt +++ b/android/src/main/java/com/brentvatne/common/toolbox/DebugLog.kt @@ -10,86 +10,87 @@ import java.lang.Exception */ object DebugLog { - // log level to display - private var level = Log.WARN - // enable thread display in logs - private var displayThread = true - // add a common prefix for easy filtering - private const val TAG_PREFIX = "RNV" - - @JvmStatic - fun setConfig(_level: Int, _displayThread: Boolean) { - level = _level - displayThread = _displayThread - } + // log level to display + private var level = Log.WARN - @JvmStatic - private fun getTag(tag: String): String { - return TAG_PREFIX + tag - } + // enable thread display in logs + private var displayThread = true - @JvmStatic - private fun getMsg(msg: String): String { - return if (displayThread) { - "[" + Thread.currentThread().name + "] " + msg - } else msg - } + // add a common prefix for easy filtering + private const val TAG_PREFIX = "RNV" - @JvmStatic - fun v(tag: String, msg: String) { - if (level <= Log.VERBOSE) Log.v(getTag(tag), getMsg(msg)) - } + @JvmStatic + fun setConfig(_level: Int, _displayThread: Boolean) { + level = _level + displayThread = _displayThread + } - @JvmStatic - fun d(tag: String, msg: String) { - if (level <= Log.DEBUG) Log.d(getTag(tag), getMsg(msg)) - } + @JvmStatic + private fun getTag(tag: String): String = TAG_PREFIX + tag - @JvmStatic - fun i(tag: String, msg: String) { - if (level <= Log.INFO) Log.i(getTag(tag), getMsg(msg)) + @JvmStatic + private fun getMsg(msg: String): String = + if (displayThread) { + "[" + Thread.currentThread().name + "] " + msg + } else { + msg } - @JvmStatic - fun w(tag: String, msg: String) { - if (level <= Log.WARN) Log.w(getTag(tag), getMsg(msg)) - } + @JvmStatic + fun v(tag: String, msg: String) { + if (level <= Log.VERBOSE) Log.v(getTag(tag), getMsg(msg)) + } - @JvmStatic - fun e(tag: String, msg: String) { - if (level <= Log.ERROR) Log.e(getTag(tag), getMsg(msg)) - } + @JvmStatic + fun d(tag: String, msg: String) { + if (level <= Log.DEBUG) Log.d(getTag(tag), getMsg(msg)) + } + + @JvmStatic + fun i(tag: String, msg: String) { + if (level <= Log.INFO) Log.i(getTag(tag), getMsg(msg)) + } + + @JvmStatic + fun w(tag: String, msg: String) { + if (level <= Log.WARN) Log.w(getTag(tag), getMsg(msg)) + } + + @JvmStatic + fun e(tag: String, msg: String) { + if (level <= Log.ERROR) Log.e(getTag(tag), getMsg(msg)) + } - @JvmStatic - fun wtf(tag: String, msg: String) { - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.FROYO) { - Log.wtf(getTag(tag), "--------------->" + getMsg(msg)) - } else { - Log.e(getTag(tag), "--------------->" + getMsg(msg)) - } - printCallStack() + @JvmStatic + fun wtf(tag: String, msg: String) { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.FROYO) { + Log.wtf(getTag(tag), "--------------->" + getMsg(msg)) + } else { + Log.e(getTag(tag), "--------------->" + getMsg(msg)) } + printCallStack() + } - @JvmStatic - fun printCallStack() { - if (level <= Log.VERBOSE) { - val e = Exception() - e.printStackTrace() - } + @JvmStatic + fun printCallStack() { + if (level <= Log.VERBOSE) { + val e = Exception() + e.printStackTrace() } + } - // Additionnal thread safety checkers - @JvmStatic - fun checkUIThread(tag: String, msg: String) { - if (Thread.currentThread().name != "main") { - wtf(tag, "------------------------>" + getMsg(msg)) - } + // Additionnal thread safety checkers + @JvmStatic + fun checkUIThread(tag: String, msg: String) { + if (Thread.currentThread().name != "main") { + wtf(tag, "------------------------>" + getMsg(msg)) } + } - @JvmStatic - fun checkNotUIThread(tag: String, msg: String) { - if (Thread.currentThread().name == "main") { - wtf(tag, "------------------------>" + getMsg(msg)) - } + @JvmStatic + fun checkNotUIThread(tag: String, msg: String) { + if (Thread.currentThread().name == "main") { + wtf(tag, "------------------------>" + getMsg(msg)) } -} \ No newline at end of file + } +} diff --git a/android/src/main/java/com/brentvatne/common/toolbox/ReactBridgeUtils.kt b/android/src/main/java/com/brentvatne/common/toolbox/ReactBridgeUtils.kt index 1424bbae1a..4df0835538 100644 --- a/android/src/main/java/com/brentvatne/common/toolbox/ReactBridgeUtils.kt +++ b/android/src/main/java/com/brentvatne/common/toolbox/ReactBridgeUtils.kt @@ -1,8 +1,8 @@ package com.brentvatne.common.toolbox import com.facebook.react.bridge.Dynamic -import com.facebook.react.bridge.ReadableMap import com.facebook.react.bridge.ReadableArray +import com.facebook.react.bridge.ReadableMap import java.util.HashMap /* @@ -11,132 +11,133 @@ import java.util.HashMap */ object ReactBridgeUtils { - @JvmStatic - fun safeGetString(map: ReadableMap?, key: String?, fallback: String?): String? { - return if (map != null && map.hasKey(key!!) && !map.isNull(key)) map.getString(key) else fallback - } - - @JvmStatic - fun safeGetString(map: ReadableMap?, key: String?): String? { - return safeGetString(map, key, null) - } - - @JvmStatic - fun safeGetDynamic(map: ReadableMap?, key: String?, fallback: Dynamic?): Dynamic? { - return if (map != null && map.hasKey(key!!) && !map.isNull(key)) map.getDynamic(key) else fallback - } - - @JvmStatic - fun safeGetDynamic(map: ReadableMap?, key: String?): Dynamic? { - return safeGetDynamic(map, key, null) - } - - @JvmStatic - fun safeGetBool(map: ReadableMap?, key: String?, fallback: Boolean): Boolean { - return if (map != null && map.hasKey(key!!) && !map.isNull(key)) map.getBoolean(key) else fallback - } - - @JvmStatic - fun safeGetMap(map: ReadableMap?, key: String?): ReadableMap? { - return if (map != null && map.hasKey(key!!) && !map.isNull(key)) map.getMap(key) else null - } - - @JvmStatic - fun safeGetArray(map: ReadableMap?, key: String?): ReadableArray? { - return if (map != null && map.hasKey(key!!) && !map.isNull(key)) map.getArray(key) else null - } - - @JvmStatic - fun safeGetInt(map: ReadableMap?, key: String?, fallback: Int): Int { - return if (map != null && map.hasKey(key!!) && !map.isNull(key)) map.getInt(key) else fallback - } - - @JvmStatic - fun safeGetInt(map: ReadableMap?, key: String?): Int { - return safeGetInt(map, key, 0); - } - - @JvmStatic - fun safeGetDouble(map: ReadableMap?, key: String?, fallback: Double): Double { - return if (map != null && map.hasKey(key!!) && !map.isNull(key)) map.getDouble(key) else fallback - } - @JvmStatic - fun safeGetDouble(map: ReadableMap?, key: String?): Double { - return safeGetDouble(map, key, 0.0); - } - /** - * toStringMap converts a [ReadableMap] into a HashMap. - * - * @param readableMap The ReadableMap to be conveted. - * @return A HashMap containing the data that was in the ReadableMap. - * @see 'Adapted from https://github.com/artemyarulin/react-native-eval/blob/master/android/src/main/java/com/evaluator/react/ConversionUtil.java' - */ - @JvmStatic - fun toStringMap(readableMap: ReadableMap?): Map? { - if (readableMap == null) return null - val iterator = readableMap.keySetIterator() - if (!iterator.hasNextKey()) return null - val result: MutableMap = HashMap() - while (iterator.hasNextKey()) { - val key = iterator.nextKey() - result[key] = readableMap.getString(key) - } - return result - } - - /** - * toIntMap converts a [ReadableMap] into a HashMap. - * - * @param readableMap The ReadableMap to be conveted. - * @return A HashMap containing the data that was in the ReadableMap. - * @see 'Adapted from https://github.com/artemyarulin/react-native-eval/blob/master/android/src/main/java/com/evaluator/react/ConversionUtil.java' - */ - @JvmStatic - fun toIntMap(readableMap: ReadableMap?): Map? { - if (readableMap == null) return null - val iterator = readableMap.keySetIterator() - if (!iterator.hasNextKey()) return null - val result: MutableMap = HashMap() - while (iterator.hasNextKey()) { - val key = iterator.nextKey() - result[key] = readableMap.getInt(key) - } - return result - } - - @JvmStatic - fun safeStringEquals(str1: String?, str2: String?): Boolean { - if (str1 == null && str2 == null) return true // both are null - return if (str1 == null || str2 == null) false else str1 == str2 // only 1 is null - } - - @JvmStatic - fun safeStringArrayEquals(str1: Array?, str2: Array?): Boolean { - if (str1 == null && str2 == null) return true // both are null - if (str1 == null || str2 == null) return false // only 1 is null - if (str1.size != str2.size) return false // only 1 is null - for (i in str1.indices) { - if (str1[i] == str2[i]) // standard check - return false - } - return true - } - - @JvmStatic - fun safeStringMapEquals( - first: Map?, - second: Map? - ): Boolean { - if (first == null && second == null) return true // both are null - if (first == null || second == null) return false // only 1 is null - if (first.size != second.size) { - return false - } - for (key in first.keys) { - if (!safeStringEquals(first[key], second[key])) { - return false - } - } - return true - } + @JvmStatic + fun safeGetString(map: ReadableMap?, key: String?, fallback: String?): String? { + return if (map != null && map.hasKey(key!!) && !map.isNull(key)) map.getString(key) else fallback + } + + @JvmStatic + fun safeGetString(map: ReadableMap?, key: String?): String? { + return safeGetString(map, key, null) + } + + @JvmStatic + fun safeGetDynamic(map: ReadableMap?, key: String?, fallback: Dynamic?): Dynamic? { + return if (map != null && map.hasKey(key!!) && !map.isNull(key)) map.getDynamic(key) else fallback + } + + @JvmStatic + fun safeGetDynamic(map: ReadableMap?, key: String?): Dynamic? { + return safeGetDynamic(map, key, null) + } + + @JvmStatic + fun safeGetBool(map: ReadableMap?, key: String?, fallback: Boolean): Boolean { + return if (map != null && map.hasKey(key!!) && !map.isNull(key)) map.getBoolean(key) else fallback + } + + @JvmStatic + fun safeGetMap(map: ReadableMap?, key: String?): ReadableMap? { + return if (map != null && map.hasKey(key!!) && !map.isNull(key)) map.getMap(key) else null + } + + @JvmStatic + fun safeGetArray(map: ReadableMap?, key: String?): ReadableArray? { + return if (map != null && map.hasKey(key!!) && !map.isNull(key)) map.getArray(key) else null + } + + @JvmStatic + fun safeGetInt(map: ReadableMap?, key: String?, fallback: Int): Int { + return if (map != null && map.hasKey(key!!) && !map.isNull(key)) map.getInt(key) else fallback + } + + @JvmStatic + fun safeGetInt(map: ReadableMap?, key: String?): Int { + return safeGetInt(map, key, 0) + } + + @JvmStatic + fun safeGetDouble(map: ReadableMap?, key: String?, fallback: Double): Double { + return if (map != null && map.hasKey(key!!) && !map.isNull(key)) map.getDouble(key) else fallback + } + + @JvmStatic + fun safeGetDouble(map: ReadableMap?, key: String?): Double { + return safeGetDouble(map, key, 0.0) + } + + /** + * toStringMap converts a [ReadableMap] into a HashMap. + * + * @param readableMap The ReadableMap to be conveted. + * @return A HashMap containing the data that was in the ReadableMap. + * @see 'Adapted from https://github.com/artemyarulin/react-native-eval/blob/master/android/src/main/java/com/evaluator/react/ConversionUtil.java' + */ + @JvmStatic + fun toStringMap(readableMap: ReadableMap?): Map? { + if (readableMap == null) return null + val iterator = readableMap.keySetIterator() + if (!iterator.hasNextKey()) return null + val result: MutableMap = HashMap() + while (iterator.hasNextKey()) { + val key = iterator.nextKey() + result[key] = readableMap.getString(key) + } + return result + } + + /** + * toIntMap converts a [ReadableMap] into a HashMap. + * + * @param readableMap The ReadableMap to be conveted. + * @return A HashMap containing the data that was in the ReadableMap. + * @see 'Adapted from https://github.com/artemyarulin/react-native-eval/blob/master/android/src/main/java/com/evaluator/react/ConversionUtil.java' + */ + @JvmStatic + fun toIntMap(readableMap: ReadableMap?): Map? { + if (readableMap == null) return null + val iterator = readableMap.keySetIterator() + if (!iterator.hasNextKey()) return null + val result: MutableMap = HashMap() + while (iterator.hasNextKey()) { + val key = iterator.nextKey() + result[key] = readableMap.getInt(key) + } + return result + } + + @JvmStatic + fun safeStringEquals(str1: String?, str2: String?): Boolean { + if (str1 == null && str2 == null) return true // both are null + return if (str1 == null || str2 == null) false else str1 == str2 // only 1 is null + } + + @JvmStatic + fun safeStringArrayEquals(str1: Array?, str2: Array?): Boolean { + if (str1 == null && str2 == null) return true // both are null + if (str1 == null || str2 == null) return false // only 1 is null + if (str1.size != str2.size) return false // only 1 is null + for (i in str1.indices) { + if (str1[i] == str2[i]) { + // standard check + return false + } + } + return true + } + + @JvmStatic + fun safeStringMapEquals(first: Map?, second: Map?): Boolean { + if (first == null && second == null) return true // both are null + if (first == null || second == null) return false // only 1 is null + if (first.size != second.size) { + return false + } + for (key in first.keys) { + if (!safeStringEquals(first[key], second[key])) { + return false + } + } + return true + } } diff --git a/android/src/main/java/com/brentvatne/exoplayer/AspectRatioFrameLayout.java b/android/src/main/java/com/brentvatne/exoplayer/AspectRatioFrameLayout.java index e408531987..261f185ddb 100644 --- a/android/src/main/java/com/brentvatne/exoplayer/AspectRatioFrameLayout.java +++ b/android/src/main/java/com/brentvatne/exoplayer/AspectRatioFrameLayout.java @@ -19,7 +19,7 @@ import android.util.AttributeSet; import android.widget.FrameLayout; -import com.brentvatne.common.API.ResizeMode; +import com.brentvatne.common.api.ResizeMode; /** * A {@link FrameLayout} that resizes itself to match a specified aspect ratio. diff --git a/android/src/main/java/com/brentvatne/exoplayer/ExoPlayerView.java b/android/src/main/java/com/brentvatne/exoplayer/ExoPlayerView.java index 895b472f27..8d9a7071d0 100644 --- a/android/src/main/java/com/brentvatne/exoplayer/ExoPlayerView.java +++ b/android/src/main/java/com/brentvatne/exoplayer/ExoPlayerView.java @@ -25,8 +25,8 @@ import android.view.ViewGroup; import android.widget.FrameLayout; -import com.brentvatne.common.API.ResizeMode; -import com.brentvatne.common.API.SubtitleStyle; +import com.brentvatne.common.api.ResizeMode; +import com.brentvatne.common.api.SubtitleStyle; import java.util.List; diff --git a/android/src/main/java/com/brentvatne/exoplayer/ReactExoplayerView.java b/android/src/main/java/com/brentvatne/exoplayer/ReactExoplayerView.java index f8c2cc957f..fef6d2479a 100644 --- a/android/src/main/java/com/brentvatne/exoplayer/ReactExoplayerView.java +++ b/android/src/main/java/com/brentvatne/exoplayer/ReactExoplayerView.java @@ -91,11 +91,11 @@ import androidx.media3.extractor.metadata.id3.TextInformationFrame; import androidx.media3.ui.LegacyPlayerControlView; -import com.brentvatne.common.API.ResizeMode; -import com.brentvatne.common.API.SubtitleStyle; -import com.brentvatne.common.API.TimedMetadata; -import com.brentvatne.common.API.Track; -import com.brentvatne.common.API.VideoTrack; +import com.brentvatne.common.api.ResizeMode; +import com.brentvatne.common.api.SubtitleStyle; +import com.brentvatne.common.api.TimedMetadata; +import com.brentvatne.common.api.Track; +import com.brentvatne.common.api.VideoTrack; import com.brentvatne.common.react.VideoEventEmitter; import com.brentvatne.common.toolbox.DebugLog; import com.brentvatne.react.R; diff --git a/android/src/main/java/com/brentvatne/exoplayer/ReactExoplayerViewManager.java b/android/src/main/java/com/brentvatne/exoplayer/ReactExoplayerViewManager.java index 7cf6cbb24d..1bd2398c8f 100644 --- a/android/src/main/java/com/brentvatne/exoplayer/ReactExoplayerViewManager.java +++ b/android/src/main/java/com/brentvatne/exoplayer/ReactExoplayerViewManager.java @@ -10,8 +10,8 @@ import androidx.media3.datasource.RawResourceDataSource; import androidx.media3.exoplayer.DefaultLoadControl; -import com.brentvatne.common.API.ResizeMode; -import com.brentvatne.common.API.SubtitleStyle; +import com.brentvatne.common.api.ResizeMode; +import com.brentvatne.common.api.SubtitleStyle; import com.brentvatne.common.react.VideoEventEmitter; import com.brentvatne.common.toolbox.DebugLog; import com.brentvatne.common.toolbox.ReactBridgeUtils; From d15dcb9b29377604c2f8b2b4cfaf08768c6e4847 Mon Sep 17 00:00:00 2001 From: Krzysztof Moch Date: Sat, 2 Dec 2023 17:35:31 +0100 Subject: [PATCH 04/10] chore: update swift linters config --- ios/.swiftformat | 4 +++- ios/.swiftlint.yml | 3 +++ ios/Video/Features/RCTVideoDRM.swift | 3 ++- ios/Video/RCTVideo.swift | 33 ++++++++++++++++++---------- 4 files changed, 30 insertions(+), 13 deletions(-) diff --git a/ios/.swiftformat b/ios/.swiftformat index 0bdcb6b7cf..40538bc81a 100644 --- a/ios/.swiftformat +++ b/ios/.swiftformat @@ -10,4 +10,6 @@ --enable markTypes ---enable isEmpty \ No newline at end of file +--enable isEmpty + +--funcattributes "prev-line" \ No newline at end of file diff --git a/ios/.swiftlint.yml b/ios/.swiftlint.yml index 7d51a163b3..a9a7640100 100644 --- a/ios/.swiftlint.yml +++ b/ios/.swiftlint.yml @@ -6,6 +6,9 @@ disabled_rules: - file_length - cyclomatic_complexity - function_body_length + # TODO: Remove this once all force casts are removed + - force_cast + opt_in_rules: - contains_over_filter_count - contains_over_filter_is_empty diff --git a/ios/Video/Features/RCTVideoDRM.swift b/ios/Video/Features/RCTVideoDRM.swift index 78cc7a0be5..5b19aa1e81 100644 --- a/ios/Video/Features/RCTVideoDRM.swift +++ b/ios/Video/Features/RCTVideoDRM.swift @@ -2,7 +2,8 @@ import AVFoundation import Promises struct RCTVideoDRM { - @available(*, unavailable) private init() {} + @available(*, unavailable) + private init() {} static func fetchLicense( licenseServer: String, diff --git a/ios/Video/RCTVideo.swift b/ios/Video/RCTVideo.swift index 1c46a8c0d2..5e4bc9fac7 100644 --- a/ios/Video/RCTVideo.swift +++ b/ios/Video/RCTVideo.swift @@ -117,11 +117,13 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH @objc var onGetLicense: RCTDirectEventBlock? @objc var onReceiveAdEvent: RCTDirectEventBlock? - @objc func _onPictureInPictureStatusChanged() { + @objc + func _onPictureInPictureStatusChanged() { onPictureInPictureStatusChanged?(["isActive": NSNumber(value: true)]) } - @objc func _onRestoreUserInterfaceForPictureInPictureStop() { + @objc + func _onRestoreUserInterfaceForPictureInPictureStop() { onPictureInPictureStatusChanged?(["isActive": NSNumber(value: false)]) } @@ -196,14 +198,16 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH // MARK: - App lifecycle handlers - @objc func applicationWillResignActive(notification _: NSNotification!) { + @objc + func applicationWillResignActive(notification _: NSNotification!) { if _playInBackground || _playWhenInactive || _paused { return } _player?.pause() _player?.rate = 0.0 } - @objc func applicationDidBecomeActive(notification _: NSNotification!) { + @objc + func applicationDidBecomeActive(notification _: NSNotification!) { if _playInBackground || _playWhenInactive || _paused { return } // Resume the player or any other tasks that should continue when the app becomes active. @@ -211,7 +215,8 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH _player?.rate = _rate } - @objc func applicationDidEnterBackground(notification _: NSNotification!) { + @objc + func applicationDidEnterBackground(notification _: NSNotification!) { if !_playInBackground { // Needed to play sound in background. See https://developer.apple.com/library/ios/qa/qa1668/_index.html _playerLayer?.player = nil @@ -219,7 +224,8 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH } } - @objc func applicationWillEnterForeground(notification _: NSNotification!) { + @objc + func applicationWillEnterForeground(notification _: NSNotification!) { self.applyModifiers() if !_playInBackground { _playerLayer?.player = _player @@ -229,7 +235,8 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH // MARK: - Audio events - @objc func audioRouteChanged(notification: NSNotification!) { + @objc + func audioRouteChanged(notification: NSNotification!) { if let userInfo = notification.userInfo { let reason: AVAudioSession.RouteChangeReason! = userInfo[AVAudioSessionRouteChangeReasonKey] as? AVAudioSession.RouteChangeReason // let previousRoute:NSNumber! = userInfo[AVAudioSessionRouteChangePreviousRouteKey] as? NSNumber @@ -1303,7 +1310,8 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH } } - @objc func handleDidFailToFinishPlaying(notification: NSNotification!) { + @objc + func handleDidFailToFinishPlaying(notification: NSNotification!) { let error: NSError! = notification.userInfo?[AVPlayerItemFailedToPlayToEndTimeErrorKey] as? NSError onVideoError?( [ @@ -1318,12 +1326,14 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH ]) } - @objc func handlePlaybackStalled(notification _: NSNotification!) { + @objc + func handlePlaybackStalled(notification _: NSNotification!) { onPlaybackStalled?(["target": reactTag as Any]) _playbackStalled = true } - @objc func handlePlayerItemDidReachEnd(notification: NSNotification!) { + @objc + func handlePlayerItemDidReachEnd(notification: NSNotification!) { onVideoEnd?(["target": reactTag as Any]) #if USE_GOOGLE_IMA if notification.object as? AVPlayerItem == _player?.currentItem { @@ -1340,7 +1350,8 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH } } - @objc func handleAVPlayerAccess(notification: NSNotification!) { + @objc + func handleAVPlayerAccess(notification: NSNotification!) { let accessLog: AVPlayerItemAccessLog! = (notification.object as! AVPlayerItem).accessLog() let lastEvent: AVPlayerItemAccessLogEvent! = accessLog.events.last From 14d4c8db3a68d184540b1572659c1dd180fea289 Mon Sep 17 00:00:00 2001 From: Krzysztof Moch Date: Sun, 3 Dec 2023 13:48:57 +0100 Subject: [PATCH 05/10] chore(ios/swift): format line length --- ios/.swiftformat | 3 ++- ios/Video/Features/RCTPictureInPicture.swift | 5 +++- ios/Video/Features/RCTPlayerObserver.swift | 24 +++++++++++++++---- ios/Video/Features/RCTPlayerOperations.swift | 5 +++- ios/Video/Features/RCTVideoDRM.swift | 20 +++++++++++++--- ios/Video/Features/RCTVideoUtils.swift | 16 +++++++++---- ios/Video/RCTVideo.swift | 19 ++++++++++----- ios/VideoCaching/RCTVideoCachingHandler.swift | 19 ++++++++++++--- 8 files changed, 87 insertions(+), 24 deletions(-) diff --git a/ios/.swiftformat b/ios/.swiftformat index 40538bc81a..6934ac985a 100644 --- a/ios/.swiftformat +++ b/ios/.swiftformat @@ -12,4 +12,5 @@ --enable isEmpty ---funcattributes "prev-line" \ No newline at end of file +--funcattributes "prev-line" +--maxwidth 160 \ No newline at end of file diff --git a/ios/Video/Features/RCTPictureInPicture.swift b/ios/Video/Features/RCTPictureInPicture.swift index 1af10965bc..1cf2661ff1 100644 --- a/ios/Video/Features/RCTPictureInPicture.swift +++ b/ios/Video/Features/RCTPictureInPicture.swift @@ -29,7 +29,10 @@ import React _onPictureInPictureStatusChanged() } - func pictureInPictureController(_: AVPictureInPictureController, restoreUserInterfaceForPictureInPictureStopWithCompletionHandler completionHandler: @escaping (Bool) -> Void) { + func pictureInPictureController( + _: AVPictureInPictureController, + restoreUserInterfaceForPictureInPictureStopWithCompletionHandler completionHandler: @escaping (Bool) -> Void + ) { guard let _onRestoreUserInterfaceForPictureInPictureStop = _onRestoreUserInterfaceForPictureInPictureStop else { return } _onRestoreUserInterfaceForPictureInPictureStop() diff --git a/ios/Video/Features/RCTPlayerObserver.swift b/ios/Video/Features/RCTPlayerObserver.swift index aa1280fbf8..3edad3bc67 100644 --- a/ios/Video/Features/RCTPlayerObserver.swift +++ b/ios/Video/Features/RCTPlayerObserver.swift @@ -116,8 +116,16 @@ class RCTPlayerObserver: NSObject { func addPlayerItemObservers() { guard let playerItem = playerItem, let _handlers = _handlers else { return } _playerItemStatusObserver = playerItem.observe(\.status, options: [.new, .old], changeHandler: _handlers.handlePlayerItemStatusChange) - _playerPlaybackBufferEmptyObserver = playerItem.observe(\.isPlaybackBufferEmpty, options: [.new, .old], changeHandler: _handlers.handlePlaybackBufferKeyEmpty) - _playerPlaybackLikelyToKeepUpObserver = playerItem.observe(\.isPlaybackLikelyToKeepUp, options: [.new, .old], changeHandler: _handlers.handlePlaybackLikelyToKeepUp) + _playerPlaybackBufferEmptyObserver = playerItem.observe( + \.isPlaybackBufferEmpty, + options: [.new, .old], + changeHandler: _handlers.handlePlaybackBufferKeyEmpty + ) + _playerPlaybackLikelyToKeepUpObserver = playerItem.observe( + \.isPlaybackLikelyToKeepUp, + options: [.new, .old], + changeHandler: _handlers.handlePlaybackLikelyToKeepUp + ) _playerTimedMetadataObserver = playerItem.observe(\.timedMetadata, options: [.new], changeHandler: _handlers.handleTimeMetadataChange) } @@ -131,9 +139,17 @@ class RCTPlayerObserver: NSObject { func addPlayerViewControllerObservers() { guard let playerViewController = playerViewController, let _handlers = _handlers else { return } - _playerViewControllerReadyForDisplayObserver = playerViewController.observe(\.isReadyForDisplay, options: [.new], changeHandler: _handlers.handleReadyForDisplay) + _playerViewControllerReadyForDisplayObserver = playerViewController.observe( + \.isReadyForDisplay, + options: [.new], + changeHandler: _handlers.handleReadyForDisplay + ) - _playerViewControllerOverlayFrameObserver = playerViewController.contentOverlayView?.observe(\.frame, options: [.new, .old], changeHandler: _handlers.handleViewControllerOverlayViewFrameChange) + _playerViewControllerOverlayFrameObserver = playerViewController.contentOverlayView?.observe( + \.frame, + options: [.new, .old], + changeHandler: _handlers.handleViewControllerOverlayViewFrameChange + ) } func removePlayerViewControllerObservers() { diff --git a/ios/Video/Features/RCTPlayerOperations.swift b/ios/Video/Features/RCTPlayerOperations.swift index 8da49eeba3..14080f3574 100644 --- a/ios/Video/Features/RCTPlayerOperations.swift +++ b/ios/Video/Features/RCTPlayerOperations.swift @@ -221,7 +221,10 @@ enum RCTPlayerOperations { if #available(iOS 16.0, *) { do { debugPrint("[RCTPlayerOperations] Reseting AVAudioSession category to playAndRecord with defaultToSpeaker options.") - try audioSession.setCategory(audioOutput == "earpiece" ? AVAudioSession.Category.playAndRecord : AVAudioSession.Category.playback, options: AVAudioSession.CategoryOptions.defaultToSpeaker) + try audioSession.setCategory( + audioOutput == "earpiece" ? AVAudioSession.Category.playAndRecord : AVAudioSession.Category.playback, + options: AVAudioSession.CategoryOptions.defaultToSpeaker + ) } catch { debugPrint("[RCTPlayerOperations] Reseting AVAudioSession category and options problem. Error: \(error).") } diff --git a/ios/Video/Features/RCTVideoDRM.swift b/ios/Video/Features/RCTVideoDRM.swift index 5b19aa1e81..1373159b9d 100644 --- a/ios/Video/Features/RCTVideoDRM.swift +++ b/ios/Video/Features/RCTVideoDRM.swift @@ -58,7 +58,13 @@ struct RCTVideoDRM { } let spcEncoded = spcData?.base64EncodedString(options: []) - let spcUrlEncoded = CFURLCreateStringByAddingPercentEscapes(kCFAllocatorDefault, spcEncoded as? CFString? as! CFString, nil, "?=&+" as CFString, CFStringBuiltInEncodings.UTF8.rawValue) as? String + let spcUrlEncoded = CFURLCreateStringByAddingPercentEscapes( + kCFAllocatorDefault, + spcEncoded as? CFString? as! CFString, + nil, + "?=&+" as CFString, + CFStringBuiltInEncodings.UTF8.rawValue + ) as? String let post = String(format: "spc=%@&%@", spcUrlEncoded as! CVarArg, contentId) let postData = post.data(using: String.Encoding.utf8, allowLossyConversion: true) request.httpBody = postData @@ -118,7 +124,8 @@ struct RCTVideoDRM { } } - static func handleWithOnGetLicense(loadingRequest: AVAssetResourceLoadingRequest, contentId: String?, certificateUrl: String?, base64Certificate: Bool?) -> Promise { + static func handleWithOnGetLicense(loadingRequest: AVAssetResourceLoadingRequest, contentId: String?, certificateUrl: String?, + base64Certificate: Bool?) -> Promise { let contentIdData = contentId?.data(using: .utf8) return RCTVideoDRM.createCertificateData(certificateStringUrl: certificateUrl, base64Certificate: base64Certificate) @@ -135,7 +142,14 @@ struct RCTVideoDRM { } } - static func handleInternalGetLicense(loadingRequest: AVAssetResourceLoadingRequest, contentId: String?, licenseServer: String?, certificateUrl: String?, base64Certificate: Bool?, headers: [String: Any]?) -> Promise { + static func handleInternalGetLicense( + loadingRequest: AVAssetResourceLoadingRequest, + contentId: String?, + licenseServer: String?, + certificateUrl: String?, + base64Certificate: Bool?, + headers: [String: Any]? + ) -> Promise { let url = loadingRequest.request.url guard let contentId = contentId ?? url?.absoluteString.replacingOccurrences(of: "skd://", with: "") else { diff --git a/ios/Video/Features/RCTVideoUtils.swift b/ios/Video/Features/RCTVideoUtils.swift index f818b529b5..a42dadf950 100644 --- a/ios/Video/Features/RCTVideoUtils.swift +++ b/ios/Video/Features/RCTVideoUtils.swift @@ -23,7 +23,7 @@ enum RCTVideoUtils { } var effectiveTimeRange: CMTimeRange? - for (_, value) in video.loadedTimeRanges.enumerated() { + for value in video.loadedTimeRanges { let timeRange: CMTimeRange = value.timeRangeValue if CMTimeRangeContainsTime(timeRange, time: video.currentTime()) { effectiveTimeRange = timeRange @@ -188,7 +188,10 @@ enum RCTVideoUtils { return mixComposition } - let videoCompTrack: AVMutableCompositionTrack! = mixComposition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID: kCMPersistentTrackID_Invalid) + let videoCompTrack: AVMutableCompositionTrack! = mixComposition.addMutableTrack( + withMediaType: AVMediaType.video, + preferredTrackID: kCMPersistentTrackID_Invalid + ) try? videoCompTrack.insertTimeRange( CMTimeRangeMake(start: .zero, duration: videoAsset.timeRange.duration), of: videoAsset, @@ -196,7 +199,10 @@ enum RCTVideoUtils { ) let audioAsset: AVAssetTrack! = asset.tracks(withMediaType: AVMediaType.audio).first - let audioCompTrack: AVMutableCompositionTrack! = mixComposition.addMutableTrack(withMediaType: AVMediaType.audio, preferredTrackID: kCMPersistentTrackID_Invalid) + let audioCompTrack: AVMutableCompositionTrack! = mixComposition.addMutableTrack( + withMediaType: AVMediaType.audio, + preferredTrackID: kCMPersistentTrackID_Invalid + ) try? audioCompTrack.insertTimeRange( CMTimeRangeMake(start: .zero, duration: audioAsset.timeRange.duration), of: audioAsset, @@ -245,7 +251,7 @@ enum RCTVideoUtils { } /* - * Create an useless / almost empty VTT file in the list with available tracks. This track gets selected when you give type: "disabled" as the selectedTextTrack + * Create an useless/almost empty VTT file in the list with available tracks. This track gets selected when you give type: "disabled" as the selectedTextTrack * This is needed because there is a bug where sideloaded texttracks cannot be disabled in the AVPlayer. Loading this VTT file instead solves that problem. * For more info see: https://github.com/react-native-community/react-native-video/issues/1144 */ @@ -296,7 +302,7 @@ enum RCTVideoUtils { } static func prepareAsset(source: VideoSource) -> (asset: AVURLAsset?, assetOptions: NSMutableDictionary?)? { - guard let sourceUri = source.uri, sourceUri != "" else { return nil } + guard let sourceUri = source.uri, sourceUri.isEmpty else { return nil } var asset: AVURLAsset! let bundlePath = Bundle.main.path(forResource: source.uri, ofType: source.type) ?? "" let url = source.isNetwork || source.isAsset diff --git a/ios/Video/RCTVideo.swift b/ios/Video/RCTVideo.swift index 5e4bc9fac7..bb62ba40a8 100644 --- a/ios/Video/RCTVideo.swift +++ b/ios/Video/RCTVideo.swift @@ -295,7 +295,7 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH func setSrc(_ source: NSDictionary!) { let dispatchClosure = { self._source = VideoSource(source) - if self._source?.uri == nil || self._source?.uri == "" { + if self._source?.uri == nil || self._source?.uri.isEmpty { self._player?.replaceCurrentItem(with: nil) return } @@ -931,7 +931,9 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH // MARK: - RCTVideoPlayerViewControllerDelegate func videoPlayerViewControllerWillDismiss(playerViewController: AVPlayerViewController) { - if _playerViewController == playerViewController && _fullscreenPlayerPresented, let onVideoFullscreenPlayerWillDismiss = onVideoFullscreenPlayerWillDismiss { + if _playerViewController == playerViewController + && _fullscreenPlayerPresented, + let onVideoFullscreenPlayerWillDismiss = onVideoFullscreenPlayerWillDismiss { _playerObserver.removePlayerViewControllerObservers() onVideoFullscreenPlayerWillDismiss(["target": reactTag as Any]) } @@ -1064,6 +1066,7 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH } _eventDispatcher = nil + // swiftlint:disable:next notification_center_detachment NotificationCenter.default.removeObserver(self) super.removeFromSuperview() @@ -1223,12 +1226,15 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH "error": [ "code": NSNumber(value: (_playerItem.error! as NSError).code), "localizedDescription": _playerItem.error?.localizedDescription == nil ? "" : _playerItem.error?.localizedDescription, - "localizedFailureReason": ((_playerItem.error! as NSError).localizedFailureReason == nil ? "" : (_playerItem.error! as NSError).localizedFailureReason) ?? "", - "localizedRecoverySuggestion": ((_playerItem.error! as NSError).localizedRecoverySuggestion == nil ? "" : (_playerItem.error! as NSError).localizedRecoverySuggestion) ?? "", + "localizedFailureReason": ((_playerItem.error! as NSError).localizedFailureReason == nil ? + "" : (_playerItem.error! as NSError).localizedFailureReason) ?? "", + "localizedRecoverySuggestion": ((_playerItem.error! as NSError).localizedRecoverySuggestion == nil ? + "" : (_playerItem.error! as NSError).localizedRecoverySuggestion) ?? "", "domain": (_playerItem.error as! NSError).domain, ], "target": reactTag, - ]) + ] + ) } func handlePlaybackBufferKeyEmpty(playerItem _: AVPlayerItem, change _: NSKeyValueObservedChange) { @@ -1323,7 +1329,8 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH "domain": (error as NSError).domain, ], "target": reactTag, - ]) + ] + ) } @objc diff --git a/ios/VideoCaching/RCTVideoCachingHandler.swift b/ios/VideoCaching/RCTVideoCachingHandler.swift index 6957c0c056..7a795468f3 100644 --- a/ios/VideoCaching/RCTVideoCachingHandler.swift +++ b/ios/VideoCaching/RCTVideoCachingHandler.swift @@ -17,7 +17,10 @@ class RCTVideoCachingHandler: NSObject, DVAssetLoaderDelegatesDelegate { * to bring in the text track code will crash. I suspect this is because the asset hasn't fully loaded. * Until this is fixed, we need to bypass caching when text tracks are specified. */ - DebugLog("Caching is not supported for uri '\(source.uri)' because text tracks are not compatible with the cache. Checkout https://github.com/react-native-community/react-native-video/blob/master/docs/caching.md") + DebugLog(""" + Caching is not supported for uri '\(source.uri)' because text tracks are not compatible with the cache. + Checkout https://github.com/react-native-community/react-native-video/blob/master/docs/caching.md + """) return true } return false @@ -30,12 +33,22 @@ class RCTVideoCachingHandler: NSObject, DVAssetLoaderDelegatesDelegate { guard let self = self, let playerItemPrepareText = self.playerItemPrepareText else { throw NSError(domain: "", code: 0, userInfo: nil) } switch videoCacheStatus { case .missingFileExtension: - DebugLog("Could not generate cache key for uri '\(uri)'. It is currently not supported to cache urls that do not include a file extension. The video file will not be cached. Checkout https://github.com/react-native-community/react-native-video/blob/master/docs/caching.md") + DebugLog(""" + Could not generate cache key for uri '\(uri)'. + It is currently not supported to cache urls that do not include a file extension. + The video file will not be cached. + Checkout https://github.com/react-native-community/react-native-video/blob/master/docs/caching.md + """) let asset: AVURLAsset! = AVURLAsset(url: url!, options: options as! [String: Any]) return playerItemPrepareText(asset, options, "") case .unsupportedFileExtension: - DebugLog("Could not generate cache key for uri '\(uri)'. The file extension of that uri is currently not supported. The video file will not be cached. Checkout https://github.com/react-native-community/react-native-video/blob/master/docs/caching.md") + DebugLog(""" + Could not generate cache key for uri '\(uri)'. + The file extension of that uri is currently not supported. + The video file will not be cached. + Checkout https://github.com/react-native-community/react-native-video/blob/master/docs/caching.md + """) let asset: AVURLAsset! = AVURLAsset(url: url!, options: options as! [String: Any]) return playerItemPrepareText(asset, options, "") From 51a329e358130bbfdbd10a2acffef48dbece18f7 Mon Sep 17 00:00:00 2001 From: Krzysztof Moch Date: Sun, 3 Dec 2023 14:39:59 +0100 Subject: [PATCH 06/10] chore(ios): fix linter warnings --- ios/.swiftlint.yml | 3 ++- ios/Video/Features/RCTPlayerOperations.swift | 8 +++----- ios/Video/Features/RCTVideoDRM.swift | 5 +---- ios/Video/Features/RCTVideoUtils.swift | 2 +- ios/Video/RCTVideo.swift | 2 +- ios/Video/RCTVideoPlayerViewControllerDelegate.swift | 2 +- 6 files changed, 9 insertions(+), 13 deletions(-) diff --git a/ios/.swiftlint.yml b/ios/.swiftlint.yml index a9a7640100..bddcfb405a 100644 --- a/ios/.swiftlint.yml +++ b/ios/.swiftlint.yml @@ -6,6 +6,8 @@ disabled_rules: - file_length - cyclomatic_complexity - function_body_length + - function_parameter_count + - empty_string # TODO: Remove this once all force casts are removed - force_cast @@ -16,7 +18,6 @@ opt_in_rules: - contains_over_range_nil_comparison - empty_collection_literal - empty_count - - empty_string - first_where - flatmap_over_map_reduce - last_where diff --git a/ios/Video/Features/RCTPlayerOperations.swift b/ios/Video/Features/RCTPlayerOperations.swift index 14080f3574..be252847df 100644 --- a/ios/Video/Features/RCTPlayerOperations.swift +++ b/ios/Video/Features/RCTPlayerOperations.swift @@ -17,11 +17,9 @@ enum RCTPlayerOperations { // The first few tracks will be audio & video track var firstTextIndex = 0 - for i in 0 ..< trackCount { - if player?.currentItem?.tracks[i].assetTrack?.hasMediaCharacteristic(.legible) ?? false { - firstTextIndex = i - break - } + for i in 0 ..< trackCount where (player?.currentItem?.tracks[i].assetTrack?.hasMediaCharacteristic(.legible)) != nil { + firstTextIndex = i + break } var selectedTrackIndex: Int = RCTVideoUnset diff --git a/ios/Video/Features/RCTVideoDRM.swift b/ios/Video/Features/RCTVideoDRM.swift index 1373159b9d..fa99c7d5bf 100644 --- a/ios/Video/Features/RCTVideoDRM.swift +++ b/ios/Video/Features/RCTVideoDRM.swift @@ -1,10 +1,7 @@ import AVFoundation import Promises -struct RCTVideoDRM { - @available(*, unavailable) - private init() {} - +enum RCTVideoDRM { static func fetchLicense( licenseServer: String, spcData: Data?, diff --git a/ios/Video/Features/RCTVideoUtils.swift b/ios/Video/Features/RCTVideoUtils.swift index a42dadf950..ec63a14cda 100644 --- a/ios/Video/Features/RCTVideoUtils.swift +++ b/ios/Video/Features/RCTVideoUtils.swift @@ -302,7 +302,7 @@ enum RCTVideoUtils { } static func prepareAsset(source: VideoSource) -> (asset: AVURLAsset?, assetOptions: NSMutableDictionary?)? { - guard let sourceUri = source.uri, sourceUri.isEmpty else { return nil } + guard let sourceUri = source.uri, sourceUri != "" else { return nil } var asset: AVURLAsset! let bundlePath = Bundle.main.path(forResource: source.uri, ofType: source.type) ?? "" let url = source.isNetwork || source.isAsset diff --git a/ios/Video/RCTVideo.swift b/ios/Video/RCTVideo.swift index bb62ba40a8..d7f3c130f6 100644 --- a/ios/Video/RCTVideo.swift +++ b/ios/Video/RCTVideo.swift @@ -295,7 +295,7 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH func setSrc(_ source: NSDictionary!) { let dispatchClosure = { self._source = VideoSource(source) - if self._source?.uri == nil || self._source?.uri.isEmpty { + if self._source?.uri == nil || self._source?.uri == "" { self._player?.replaceCurrentItem(with: nil) return } diff --git a/ios/Video/RCTVideoPlayerViewControllerDelegate.swift b/ios/Video/RCTVideoPlayerViewControllerDelegate.swift index d2bc0006a7..7d1bbc6c4f 100644 --- a/ios/Video/RCTVideoPlayerViewControllerDelegate.swift +++ b/ios/Video/RCTVideoPlayerViewControllerDelegate.swift @@ -1,7 +1,7 @@ import AVKit import Foundation -protocol RCTVideoPlayerViewControllerDelegate: NSObject { +protocol RCTVideoPlayerViewControllerDelegate: class { func videoPlayerViewControllerWillDismiss(playerViewController: AVPlayerViewController) func videoPlayerViewControllerDidDismiss(playerViewController: AVPlayerViewController) } From e98a2c00104fbef735f6ae1da33d4f383eb1bae0 Mon Sep 17 00:00:00 2001 From: Krzysztof Moch Date: Sun, 3 Dec 2023 14:58:52 +0100 Subject: [PATCH 07/10] chore(ci/android): update ktlint version --- .github/workflows/check-android.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/check-android.yml b/.github/workflows/check-android.yml index b5e4e16568..1c434a17d5 100644 --- a/.github/workflows/check-android.yml +++ b/.github/workflows/check-android.yml @@ -18,7 +18,7 @@ jobs: steps: - uses: actions/checkout@v4 - run: | - curl -sSLO https://github.com/pinterest/ktlint/releases/download/1.0.0/ktlint && chmod a+x ktlint && sudo mv ktlint /usr/local/bin/ + curl -sSLO https://github.com/pinterest/ktlint/releases/download/1.0.1/ktlint && chmod a+x ktlint && sudo mv ktlint /usr/local/bin/ - name: run ktlint working-directory: ./android/ run: | From 12bba637e2257a43864097e63227f1c1ac096135 Mon Sep 17 00:00:00 2001 From: Krzysztof Moch Date: Wed, 6 Dec 2023 20:11:37 +0100 Subject: [PATCH 08/10] chore: set indent to 4 --- android/.editorconfig | 2 +- .../com/brentvatne/common/API/ResizeMode.kt | 92 +- .../brentvatne/common/API/SubtitleStyle.kt | 52 +- .../brentvatne/common/API/TimedMetadata.kt | 4 +- .../java/com/brentvatne/common/API/Track.kt | 14 +- .../com/brentvatne/common/API/VideoTrack.kt | 14 +- .../com/brentvatne/common/toolbox/DebugLog.kt | 150 +- .../common/toolbox/ReactBridgeUtils.kt | 258 +- ios/.swiftformat | 2 +- ios/Video/DataStructures/Chapter.swift | 38 +- ios/Video/DataStructures/DRMParams.swift | 50 +- .../SelectedTrackCriteria.swift | 26 +- ios/Video/DataStructures/TextTrack.swift | 38 +- ios/Video/DataStructures/VideoSource.swift | 94 +- ios/Video/Features/RCTIMAAdsManager.swift | 366 +-- ios/Video/Features/RCTPictureInPicture.swift | 108 +- ios/Video/Features/RCTPlayerObserver.swift | 460 +-- ios/Video/Features/RCTPlayerOperations.swift | 420 +-- .../Features/RCTResourceLoaderDelegate.swift | 302 +- ios/Video/Features/RCTVideoDRM.swift | 319 ++- .../Features/RCTVideoErrorHandling.swift | 194 +- ios/Video/Features/RCTVideoSave.swift | 114 +- ios/Video/Features/RCTVideoTVUtils.swift | 74 +- ios/Video/Features/RCTVideoUtils.swift | 583 ++-- ios/Video/RCTVideo.swift | 2542 ++++++++--------- ios/Video/RCTVideoManager.swift | 140 +- ios/Video/RCTVideoPlayerViewController.swift | 66 +- ...RCTVideoPlayerViewControllerDelegate.swift | 4 +- .../RCTVideoSwiftLog/RCTVideoSwiftLog.swift | 16 +- ios/Video/UIView+FindUIViewController.swift | 24 +- ios/VideoCaching/RCTVideoCachingHandler.swift | 154 +- 31 files changed, 3362 insertions(+), 3358 deletions(-) diff --git a/android/.editorconfig b/android/.editorconfig index 30514ea44c..3f7f912c7c 100644 --- a/android/.editorconfig +++ b/android/.editorconfig @@ -1,6 +1,6 @@ [*.{kt,kts}] indent_style=space -indent_size=2 +indent_size=4 continuation_indent_size=4 insert_final_newline=true max_line_length=160 diff --git a/android/src/main/java/com/brentvatne/common/API/ResizeMode.kt b/android/src/main/java/com/brentvatne/common/API/ResizeMode.kt index fd77ea7bcf..c873a04e0c 100644 --- a/android/src/main/java/com/brentvatne/common/API/ResizeMode.kt +++ b/android/src/main/java/com/brentvatne/common/API/ResizeMode.kt @@ -5,50 +5,50 @@ import java.lang.annotation.Retention import java.lang.annotation.RetentionPolicy internal object ResizeMode { - /** - * Either the width or height is decreased to obtain the desired aspect ratio. - */ - const val RESIZE_MODE_FIT = 0 - - /** - * The width is fixed and the height is increased or decreased to obtain the desired aspect ratio. - */ - const val RESIZE_MODE_FIXED_WIDTH = 1 - - /** - * The height is fixed and the width is increased or decreased to obtain the desired aspect ratio. - */ - const val RESIZE_MODE_FIXED_HEIGHT = 2 - - /** - * The height and the width is increased or decreased to fit the size of the view. - */ - const val RESIZE_MODE_FILL = 3 - - /** - * Keeps the aspect ratio but takes up the view's size. - */ - const val RESIZE_MODE_CENTER_CROP = 4 - - @JvmStatic - @Mode - fun toResizeMode(ordinal: Int): Int = - when (ordinal) { - RESIZE_MODE_FIXED_WIDTH -> RESIZE_MODE_FIXED_WIDTH - RESIZE_MODE_FIXED_HEIGHT -> RESIZE_MODE_FIXED_HEIGHT - RESIZE_MODE_FILL -> RESIZE_MODE_FILL - RESIZE_MODE_CENTER_CROP -> RESIZE_MODE_CENTER_CROP - RESIZE_MODE_FIT -> RESIZE_MODE_FIT - else -> RESIZE_MODE_FIT - } - - @Retention(RetentionPolicy.SOURCE) - @IntDef( - RESIZE_MODE_FIT, - RESIZE_MODE_FIXED_WIDTH, - RESIZE_MODE_FIXED_HEIGHT, - RESIZE_MODE_FILL, - RESIZE_MODE_CENTER_CROP - ) - annotation class Mode + /** + * Either the width or height is decreased to obtain the desired aspect ratio. + */ + const val RESIZE_MODE_FIT = 0 + + /** + * The width is fixed and the height is increased or decreased to obtain the desired aspect ratio. + */ + const val RESIZE_MODE_FIXED_WIDTH = 1 + + /** + * The height is fixed and the width is increased or decreased to obtain the desired aspect ratio. + */ + const val RESIZE_MODE_FIXED_HEIGHT = 2 + + /** + * The height and the width is increased or decreased to fit the size of the view. + */ + const val RESIZE_MODE_FILL = 3 + + /** + * Keeps the aspect ratio but takes up the view's size. + */ + const val RESIZE_MODE_CENTER_CROP = 4 + + @JvmStatic + @Mode + fun toResizeMode(ordinal: Int): Int = + when (ordinal) { + RESIZE_MODE_FIXED_WIDTH -> RESIZE_MODE_FIXED_WIDTH + RESIZE_MODE_FIXED_HEIGHT -> RESIZE_MODE_FIXED_HEIGHT + RESIZE_MODE_FILL -> RESIZE_MODE_FILL + RESIZE_MODE_CENTER_CROP -> RESIZE_MODE_CENTER_CROP + RESIZE_MODE_FIT -> RESIZE_MODE_FIT + else -> RESIZE_MODE_FIT + } + + @Retention(RetentionPolicy.SOURCE) + @IntDef( + RESIZE_MODE_FIT, + RESIZE_MODE_FIXED_WIDTH, + RESIZE_MODE_FIXED_HEIGHT, + RESIZE_MODE_FILL, + RESIZE_MODE_CENTER_CROP + ) + annotation class Mode } diff --git a/android/src/main/java/com/brentvatne/common/API/SubtitleStyle.kt b/android/src/main/java/com/brentvatne/common/API/SubtitleStyle.kt index 28b971d7dc..1e32c77ada 100644 --- a/android/src/main/java/com/brentvatne/common/API/SubtitleStyle.kt +++ b/android/src/main/java/com/brentvatne/common/API/SubtitleStyle.kt @@ -7,33 +7,33 @@ import com.facebook.react.bridge.ReadableMap * Helper file to parse SubtitleStyle prop and build a dedicated class */ class SubtitleStyle private constructor() { - var fontSize = -1 - private set - var paddingLeft = 0 - private set - var paddingRight = 0 - private set - var paddingTop = 0 - private set - var paddingBottom = 0 - private set + var fontSize = -1 + private set + var paddingLeft = 0 + private set + var paddingRight = 0 + private set + var paddingTop = 0 + private set + var paddingBottom = 0 + private set - companion object { - private const val PROP_FONT_SIZE_TRACK = "fontSize" - private const val PROP_PADDING_BOTTOM = "paddingBottom" - private const val PROP_PADDING_TOP = "paddingTop" - private const val PROP_PADDING_LEFT = "paddingLeft" - private const val PROP_PADDING_RIGHT = "paddingRight" + companion object { + private const val PROP_FONT_SIZE_TRACK = "fontSize" + private const val PROP_PADDING_BOTTOM = "paddingBottom" + private const val PROP_PADDING_TOP = "paddingTop" + private const val PROP_PADDING_LEFT = "paddingLeft" + private const val PROP_PADDING_RIGHT = "paddingRight" - @JvmStatic - fun parse(src: ReadableMap?): SubtitleStyle { - val subtitleStyle = SubtitleStyle() - subtitleStyle.fontSize = ReactBridgeUtils.safeGetInt(src, PROP_FONT_SIZE_TRACK, -1) - subtitleStyle.paddingBottom = ReactBridgeUtils.safeGetInt(src, PROP_PADDING_BOTTOM, 0) - subtitleStyle.paddingTop = ReactBridgeUtils.safeGetInt(src, PROP_PADDING_TOP, 0) - subtitleStyle.paddingLeft = ReactBridgeUtils.safeGetInt(src, PROP_PADDING_LEFT, 0) - subtitleStyle.paddingRight = ReactBridgeUtils.safeGetInt(src, PROP_PADDING_RIGHT, 0) - return subtitleStyle + @JvmStatic + fun parse(src: ReadableMap?): SubtitleStyle { + val subtitleStyle = SubtitleStyle() + subtitleStyle.fontSize = ReactBridgeUtils.safeGetInt(src, PROP_FONT_SIZE_TRACK, -1) + subtitleStyle.paddingBottom = ReactBridgeUtils.safeGetInt(src, PROP_PADDING_BOTTOM, 0) + subtitleStyle.paddingTop = ReactBridgeUtils.safeGetInt(src, PROP_PADDING_TOP, 0) + subtitleStyle.paddingLeft = ReactBridgeUtils.safeGetInt(src, PROP_PADDING_LEFT, 0) + subtitleStyle.paddingRight = ReactBridgeUtils.safeGetInt(src, PROP_PADDING_RIGHT, 0) + return subtitleStyle + } } - } } diff --git a/android/src/main/java/com/brentvatne/common/API/TimedMetadata.kt b/android/src/main/java/com/brentvatne/common/API/TimedMetadata.kt index 130eb21f16..affc255b3b 100644 --- a/android/src/main/java/com/brentvatne/common/API/TimedMetadata.kt +++ b/android/src/main/java/com/brentvatne/common/API/TimedMetadata.kt @@ -5,6 +5,6 @@ package com.brentvatne.common.api */ class TimedMetadata(_identifier: String? = null, _value: String? = null) { - var identifier: String? = _identifier - var value: String? = _value + var identifier: String? = _identifier + var value: String? = _value } diff --git a/android/src/main/java/com/brentvatne/common/API/Track.kt b/android/src/main/java/com/brentvatne/common/API/Track.kt index a5a5950ae2..bf5f302025 100644 --- a/android/src/main/java/com/brentvatne/common/API/Track.kt +++ b/android/src/main/java/com/brentvatne/common/API/Track.kt @@ -4,12 +4,12 @@ package com.brentvatne.common.api * internal representation of audio & text tracks */ class Track { - var title: String? = null - var mimeType: String? = null - var language: String? = null - var isSelected = false + var title: String? = null + var mimeType: String? = null + var language: String? = null + var isSelected = false - // in bps available only on audio tracks - var bitrate = 0 - var index = 0 + // in bps available only on audio tracks + var bitrate = 0 + var index = 0 } diff --git a/android/src/main/java/com/brentvatne/common/API/VideoTrack.kt b/android/src/main/java/com/brentvatne/common/API/VideoTrack.kt index 379a987ec0..60e5da2610 100644 --- a/android/src/main/java/com/brentvatne/common/API/VideoTrack.kt +++ b/android/src/main/java/com/brentvatne/common/API/VideoTrack.kt @@ -5,11 +5,11 @@ package com.brentvatne.common.api */ class VideoTrack { - var width = 0 - var height = 0 - var bitrate = 0 - var codecs = "" - var id = -1 - var trackId = "" - var isSelected = false + var width = 0 + var height = 0 + var bitrate = 0 + var codecs = "" + var id = -1 + var trackId = "" + var isSelected = false } diff --git a/android/src/main/java/com/brentvatne/common/toolbox/DebugLog.kt b/android/src/main/java/com/brentvatne/common/toolbox/DebugLog.kt index be66ede5a4..4b6f252d63 100644 --- a/android/src/main/java/com/brentvatne/common/toolbox/DebugLog.kt +++ b/android/src/main/java/com/brentvatne/common/toolbox/DebugLog.kt @@ -10,87 +10,87 @@ import java.lang.Exception */ object DebugLog { - // log level to display - private var level = Log.WARN - - // enable thread display in logs - private var displayThread = true - - // add a common prefix for easy filtering - private const val TAG_PREFIX = "RNV" - - @JvmStatic - fun setConfig(_level: Int, _displayThread: Boolean) { - level = _level - displayThread = _displayThread - } - - @JvmStatic - private fun getTag(tag: String): String = TAG_PREFIX + tag - - @JvmStatic - private fun getMsg(msg: String): String = - if (displayThread) { - "[" + Thread.currentThread().name + "] " + msg - } else { - msg + // log level to display + private var level = Log.WARN + + // enable thread display in logs + private var displayThread = true + + // add a common prefix for easy filtering + private const val TAG_PREFIX = "RNV" + + @JvmStatic + fun setConfig(_level: Int, _displayThread: Boolean) { + level = _level + displayThread = _displayThread + } + + @JvmStatic + private fun getTag(tag: String): String = TAG_PREFIX + tag + + @JvmStatic + private fun getMsg(msg: String): String = + if (displayThread) { + "[" + Thread.currentThread().name + "] " + msg + } else { + msg + } + + @JvmStatic + fun v(tag: String, msg: String) { + if (level <= Log.VERBOSE) Log.v(getTag(tag), getMsg(msg)) + } + + @JvmStatic + fun d(tag: String, msg: String) { + if (level <= Log.DEBUG) Log.d(getTag(tag), getMsg(msg)) + } + + @JvmStatic + fun i(tag: String, msg: String) { + if (level <= Log.INFO) Log.i(getTag(tag), getMsg(msg)) + } + + @JvmStatic + fun w(tag: String, msg: String) { + if (level <= Log.WARN) Log.w(getTag(tag), getMsg(msg)) + } + + @JvmStatic + fun e(tag: String, msg: String) { + if (level <= Log.ERROR) Log.e(getTag(tag), getMsg(msg)) } - @JvmStatic - fun v(tag: String, msg: String) { - if (level <= Log.VERBOSE) Log.v(getTag(tag), getMsg(msg)) - } - - @JvmStatic - fun d(tag: String, msg: String) { - if (level <= Log.DEBUG) Log.d(getTag(tag), getMsg(msg)) - } - - @JvmStatic - fun i(tag: String, msg: String) { - if (level <= Log.INFO) Log.i(getTag(tag), getMsg(msg)) - } - - @JvmStatic - fun w(tag: String, msg: String) { - if (level <= Log.WARN) Log.w(getTag(tag), getMsg(msg)) - } - - @JvmStatic - fun e(tag: String, msg: String) { - if (level <= Log.ERROR) Log.e(getTag(tag), getMsg(msg)) - } - - @JvmStatic - fun wtf(tag: String, msg: String) { - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.FROYO) { - Log.wtf(getTag(tag), "--------------->" + getMsg(msg)) - } else { - Log.e(getTag(tag), "--------------->" + getMsg(msg)) + @JvmStatic + fun wtf(tag: String, msg: String) { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.FROYO) { + Log.wtf(getTag(tag), "--------------->" + getMsg(msg)) + } else { + Log.e(getTag(tag), "--------------->" + getMsg(msg)) + } + printCallStack() } - printCallStack() - } - - @JvmStatic - fun printCallStack() { - if (level <= Log.VERBOSE) { - val e = Exception() - e.printStackTrace() + + @JvmStatic + fun printCallStack() { + if (level <= Log.VERBOSE) { + val e = Exception() + e.printStackTrace() + } } - } - // Additionnal thread safety checkers - @JvmStatic - fun checkUIThread(tag: String, msg: String) { - if (Thread.currentThread().name != "main") { - wtf(tag, "------------------------>" + getMsg(msg)) + // Additionnal thread safety checkers + @JvmStatic + fun checkUIThread(tag: String, msg: String) { + if (Thread.currentThread().name != "main") { + wtf(tag, "------------------------>" + getMsg(msg)) + } } - } - @JvmStatic - fun checkNotUIThread(tag: String, msg: String) { - if (Thread.currentThread().name == "main") { - wtf(tag, "------------------------>" + getMsg(msg)) + @JvmStatic + fun checkNotUIThread(tag: String, msg: String) { + if (Thread.currentThread().name == "main") { + wtf(tag, "------------------------>" + getMsg(msg)) + } } - } } diff --git a/android/src/main/java/com/brentvatne/common/toolbox/ReactBridgeUtils.kt b/android/src/main/java/com/brentvatne/common/toolbox/ReactBridgeUtils.kt index 4df0835538..65dc57a5b4 100644 --- a/android/src/main/java/com/brentvatne/common/toolbox/ReactBridgeUtils.kt +++ b/android/src/main/java/com/brentvatne/common/toolbox/ReactBridgeUtils.kt @@ -11,133 +11,133 @@ import java.util.HashMap */ object ReactBridgeUtils { - @JvmStatic - fun safeGetString(map: ReadableMap?, key: String?, fallback: String?): String? { - return if (map != null && map.hasKey(key!!) && !map.isNull(key)) map.getString(key) else fallback - } - - @JvmStatic - fun safeGetString(map: ReadableMap?, key: String?): String? { - return safeGetString(map, key, null) - } - - @JvmStatic - fun safeGetDynamic(map: ReadableMap?, key: String?, fallback: Dynamic?): Dynamic? { - return if (map != null && map.hasKey(key!!) && !map.isNull(key)) map.getDynamic(key) else fallback - } - - @JvmStatic - fun safeGetDynamic(map: ReadableMap?, key: String?): Dynamic? { - return safeGetDynamic(map, key, null) - } - - @JvmStatic - fun safeGetBool(map: ReadableMap?, key: String?, fallback: Boolean): Boolean { - return if (map != null && map.hasKey(key!!) && !map.isNull(key)) map.getBoolean(key) else fallback - } - - @JvmStatic - fun safeGetMap(map: ReadableMap?, key: String?): ReadableMap? { - return if (map != null && map.hasKey(key!!) && !map.isNull(key)) map.getMap(key) else null - } - - @JvmStatic - fun safeGetArray(map: ReadableMap?, key: String?): ReadableArray? { - return if (map != null && map.hasKey(key!!) && !map.isNull(key)) map.getArray(key) else null - } - - @JvmStatic - fun safeGetInt(map: ReadableMap?, key: String?, fallback: Int): Int { - return if (map != null && map.hasKey(key!!) && !map.isNull(key)) map.getInt(key) else fallback - } - - @JvmStatic - fun safeGetInt(map: ReadableMap?, key: String?): Int { - return safeGetInt(map, key, 0) - } - - @JvmStatic - fun safeGetDouble(map: ReadableMap?, key: String?, fallback: Double): Double { - return if (map != null && map.hasKey(key!!) && !map.isNull(key)) map.getDouble(key) else fallback - } - - @JvmStatic - fun safeGetDouble(map: ReadableMap?, key: String?): Double { - return safeGetDouble(map, key, 0.0) - } - - /** - * toStringMap converts a [ReadableMap] into a HashMap. - * - * @param readableMap The ReadableMap to be conveted. - * @return A HashMap containing the data that was in the ReadableMap. - * @see 'Adapted from https://github.com/artemyarulin/react-native-eval/blob/master/android/src/main/java/com/evaluator/react/ConversionUtil.java' - */ - @JvmStatic - fun toStringMap(readableMap: ReadableMap?): Map? { - if (readableMap == null) return null - val iterator = readableMap.keySetIterator() - if (!iterator.hasNextKey()) return null - val result: MutableMap = HashMap() - while (iterator.hasNextKey()) { - val key = iterator.nextKey() - result[key] = readableMap.getString(key) - } - return result - } - - /** - * toIntMap converts a [ReadableMap] into a HashMap. - * - * @param readableMap The ReadableMap to be conveted. - * @return A HashMap containing the data that was in the ReadableMap. - * @see 'Adapted from https://github.com/artemyarulin/react-native-eval/blob/master/android/src/main/java/com/evaluator/react/ConversionUtil.java' - */ - @JvmStatic - fun toIntMap(readableMap: ReadableMap?): Map? { - if (readableMap == null) return null - val iterator = readableMap.keySetIterator() - if (!iterator.hasNextKey()) return null - val result: MutableMap = HashMap() - while (iterator.hasNextKey()) { - val key = iterator.nextKey() - result[key] = readableMap.getInt(key) - } - return result - } - - @JvmStatic - fun safeStringEquals(str1: String?, str2: String?): Boolean { - if (str1 == null && str2 == null) return true // both are null - return if (str1 == null || str2 == null) false else str1 == str2 // only 1 is null - } - - @JvmStatic - fun safeStringArrayEquals(str1: Array?, str2: Array?): Boolean { - if (str1 == null && str2 == null) return true // both are null - if (str1 == null || str2 == null) return false // only 1 is null - if (str1.size != str2.size) return false // only 1 is null - for (i in str1.indices) { - if (str1[i] == str2[i]) { - // standard check - return false - } - } - return true - } - - @JvmStatic - fun safeStringMapEquals(first: Map?, second: Map?): Boolean { - if (first == null && second == null) return true // both are null - if (first == null || second == null) return false // only 1 is null - if (first.size != second.size) { - return false - } - for (key in first.keys) { - if (!safeStringEquals(first[key], second[key])) { - return false - } - } - return true - } + @JvmStatic + fun safeGetString(map: ReadableMap?, key: String?, fallback: String?): String? { + return if (map != null && map.hasKey(key!!) && !map.isNull(key)) map.getString(key) else fallback + } + + @JvmStatic + fun safeGetString(map: ReadableMap?, key: String?): String? { + return safeGetString(map, key, null) + } + + @JvmStatic + fun safeGetDynamic(map: ReadableMap?, key: String?, fallback: Dynamic?): Dynamic? { + return if (map != null && map.hasKey(key!!) && !map.isNull(key)) map.getDynamic(key) else fallback + } + + @JvmStatic + fun safeGetDynamic(map: ReadableMap?, key: String?): Dynamic? { + return safeGetDynamic(map, key, null) + } + + @JvmStatic + fun safeGetBool(map: ReadableMap?, key: String?, fallback: Boolean): Boolean { + return if (map != null && map.hasKey(key!!) && !map.isNull(key)) map.getBoolean(key) else fallback + } + + @JvmStatic + fun safeGetMap(map: ReadableMap?, key: String?): ReadableMap? { + return if (map != null && map.hasKey(key!!) && !map.isNull(key)) map.getMap(key) else null + } + + @JvmStatic + fun safeGetArray(map: ReadableMap?, key: String?): ReadableArray? { + return if (map != null && map.hasKey(key!!) && !map.isNull(key)) map.getArray(key) else null + } + + @JvmStatic + fun safeGetInt(map: ReadableMap?, key: String?, fallback: Int): Int { + return if (map != null && map.hasKey(key!!) && !map.isNull(key)) map.getInt(key) else fallback + } + + @JvmStatic + fun safeGetInt(map: ReadableMap?, key: String?): Int { + return safeGetInt(map, key, 0) + } + + @JvmStatic + fun safeGetDouble(map: ReadableMap?, key: String?, fallback: Double): Double { + return if (map != null && map.hasKey(key!!) && !map.isNull(key)) map.getDouble(key) else fallback + } + + @JvmStatic + fun safeGetDouble(map: ReadableMap?, key: String?): Double { + return safeGetDouble(map, key, 0.0) + } + + /** + * toStringMap converts a [ReadableMap] into a HashMap. + * + * @param readableMap The ReadableMap to be conveted. + * @return A HashMap containing the data that was in the ReadableMap. + * @see 'Adapted from https://github.com/artemyarulin/react-native-eval/blob/master/android/src/main/java/com/evaluator/react/ConversionUtil.java' + */ + @JvmStatic + fun toStringMap(readableMap: ReadableMap?): Map? { + if (readableMap == null) return null + val iterator = readableMap.keySetIterator() + if (!iterator.hasNextKey()) return null + val result: MutableMap = HashMap() + while (iterator.hasNextKey()) { + val key = iterator.nextKey() + result[key] = readableMap.getString(key) + } + return result + } + + /** + * toIntMap converts a [ReadableMap] into a HashMap. + * + * @param readableMap The ReadableMap to be conveted. + * @return A HashMap containing the data that was in the ReadableMap. + * @see 'Adapted from https://github.com/artemyarulin/react-native-eval/blob/master/android/src/main/java/com/evaluator/react/ConversionUtil.java' + */ + @JvmStatic + fun toIntMap(readableMap: ReadableMap?): Map? { + if (readableMap == null) return null + val iterator = readableMap.keySetIterator() + if (!iterator.hasNextKey()) return null + val result: MutableMap = HashMap() + while (iterator.hasNextKey()) { + val key = iterator.nextKey() + result[key] = readableMap.getInt(key) + } + return result + } + + @JvmStatic + fun safeStringEquals(str1: String?, str2: String?): Boolean { + if (str1 == null && str2 == null) return true // both are null + return if (str1 == null || str2 == null) false else str1 == str2 // only 1 is null + } + + @JvmStatic + fun safeStringArrayEquals(str1: Array?, str2: Array?): Boolean { + if (str1 == null && str2 == null) return true // both are null + if (str1 == null || str2 == null) return false // only 1 is null + if (str1.size != str2.size) return false // only 1 is null + for (i in str1.indices) { + if (str1[i] == str2[i]) { + // standard check + return false + } + } + return true + } + + @JvmStatic + fun safeStringMapEquals(first: Map?, second: Map?): Boolean { + if (first == null && second == null) return true // both are null + if (first == null || second == null) return false // only 1 is null + if (first.size != second.size) { + return false + } + for (key in first.keys) { + if (!safeStringEquals(first[key], second[key])) { + return false + } + } + return true + } } diff --git a/ios/.swiftformat b/ios/.swiftformat index 6934ac985a..f18154f318 100644 --- a/ios/.swiftformat +++ b/ios/.swiftformat @@ -1,5 +1,5 @@ --allman false ---indent 2 +--indent 4 --exclude Pods,Generated --disable andOperator diff --git a/ios/Video/DataStructures/Chapter.swift b/ios/Video/DataStructures/Chapter.swift index 39ae0a056b..7fc4e54635 100644 --- a/ios/Video/DataStructures/Chapter.swift +++ b/ios/Video/DataStructures/Chapter.swift @@ -1,24 +1,24 @@ struct Chapter { - let title: String - let uri: String? - let startTime: Double - let endTime: Double + let title: String + let uri: String? + let startTime: Double + let endTime: Double - let json: NSDictionary? + let json: NSDictionary? - init(_ json: NSDictionary!) { - guard json != nil else { - self.json = nil - self.title = "" - self.uri = nil - self.startTime = 0 - self.endTime = 0 - return + init(_ json: NSDictionary!) { + guard json != nil else { + self.json = nil + self.title = "" + self.uri = nil + self.startTime = 0 + self.endTime = 0 + return + } + self.json = json + self.title = json["title"] as? String ?? "" + self.uri = json["uri"] as? String + self.startTime = json["startTime"] as? Double ?? 0 + self.endTime = json["endTime"] as? Double ?? 0 } - self.json = json - self.title = json["title"] as? String ?? "" - self.uri = json["uri"] as? String - self.startTime = json["startTime"] as? Double ?? 0 - self.endTime = json["endTime"] as? Double ?? 0 - } } diff --git a/ios/Video/DataStructures/DRMParams.swift b/ios/Video/DataStructures/DRMParams.swift index 4d676a0a92..b0baba49e5 100644 --- a/ios/Video/DataStructures/DRMParams.swift +++ b/ios/Video/DataStructures/DRMParams.swift @@ -1,30 +1,30 @@ struct DRMParams { - let type: String? - let licenseServer: String? - let headers: [String: Any]? - let contentId: String? - let certificateUrl: String? - let base64Certificate: Bool? + let type: String? + let licenseServer: String? + let headers: [String: Any]? + let contentId: String? + let certificateUrl: String? + let base64Certificate: Bool? - let json: NSDictionary? + let json: NSDictionary? - init(_ json: NSDictionary!) { - guard json != nil else { - self.json = nil - self.type = nil - self.licenseServer = nil - self.contentId = nil - self.certificateUrl = nil - self.base64Certificate = nil - self.headers = nil - return + init(_ json: NSDictionary!) { + guard json != nil else { + self.json = nil + self.type = nil + self.licenseServer = nil + self.contentId = nil + self.certificateUrl = nil + self.base64Certificate = nil + self.headers = nil + return + } + self.json = json + self.type = json["type"] as? String + self.licenseServer = json["licenseServer"] as? String + self.contentId = json["contentId"] as? String + self.certificateUrl = json["certificateUrl"] as? String + self.base64Certificate = json["base64Certificate"] as? Bool + self.headers = json["headers"] as? [String: Any] } - self.json = json - self.type = json["type"] as? String - self.licenseServer = json["licenseServer"] as? String - self.contentId = json["contentId"] as? String - self.certificateUrl = json["certificateUrl"] as? String - self.base64Certificate = json["base64Certificate"] as? Bool - self.headers = json["headers"] as? [String: Any] - } } diff --git a/ios/Video/DataStructures/SelectedTrackCriteria.swift b/ios/Video/DataStructures/SelectedTrackCriteria.swift index f118760f40..41d68affc4 100644 --- a/ios/Video/DataStructures/SelectedTrackCriteria.swift +++ b/ios/Video/DataStructures/SelectedTrackCriteria.swift @@ -1,18 +1,18 @@ struct SelectedTrackCriteria { - let type: String - let value: Any? + let type: String + let value: Any? - let json: NSDictionary? + let json: NSDictionary? - init(_ json: NSDictionary!) { - guard json != nil else { - self.json = nil - self.type = "" - self.value = nil - return + init(_ json: NSDictionary!) { + guard json != nil else { + self.json = nil + self.type = "" + self.value = nil + return + } + self.json = json + self.type = json["type"] as? String ?? "" + self.value = json["value"] } - self.json = json - self.type = json["type"] as? String ?? "" - self.value = json["value"] - } } diff --git a/ios/Video/DataStructures/TextTrack.swift b/ios/Video/DataStructures/TextTrack.swift index 4592ca3ad4..4c186b288c 100644 --- a/ios/Video/DataStructures/TextTrack.swift +++ b/ios/Video/DataStructures/TextTrack.swift @@ -1,24 +1,24 @@ struct TextTrack { - let type: String - let language: String - let title: String - let uri: String + let type: String + let language: String + let title: String + let uri: String - let json: NSDictionary? + let json: NSDictionary? - init(_ json: NSDictionary!) { - guard json != nil else { - self.json = nil - self.type = "" - self.language = "" - self.title = "" - self.uri = "" - return + init(_ json: NSDictionary!) { + guard json != nil else { + self.json = nil + self.type = "" + self.language = "" + self.title = "" + self.uri = "" + return + } + self.json = json + self.type = json["type"] as? String ?? "" + self.language = json["language"] as? String ?? "" + self.title = json["title"] as? String ?? "" + self.uri = json["uri"] as? String ?? "" } - self.json = json - self.type = json["type"] as? String ?? "" - self.language = json["language"] as? String ?? "" - self.title = json["title"] as? String ?? "" - self.uri = json["uri"] as? String ?? "" - } } diff --git a/ios/Video/DataStructures/VideoSource.swift b/ios/Video/DataStructures/VideoSource.swift index 45d8d4a8d5..e9495a3325 100644 --- a/ios/Video/DataStructures/VideoSource.swift +++ b/ios/Video/DataStructures/VideoSource.swift @@ -1,52 +1,52 @@ struct VideoSource { - let type: String? - let uri: String? - let isNetwork: Bool - let isAsset: Bool - let shouldCache: Bool - let requestHeaders: [String: Any]? - let startPosition: Int64? - let cropStart: Int64? - let cropEnd: Int64? - // Custom Metadata - let title: String? - let subtitle: String? - let description: String? - let customImageUri: String? + let type: String? + let uri: String? + let isNetwork: Bool + let isAsset: Bool + let shouldCache: Bool + let requestHeaders: [String: Any]? + let startPosition: Int64? + let cropStart: Int64? + let cropEnd: Int64? + // Custom Metadata + let title: String? + let subtitle: String? + let description: String? + let customImageUri: String? - let json: NSDictionary? + let json: NSDictionary? - init(_ json: NSDictionary!) { - guard json != nil else { - self.json = nil - self.type = nil - self.uri = nil - self.isNetwork = false - self.isAsset = false - self.shouldCache = false - self.requestHeaders = nil - self.startPosition = nil - self.cropStart = nil - self.cropEnd = nil - self.title = nil - self.subtitle = nil - self.description = nil - self.customImageUri = nil - return + init(_ json: NSDictionary!) { + guard json != nil else { + self.json = nil + self.type = nil + self.uri = nil + self.isNetwork = false + self.isAsset = false + self.shouldCache = false + self.requestHeaders = nil + self.startPosition = nil + self.cropStart = nil + self.cropEnd = nil + self.title = nil + self.subtitle = nil + self.description = nil + self.customImageUri = nil + return + } + self.json = json + self.type = json["type"] as? String + self.uri = json["uri"] as? String + self.isNetwork = json["isNetwork"] as? Bool ?? false + self.isAsset = json["isAsset"] as? Bool ?? false + self.shouldCache = json["shouldCache"] as? Bool ?? false + self.requestHeaders = json["requestHeaders"] as? [String: Any] + self.startPosition = json["startPosition"] as? Int64 + self.cropStart = json["cropStart"] as? Int64 + self.cropEnd = json["cropEnd"] as? Int64 + self.title = json["title"] as? String + self.subtitle = json["subtitle"] as? String + self.description = json["description"] as? String + self.customImageUri = json["customImageUri"] as? String } - self.json = json - self.type = json["type"] as? String - self.uri = json["uri"] as? String - self.isNetwork = json["isNetwork"] as? Bool ?? false - self.isAsset = json["isAsset"] as? Bool ?? false - self.shouldCache = json["shouldCache"] as? Bool ?? false - self.requestHeaders = json["requestHeaders"] as? [String: Any] - self.startPosition = json["startPosition"] as? Int64 - self.cropStart = json["cropStart"] as? Int64 - self.cropEnd = json["cropEnd"] as? Int64 - self.title = json["title"] as? String - self.subtitle = json["subtitle"] as? String - self.description = json["description"] as? String - self.customImageUri = json["customImageUri"] as? String - } } diff --git a/ios/Video/Features/RCTIMAAdsManager.swift b/ios/Video/Features/RCTIMAAdsManager.swift index fc24447d94..4345b8d01c 100644 --- a/ios/Video/Features/RCTIMAAdsManager.swift +++ b/ios/Video/Features/RCTIMAAdsManager.swift @@ -1,209 +1,209 @@ #if USE_GOOGLE_IMA - import Foundation - import GoogleInteractiveMediaAds + import Foundation + import GoogleInteractiveMediaAds - class RCTIMAAdsManager: NSObject, IMAAdsLoaderDelegate, IMAAdsManagerDelegate, IMALinkOpenerDelegate { - private weak var _video: RCTVideo? - private var _pipEnabled: () -> Bool + class RCTIMAAdsManager: NSObject, IMAAdsLoaderDelegate, IMAAdsManagerDelegate, IMALinkOpenerDelegate { + private weak var _video: RCTVideo? + private var _pipEnabled: () -> Bool - /* Entry point for the SDK. Used to make ad requests. */ - private var adsLoader: IMAAdsLoader! - /* Main point of interaction with the SDK. Created by the SDK as the result of an ad request. */ - private var adsManager: IMAAdsManager! + /* Entry point for the SDK. Used to make ad requests. */ + private var adsLoader: IMAAdsLoader! + /* Main point of interaction with the SDK. Created by the SDK as the result of an ad request. */ + private var adsManager: IMAAdsManager! - init(video: RCTVideo!, pipEnabled: @escaping () -> Bool) { - _video = video - _pipEnabled = pipEnabled + init(video: RCTVideo!, pipEnabled: @escaping () -> Bool) { + _video = video + _pipEnabled = pipEnabled - super.init() - } + super.init() + } - func setUpAdsLoader() { - adsLoader = IMAAdsLoader(settings: nil) - adsLoader.delegate = self - } + func setUpAdsLoader() { + adsLoader = IMAAdsLoader(settings: nil) + adsLoader.delegate = self + } - func requestAds() { - guard let _video = _video else { return } - // Create ad display container for ad rendering. - let adDisplayContainer = IMAAdDisplayContainer(adContainer: _video, viewController: _video.reactViewController()) - - let adTagUrl = _video.getAdTagUrl() - let contentPlayhead = _video.getContentPlayhead() - - if adTagUrl != nil && contentPlayhead != nil { - // Create an ad request with our ad tag, display container, and optional user context. - let request = IMAAdsRequest( - adTagUrl: adTagUrl!, - adDisplayContainer: adDisplayContainer, - contentPlayhead: contentPlayhead, - userContext: nil - ) - - adsLoader.requestAds(with: request) - } - } + func requestAds() { + guard let _video = _video else { return } + // Create ad display container for ad rendering. + let adDisplayContainer = IMAAdDisplayContainer(adContainer: _video, viewController: _video.reactViewController()) + + let adTagUrl = _video.getAdTagUrl() + let contentPlayhead = _video.getContentPlayhead() + + if adTagUrl != nil && contentPlayhead != nil { + // Create an ad request with our ad tag, display container, and optional user context. + let request = IMAAdsRequest( + adTagUrl: adTagUrl!, + adDisplayContainer: adDisplayContainer, + contentPlayhead: contentPlayhead, + userContext: nil + ) + + adsLoader.requestAds(with: request) + } + } - // MARK: - Getters + // MARK: - Getters - func getAdsLoader() -> IMAAdsLoader? { - return adsLoader - } + func getAdsLoader() -> IMAAdsLoader? { + return adsLoader + } - func getAdsManager() -> IMAAdsManager? { - return adsManager - } + func getAdsManager() -> IMAAdsManager? { + return adsManager + } - // MARK: - IMAAdsLoaderDelegate + // MARK: - IMAAdsLoaderDelegate - func adsLoader(_: IMAAdsLoader, adsLoadedWith adsLoadedData: IMAAdsLoadedData) { - guard let _video = _video else { return } - // Grab the instance of the IMAAdsManager and set yourself as the delegate. - adsManager = adsLoadedData.adsManager - adsManager?.delegate = self + func adsLoader(_: IMAAdsLoader, adsLoadedWith adsLoadedData: IMAAdsLoadedData) { + guard let _video = _video else { return } + // Grab the instance of the IMAAdsManager and set yourself as the delegate. + adsManager = adsLoadedData.adsManager + adsManager?.delegate = self - // Create ads rendering settings and tell the SDK to use the in-app browser. - let adsRenderingSettings = IMAAdsRenderingSettings() - adsRenderingSettings.linkOpenerDelegate = self - adsRenderingSettings.linkOpenerPresentingController = _video.reactViewController() + // Create ads rendering settings and tell the SDK to use the in-app browser. + let adsRenderingSettings = IMAAdsRenderingSettings() + adsRenderingSettings.linkOpenerDelegate = self + adsRenderingSettings.linkOpenerPresentingController = _video.reactViewController() - adsManager.initialize(with: adsRenderingSettings) - } + adsManager.initialize(with: adsRenderingSettings) + } - func adsLoader(_: IMAAdsLoader, failedWith adErrorData: IMAAdLoadingErrorData) { - if adErrorData.adError.message != nil { - print("Error loading ads: " + adErrorData.adError.message!) - } + func adsLoader(_: IMAAdsLoader, failedWith adErrorData: IMAAdLoadingErrorData) { + if adErrorData.adError.message != nil { + print("Error loading ads: " + adErrorData.adError.message!) + } - _video?.setPaused(false) - } - - // MARK: - IMAAdsManagerDelegate - - func adsManager(_ adsManager: IMAAdsManager, didReceive event: IMAAdEvent) { - guard let _video = _video else { return } - // Mute ad if the main player is muted - if _video.isMuted() { - adsManager.volume = 0 - } - // Play each ad once it has been loaded - if event.type == IMAAdEventType.LOADED { - if _pipEnabled() { - return + _video?.setPaused(false) } - adsManager.start() - } - - if _video.onReceiveAdEvent != nil { - let type = convertEventToString(event: event.type) - - if event.adData != nil { - _video.onReceiveAdEvent?([ - "event": type, - "data": event.adData ?? [String](), - "target": _video.reactTag!, - ]) - } else { - _video.onReceiveAdEvent?([ - "event": type, - "target": _video.reactTag!, - ]) + + // MARK: - IMAAdsManagerDelegate + + func adsManager(_ adsManager: IMAAdsManager, didReceive event: IMAAdEvent) { + guard let _video = _video else { return } + // Mute ad if the main player is muted + if _video.isMuted() { + adsManager.volume = 0 + } + // Play each ad once it has been loaded + if event.type == IMAAdEventType.LOADED { + if _pipEnabled() { + return + } + adsManager.start() + } + + if _video.onReceiveAdEvent != nil { + let type = convertEventToString(event: event.type) + + if event.adData != nil { + _video.onReceiveAdEvent?([ + "event": type, + "data": event.adData ?? [String](), + "target": _video.reactTag!, + ]) + } else { + _video.onReceiveAdEvent?([ + "event": type, + "target": _video.reactTag!, + ]) + } + } } - } - } - func adsManager(_: IMAAdsManager, didReceive error: IMAAdError) { - if error.message != nil { - print("AdsManager error: " + error.message!) - } - - guard let _video = _video else { return } - - if _video.onReceiveAdEvent != nil { - _video.onReceiveAdEvent?([ - "event": "ERROR", - "data": [ - "message": error.message ?? "", - "code": error.code, - "type": error.type, - ], - "target": _video.reactTag!, - ]) - } - - // Fall back to playing content - _video.setPaused(false) - } + func adsManager(_: IMAAdsManager, didReceive error: IMAAdError) { + if error.message != nil { + print("AdsManager error: " + error.message!) + } + + guard let _video = _video else { return } + + if _video.onReceiveAdEvent != nil { + _video.onReceiveAdEvent?([ + "event": "ERROR", + "data": [ + "message": error.message ?? "", + "code": error.code, + "type": error.type, + ], + "target": _video.reactTag!, + ]) + } + + // Fall back to playing content + _video.setPaused(false) + } - func adsManagerDidRequestContentPause(_: IMAAdsManager) { - // Pause the content for the SDK to play ads. - _video?.setPaused(true) - _video?.setAdPlaying(true) - } + func adsManagerDidRequestContentPause(_: IMAAdsManager) { + // Pause the content for the SDK to play ads. + _video?.setPaused(true) + _video?.setAdPlaying(true) + } - func adsManagerDidRequestContentResume(_: IMAAdsManager) { - // Resume the content since the SDK is done playing ads (at least for now). - _video?.setAdPlaying(false) - _video?.setPaused(false) - } + func adsManagerDidRequestContentResume(_: IMAAdsManager) { + // Resume the content since the SDK is done playing ads (at least for now). + _video?.setAdPlaying(false) + _video?.setPaused(false) + } - // MARK: - IMALinkOpenerDelegate + // MARK: - IMALinkOpenerDelegate - func linkOpenerDidClose(inAppLink _: NSObject) { - adsManager?.resume() - } + func linkOpenerDidClose(inAppLink _: NSObject) { + adsManager?.resume() + } - // MARK: - Helpers - - func convertEventToString(event: IMAAdEventType!) -> String { - var result = "UNKNOWN" - - switch event { - case .AD_BREAK_READY: - result = "AD_BREAK_READY" - case .AD_BREAK_ENDED: - result = "AD_BREAK_ENDED" - case .AD_BREAK_STARTED: - result = "AD_BREAK_STARTED" - case .AD_PERIOD_ENDED: - result = "AD_PERIOD_ENDED" - case .AD_PERIOD_STARTED: - result = "AD_PERIOD_STARTED" - case .ALL_ADS_COMPLETED: - result = "ALL_ADS_COMPLETED" - case .CLICKED: - result = "CLICK" - case .COMPLETE: - result = "COMPLETED" - case .CUEPOINTS_CHANGED: - result = "CUEPOINTS_CHANGED" - case .FIRST_QUARTILE: - result = "FIRST_QUARTILE" - case .LOADED: - result = "LOADED" - case .LOG: - result = "LOG" - case .MIDPOINT: - result = "MIDPOINT" - case .PAUSE: - result = "PAUSED" - case .RESUME: - result = "RESUMED" - case .SKIPPED: - result = "SKIPPED" - case .STARTED: - result = "STARTED" - case .STREAM_LOADED: - result = "STREAM_LOADED" - case .TAPPED: - result = "TAPPED" - case .THIRD_QUARTILE: - result = "THIRD_QUARTILE" - default: - result = "UNKNOWN" - } - - return result + // MARK: - Helpers + + func convertEventToString(event: IMAAdEventType!) -> String { + var result = "UNKNOWN" + + switch event { + case .AD_BREAK_READY: + result = "AD_BREAK_READY" + case .AD_BREAK_ENDED: + result = "AD_BREAK_ENDED" + case .AD_BREAK_STARTED: + result = "AD_BREAK_STARTED" + case .AD_PERIOD_ENDED: + result = "AD_PERIOD_ENDED" + case .AD_PERIOD_STARTED: + result = "AD_PERIOD_STARTED" + case .ALL_ADS_COMPLETED: + result = "ALL_ADS_COMPLETED" + case .CLICKED: + result = "CLICK" + case .COMPLETE: + result = "COMPLETED" + case .CUEPOINTS_CHANGED: + result = "CUEPOINTS_CHANGED" + case .FIRST_QUARTILE: + result = "FIRST_QUARTILE" + case .LOADED: + result = "LOADED" + case .LOG: + result = "LOG" + case .MIDPOINT: + result = "MIDPOINT" + case .PAUSE: + result = "PAUSED" + case .RESUME: + result = "RESUMED" + case .SKIPPED: + result = "SKIPPED" + case .STARTED: + result = "STARTED" + case .STREAM_LOADED: + result = "STREAM_LOADED" + case .TAPPED: + result = "TAPPED" + case .THIRD_QUARTILE: + result = "THIRD_QUARTILE" + default: + result = "UNKNOWN" + } + + return result + } } - } #endif diff --git a/ios/Video/Features/RCTPictureInPicture.swift b/ios/Video/Features/RCTPictureInPicture.swift index 1cf2661ff1..23dae3083d 100644 --- a/ios/Video/Features/RCTPictureInPicture.swift +++ b/ios/Video/Features/RCTPictureInPicture.swift @@ -5,73 +5,73 @@ import MediaAccessibility import React #if os(iOS) - class RCTPictureInPicture: NSObject, AVPictureInPictureControllerDelegate { - private var _onPictureInPictureStatusChanged: (() -> Void)? - private var _onRestoreUserInterfaceForPictureInPictureStop: (() -> Void)? - private var _restoreUserInterfaceForPIPStopCompletionHandler: ((Bool) -> Void)? - private var _pipController: AVPictureInPictureController? - private var _isActive = false + class RCTPictureInPicture: NSObject, AVPictureInPictureControllerDelegate { + private var _onPictureInPictureStatusChanged: (() -> Void)? + private var _onRestoreUserInterfaceForPictureInPictureStop: (() -> Void)? + private var _restoreUserInterfaceForPIPStopCompletionHandler: ((Bool) -> Void)? + private var _pipController: AVPictureInPictureController? + private var _isActive = false - init(_ onPictureInPictureStatusChanged: (() -> Void)? = nil, _ onRestoreUserInterfaceForPictureInPictureStop: (() -> Void)? = nil) { - _onPictureInPictureStatusChanged = onPictureInPictureStatusChanged - _onRestoreUserInterfaceForPictureInPictureStop = onRestoreUserInterfaceForPictureInPictureStop - } + init(_ onPictureInPictureStatusChanged: (() -> Void)? = nil, _ onRestoreUserInterfaceForPictureInPictureStop: (() -> Void)? = nil) { + _onPictureInPictureStatusChanged = onPictureInPictureStatusChanged + _onRestoreUserInterfaceForPictureInPictureStop = onRestoreUserInterfaceForPictureInPictureStop + } - func pictureInPictureControllerDidStartPictureInPicture(_: AVPictureInPictureController) { - guard let _onPictureInPictureStatusChanged = _onPictureInPictureStatusChanged else { return } + func pictureInPictureControllerDidStartPictureInPicture(_: AVPictureInPictureController) { + guard let _onPictureInPictureStatusChanged = _onPictureInPictureStatusChanged else { return } - _onPictureInPictureStatusChanged() - } + _onPictureInPictureStatusChanged() + } - func pictureInPictureControllerDidStopPictureInPicture(_: AVPictureInPictureController) { - guard let _onPictureInPictureStatusChanged = _onPictureInPictureStatusChanged else { return } + func pictureInPictureControllerDidStopPictureInPicture(_: AVPictureInPictureController) { + guard let _onPictureInPictureStatusChanged = _onPictureInPictureStatusChanged else { return } - _onPictureInPictureStatusChanged() - } + _onPictureInPictureStatusChanged() + } - func pictureInPictureController( - _: AVPictureInPictureController, - restoreUserInterfaceForPictureInPictureStopWithCompletionHandler completionHandler: @escaping (Bool) -> Void - ) { - guard let _onRestoreUserInterfaceForPictureInPictureStop = _onRestoreUserInterfaceForPictureInPictureStop else { return } + func pictureInPictureController( + _: AVPictureInPictureController, + restoreUserInterfaceForPictureInPictureStopWithCompletionHandler completionHandler: @escaping (Bool) -> Void + ) { + guard let _onRestoreUserInterfaceForPictureInPictureStop = _onRestoreUserInterfaceForPictureInPictureStop else { return } - _onRestoreUserInterfaceForPictureInPictureStop() + _onRestoreUserInterfaceForPictureInPictureStop() - _restoreUserInterfaceForPIPStopCompletionHandler = completionHandler - } + _restoreUserInterfaceForPIPStopCompletionHandler = completionHandler + } - func setRestoreUserInterfaceForPIPStopCompletionHandler(_ restore: Bool) { - guard let _restoreUserInterfaceForPIPStopCompletionHandler = _restoreUserInterfaceForPIPStopCompletionHandler else { return } - _restoreUserInterfaceForPIPStopCompletionHandler(restore) - self._restoreUserInterfaceForPIPStopCompletionHandler = nil - } + func setRestoreUserInterfaceForPIPStopCompletionHandler(_ restore: Bool) { + guard let _restoreUserInterfaceForPIPStopCompletionHandler = _restoreUserInterfaceForPIPStopCompletionHandler else { return } + _restoreUserInterfaceForPIPStopCompletionHandler(restore) + self._restoreUserInterfaceForPIPStopCompletionHandler = nil + } - func setupPipController(_ playerLayer: AVPlayerLayer?) { - // Create new controller passing reference to the AVPlayerLayer - _pipController = AVPictureInPictureController(playerLayer: playerLayer!) - if #available(iOS 14.2, *) { - _pipController?.canStartPictureInPictureAutomaticallyFromInline = true - } - _pipController?.delegate = self - } + func setupPipController(_ playerLayer: AVPlayerLayer?) { + // Create new controller passing reference to the AVPlayerLayer + _pipController = AVPictureInPictureController(playerLayer: playerLayer!) + if #available(iOS 14.2, *) { + _pipController?.canStartPictureInPictureAutomaticallyFromInline = true + } + _pipController?.delegate = self + } - func setPictureInPicture(_ isActive: Bool) { - if _isActive == isActive { - return - } - _isActive = isActive + func setPictureInPicture(_ isActive: Bool) { + if _isActive == isActive { + return + } + _isActive = isActive - guard let _pipController = _pipController else { return } + guard let _pipController = _pipController else { return } - if _isActive && !_pipController.isPictureInPictureActive { - DispatchQueue.main.async { - _pipController.startPictureInPicture() - } - } else if !_isActive && _pipController.isPictureInPictureActive { - DispatchQueue.main.async { - _pipController.stopPictureInPicture() + if _isActive && !_pipController.isPictureInPictureActive { + DispatchQueue.main.async { + _pipController.startPictureInPicture() + } + } else if !_isActive && _pipController.isPictureInPictureActive { + DispatchQueue.main.async { + _pipController.stopPictureInPicture() + } + } } - } } - } #endif diff --git a/ios/Video/Features/RCTPlayerObserver.swift b/ios/Video/Features/RCTPlayerObserver.swift index 3edad3bc67..658440bfdf 100644 --- a/ios/Video/Features/RCTPlayerObserver.swift +++ b/ios/Video/Features/RCTPlayerObserver.swift @@ -6,244 +6,244 @@ import Foundation @objc protocol RCTPlayerObserverHandlerObjc { - func handleDidFailToFinishPlaying(notification: NSNotification!) - func handlePlaybackStalled(notification: NSNotification!) - func handlePlayerItemDidReachEnd(notification: NSNotification!) - func handleAVPlayerAccess(notification: NSNotification!) + func handleDidFailToFinishPlaying(notification: NSNotification!) + func handlePlaybackStalled(notification: NSNotification!) + func handlePlayerItemDidReachEnd(notification: NSNotification!) + func handleAVPlayerAccess(notification: NSNotification!) } // MARK: - RCTPlayerObserverHandler protocol RCTPlayerObserverHandler: RCTPlayerObserverHandlerObjc { - func handleTimeUpdate(time: CMTime) - func handleReadyForDisplay(changeObject: Any, change: NSKeyValueObservedChange) - func handleTimeMetadataChange(playerItem: AVPlayerItem, change: NSKeyValueObservedChange<[AVMetadataItem]?>) - func handlePlayerItemStatusChange(playerItem: AVPlayerItem, change: NSKeyValueObservedChange) - func handlePlaybackBufferKeyEmpty(playerItem: AVPlayerItem, change: NSKeyValueObservedChange) - func handlePlaybackLikelyToKeepUp(playerItem: AVPlayerItem, change: NSKeyValueObservedChange) - func handlePlaybackRateChange(player: AVPlayer, change: NSKeyValueObservedChange) - func handleVolumeChange(player: AVPlayer, change: NSKeyValueObservedChange) - func handleExternalPlaybackActiveChange(player: AVPlayer, change: NSKeyValueObservedChange) - func handleViewControllerOverlayViewFrameChange(overlayView: UIView, change: NSKeyValueObservedChange) + func handleTimeUpdate(time: CMTime) + func handleReadyForDisplay(changeObject: Any, change: NSKeyValueObservedChange) + func handleTimeMetadataChange(playerItem: AVPlayerItem, change: NSKeyValueObservedChange<[AVMetadataItem]?>) + func handlePlayerItemStatusChange(playerItem: AVPlayerItem, change: NSKeyValueObservedChange) + func handlePlaybackBufferKeyEmpty(playerItem: AVPlayerItem, change: NSKeyValueObservedChange) + func handlePlaybackLikelyToKeepUp(playerItem: AVPlayerItem, change: NSKeyValueObservedChange) + func handlePlaybackRateChange(player: AVPlayer, change: NSKeyValueObservedChange) + func handleVolumeChange(player: AVPlayer, change: NSKeyValueObservedChange) + func handleExternalPlaybackActiveChange(player: AVPlayer, change: NSKeyValueObservedChange) + func handleViewControllerOverlayViewFrameChange(overlayView: UIView, change: NSKeyValueObservedChange) } // MARK: - RCTPlayerObserver class RCTPlayerObserver: NSObject { - weak var _handlers: RCTPlayerObserverHandler? - - var player: AVPlayer? { - willSet { - removePlayerObservers() - removePlayerTimeObserver() - } - didSet { - if player != nil { - addPlayerObservers() - addPlayerTimeObserver() - } - } - } - - var playerItem: AVPlayerItem? { - willSet { - removePlayerItemObservers() - } - didSet { - if playerItem != nil { - addPlayerItemObservers() - } - } - } - - var playerViewController: AVPlayerViewController? { - willSet { - removePlayerViewControllerObservers() - } - didSet { - if playerViewController != nil { - addPlayerViewControllerObservers() - } - } - } - - var playerLayer: AVPlayerLayer? { - willSet { - removePlayerLayerObserver() - } - didSet { - if playerLayer != nil { - addPlayerLayerObserver() - } - } - } - - private var _progressUpdateInterval: TimeInterval = 250 - private var _timeObserver: Any? - - private var _playerRateChangeObserver: NSKeyValueObservation? - private var _playerVolumeChangeObserver: NSKeyValueObservation? - private var _playerExternalPlaybackActiveObserver: NSKeyValueObservation? - private var _playerItemStatusObserver: NSKeyValueObservation? - private var _playerPlaybackBufferEmptyObserver: NSKeyValueObservation? - private var _playerPlaybackLikelyToKeepUpObserver: NSKeyValueObservation? - private var _playerTimedMetadataObserver: NSKeyValueObservation? - private var _playerViewControllerReadyForDisplayObserver: NSKeyValueObservation? - private var _playerLayerReadyForDisplayObserver: NSKeyValueObservation? - private var _playerViewControllerOverlayFrameObserver: NSKeyValueObservation? - - deinit { - if let _handlers = _handlers { - NotificationCenter.default.removeObserver(_handlers) - } - } - - func addPlayerObservers() { - guard let player = player, let _handlers = _handlers else { - return - } - - _playerRateChangeObserver = player.observe(\.rate, options: [.old], changeHandler: _handlers.handlePlaybackRateChange) - _playerVolumeChangeObserver = player.observe(\.volume, options: [.old], changeHandler: _handlers.handleVolumeChange) - _playerExternalPlaybackActiveObserver = player.observe(\.isExternalPlaybackActive, changeHandler: _handlers.handleExternalPlaybackActiveChange) - } - - func removePlayerObservers() { - _playerRateChangeObserver?.invalidate() - _playerExternalPlaybackActiveObserver?.invalidate() - } - - func addPlayerItemObservers() { - guard let playerItem = playerItem, let _handlers = _handlers else { return } - _playerItemStatusObserver = playerItem.observe(\.status, options: [.new, .old], changeHandler: _handlers.handlePlayerItemStatusChange) - _playerPlaybackBufferEmptyObserver = playerItem.observe( - \.isPlaybackBufferEmpty, - options: [.new, .old], - changeHandler: _handlers.handlePlaybackBufferKeyEmpty - ) - _playerPlaybackLikelyToKeepUpObserver = playerItem.observe( - \.isPlaybackLikelyToKeepUp, - options: [.new, .old], - changeHandler: _handlers.handlePlaybackLikelyToKeepUp - ) - _playerTimedMetadataObserver = playerItem.observe(\.timedMetadata, options: [.new], changeHandler: _handlers.handleTimeMetadataChange) - } - - func removePlayerItemObservers() { - _playerItemStatusObserver?.invalidate() - _playerPlaybackBufferEmptyObserver?.invalidate() - _playerPlaybackLikelyToKeepUpObserver?.invalidate() - _playerTimedMetadataObserver?.invalidate() - } - - func addPlayerViewControllerObservers() { - guard let playerViewController = playerViewController, let _handlers = _handlers else { return } - - _playerViewControllerReadyForDisplayObserver = playerViewController.observe( - \.isReadyForDisplay, - options: [.new], - changeHandler: _handlers.handleReadyForDisplay - ) - - _playerViewControllerOverlayFrameObserver = playerViewController.contentOverlayView?.observe( - \.frame, - options: [.new, .old], - changeHandler: _handlers.handleViewControllerOverlayViewFrameChange - ) - } - - func removePlayerViewControllerObservers() { - _playerViewControllerReadyForDisplayObserver?.invalidate() - _playerViewControllerOverlayFrameObserver?.invalidate() - } - - func addPlayerLayerObserver() { - guard let _handlers = _handlers else { return } - _playerLayerReadyForDisplayObserver = playerLayer?.observe(\.isReadyForDisplay, options: [.new], changeHandler: _handlers.handleReadyForDisplay) - } - - func removePlayerLayerObserver() { - _playerLayerReadyForDisplayObserver?.invalidate() - } - - func addPlayerTimeObserver() { - guard let _handlers = _handlers else { return } - removePlayerTimeObserver() - let progressUpdateIntervalMS: Float64 = _progressUpdateInterval / 1000 - // @see endScrubbing in AVPlayerDemoPlaybackViewController.m - // of https://developer.apple.com/library/ios/samplecode/AVPlayerDemo/Introduction/Intro.html - _timeObserver = player?.addPeriodicTimeObserver( - forInterval: CMTimeMakeWithSeconds(progressUpdateIntervalMS, preferredTimescale: Int32(NSEC_PER_SEC)), - queue: nil, - using: _handlers.handleTimeUpdate - ) - } - - /* Cancels the previously registered time observer. */ - func removePlayerTimeObserver() { - if _timeObserver != nil { - player?.removeTimeObserver(_timeObserver) - _timeObserver = nil - } - } - - func addTimeObserverIfNotSet() { - if _timeObserver == nil { - addPlayerTimeObserver() - } - } - - func replaceTimeObserverIfSet(_ newUpdateInterval: Float64? = nil) { - if let newUpdateInterval = newUpdateInterval { - _progressUpdateInterval = newUpdateInterval - } - if _timeObserver != nil { - addPlayerTimeObserver() - } - } - - func attachPlayerEventListeners() { - guard let _handlers = _handlers else { return } - NotificationCenter.default.removeObserver(_handlers, - name: NSNotification.Name.AVPlayerItemDidPlayToEndTime, - object: player?.currentItem) - - NotificationCenter.default.addObserver(_handlers, - selector: #selector(RCTPlayerObserverHandler.handlePlayerItemDidReachEnd(notification:)), - name: NSNotification.Name.AVPlayerItemDidPlayToEndTime, - object: player?.currentItem) - - NotificationCenter.default.removeObserver(_handlers, - name: NSNotification.Name.AVPlayerItemPlaybackStalled, - object: nil) - - NotificationCenter.default.addObserver(_handlers, - selector: #selector(RCTPlayerObserverHandler.handlePlaybackStalled(notification:)), - name: NSNotification.Name.AVPlayerItemPlaybackStalled, - object: nil) - - NotificationCenter.default.removeObserver(_handlers, - name: NSNotification.Name.AVPlayerItemFailedToPlayToEndTime, - object: nil) - - NotificationCenter.default.addObserver(_handlers, - selector: #selector(RCTPlayerObserverHandler.handleDidFailToFinishPlaying(notification:)), - name: NSNotification.Name.AVPlayerItemFailedToPlayToEndTime, - object: nil) - - NotificationCenter.default.removeObserver(_handlers, name: NSNotification.Name.AVPlayerItemNewAccessLogEntry, object: player?.currentItem) - - NotificationCenter.default.addObserver(_handlers, - selector: #selector(RCTPlayerObserverHandlerObjc.handleAVPlayerAccess(notification:)), - name: NSNotification.Name.AVPlayerItemNewAccessLogEntry, - object: player?.currentItem) - } - - func clearPlayer() { - player = nil - playerItem = nil - if let _handlers = _handlers { - NotificationCenter.default.removeObserver(_handlers) - } - } + weak var _handlers: RCTPlayerObserverHandler? + + var player: AVPlayer? { + willSet { + removePlayerObservers() + removePlayerTimeObserver() + } + didSet { + if player != nil { + addPlayerObservers() + addPlayerTimeObserver() + } + } + } + + var playerItem: AVPlayerItem? { + willSet { + removePlayerItemObservers() + } + didSet { + if playerItem != nil { + addPlayerItemObservers() + } + } + } + + var playerViewController: AVPlayerViewController? { + willSet { + removePlayerViewControllerObservers() + } + didSet { + if playerViewController != nil { + addPlayerViewControllerObservers() + } + } + } + + var playerLayer: AVPlayerLayer? { + willSet { + removePlayerLayerObserver() + } + didSet { + if playerLayer != nil { + addPlayerLayerObserver() + } + } + } + + private var _progressUpdateInterval: TimeInterval = 250 + private var _timeObserver: Any? + + private var _playerRateChangeObserver: NSKeyValueObservation? + private var _playerVolumeChangeObserver: NSKeyValueObservation? + private var _playerExternalPlaybackActiveObserver: NSKeyValueObservation? + private var _playerItemStatusObserver: NSKeyValueObservation? + private var _playerPlaybackBufferEmptyObserver: NSKeyValueObservation? + private var _playerPlaybackLikelyToKeepUpObserver: NSKeyValueObservation? + private var _playerTimedMetadataObserver: NSKeyValueObservation? + private var _playerViewControllerReadyForDisplayObserver: NSKeyValueObservation? + private var _playerLayerReadyForDisplayObserver: NSKeyValueObservation? + private var _playerViewControllerOverlayFrameObserver: NSKeyValueObservation? + + deinit { + if let _handlers = _handlers { + NotificationCenter.default.removeObserver(_handlers) + } + } + + func addPlayerObservers() { + guard let player = player, let _handlers = _handlers else { + return + } + + _playerRateChangeObserver = player.observe(\.rate, options: [.old], changeHandler: _handlers.handlePlaybackRateChange) + _playerVolumeChangeObserver = player.observe(\.volume, options: [.old], changeHandler: _handlers.handleVolumeChange) + _playerExternalPlaybackActiveObserver = player.observe(\.isExternalPlaybackActive, changeHandler: _handlers.handleExternalPlaybackActiveChange) + } + + func removePlayerObservers() { + _playerRateChangeObserver?.invalidate() + _playerExternalPlaybackActiveObserver?.invalidate() + } + + func addPlayerItemObservers() { + guard let playerItem = playerItem, let _handlers = _handlers else { return } + _playerItemStatusObserver = playerItem.observe(\.status, options: [.new, .old], changeHandler: _handlers.handlePlayerItemStatusChange) + _playerPlaybackBufferEmptyObserver = playerItem.observe( + \.isPlaybackBufferEmpty, + options: [.new, .old], + changeHandler: _handlers.handlePlaybackBufferKeyEmpty + ) + _playerPlaybackLikelyToKeepUpObserver = playerItem.observe( + \.isPlaybackLikelyToKeepUp, + options: [.new, .old], + changeHandler: _handlers.handlePlaybackLikelyToKeepUp + ) + _playerTimedMetadataObserver = playerItem.observe(\.timedMetadata, options: [.new], changeHandler: _handlers.handleTimeMetadataChange) + } + + func removePlayerItemObservers() { + _playerItemStatusObserver?.invalidate() + _playerPlaybackBufferEmptyObserver?.invalidate() + _playerPlaybackLikelyToKeepUpObserver?.invalidate() + _playerTimedMetadataObserver?.invalidate() + } + + func addPlayerViewControllerObservers() { + guard let playerViewController = playerViewController, let _handlers = _handlers else { return } + + _playerViewControllerReadyForDisplayObserver = playerViewController.observe( + \.isReadyForDisplay, + options: [.new], + changeHandler: _handlers.handleReadyForDisplay + ) + + _playerViewControllerOverlayFrameObserver = playerViewController.contentOverlayView?.observe( + \.frame, + options: [.new, .old], + changeHandler: _handlers.handleViewControllerOverlayViewFrameChange + ) + } + + func removePlayerViewControllerObservers() { + _playerViewControllerReadyForDisplayObserver?.invalidate() + _playerViewControllerOverlayFrameObserver?.invalidate() + } + + func addPlayerLayerObserver() { + guard let _handlers = _handlers else { return } + _playerLayerReadyForDisplayObserver = playerLayer?.observe(\.isReadyForDisplay, options: [.new], changeHandler: _handlers.handleReadyForDisplay) + } + + func removePlayerLayerObserver() { + _playerLayerReadyForDisplayObserver?.invalidate() + } + + func addPlayerTimeObserver() { + guard let _handlers = _handlers else { return } + removePlayerTimeObserver() + let progressUpdateIntervalMS: Float64 = _progressUpdateInterval / 1000 + // @see endScrubbing in AVPlayerDemoPlaybackViewController.m + // of https://developer.apple.com/library/ios/samplecode/AVPlayerDemo/Introduction/Intro.html + _timeObserver = player?.addPeriodicTimeObserver( + forInterval: CMTimeMakeWithSeconds(progressUpdateIntervalMS, preferredTimescale: Int32(NSEC_PER_SEC)), + queue: nil, + using: _handlers.handleTimeUpdate + ) + } + + /* Cancels the previously registered time observer. */ + func removePlayerTimeObserver() { + if _timeObserver != nil { + player?.removeTimeObserver(_timeObserver) + _timeObserver = nil + } + } + + func addTimeObserverIfNotSet() { + if _timeObserver == nil { + addPlayerTimeObserver() + } + } + + func replaceTimeObserverIfSet(_ newUpdateInterval: Float64? = nil) { + if let newUpdateInterval = newUpdateInterval { + _progressUpdateInterval = newUpdateInterval + } + if _timeObserver != nil { + addPlayerTimeObserver() + } + } + + func attachPlayerEventListeners() { + guard let _handlers = _handlers else { return } + NotificationCenter.default.removeObserver(_handlers, + name: NSNotification.Name.AVPlayerItemDidPlayToEndTime, + object: player?.currentItem) + + NotificationCenter.default.addObserver(_handlers, + selector: #selector(RCTPlayerObserverHandler.handlePlayerItemDidReachEnd(notification:)), + name: NSNotification.Name.AVPlayerItemDidPlayToEndTime, + object: player?.currentItem) + + NotificationCenter.default.removeObserver(_handlers, + name: NSNotification.Name.AVPlayerItemPlaybackStalled, + object: nil) + + NotificationCenter.default.addObserver(_handlers, + selector: #selector(RCTPlayerObserverHandler.handlePlaybackStalled(notification:)), + name: NSNotification.Name.AVPlayerItemPlaybackStalled, + object: nil) + + NotificationCenter.default.removeObserver(_handlers, + name: NSNotification.Name.AVPlayerItemFailedToPlayToEndTime, + object: nil) + + NotificationCenter.default.addObserver(_handlers, + selector: #selector(RCTPlayerObserverHandler.handleDidFailToFinishPlaying(notification:)), + name: NSNotification.Name.AVPlayerItemFailedToPlayToEndTime, + object: nil) + + NotificationCenter.default.removeObserver(_handlers, name: NSNotification.Name.AVPlayerItemNewAccessLogEntry, object: player?.currentItem) + + NotificationCenter.default.addObserver(_handlers, + selector: #selector(RCTPlayerObserverHandlerObjc.handleAVPlayerAccess(notification:)), + name: NSNotification.Name.AVPlayerItemNewAccessLogEntry, + object: player?.currentItem) + } + + func clearPlayer() { + player = nil + playerItem = nil + if let _handlers = _handlers { + NotificationCenter.default.removeObserver(_handlers) + } + } } diff --git a/ios/Video/Features/RCTPlayerOperations.swift b/ios/Video/Features/RCTPlayerOperations.swift index be252847df..885a621f3d 100644 --- a/ios/Video/Features/RCTPlayerOperations.swift +++ b/ios/Video/Features/RCTPlayerOperations.swift @@ -10,237 +10,237 @@ let RCTVideoUnset = -1 * Collection of mutating functions */ enum RCTPlayerOperations { - static func setSideloadedText(player: AVPlayer?, textTracks: [TextTrack]?, criteria: SelectedTrackCriteria?) { - let type = criteria?.type - let textTracks: [TextTrack]! = textTracks ?? RCTVideoUtils.getTextTrackInfo(player) - let trackCount: Int! = player?.currentItem?.tracks.count ?? 0 - - // The first few tracks will be audio & video track - var firstTextIndex = 0 - for i in 0 ..< trackCount where (player?.currentItem?.tracks[i].assetTrack?.hasMediaCharacteristic(.legible)) != nil { - firstTextIndex = i - break - } - - var selectedTrackIndex: Int = RCTVideoUnset - - if type == "disabled" { - // Select the last text index which is the disabled text track - selectedTrackIndex = trackCount - firstTextIndex - } else if type == "language" { - let selectedValue = criteria?.value as? String - for i in 0 ..< textTracks.count { - let currentTextTrack = textTracks[i] - if selectedValue == currentTextTrack.language { - selectedTrackIndex = i - break + static func setSideloadedText(player: AVPlayer?, textTracks: [TextTrack]?, criteria: SelectedTrackCriteria?) { + let type = criteria?.type + let textTracks: [TextTrack]! = textTracks ?? RCTVideoUtils.getTextTrackInfo(player) + let trackCount: Int! = player?.currentItem?.tracks.count ?? 0 + + // The first few tracks will be audio & video track + var firstTextIndex = 0 + for i in 0 ..< trackCount where (player?.currentItem?.tracks[i].assetTrack?.hasMediaCharacteristic(.legible)) != nil { + firstTextIndex = i + break } - } - } else if type == "title" { - let selectedValue = criteria?.value as? String - for i in 0 ..< textTracks.count { - let currentTextTrack = textTracks[i] - if selectedValue == currentTextTrack.title { - selectedTrackIndex = i - break + + var selectedTrackIndex: Int = RCTVideoUnset + + if type == "disabled" { + // Select the last text index which is the disabled text track + selectedTrackIndex = trackCount - firstTextIndex + } else if type == "language" { + let selectedValue = criteria?.value as? String + for i in 0 ..< textTracks.count { + let currentTextTrack = textTracks[i] + if selectedValue == currentTextTrack.language { + selectedTrackIndex = i + break + } + } + } else if type == "title" { + let selectedValue = criteria?.value as? String + for i in 0 ..< textTracks.count { + let currentTextTrack = textTracks[i] + if selectedValue == currentTextTrack.title { + selectedTrackIndex = i + break + } + } + } else if type == "index" { + if let value = criteria?.value, let index = value as? Int { + if textTracks.count > index { + selectedTrackIndex = index + } + } } - } - } else if type == "index" { - if let value = criteria?.value, let index = value as? Int { - if textTracks.count > index { - selectedTrackIndex = index + + // in the situation that a selected text track is not available (eg. specifies a textTrack not available) + if (type != "disabled") && selectedTrackIndex == RCTVideoUnset { + let captioningMediaCharacteristics = MACaptionAppearanceCopyPreferredCaptioningMediaCharacteristics(.user) + let captionSettings = captioningMediaCharacteristics as? [AnyHashable] + if (captionSettings?.contains(AVMediaCharacteristic.transcribesSpokenDialogForAccessibility)) != nil { + selectedTrackIndex = 0 // If we can't find a match, use the first available track + let systemLanguage = NSLocale.preferredLanguages.first + for i in 0 ..< textTracks.count { + let currentTextTrack = textTracks[i] + if systemLanguage == currentTextTrack.language { + selectedTrackIndex = i + break + } + } + } } - } - } - // in the situation that a selected text track is not available (eg. specifies a textTrack not available) - if (type != "disabled") && selectedTrackIndex == RCTVideoUnset { - let captioningMediaCharacteristics = MACaptionAppearanceCopyPreferredCaptioningMediaCharacteristics(.user) - let captionSettings = captioningMediaCharacteristics as? [AnyHashable] - if (captionSettings?.contains(AVMediaCharacteristic.transcribesSpokenDialogForAccessibility)) != nil { - selectedTrackIndex = 0 // If we can't find a match, use the first available track - let systemLanguage = NSLocale.preferredLanguages.first - for i in 0 ..< textTracks.count { - let currentTextTrack = textTracks[i] - if systemLanguage == currentTextTrack.language { - selectedTrackIndex = i - break - } + for i in firstTextIndex ..< trackCount { + var isEnabled = false + if selectedTrackIndex != RCTVideoUnset { + isEnabled = i == selectedTrackIndex + firstTextIndex + } + player?.currentItem?.tracks[i].isEnabled = isEnabled } - } } - for i in firstTextIndex ..< trackCount { - var isEnabled = false - if selectedTrackIndex != RCTVideoUnset { - isEnabled = i == selectedTrackIndex + firstTextIndex - } - player?.currentItem?.tracks[i].isEnabled = isEnabled - } - } - - // UNUSED - static func setStreamingText(player: AVPlayer?, criteria: SelectedTrackCriteria?) { - let type = criteria?.type - let group: AVMediaSelectionGroup! = player?.currentItem?.asset.mediaSelectionGroup(forMediaCharacteristic: AVMediaCharacteristic.legible) - var mediaOption: AVMediaSelectionOption! - - if type == "disabled" { - // Do nothing. We want to ensure option is nil - } else if (type == "language") || (type == "title") { - let value = criteria?.value as? String - for i in 0 ..< group.options.count { - let currentOption: AVMediaSelectionOption! = group.options[i] - var optionValue: String! - if type == "language" { - optionValue = currentOption.extendedLanguageTag - } else { - optionValue = currentOption.commonMetadata.map(\.value)[0] as! String - } - if value == optionValue { - mediaOption = currentOption - break - } - } - // } else if ([type isEqualToString:@"default"]) { - // option = group.defaultOption; */ - } else if type == "index" { - if let value = criteria?.value, let index = value as? Int { - if group.options.count > index { - mediaOption = group.options[index] + // UNUSED + static func setStreamingText(player: AVPlayer?, criteria: SelectedTrackCriteria?) { + let type = criteria?.type + let group: AVMediaSelectionGroup! = player?.currentItem?.asset.mediaSelectionGroup(forMediaCharacteristic: AVMediaCharacteristic.legible) + var mediaOption: AVMediaSelectionOption! + + if type == "disabled" { + // Do nothing. We want to ensure option is nil + } else if (type == "language") || (type == "title") { + let value = criteria?.value as? String + for i in 0 ..< group.options.count { + let currentOption: AVMediaSelectionOption! = group.options[i] + var optionValue: String! + if type == "language" { + optionValue = currentOption.extendedLanguageTag + } else { + optionValue = currentOption.commonMetadata.map(\.value)[0] as! String + } + if value == optionValue { + mediaOption = currentOption + break + } + } + // } else if ([type isEqualToString:@"default"]) { + // option = group.defaultOption; */ + } else if type == "index" { + if let value = criteria?.value, let index = value as? Int { + if group.options.count > index { + mediaOption = group.options[index] + } + } + } else { // default. invalid type or "system" + #if os(tvOS) + // Do noting. Fix for tvOS native audio menu language selector + #else + player?.currentItem?.selectMediaOptionAutomatically(in: group) + return + #endif } - } - } else { // default. invalid type or "system" - #if os(tvOS) - // Do noting. Fix for tvOS native audio menu language selector - #else - player?.currentItem?.selectMediaOptionAutomatically(in: group) - return - #endif + + #if os(tvOS) + // Do noting. Fix for tvOS native audio menu language selector + #else + // If a match isn't found, option will be nil and text tracks will be disabled + player?.currentItem?.select(mediaOption, in: group) + #endif } - #if os(tvOS) - // Do noting. Fix for tvOS native audio menu language selector - #else - // If a match isn't found, option will be nil and text tracks will be disabled - player?.currentItem?.select(mediaOption, in: group) - #endif - } - - static func setMediaSelectionTrackForCharacteristic(player: AVPlayer?, characteristic: AVMediaCharacteristic, criteria: SelectedTrackCriteria?) { - let type = criteria?.type - let group: AVMediaSelectionGroup! = player?.currentItem?.asset.mediaSelectionGroup(forMediaCharacteristic: characteristic) - var mediaOption: AVMediaSelectionOption! - - guard group != nil else { return } - - if type == "disabled" { - // Do nothing. We want to ensure option is nil - } else if (type == "language") || (type == "title") { - let value = criteria?.value as? String - for i in 0 ..< group.options.count { - let currentOption: AVMediaSelectionOption! = group.options[i] - var optionValue: String! - if type == "language" { - optionValue = currentOption.extendedLanguageTag - } else { - optionValue = currentOption.commonMetadata.map(\.value)[0] as? String - } - if value == optionValue { - mediaOption = currentOption - break + static func setMediaSelectionTrackForCharacteristic(player: AVPlayer?, characteristic: AVMediaCharacteristic, criteria: SelectedTrackCriteria?) { + let type = criteria?.type + let group: AVMediaSelectionGroup! = player?.currentItem?.asset.mediaSelectionGroup(forMediaCharacteristic: characteristic) + var mediaOption: AVMediaSelectionOption! + + guard group != nil else { return } + + if type == "disabled" { + // Do nothing. We want to ensure option is nil + } else if (type == "language") || (type == "title") { + let value = criteria?.value as? String + for i in 0 ..< group.options.count { + let currentOption: AVMediaSelectionOption! = group.options[i] + var optionValue: String! + if type == "language" { + optionValue = currentOption.extendedLanguageTag + } else { + optionValue = currentOption.commonMetadata.map(\.value)[0] as? String + } + if value == optionValue { + mediaOption = currentOption + break + } + } + // } else if ([type isEqualToString:@"default"]) { + // option = group.defaultOption; */ + } else if type == "index" { + if let value = criteria?.value, let index = value as? Int { + if group.options.count > index { + mediaOption = group.options[index] + } + } + } else if let group = group { // default. invalid type or "system" + player?.currentItem?.selectMediaOptionAutomatically(in: group) + return } - } - // } else if ([type isEqualToString:@"default"]) { - // option = group.defaultOption; */ - } else if type == "index" { - if let value = criteria?.value, let index = value as? Int { - if group.options.count > index { - mediaOption = group.options[index] + + if let group = group { + // If a match isn't found, option will be nil and text tracks will be disabled + player?.currentItem?.select(mediaOption, in: group) } - } - } else if let group = group { // default. invalid type or "system" - player?.currentItem?.selectMediaOptionAutomatically(in: group) - return } - if let group = group { - // If a match isn't found, option will be nil and text tracks will be disabled - player?.currentItem?.select(mediaOption, in: group) - } - } - - static func seek(player: AVPlayer, playerItem: AVPlayerItem, paused: Bool, seekTime: Float, seekTolerance: Float) -> Promise { - let timeScale = 1000 - let cmSeekTime: CMTime = CMTimeMakeWithSeconds(Float64(seekTime), preferredTimescale: Int32(timeScale)) - let current: CMTime = playerItem.currentTime() - let tolerance: CMTime = CMTimeMake(value: Int64(seekTolerance), timescale: Int32(timeScale)) - - return Promise(on: .global()) { fulfill, reject in - guard CMTimeCompare(current, cmSeekTime) != 0 else { - reject(NSError(domain: "", code: 0, userInfo: nil)) - return - } - if !paused { player.pause() } - - player.seek(to: cmSeekTime, toleranceBefore: tolerance, toleranceAfter: tolerance, completionHandler: { (finished: Bool) in - fulfill(finished) - }) - } - } + static func seek(player: AVPlayer, playerItem: AVPlayerItem, paused: Bool, seekTime: Float, seekTolerance: Float) -> Promise { + let timeScale = 1000 + let cmSeekTime: CMTime = CMTimeMakeWithSeconds(Float64(seekTime), preferredTimescale: Int32(timeScale)) + let current: CMTime = playerItem.currentTime() + let tolerance: CMTime = CMTimeMake(value: Int64(seekTolerance), timescale: Int32(timeScale)) - static func configureAudio(ignoreSilentSwitch: String, mixWithOthers: String, audioOutput: String) { - let audioSession: AVAudioSession! = AVAudioSession.sharedInstance() - var category: AVAudioSession.Category? - var options: AVAudioSession.CategoryOptions? + return Promise(on: .global()) { fulfill, reject in + guard CMTimeCompare(current, cmSeekTime) != 0 else { + reject(NSError(domain: "", code: 0, userInfo: nil)) + return + } + if !paused { player.pause() } - if ignoreSilentSwitch == "ignore" { - category = audioOutput == "earpiece" ? AVAudioSession.Category.playAndRecord : AVAudioSession.Category.playback - } else if ignoreSilentSwitch == "obey" { - category = AVAudioSession.Category.ambient + player.seek(to: cmSeekTime, toleranceBefore: tolerance, toleranceAfter: tolerance, completionHandler: { (finished: Bool) in + fulfill(finished) + }) + } } - if mixWithOthers == "mix" { - options = .mixWithOthers - } else if mixWithOthers == "duck" { - options = .duckOthers - } + static func configureAudio(ignoreSilentSwitch: String, mixWithOthers: String, audioOutput: String) { + let audioSession: AVAudioSession! = AVAudioSession.sharedInstance() + var category: AVAudioSession.Category? + var options: AVAudioSession.CategoryOptions? + + if ignoreSilentSwitch == "ignore" { + category = audioOutput == "earpiece" ? AVAudioSession.Category.playAndRecord : AVAudioSession.Category.playback + } else if ignoreSilentSwitch == "obey" { + category = AVAudioSession.Category.ambient + } - if let category = category, let options = options { - do { - try audioSession.setCategory(category, options: options) - } catch { - debugPrint("[RCTPlayerOperations] Problem setting up AVAudioSession category and options. Error: \(error).") - #if !os(tvOS) - // Handle specific set category and option combination error - // setCategory:AVAudioSessionCategoryPlayback withOptions:mixWithOthers || duckOthers - // Failed to set category, error: 'what' Error Domain=NSOSStatusErrorDomain - // https://developer.apple.com/forums/thread/714598 - if #available(iOS 16.0, *) { + if mixWithOthers == "mix" { + options = .mixWithOthers + } else if mixWithOthers == "duck" { + options = .duckOthers + } + + if let category = category, let options = options { do { - debugPrint("[RCTPlayerOperations] Reseting AVAudioSession category to playAndRecord with defaultToSpeaker options.") - try audioSession.setCategory( - audioOutput == "earpiece" ? AVAudioSession.Category.playAndRecord : AVAudioSession.Category.playback, - options: AVAudioSession.CategoryOptions.defaultToSpeaker - ) + try audioSession.setCategory(category, options: options) } catch { - debugPrint("[RCTPlayerOperations] Reseting AVAudioSession category and options problem. Error: \(error).") + debugPrint("[RCTPlayerOperations] Problem setting up AVAudioSession category and options. Error: \(error).") + #if !os(tvOS) + // Handle specific set category and option combination error + // setCategory:AVAudioSessionCategoryPlayback withOptions:mixWithOthers || duckOthers + // Failed to set category, error: 'what' Error Domain=NSOSStatusErrorDomain + // https://developer.apple.com/forums/thread/714598 + if #available(iOS 16.0, *) { + do { + debugPrint("[RCTPlayerOperations] Reseting AVAudioSession category to playAndRecord with defaultToSpeaker options.") + try audioSession.setCategory( + audioOutput == "earpiece" ? AVAudioSession.Category.playAndRecord : AVAudioSession.Category.playback, + options: AVAudioSession.CategoryOptions.defaultToSpeaker + ) + } catch { + debugPrint("[RCTPlayerOperations] Reseting AVAudioSession category and options problem. Error: \(error).") + } + } + #endif } - } - #endif - } - } else if let category = category, options == nil { - do { - try audioSession.setCategory(category) - } catch { - debugPrint("[RCTPlayerOperations] Problem setting up AVAudioSession category. Error: \(error).") - } - } else if category == nil, let options = options { - do { - try audioSession.setCategory(audioSession.category, options: options) - } catch { - debugPrint("[RCTPlayerOperations] Problem setting up AVAudioSession options. Error: \(error).") - } + } else if let category = category, options == nil { + do { + try audioSession.setCategory(category) + } catch { + debugPrint("[RCTPlayerOperations] Problem setting up AVAudioSession category. Error: \(error).") + } + } else if category == nil, let options = options { + do { + try audioSession.setCategory(audioSession.category, options: options) + } catch { + debugPrint("[RCTPlayerOperations] Problem setting up AVAudioSession options. Error: \(error).") + } + } } - } } diff --git a/ios/Video/Features/RCTResourceLoaderDelegate.swift b/ios/Video/Features/RCTResourceLoaderDelegate.swift index 24b6fcb8c6..e0bad41bc5 100644 --- a/ios/Video/Features/RCTResourceLoaderDelegate.swift +++ b/ios/Video/Features/RCTResourceLoaderDelegate.swift @@ -2,184 +2,184 @@ import AVFoundation import Promises class RCTResourceLoaderDelegate: NSObject, AVAssetResourceLoaderDelegate, URLSessionDelegate { - private var _loadingRequests: [String: AVAssetResourceLoadingRequest?] = [:] - private var _requestingCertificate = false - private var _requestingCertificateErrored = false - private var _drm: DRMParams? - private var _localSourceEncryptionKeyScheme: String? - private var _reactTag: NSNumber? - private var _onVideoError: RCTDirectEventBlock? - private var _onGetLicense: RCTDirectEventBlock? - - init( - asset: AVURLAsset, - drm: DRMParams?, - localSourceEncryptionKeyScheme: String?, - onVideoError: RCTDirectEventBlock?, - onGetLicense: RCTDirectEventBlock?, - reactTag: NSNumber - ) { - super.init() - let queue = DispatchQueue(label: "assetQueue") - asset.resourceLoader.setDelegate(self, queue: queue) - _reactTag = reactTag - _onVideoError = onVideoError - _onGetLicense = onGetLicense - _drm = drm - _localSourceEncryptionKeyScheme = localSourceEncryptionKeyScheme - } - - deinit { - for request in _loadingRequests.values { - request?.finishLoading() + private var _loadingRequests: [String: AVAssetResourceLoadingRequest?] = [:] + private var _requestingCertificate = false + private var _requestingCertificateErrored = false + private var _drm: DRMParams? + private var _localSourceEncryptionKeyScheme: String? + private var _reactTag: NSNumber? + private var _onVideoError: RCTDirectEventBlock? + private var _onGetLicense: RCTDirectEventBlock? + + init( + asset: AVURLAsset, + drm: DRMParams?, + localSourceEncryptionKeyScheme: String?, + onVideoError: RCTDirectEventBlock?, + onGetLicense: RCTDirectEventBlock?, + reactTag: NSNumber + ) { + super.init() + let queue = DispatchQueue(label: "assetQueue") + asset.resourceLoader.setDelegate(self, queue: queue) + _reactTag = reactTag + _onVideoError = onVideoError + _onGetLicense = onGetLicense + _drm = drm + _localSourceEncryptionKeyScheme = localSourceEncryptionKeyScheme } - } - func resourceLoader(_: AVAssetResourceLoader, shouldWaitForRenewalOfRequestedResource renewalRequest: AVAssetResourceRenewalRequest) -> Bool { - return loadingRequestHandling(renewalRequest) - } - - func resourceLoader(_: AVAssetResourceLoader, shouldWaitForLoadingOfRequestedResource loadingRequest: AVAssetResourceLoadingRequest) -> Bool { - return loadingRequestHandling(loadingRequest) - } - - func resourceLoader(_: AVAssetResourceLoader, didCancel _: AVAssetResourceLoadingRequest) { - RCTLog("didCancelLoadingRequest") - } + deinit { + for request in _loadingRequests.values { + request?.finishLoading() + } + } - func setLicenseResult(_ license: String!, _ licenseUrl: String!) { - // Check if the loading request exists in _loadingRequests based on licenseUrl - guard let loadingRequest = _loadingRequests[licenseUrl] else { - setLicenseResultError("Loading request for licenseUrl \(licenseUrl) not found", licenseUrl) - return + func resourceLoader(_: AVAssetResourceLoader, shouldWaitForRenewalOfRequestedResource renewalRequest: AVAssetResourceRenewalRequest) -> Bool { + return loadingRequestHandling(renewalRequest) } - // Check if the license data is valid - guard let respondData = RCTVideoUtils.base64DataFromBase64String(base64String: license) else { - setLicenseResultError("No data from JS license response", licenseUrl) - return + func resourceLoader(_: AVAssetResourceLoader, shouldWaitForLoadingOfRequestedResource loadingRequest: AVAssetResourceLoadingRequest) -> Bool { + return loadingRequestHandling(loadingRequest) } - let dataRequest: AVAssetResourceLoadingDataRequest! = loadingRequest?.dataRequest - dataRequest.respond(with: respondData) - loadingRequest!.finishLoading() - _loadingRequests.removeValue(forKey: licenseUrl) - } - - func setLicenseResultError(_ error: String!, _ licenseUrl: String!) { - // Check if the loading request exists in _loadingRequests based on licenseUrl - guard let loadingRequest = _loadingRequests[licenseUrl] else { - print("Loading request for licenseUrl \(licenseUrl) not found. Error: \(error)") - return + func resourceLoader(_: AVAssetResourceLoader, didCancel _: AVAssetResourceLoadingRequest) { + RCTLog("didCancelLoadingRequest") } - self.finishLoadingWithError(error: RCTVideoErrorHandler.fromJSPart(error), licenseUrl: licenseUrl) - } + func setLicenseResult(_ license: String!, _ licenseUrl: String!) { + // Check if the loading request exists in _loadingRequests based on licenseUrl + guard let loadingRequest = _loadingRequests[licenseUrl] else { + setLicenseResultError("Loading request for licenseUrl \(licenseUrl) not found", licenseUrl) + return + } - func finishLoadingWithError(error: Error!, licenseUrl: String!) -> Bool { - // Check if the loading request exists in _loadingRequests based on licenseUrl - guard let loadingRequest = _loadingRequests[licenseUrl], let error = error as NSError? else { - // Handle the case where the loading request is not found or error is nil - return false - } + // Check if the license data is valid + guard let respondData = RCTVideoUtils.base64DataFromBase64String(base64String: license) else { + setLicenseResultError("No data from JS license response", licenseUrl) + return + } - loadingRequest!.finishLoading(with: error) - _loadingRequests.removeValue(forKey: licenseUrl) - _onVideoError?([ - "error": [ - "code": NSNumber(value: error.code), - "localizedDescription": error.localizedDescription ?? "", - "localizedFailureReason": error.localizedFailureReason ?? "", - "localizedRecoverySuggestion": error.localizedRecoverySuggestion ?? "", - "domain": error.domain, - ], - "target": _reactTag, - ]) - - return false - } - - func loadingRequestHandling(_ loadingRequest: AVAssetResourceLoadingRequest!) -> Bool { - if handleEmbeddedKey(loadingRequest) { - return true + let dataRequest: AVAssetResourceLoadingDataRequest! = loadingRequest?.dataRequest + dataRequest.respond(with: respondData) + loadingRequest!.finishLoading() + _loadingRequests.removeValue(forKey: licenseUrl) } - if _drm != nil { - return handleDrm(loadingRequest) + func setLicenseResultError(_ error: String!, _ licenseUrl: String!) { + // Check if the loading request exists in _loadingRequests based on licenseUrl + guard let loadingRequest = _loadingRequests[licenseUrl] else { + print("Loading request for licenseUrl \(licenseUrl) not found. Error: \(error)") + return + } + + self.finishLoadingWithError(error: RCTVideoErrorHandler.fromJSPart(error), licenseUrl: licenseUrl) } - return false - } + func finishLoadingWithError(error: Error!, licenseUrl: String!) -> Bool { + // Check if the loading request exists in _loadingRequests based on licenseUrl + guard let loadingRequest = _loadingRequests[licenseUrl], let error = error as NSError? else { + // Handle the case where the loading request is not found or error is nil + return false + } - func handleEmbeddedKey(_ loadingRequest: AVAssetResourceLoadingRequest!) -> Bool { - guard let url = loadingRequest.request.url, - let _localSourceEncryptionKeyScheme = _localSourceEncryptionKeyScheme, - let persistentKeyData = RCTVideoUtils.extractDataFromCustomSchemeUrl(from: url, scheme: _localSourceEncryptionKeyScheme) - else { - return false + loadingRequest!.finishLoading(with: error) + _loadingRequests.removeValue(forKey: licenseUrl) + _onVideoError?([ + "error": [ + "code": NSNumber(value: error.code), + "localizedDescription": error.localizedDescription ?? "", + "localizedFailureReason": error.localizedFailureReason ?? "", + "localizedRecoverySuggestion": error.localizedRecoverySuggestion ?? "", + "domain": error.domain, + ], + "target": _reactTag, + ]) + + return false } - loadingRequest.contentInformationRequest?.contentType = AVStreamingKeyDeliveryPersistentContentKeyType - loadingRequest.contentInformationRequest?.isByteRangeAccessSupported = true - loadingRequest.contentInformationRequest?.contentLength = Int64(persistentKeyData.count) - loadingRequest.dataRequest?.respond(with: persistentKeyData) - loadingRequest.finishLoading() + func loadingRequestHandling(_ loadingRequest: AVAssetResourceLoadingRequest!) -> Bool { + if handleEmbeddedKey(loadingRequest) { + return true + } - return true - } + if _drm != nil { + return handleDrm(loadingRequest) + } - func handleDrm(_ loadingRequest: AVAssetResourceLoadingRequest!) -> Bool { - if _requestingCertificate { - return true - } else if _requestingCertificateErrored { - return false + return false } - var requestKey: String = loadingRequest.request.url?.absoluteString ?? "" + func handleEmbeddedKey(_ loadingRequest: AVAssetResourceLoadingRequest!) -> Bool { + guard let url = loadingRequest.request.url, + let _localSourceEncryptionKeyScheme = _localSourceEncryptionKeyScheme, + let persistentKeyData = RCTVideoUtils.extractDataFromCustomSchemeUrl(from: url, scheme: _localSourceEncryptionKeyScheme) + else { + return false + } - _loadingRequests[requestKey] = loadingRequest + loadingRequest.contentInformationRequest?.contentType = AVStreamingKeyDeliveryPersistentContentKeyType + loadingRequest.contentInformationRequest?.isByteRangeAccessSupported = true + loadingRequest.contentInformationRequest?.contentLength = Int64(persistentKeyData.count) + loadingRequest.dataRequest?.respond(with: persistentKeyData) + loadingRequest.finishLoading() - guard let _drm = _drm, let drmType = _drm.type, drmType == "fairplay" else { - return finishLoadingWithError(error: RCTVideoErrorHandler.noDRMData, licenseUrl: requestKey) + return true } - var promise: Promise - if _onGetLicense != nil { - let contentId = _drm.contentId ?? loadingRequest.request.url?.host - promise = RCTVideoDRM.handleWithOnGetLicense( - loadingRequest: loadingRequest, - contentId: contentId, - certificateUrl: _drm.certificateUrl, - base64Certificate: _drm.base64Certificate - ).then { spcData in - self._requestingCertificate = true - self._onGetLicense?(["licenseUrl": self._drm?.licenseServer ?? loadingRequest.request.url?.absoluteString ?? "", - "contentId": contentId ?? "", - "spcBase64": spcData.base64EncodedString(options: []), - "target": self._reactTag]) - } - } else { - promise = RCTVideoDRM.handleInternalGetLicense( - loadingRequest: loadingRequest, - contentId: _drm.contentId, - licenseServer: _drm.licenseServer, - certificateUrl: _drm.certificateUrl, - base64Certificate: _drm.base64Certificate, - headers: _drm.headers - ).then { data in - guard let dataRequest = loadingRequest.dataRequest else { - throw RCTVideoErrorHandler.noCertificateData + func handleDrm(_ loadingRequest: AVAssetResourceLoadingRequest!) -> Bool { + if _requestingCertificate { + return true + } else if _requestingCertificateErrored { + return false } - dataRequest.respond(with: data) - loadingRequest.finishLoading() - } - } - promise.catch { error in - self.finishLoadingWithError(error: error, licenseUrl: requestKey) - self._requestingCertificateErrored = true - } + var requestKey: String = loadingRequest.request.url?.absoluteString ?? "" + + _loadingRequests[requestKey] = loadingRequest + + guard let _drm = _drm, let drmType = _drm.type, drmType == "fairplay" else { + return finishLoadingWithError(error: RCTVideoErrorHandler.noDRMData, licenseUrl: requestKey) + } + + var promise: Promise + if _onGetLicense != nil { + let contentId = _drm.contentId ?? loadingRequest.request.url?.host + promise = RCTVideoDRM.handleWithOnGetLicense( + loadingRequest: loadingRequest, + contentId: contentId, + certificateUrl: _drm.certificateUrl, + base64Certificate: _drm.base64Certificate + ).then { spcData in + self._requestingCertificate = true + self._onGetLicense?(["licenseUrl": self._drm?.licenseServer ?? loadingRequest.request.url?.absoluteString ?? "", + "contentId": contentId ?? "", + "spcBase64": spcData.base64EncodedString(options: []), + "target": self._reactTag]) + } + } else { + promise = RCTVideoDRM.handleInternalGetLicense( + loadingRequest: loadingRequest, + contentId: _drm.contentId, + licenseServer: _drm.licenseServer, + certificateUrl: _drm.certificateUrl, + base64Certificate: _drm.base64Certificate, + headers: _drm.headers + ).then { data in + guard let dataRequest = loadingRequest.dataRequest else { + throw RCTVideoErrorHandler.noCertificateData + } + dataRequest.respond(with: data) + loadingRequest.finishLoading() + } + } - return true - } + promise.catch { error in + self.finishLoadingWithError(error: error, licenseUrl: requestKey) + self._requestingCertificateErrored = true + } + + return true + } } diff --git a/ios/Video/Features/RCTVideoDRM.swift b/ios/Video/Features/RCTVideoDRM.swift index fa99c7d5bf..c47530e5d9 100644 --- a/ios/Video/Features/RCTVideoDRM.swift +++ b/ios/Video/Features/RCTVideoDRM.swift @@ -2,177 +2,180 @@ import AVFoundation import Promises enum RCTVideoDRM { - static func fetchLicense( - licenseServer: String, - spcData: Data?, - contentId: String, - headers: [String: Any]? - ) -> Promise { - let request = createLicenseRequest(licenseServer: licenseServer, spcData: spcData, contentId: contentId, headers: headers) - - return Promise(on: .global()) { fulfill, reject in - let postDataTask = URLSession.shared.dataTask(with: request as URLRequest, completionHandler: { (data: Data!, response: URLResponse!, error: Error!) in - let httpResponse: HTTPURLResponse! = (response as! HTTPURLResponse) - - guard error == nil else { - print("Error getting license from \(licenseServer), HTTP status code \(httpResponse.statusCode)") - reject(error) - return - } - guard httpResponse.statusCode == 200 else { - print("Error getting license from \(licenseServer), HTTP status code \(httpResponse.statusCode)") - reject(RCTVideoErrorHandler.licenseRequestNotOk(httpResponse.statusCode)) - return + static func fetchLicense( + licenseServer: String, + spcData: Data?, + contentId: String, + headers: [String: Any]? + ) -> Promise { + let request = createLicenseRequest(licenseServer: licenseServer, spcData: spcData, contentId: contentId, headers: headers) + + return Promise(on: .global()) { fulfill, reject in + let postDataTask = URLSession.shared.dataTask( + with: request as URLRequest, + completionHandler: { (data: Data!, response: URLResponse!, error: Error!) in + let httpResponse: HTTPURLResponse! = (response as! HTTPURLResponse) + + guard error == nil else { + print("Error getting license from \(licenseServer), HTTP status code \(httpResponse.statusCode)") + reject(error) + return + } + guard httpResponse.statusCode == 200 else { + print("Error getting license from \(licenseServer), HTTP status code \(httpResponse.statusCode)") + reject(RCTVideoErrorHandler.licenseRequestNotOk(httpResponse.statusCode)) + return + } + + guard data != nil, let decodedData = Data(base64Encoded: data, options: []) else { + reject(RCTVideoErrorHandler.noDataFromLicenseRequest) + return + } + + fulfill(decodedData) + } + ) + postDataTask.resume() } + } - guard data != nil, let decodedData = Data(base64Encoded: data, options: []) else { - reject(RCTVideoErrorHandler.noDataFromLicenseRequest) - return + static func createLicenseRequest( + licenseServer: String, + spcData: Data?, + contentId: String, + headers: [String: Any]? + ) -> URLRequest { + var request = URLRequest(url: URL(string: licenseServer)!) + request.httpMethod = "POST" + + if let headers = headers { + for item in headers { + guard let key = item.key as? String, let value = item.value as? String else { + continue + } + request.setValue(value, forHTTPHeaderField: key) + } } - fulfill(decodedData) - }) - postDataTask.resume() + let spcEncoded = spcData?.base64EncodedString(options: []) + let spcUrlEncoded = CFURLCreateStringByAddingPercentEscapes( + kCFAllocatorDefault, + spcEncoded as? CFString? as! CFString, + nil, + "?=&+" as CFString, + CFStringBuiltInEncodings.UTF8.rawValue + ) as? String + let post = String(format: "spc=%@&%@", spcUrlEncoded as! CVarArg, contentId) + let postData = post.data(using: String.Encoding.utf8, allowLossyConversion: true) + request.httpBody = postData + + return request } - } - - static func createLicenseRequest( - licenseServer: String, - spcData: Data?, - contentId: String, - headers: [String: Any]? - ) -> URLRequest { - var request = URLRequest(url: URL(string: licenseServer)!) - request.httpMethod = "POST" - - if let headers = headers { - for item in headers { - guard let key = item.key as? String, let value = item.value as? String else { - continue + + static func fetchSpcData( + loadingRequest: AVAssetResourceLoadingRequest, + certificateData: Data, + contentIdData: Data + ) -> Promise { + return Promise(on: .global()) { fulfill, reject in + var spcError: NSError! + var spcData: Data? + do { + spcData = try loadingRequest.streamingContentKeyRequestData(forApp: certificateData, contentIdentifier: contentIdData as Data, options: nil) + } catch _ { + print("SPC error") + } + + if spcError != nil { + reject(spcError) + } + + guard let spcData = spcData else { + reject(RCTVideoErrorHandler.noSPC) + return + } + + fulfill(spcData) } - request.setValue(value, forHTTPHeaderField: key) - } } - let spcEncoded = spcData?.base64EncodedString(options: []) - let spcUrlEncoded = CFURLCreateStringByAddingPercentEscapes( - kCFAllocatorDefault, - spcEncoded as? CFString? as! CFString, - nil, - "?=&+" as CFString, - CFStringBuiltInEncodings.UTF8.rawValue - ) as? String - let post = String(format: "spc=%@&%@", spcUrlEncoded as! CVarArg, contentId) - let postData = post.data(using: String.Encoding.utf8, allowLossyConversion: true) - request.httpBody = postData - - return request - } - - static func fetchSpcData( - loadingRequest: AVAssetResourceLoadingRequest, - certificateData: Data, - contentIdData: Data - ) -> Promise { - return Promise(on: .global()) { fulfill, reject in - var spcError: NSError! - var spcData: Data? - do { - spcData = try loadingRequest.streamingContentKeyRequestData(forApp: certificateData, contentIdentifier: contentIdData as Data, options: nil) - } catch _ { - print("SPC error") - } - - if spcError != nil { - reject(spcError) - } - - guard let spcData = spcData else { - reject(RCTVideoErrorHandler.noSPC) - return - } - - fulfill(spcData) - } - } - - static func createCertificateData(certificateStringUrl: String?, base64Certificate: Bool?) -> Promise { - return Promise(on: .global()) { fulfill, reject in - guard let certificateStringUrl = certificateStringUrl, - let certificateURL = URL(string: certificateStringUrl.addingPercentEncoding(withAllowedCharacters: .urlFragmentAllowed) ?? "") else { - reject(RCTVideoErrorHandler.noCertificateURL) - return - } - - var certificateData: Data? - do { - certificateData = try Data(contentsOf: certificateURL) - if base64Certificate != nil { - certificateData = Data(base64Encoded: certificateData! as Data, options: .ignoreUnknownCharacters) + static func createCertificateData(certificateStringUrl: String?, base64Certificate: Bool?) -> Promise { + return Promise(on: .global()) { fulfill, reject in + guard let certificateStringUrl = certificateStringUrl, + let certificateURL = URL(string: certificateStringUrl.addingPercentEncoding(withAllowedCharacters: .urlFragmentAllowed) ?? "") else { + reject(RCTVideoErrorHandler.noCertificateURL) + return + } + + var certificateData: Data? + do { + certificateData = try Data(contentsOf: certificateURL) + if base64Certificate != nil { + certificateData = Data(base64Encoded: certificateData! as Data, options: .ignoreUnknownCharacters) + } + } catch {} + + guard let certificateData = certificateData else { + reject(RCTVideoErrorHandler.noCertificateData) + return + } + + fulfill(certificateData) } - } catch {} - - guard let certificateData = certificateData else { - reject(RCTVideoErrorHandler.noCertificateData) - return - } - - fulfill(certificateData) } - } - static func handleWithOnGetLicense(loadingRequest: AVAssetResourceLoadingRequest, contentId: String?, certificateUrl: String?, - base64Certificate: Bool?) -> Promise { - let contentIdData = contentId?.data(using: .utf8) + static func handleWithOnGetLicense(loadingRequest: AVAssetResourceLoadingRequest, contentId: String?, certificateUrl: String?, + base64Certificate: Bool?) -> Promise { + let contentIdData = contentId?.data(using: .utf8) + + return RCTVideoDRM.createCertificateData(certificateStringUrl: certificateUrl, base64Certificate: base64Certificate) + .then { certificateData -> Promise in + guard let contentIdData = contentIdData else { + throw RCTVideoError.invalidContentId as! Error + } + + return RCTVideoDRM.fetchSpcData( + loadingRequest: loadingRequest, + certificateData: certificateData, + contentIdData: contentIdData + ) + } + } - return RCTVideoDRM.createCertificateData(certificateStringUrl: certificateUrl, base64Certificate: base64Certificate) - .then { certificateData -> Promise in - guard let contentIdData = contentIdData else { - throw RCTVideoError.invalidContentId as! Error + static func handleInternalGetLicense( + loadingRequest: AVAssetResourceLoadingRequest, + contentId: String?, + licenseServer: String?, + certificateUrl: String?, + base64Certificate: Bool?, + headers: [String: Any]? + ) -> Promise { + let url = loadingRequest.request.url + + guard let contentId = contentId ?? url?.absoluteString.replacingOccurrences(of: "skd://", with: "") else { + return Promise(RCTVideoError.invalidContentId as! Error) } - return RCTVideoDRM.fetchSpcData( - loadingRequest: loadingRequest, - certificateData: certificateData, - contentIdData: contentIdData - ) - } - } - - static func handleInternalGetLicense( - loadingRequest: AVAssetResourceLoadingRequest, - contentId: String?, - licenseServer: String?, - certificateUrl: String?, - base64Certificate: Bool?, - headers: [String: Any]? - ) -> Promise { - let url = loadingRequest.request.url - - guard let contentId = contentId ?? url?.absoluteString.replacingOccurrences(of: "skd://", with: "") else { - return Promise(RCTVideoError.invalidContentId as! Error) + let contentIdData = NSData(bytes: contentId.cString(using: String.Encoding.utf8), length: contentId.lengthOfBytes(using: String.Encoding.utf8)) as Data + + return RCTVideoDRM.createCertificateData(certificateStringUrl: certificateUrl, base64Certificate: base64Certificate) + .then { certificateData in + return RCTVideoDRM.fetchSpcData( + loadingRequest: loadingRequest, + certificateData: certificateData, + contentIdData: contentIdData + ) + } + .then { spcData -> Promise in + guard let licenseServer = licenseServer else { + throw RCTVideoError.noLicenseServerURL as! Error + } + return RCTVideoDRM.fetchLicense( + licenseServer: licenseServer, + spcData: spcData, + contentId: contentId, + headers: headers + ) + } } - - let contentIdData = NSData(bytes: contentId.cString(using: String.Encoding.utf8), length: contentId.lengthOfBytes(using: String.Encoding.utf8)) as Data - - return RCTVideoDRM.createCertificateData(certificateStringUrl: certificateUrl, base64Certificate: base64Certificate) - .then { certificateData in - return RCTVideoDRM.fetchSpcData( - loadingRequest: loadingRequest, - certificateData: certificateData, - contentIdData: contentIdData - ) - } - .then { spcData -> Promise in - guard let licenseServer = licenseServer else { - throw RCTVideoError.noLicenseServerURL as! Error - } - return RCTVideoDRM.fetchLicense( - licenseServer: licenseServer, - spcData: spcData, - contentId: contentId, - headers: headers - ) - } - } } diff --git a/ios/Video/Features/RCTVideoErrorHandling.swift b/ios/Video/Features/RCTVideoErrorHandling.swift index caee88bb10..7dc687839e 100644 --- a/ios/Video/Features/RCTVideoErrorHandling.swift +++ b/ios/Video/Features/RCTVideoErrorHandling.swift @@ -1,114 +1,114 @@ // MARK: - RCTVideoError enum RCTVideoError: Int { - case fromJSPart - case noLicenseServerURL - case licenseRequestNotOk - case noDataFromLicenseRequest - case noSPC - case noDataRequest - case noCertificateData - case noCertificateURL - case noFairplayDRM - case noDRMData - case invalidContentId + case fromJSPart + case noLicenseServerURL + case licenseRequestNotOk + case noDataFromLicenseRequest + case noSPC + case noDataRequest + case noCertificateData + case noCertificateURL + case noFairplayDRM + case noDRMData + case invalidContentId } // MARK: - RCTVideoErrorHandler enum RCTVideoErrorHandler { - static let noDRMData = NSError( - domain: "RCTVideo", - code: RCTVideoError.noDRMData.rawValue, - userInfo: [ - NSLocalizedDescriptionKey: "Error obtaining DRM license.", - NSLocalizedFailureReasonErrorKey: "No drm object found.", - NSLocalizedRecoverySuggestionErrorKey: "Have you specified the 'drm' prop?", - ] - ) - - static let noCertificateURL = NSError( - domain: "RCTVideo", - code: RCTVideoError.noCertificateURL.rawValue, - userInfo: [ - NSLocalizedDescriptionKey: "Error obtaining DRM License.", - NSLocalizedFailureReasonErrorKey: "No certificate URL has been found.", - NSLocalizedRecoverySuggestionErrorKey: "Did you specified the prop certificateUrl?", - ] - ) + static let noDRMData = NSError( + domain: "RCTVideo", + code: RCTVideoError.noDRMData.rawValue, + userInfo: [ + NSLocalizedDescriptionKey: "Error obtaining DRM license.", + NSLocalizedFailureReasonErrorKey: "No drm object found.", + NSLocalizedRecoverySuggestionErrorKey: "Have you specified the 'drm' prop?", + ] + ) - static let noCertificateData = NSError( - domain: "RCTVideo", - code: RCTVideoError.noCertificateData.rawValue, - userInfo: [ - NSLocalizedDescriptionKey: "Error obtaining DRM license.", - NSLocalizedFailureReasonErrorKey: "No certificate data obtained from the specificied url.", - NSLocalizedRecoverySuggestionErrorKey: "Have you specified a valid 'certificateUrl'?", - ] - ) + static let noCertificateURL = NSError( + domain: "RCTVideo", + code: RCTVideoError.noCertificateURL.rawValue, + userInfo: [ + NSLocalizedDescriptionKey: "Error obtaining DRM License.", + NSLocalizedFailureReasonErrorKey: "No certificate URL has been found.", + NSLocalizedRecoverySuggestionErrorKey: "Did you specified the prop certificateUrl?", + ] + ) - static let noSPC = NSError( - domain: "RCTVideo", - code: RCTVideoError.noSPC.rawValue, - userInfo: [ - NSLocalizedDescriptionKey: "Error obtaining license.", - NSLocalizedFailureReasonErrorKey: "No spc received.", - NSLocalizedRecoverySuggestionErrorKey: "Check your DRM config.", - ] - ) + static let noCertificateData = NSError( + domain: "RCTVideo", + code: RCTVideoError.noCertificateData.rawValue, + userInfo: [ + NSLocalizedDescriptionKey: "Error obtaining DRM license.", + NSLocalizedFailureReasonErrorKey: "No certificate data obtained from the specificied url.", + NSLocalizedRecoverySuggestionErrorKey: "Have you specified a valid 'certificateUrl'?", + ] + ) - static let noLicenseServerURL = NSError( - domain: "RCTVideo", - code: RCTVideoError.noLicenseServerURL.rawValue, - userInfo: [ - NSLocalizedDescriptionKey: "Error obtaining DRM License.", - NSLocalizedFailureReasonErrorKey: "No license server URL has been found.", - NSLocalizedRecoverySuggestionErrorKey: "Did you specified the prop licenseServer?", - ] - ) + static let noSPC = NSError( + domain: "RCTVideo", + code: RCTVideoError.noSPC.rawValue, + userInfo: [ + NSLocalizedDescriptionKey: "Error obtaining license.", + NSLocalizedFailureReasonErrorKey: "No spc received.", + NSLocalizedRecoverySuggestionErrorKey: "Check your DRM config.", + ] + ) - static let noDataFromLicenseRequest = NSError( - domain: "RCTVideo", - code: RCTVideoError.noDataFromLicenseRequest.rawValue, - userInfo: [ - NSLocalizedDescriptionKey: "Error obtaining DRM license.", - NSLocalizedFailureReasonErrorKey: "No data received from the license server.", - NSLocalizedRecoverySuggestionErrorKey: "Is the licenseServer ok?", - ] - ) + static let noLicenseServerURL = NSError( + domain: "RCTVideo", + code: RCTVideoError.noLicenseServerURL.rawValue, + userInfo: [ + NSLocalizedDescriptionKey: "Error obtaining DRM License.", + NSLocalizedFailureReasonErrorKey: "No license server URL has been found.", + NSLocalizedRecoverySuggestionErrorKey: "Did you specified the prop licenseServer?", + ] + ) - static func licenseRequestNotOk(_ statusCode: Int) -> NSError { - return NSError( - domain: "RCTVideo", - code: RCTVideoError.licenseRequestNotOk.rawValue, - userInfo: [ - NSLocalizedDescriptionKey: "Error obtaining license.", - NSLocalizedFailureReasonErrorKey: String( - format: "License server responded with status code %li", - statusCode - ), - NSLocalizedRecoverySuggestionErrorKey: "Did you send the correct data to the license Server? Is the server ok?", - ] + static let noDataFromLicenseRequest = NSError( + domain: "RCTVideo", + code: RCTVideoError.noDataFromLicenseRequest.rawValue, + userInfo: [ + NSLocalizedDescriptionKey: "Error obtaining DRM license.", + NSLocalizedFailureReasonErrorKey: "No data received from the license server.", + NSLocalizedRecoverySuggestionErrorKey: "Is the licenseServer ok?", + ] ) - } - static func fromJSPart(_ error: String) -> NSError { - return NSError(domain: "RCTVideo", - code: RCTVideoError.fromJSPart.rawValue, - userInfo: [ - NSLocalizedDescriptionKey: error, - NSLocalizedFailureReasonErrorKey: error, - NSLocalizedRecoverySuggestionErrorKey: error, - ]) - } + static func licenseRequestNotOk(_ statusCode: Int) -> NSError { + return NSError( + domain: "RCTVideo", + code: RCTVideoError.licenseRequestNotOk.rawValue, + userInfo: [ + NSLocalizedDescriptionKey: "Error obtaining license.", + NSLocalizedFailureReasonErrorKey: String( + format: "License server responded with status code %li", + statusCode + ), + NSLocalizedRecoverySuggestionErrorKey: "Did you send the correct data to the license Server? Is the server ok?", + ] + ) + } - static let invalidContentId = NSError( - domain: "RCTVideo", - code: RCTVideoError.invalidContentId.rawValue, - userInfo: [ - NSLocalizedDescriptionKey: "Error obtaining DRM license.", - NSLocalizedFailureReasonErrorKey: "No valide content Id received", - NSLocalizedRecoverySuggestionErrorKey: "Is the contentId and url ok?", - ] - ) + static func fromJSPart(_ error: String) -> NSError { + return NSError(domain: "RCTVideo", + code: RCTVideoError.fromJSPart.rawValue, + userInfo: [ + NSLocalizedDescriptionKey: error, + NSLocalizedFailureReasonErrorKey: error, + NSLocalizedRecoverySuggestionErrorKey: error, + ]) + } + + static let invalidContentId = NSError( + domain: "RCTVideo", + code: RCTVideoError.invalidContentId.rawValue, + userInfo: [ + NSLocalizedDescriptionKey: "Error obtaining DRM license.", + NSLocalizedFailureReasonErrorKey: "No valide content Id received", + NSLocalizedRecoverySuggestionErrorKey: "Is the contentId and url ok?", + ] + ) } diff --git a/ios/Video/Features/RCTVideoSave.swift b/ios/Video/Features/RCTVideoSave.swift index 66b9d0a3d7..76fc2901fa 100644 --- a/ios/Video/Features/RCTVideoSave.swift +++ b/ios/Video/Features/RCTVideoSave.swift @@ -1,69 +1,69 @@ import AVFoundation enum RCTVideoSave { - static func save( - options _: NSDictionary!, - resolve: @escaping RCTPromiseResolveBlock, - reject: @escaping RCTPromiseRejectBlock, + static func save( + options _: NSDictionary!, + resolve: @escaping RCTPromiseResolveBlock, + reject: @escaping RCTPromiseRejectBlock, - playerItem: AVPlayerItem? - ) { - let asset: AVAsset! = playerItem?.asset + playerItem: AVPlayerItem? + ) { + let asset: AVAsset! = playerItem?.asset - guard asset != nil else { - reject("ERROR_ASSET_NIL", "Asset is nil", nil) - return - } + guard asset != nil else { + reject("ERROR_ASSET_NIL", "Asset is nil", nil) + return + } - guard let exportSession = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetHighestQuality) else { - reject("ERROR_COULD_NOT_CREATE_EXPORT_SESSION", "Could not create export session", nil) - return + guard let exportSession = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetHighestQuality) else { + reject("ERROR_COULD_NOT_CREATE_EXPORT_SESSION", "Could not create export session", nil) + return + } + var path: String! + path = RCTVideoSave.generatePathInDirectory( + directory: URL(fileURLWithPath: RCTVideoSave.cacheDirectoryPath() ?? "").appendingPathComponent("Videos").path, + withExtension: ".mp4" + ) + let url: NSURL! = NSURL.fileURL(withPath: path) as NSURL + exportSession.outputFileType = AVFileType.mp4 + exportSession.outputURL = url as URL? + exportSession.videoComposition = playerItem?.videoComposition + exportSession.shouldOptimizeForNetworkUse = true + exportSession.exportAsynchronously(completionHandler: { + switch exportSession.status { + case .failed: + reject("ERROR_COULD_NOT_EXPORT_VIDEO", "Could not export video", exportSession.error) + case .cancelled: + reject("ERROR_EXPORT_SESSION_CANCELLED", "Export session was cancelled", exportSession.error) + default: + resolve(["uri": url.absoluteString]) + } + }) } - var path: String! - path = RCTVideoSave.generatePathInDirectory( - directory: URL(fileURLWithPath: RCTVideoSave.cacheDirectoryPath() ?? "").appendingPathComponent("Videos").path, - withExtension: ".mp4" - ) - let url: NSURL! = NSURL.fileURL(withPath: path) as NSURL - exportSession.outputFileType = AVFileType.mp4 - exportSession.outputURL = url as URL? - exportSession.videoComposition = playerItem?.videoComposition - exportSession.shouldOptimizeForNetworkUse = true - exportSession.exportAsynchronously(completionHandler: { - switch exportSession.status { - case .failed: - reject("ERROR_COULD_NOT_EXPORT_VIDEO", "Could not export video", exportSession.error) - case .cancelled: - reject("ERROR_EXPORT_SESSION_CANCELLED", "Export session was cancelled", exportSession.error) - default: - resolve(["uri": url.absoluteString]) - } - }) - } - static func generatePathInDirectory(directory: String?, withExtension extension: String?) -> String? { - let fileName = UUID().uuidString + (`extension` ?? "") - RCTVideoSave.ensureDirExists(withPath: directory) - return URL(fileURLWithPath: directory ?? "").appendingPathComponent(fileName).path - } + static func generatePathInDirectory(directory: String?, withExtension extension: String?) -> String? { + let fileName = UUID().uuidString + (`extension` ?? "") + RCTVideoSave.ensureDirExists(withPath: directory) + return URL(fileURLWithPath: directory ?? "").appendingPathComponent(fileName).path + } - static func cacheDirectoryPath() -> String? { - let array = FileManager.default.urls(for: .cachesDirectory, in: .userDomainMask).map(\.path) - return array[0] - } + static func cacheDirectoryPath() -> String? { + let array = FileManager.default.urls(for: .cachesDirectory, in: .userDomainMask).map(\.path) + return array[0] + } - static func ensureDirExists(withPath path: String?) -> Bool { - var isDir: ObjCBool = false - var error: Error? - let exists = FileManager.default.fileExists(atPath: path ?? "", isDirectory: &isDir) - if !(exists && isDir.boolValue) { - do { - try FileManager.default.createDirectory(atPath: path ?? "", withIntermediateDirectories: true, attributes: nil) - } catch {} - if error != nil { - return false - } + static func ensureDirExists(withPath path: String?) -> Bool { + var isDir: ObjCBool = false + var error: Error? + let exists = FileManager.default.fileExists(atPath: path ?? "", isDirectory: &isDir) + if !(exists && isDir.boolValue) { + do { + try FileManager.default.createDirectory(atPath: path ?? "", withIntermediateDirectories: true, attributes: nil) + } catch {} + if error != nil { + return false + } + } + return true } - return true - } } diff --git a/ios/Video/Features/RCTVideoTVUtils.swift b/ios/Video/Features/RCTVideoTVUtils.swift index 84d8f1e03e..2edcf9c76f 100644 --- a/ios/Video/Features/RCTVideoTVUtils.swift +++ b/ios/Video/Features/RCTVideoTVUtils.swift @@ -7,42 +7,42 @@ import Foundation */ #if os(tvOS) - enum RCTVideoTVUtils { - static func makeNavigationMarkerGroups(_ chapters: [Chapter]) -> [AVNavigationMarkersGroup] { - var metadataGroups = [AVTimedMetadataGroup]() - - // Iterate over the defined chapters and build a timed metadata group object for each. - chapters.forEach { chapter in - metadataGroups.append(makeTimedMetadataGroup(for: chapter)) - } - - return [AVNavigationMarkersGroup(title: nil, timedNavigationMarkers: metadataGroups)] - } - - static func makeTimedMetadataGroup(for chapter: Chapter) -> AVTimedMetadataGroup { - var metadata = [AVMetadataItem]() - - // Create a metadata item that contains the chapter title. - let titleItem = RCTVideoUtils.createMetadataItem(for: .commonIdentifierTitle, value: chapter.title) - metadata.append(titleItem) - - // Create a time range for the metadata group. - let timescale: Int32 = 600 - let startTime = CMTime(seconds: chapter.startTime, preferredTimescale: timescale) - let endTime = CMTime(seconds: chapter.endTime, preferredTimescale: timescale) - let timeRange = CMTimeRangeFromTimeToTime(start: startTime, end: endTime) - - // Image - if let imgUri = chapter.uri, - let uri = URL(string: imgUri), - let imgData = try? Data(contentsOf: uri), - let image = UIImage(data: imgData), - let pngData = image.pngData() { - let imageItem = RCTVideoUtils.createMetadataItem(for: .commonIdentifierArtwork, value: pngData) - metadata.append(imageItem) - } - - return AVTimedMetadataGroup(items: metadata, timeRange: timeRange) + enum RCTVideoTVUtils { + static func makeNavigationMarkerGroups(_ chapters: [Chapter]) -> [AVNavigationMarkersGroup] { + var metadataGroups = [AVTimedMetadataGroup]() + + // Iterate over the defined chapters and build a timed metadata group object for each. + chapters.forEach { chapter in + metadataGroups.append(makeTimedMetadataGroup(for: chapter)) + } + + return [AVNavigationMarkersGroup(title: nil, timedNavigationMarkers: metadataGroups)] + } + + static func makeTimedMetadataGroup(for chapter: Chapter) -> AVTimedMetadataGroup { + var metadata = [AVMetadataItem]() + + // Create a metadata item that contains the chapter title. + let titleItem = RCTVideoUtils.createMetadataItem(for: .commonIdentifierTitle, value: chapter.title) + metadata.append(titleItem) + + // Create a time range for the metadata group. + let timescale: Int32 = 600 + let startTime = CMTime(seconds: chapter.startTime, preferredTimescale: timescale) + let endTime = CMTime(seconds: chapter.endTime, preferredTimescale: timescale) + let timeRange = CMTimeRangeFromTimeToTime(start: startTime, end: endTime) + + // Image + if let imgUri = chapter.uri, + let uri = URL(string: imgUri), + let imgData = try? Data(contentsOf: uri), + let image = UIImage(data: imgData), + let pngData = image.pngData() { + let imageItem = RCTVideoUtils.createMetadataItem(for: .commonIdentifierArtwork, value: pngData) + metadata.append(imageItem) + } + + return AVTimedMetadataGroup(items: metadata, timeRange: timeRange) + } } - } #endif diff --git a/ios/Video/Features/RCTVideoUtils.swift b/ios/Video/Features/RCTVideoUtils.swift index ec63a14cda..d812c52def 100644 --- a/ios/Video/Features/RCTVideoUtils.swift +++ b/ios/Video/Features/RCTVideoUtils.swift @@ -6,345 +6,346 @@ import Promises * Collection of pure functions */ enum RCTVideoUtils { - /*! - * Calculates and returns the playable duration of the current player item using its loaded time ranges. - * - * \returns The playable duration of the current player item in seconds. - */ - static func calculatePlayableDuration(_ player: AVPlayer?, withSource source: VideoSource?) -> NSNumber { - guard let player = player, - let video: AVPlayerItem = player.currentItem, - video.status == AVPlayerItem.Status.readyToPlay else { - return 0 - } + /*! + * Calculates and returns the playable duration of the current player item using its loaded time ranges. + * + * \returns The playable duration of the current player item in seconds. + */ + static func calculatePlayableDuration(_ player: AVPlayer?, withSource source: VideoSource?) -> NSNumber { + guard let player = player, + let video: AVPlayerItem = player.currentItem, + video.status == AVPlayerItem.Status.readyToPlay else { + return 0 + } - if source?.cropStart != nil && source?.cropEnd != nil { - return NSNumber(value: (Float64(source?.cropEnd ?? 0) - Float64(source?.cropStart ?? 0)) / 1000) - } + if source?.cropStart != nil && source?.cropEnd != nil { + return NSNumber(value: (Float64(source?.cropEnd ?? 0) - Float64(source?.cropStart ?? 0)) / 1000) + } - var effectiveTimeRange: CMTimeRange? - for value in video.loadedTimeRanges { - let timeRange: CMTimeRange = value.timeRangeValue - if CMTimeRangeContainsTime(timeRange, time: video.currentTime()) { - effectiveTimeRange = timeRange - break - } - } + var effectiveTimeRange: CMTimeRange? + for value in video.loadedTimeRanges { + let timeRange: CMTimeRange = value.timeRangeValue + if CMTimeRangeContainsTime(timeRange, time: video.currentTime()) { + effectiveTimeRange = timeRange + break + } + } - if let effectiveTimeRange = effectiveTimeRange { - let playableDuration: Float64 = CMTimeGetSeconds(CMTimeRangeGetEnd(effectiveTimeRange)) - if playableDuration > 0 { - if source?.cropStart != nil { - return NSNumber(value: playableDuration - Float64(source?.cropStart ?? 0) / 1000) + if let effectiveTimeRange = effectiveTimeRange { + let playableDuration: Float64 = CMTimeGetSeconds(CMTimeRangeGetEnd(effectiveTimeRange)) + if playableDuration > 0 { + if source?.cropStart != nil { + return NSNumber(value: playableDuration - Float64(source?.cropStart ?? 0) / 1000) + } + + return playableDuration as NSNumber + } } - return playableDuration as NSNumber - } + return 0 } - return 0 - } + static func urlFilePath(filepath: NSString!, searchPath: FileManager.SearchPathDirectory) -> NSURL! { + if filepath.contains("file://") { + return NSURL(string: filepath as String) + } - static func urlFilePath(filepath: NSString!, searchPath: FileManager.SearchPathDirectory) -> NSURL! { - if filepath.contains("file://") { - return NSURL(string: filepath as String) - } + // if no file found, check if the file exists in the Document directory + let paths: [String]! = NSSearchPathForDirectoriesInDomains(searchPath, .userDomainMask, true) + var relativeFilePath: String! = filepath.lastPathComponent + // the file may be multiple levels below the documents directory + let directoryString: String! = searchPath == .cachesDirectory ? "Library/Caches/" : "Documents" + let fileComponents: [String]! = filepath.components(separatedBy: directoryString) + if fileComponents.count > 1 { + relativeFilePath = fileComponents[1] + } - // if no file found, check if the file exists in the Document directory - let paths: [String]! = NSSearchPathForDirectoriesInDomains(searchPath, .userDomainMask, true) - var relativeFilePath: String! = filepath.lastPathComponent - // the file may be multiple levels below the documents directory - let directoryString: String! = searchPath == .cachesDirectory ? "Library/Caches/" : "Documents" - let fileComponents: [String]! = filepath.components(separatedBy: directoryString) - if fileComponents.count > 1 { - relativeFilePath = fileComponents[1] + let path: String! = (paths.first! as NSString).appendingPathComponent(relativeFilePath) + if FileManager.default.fileExists(atPath: path) { + return NSURL.fileURL(withPath: path) as NSURL + } + return nil } - let path: String! = (paths.first! as NSString).appendingPathComponent(relativeFilePath) - if FileManager.default.fileExists(atPath: path) { - return NSURL.fileURL(withPath: path) as NSURL - } - return nil - } - - static func playerItemSeekableTimeRange(_ player: AVPlayer?) -> CMTimeRange { - if let playerItem = player?.currentItem, - playerItem.status == .readyToPlay, - let firstItem = playerItem.seekableTimeRanges.first { - return firstItem.timeRangeValue + static func playerItemSeekableTimeRange(_ player: AVPlayer?) -> CMTimeRange { + if let playerItem = player?.currentItem, + playerItem.status == .readyToPlay, + let firstItem = playerItem.seekableTimeRanges.first { + return firstItem.timeRangeValue + } + + return CMTimeRange.zero } - return CMTimeRange.zero - } + static func playerItemDuration(_ player: AVPlayer?) -> CMTime { + if let playerItem = player?.currentItem, + playerItem.status == .readyToPlay { + return playerItem.duration + } - static func playerItemDuration(_ player: AVPlayer?) -> CMTime { - if let playerItem = player?.currentItem, - playerItem.status == .readyToPlay { - return playerItem.duration + return CMTime.invalid } - return CMTime.invalid - } - - static func calculateSeekableDuration(_ player: AVPlayer?) -> NSNumber { - let timeRange: CMTimeRange = RCTVideoUtils.playerItemSeekableTimeRange(player) - if CMTIME_IS_NUMERIC(timeRange.duration) { - return NSNumber(value: CMTimeGetSeconds(timeRange.duration)) + static func calculateSeekableDuration(_ player: AVPlayer?) -> NSNumber { + let timeRange: CMTimeRange = RCTVideoUtils.playerItemSeekableTimeRange(player) + if CMTIME_IS_NUMERIC(timeRange.duration) { + return NSNumber(value: CMTimeGetSeconds(timeRange.duration)) + } + return 0 } - return 0 - } - static func getAudioTrackInfo(_ player: AVPlayer?) -> [AnyObject]! { - guard let player = player else { - return [] + static func getAudioTrackInfo(_ player: AVPlayer?) -> [AnyObject]! { + guard let player = player else { + return [] + } + + let audioTracks: NSMutableArray! = NSMutableArray() + let group = player.currentItem?.asset.mediaSelectionGroup(forMediaCharacteristic: .audible) + for i in 0 ..< (group?.options.count ?? 0) { + let currentOption = group?.options[i] + var title = "" + let values = currentOption?.commonMetadata.map(\.value) + if (values?.count ?? 0) > 0, let value = values?[0] { + title = value as! String + } + let language: String! = currentOption?.extendedLanguageTag ?? "" + + let selectedOption: AVMediaSelectionOption? = player.currentItem?.currentMediaSelection.selectedMediaOption(in: group!) + + let audioTrack = [ + "index": NSNumber(value: i), + "title": title, + "language": language ?? "", + "selected": currentOption?.displayName == selectedOption?.displayName, + ] as [String: Any] + audioTracks.add(audioTrack) + } + return audioTracks as [AnyObject]? } - let audioTracks: NSMutableArray! = NSMutableArray() - let group = player.currentItem?.asset.mediaSelectionGroup(forMediaCharacteristic: .audible) - for i in 0 ..< (group?.options.count ?? 0) { - let currentOption = group?.options[i] - var title = "" - let values = currentOption?.commonMetadata.map(\.value) - if (values?.count ?? 0) > 0, let value = values?[0] { - title = value as! String - } - let language: String! = currentOption?.extendedLanguageTag ?? "" - - let selectedOption: AVMediaSelectionOption? = player.currentItem?.currentMediaSelection.selectedMediaOption(in: group!) - - let audioTrack = [ - "index": NSNumber(value: i), - "title": title, - "language": language ?? "", - "selected": currentOption?.displayName == selectedOption?.displayName, - ] as [String: Any] - audioTracks.add(audioTrack) + static func getTextTrackInfo(_ player: AVPlayer?) -> [TextTrack]! { + guard let player = player else { + return [] + } + + // if streaming video, we extract the text tracks + var textTracks: [TextTrack] = [] + let group = player.currentItem?.asset.mediaSelectionGroup(forMediaCharacteristic: .legible) + for i in 0 ..< (group?.options.count ?? 0) { + let currentOption = group?.options[i] + var title = "" + let values = currentOption?.commonMetadata.map(\.value) + if (values?.count ?? 0) > 0, let value = values?[0] { + title = value as! String + } + let language: String! = currentOption?.extendedLanguageTag ?? "" + let selectedOpt = player.currentItem?.currentMediaSelection + let selectedOption: AVMediaSelectionOption? = player.currentItem?.currentMediaSelection.selectedMediaOption(in: group!) + let textTrack = TextTrack([ + "index": NSNumber(value: i), + "title": title, + "language": language, + "selected": currentOption?.displayName == selectedOption?.displayName, + ]) + textTracks.append(textTrack) + } + return textTracks } - return audioTracks as [AnyObject]? - } - static func getTextTrackInfo(_ player: AVPlayer?) -> [TextTrack]! { - guard let player = player else { - return [] + // UNUSED + static func getCurrentTime(playerItem: AVPlayerItem?) -> Float { + return Float(CMTimeGetSeconds(playerItem?.currentTime() ?? .zero)) } - // if streaming video, we extract the text tracks - var textTracks: [TextTrack] = [] - let group = player.currentItem?.asset.mediaSelectionGroup(forMediaCharacteristic: .legible) - for i in 0 ..< (group?.options.count ?? 0) { - let currentOption = group?.options[i] - var title = "" - let values = currentOption?.commonMetadata.map(\.value) - if (values?.count ?? 0) > 0, let value = values?[0] { - title = value as! String - } - let language: String! = currentOption?.extendedLanguageTag ?? "" - let selectedOpt = player.currentItem?.currentMediaSelection - let selectedOption: AVMediaSelectionOption? = player.currentItem?.currentMediaSelection.selectedMediaOption(in: group!) - let textTrack = TextTrack([ - "index": NSNumber(value: i), - "title": title, - "language": language, - "selected": currentOption?.displayName == selectedOption?.displayName, - ]) - textTracks.append(textTrack) + static func base64DataFromBase64String(base64String: String?) -> Data? { + if let base64String = base64String { + return Data(base64Encoded: base64String) + } + return nil } - return textTracks - } - // UNUSED - static func getCurrentTime(playerItem: AVPlayerItem?) -> Float { - return Float(CMTimeGetSeconds(playerItem?.currentTime() ?? .zero)) - } + static func replaceURLScheme(url: URL, scheme: String?) -> URL? { + var urlComponents = URLComponents(url: url, resolvingAgainstBaseURL: false) + urlComponents?.scheme = scheme - static func base64DataFromBase64String(base64String: String?) -> Data? { - if let base64String = base64String { - return Data(base64Encoded: base64String) + return urlComponents?.url } - return nil - } - static func replaceURLScheme(url: URL, scheme: String?) -> URL? { - var urlComponents = URLComponents(url: url, resolvingAgainstBaseURL: false) - urlComponents?.scheme = scheme + static func extractDataFromCustomSchemeUrl(from url: URL, scheme: String) -> Data? { + guard url.scheme == scheme, + let adoptURL = RCTVideoUtils.replaceURLScheme(url: url, scheme: nil) else { return nil } - return urlComponents?.url - } + return Data(base64Encoded: adoptURL.absoluteString) + } - static func extractDataFromCustomSchemeUrl(from url: URL, scheme: String) -> Data? { - guard url.scheme == scheme, - let adoptURL = RCTVideoUtils.replaceURLScheme(url: url, scheme: nil) else { return nil } + static func generateMixComposition(_ asset: AVAsset) -> AVMutableComposition { + let mixComposition = AVMutableComposition() - return Data(base64Encoded: adoptURL.absoluteString) - } + let videoAsset: AVAssetTrack! = asset.tracks(withMediaType: AVMediaType.video).first - static func generateMixComposition(_ asset: AVAsset) -> AVMutableComposition { - let mixComposition = AVMutableComposition() + // we need videoAsset asset to be not null to get durration later + if videoAsset == nil { + return mixComposition + } - let videoAsset: AVAssetTrack! = asset.tracks(withMediaType: AVMediaType.video).first + let videoCompTrack: AVMutableCompositionTrack! = mixComposition.addMutableTrack( + withMediaType: AVMediaType.video, + preferredTrackID: kCMPersistentTrackID_Invalid + ) + try? videoCompTrack.insertTimeRange( + CMTimeRangeMake(start: .zero, duration: videoAsset.timeRange.duration), + of: videoAsset, + at: .zero + ) + + let audioAsset: AVAssetTrack! = asset.tracks(withMediaType: AVMediaType.audio).first + let audioCompTrack: AVMutableCompositionTrack! = mixComposition.addMutableTrack( + withMediaType: AVMediaType.audio, + preferredTrackID: kCMPersistentTrackID_Invalid + ) + try? audioCompTrack.insertTimeRange( + CMTimeRangeMake(start: .zero, duration: audioAsset.timeRange.duration), + of: audioAsset, + at: .zero + ) - // we need videoAsset asset to be not null to get durration later - if videoAsset == nil { - return mixComposition + return mixComposition } - let videoCompTrack: AVMutableCompositionTrack! = mixComposition.addMutableTrack( - withMediaType: AVMediaType.video, - preferredTrackID: kCMPersistentTrackID_Invalid - ) - try? videoCompTrack.insertTimeRange( - CMTimeRangeMake(start: .zero, duration: videoAsset.timeRange.duration), - of: videoAsset, - at: .zero - ) - - let audioAsset: AVAssetTrack! = asset.tracks(withMediaType: AVMediaType.audio).first - let audioCompTrack: AVMutableCompositionTrack! = mixComposition.addMutableTrack( - withMediaType: AVMediaType.audio, - preferredTrackID: kCMPersistentTrackID_Invalid - ) - try? audioCompTrack.insertTimeRange( - CMTimeRangeMake(start: .zero, duration: audioAsset.timeRange.duration), - of: audioAsset, - at: .zero - ) - - return mixComposition - } - - static func getValidTextTracks(asset: AVAsset, assetOptions: NSDictionary?, mixComposition: AVMutableComposition, textTracks: [TextTrack]?) -> [TextTrack] { - let videoAsset: AVAssetTrack! = asset.tracks(withMediaType: AVMediaType.video).first - var validTextTracks: [TextTrack] = [] - - if let textTracks = textTracks, !textTracks.isEmpty { - for i in 0 ..< textTracks.count { - var textURLAsset: AVURLAsset! - let textUri: String = textTracks[i].uri - if textUri.lowercased().hasPrefix("http") { - textURLAsset = AVURLAsset(url: NSURL(string: textUri)! as URL, options: (assetOptions as! [String: Any])) - } else { - let isDisabledTrack: Bool! = textTracks[i].type == "disabled" - let searchPath: FileManager.SearchPathDirectory = isDisabledTrack ? .cachesDirectory : .documentDirectory - textURLAsset = AVURLAsset(url: RCTVideoUtils.urlFilePath(filepath: textUri as NSString?, searchPath: searchPath) as URL, options: nil) + static func getValidTextTracks(asset: AVAsset, assetOptions: NSDictionary?, mixComposition: AVMutableComposition, textTracks: [TextTrack]?) -> [TextTrack] { + let videoAsset: AVAssetTrack! = asset.tracks(withMediaType: AVMediaType.video).first + var validTextTracks: [TextTrack] = [] + + if let textTracks = textTracks, !textTracks.isEmpty { + for i in 0 ..< textTracks.count { + var textURLAsset: AVURLAsset! + let textUri: String = textTracks[i].uri + if textUri.lowercased().hasPrefix("http") { + textURLAsset = AVURLAsset(url: NSURL(string: textUri)! as URL, options: (assetOptions as! [String: Any])) + } else { + let isDisabledTrack: Bool! = textTracks[i].type == "disabled" + let searchPath: FileManager.SearchPathDirectory = isDisabledTrack ? .cachesDirectory : .documentDirectory + textURLAsset = AVURLAsset(url: RCTVideoUtils.urlFilePath(filepath: textUri as NSString?, searchPath: searchPath) as URL, options: nil) + } + let textTrackAsset: AVAssetTrack! = textURLAsset.tracks(withMediaType: AVMediaType.text).first + if textTrackAsset == nil { continue } // fix when there's no textTrackAsset + validTextTracks.append(textTracks[i]) + let textCompTrack: AVMutableCompositionTrack! = mixComposition.addMutableTrack(withMediaType: AVMediaType.text, + preferredTrackID: kCMPersistentTrackID_Invalid) + if videoAsset != nil { + try? textCompTrack.insertTimeRange( + CMTimeRangeMake(start: .zero, duration: videoAsset!.timeRange.duration), + of: textTrackAsset, + at: .zero + ) + } + } } - let textTrackAsset: AVAssetTrack! = textURLAsset.tracks(withMediaType: AVMediaType.text).first - if textTrackAsset == nil { continue } // fix when there's no textTrackAsset - validTextTracks.append(textTracks[i]) - let textCompTrack: AVMutableCompositionTrack! = mixComposition.addMutableTrack(withMediaType: AVMediaType.text, - preferredTrackID: kCMPersistentTrackID_Invalid) - if videoAsset != nil { - try? textCompTrack.insertTimeRange( - CMTimeRangeMake(start: .zero, duration: videoAsset!.timeRange.duration), - of: textTrackAsset, - at: .zero - ) + + let emptyVttFile: TextTrack? = self.createEmptyVttFile() + if emptyVttFile != nil { + validTextTracks.append(emptyVttFile!) } - } + + return validTextTracks } - let emptyVttFile: TextTrack? = self.createEmptyVttFile() - if emptyVttFile != nil { - validTextTracks.append(emptyVttFile!) + /* + * Create an useless/almost empty VTT file in the list with available tracks. + * This track gets selected when you give type: "disabled" as the selectedTextTrack + * This is needed because there is a bug where sideloaded texttracks cannot be disabled in the AVPlayer. Loading this VTT file instead solves that problem. + * For more info see: https://github.com/react-native-community/react-native-video/issues/1144 + */ + static func createEmptyVttFile() -> TextTrack? { + let fileManager = FileManager.default + let cachesDirectoryUrl = fileManager.urls(for: .cachesDirectory, in: .userDomainMask)[0] + let filePath = cachesDirectoryUrl.appendingPathComponent("empty.vtt").path + + if !fileManager.fileExists(atPath: filePath) { + let stringToWrite = "WEBVTT\n\n1\n99:59:59.000 --> 99:59:59.001\n." + + do { + try stringToWrite.write(to: URL(fileURLWithPath: filePath), atomically: true, encoding: String.Encoding.utf8) + } catch { + return nil + } + } + + return TextTrack([ + "language": "disabled", + "title": "EmptyVttFile", + "type": "text/vtt", + "uri": filePath, + ]) } - return validTextTracks - } - - /* - * Create an useless/almost empty VTT file in the list with available tracks. This track gets selected when you give type: "disabled" as the selectedTextTrack - * This is needed because there is a bug where sideloaded texttracks cannot be disabled in the AVPlayer. Loading this VTT file instead solves that problem. - * For more info see: https://github.com/react-native-community/react-native-video/issues/1144 - */ - static func createEmptyVttFile() -> TextTrack? { - let fileManager = FileManager.default - let cachesDirectoryUrl = fileManager.urls(for: .cachesDirectory, in: .userDomainMask)[0] - let filePath = cachesDirectoryUrl.appendingPathComponent("empty.vtt").path - - if !fileManager.fileExists(atPath: filePath) { - let stringToWrite = "WEBVTT\n\n1\n99:59:59.000 --> 99:59:59.001\n." - - do { - try stringToWrite.write(to: URL(fileURLWithPath: filePath), atomically: true, encoding: String.Encoding.utf8) - } catch { - return nil - } + static func delay(seconds: Int = 0) -> Promise { + return Promise(on: .global()) { fulfill, _ in + DispatchQueue.main.asyncAfter(deadline: DispatchTime.now() + Double(Int64(seconds)) / Double(NSEC_PER_SEC)) { + fulfill(()) + } + } } - return TextTrack([ - "language": "disabled", - "title": "EmptyVttFile", - "type": "text/vtt", - "uri": filePath, - ]) - } - - static func delay(seconds: Int = 0) -> Promise { - return Promise(on: .global()) { fulfill, _ in - DispatchQueue.main.asyncAfter(deadline: DispatchTime.now() + Double(Int64(seconds)) / Double(NSEC_PER_SEC)) { - fulfill(()) - } + static func preparePHAsset(uri: String) -> Promise { + return Promise(on: .global()) { fulfill, reject in + let assetId = String(uri[uri.index(uri.startIndex, offsetBy: "ph://".count)...]) + guard let phAsset = PHAsset.fetchAssets(withLocalIdentifiers: [assetId], options: nil).firstObject else { + reject(NSError(domain: "", code: 0, userInfo: nil)) + return + } + let options = PHVideoRequestOptions() + options.isNetworkAccessAllowed = true + PHCachingImageManager().requestAVAsset(forVideo: phAsset, options: options) { data, _, _ in + fulfill(data) + } + } } - } - - static func preparePHAsset(uri: String) -> Promise { - return Promise(on: .global()) { fulfill, reject in - let assetId = String(uri[uri.index(uri.startIndex, offsetBy: "ph://".count)...]) - guard let phAsset = PHAsset.fetchAssets(withLocalIdentifiers: [assetId], options: nil).firstObject else { - reject(NSError(domain: "", code: 0, userInfo: nil)) - return - } - let options = PHVideoRequestOptions() - options.isNetworkAccessAllowed = true - PHCachingImageManager().requestAVAsset(forVideo: phAsset, options: options) { data, _, _ in - fulfill(data) - } + + static func prepareAsset(source: VideoSource) -> (asset: AVURLAsset?, assetOptions: NSMutableDictionary?)? { + guard let sourceUri = source.uri, sourceUri != "" else { return nil } + var asset: AVURLAsset! + let bundlePath = Bundle.main.path(forResource: source.uri, ofType: source.type) ?? "" + let url = source.isNetwork || source.isAsset + ? URL(string: source.uri?.addingPercentEncoding(withAllowedCharacters: .urlQueryAllowed) ?? "") + : URL(fileURLWithPath: bundlePath) + let assetOptions: NSMutableDictionary! = NSMutableDictionary() + + if source.isNetwork { + if let headers = source.requestHeaders, !headers.isEmpty { + assetOptions.setObject(headers, forKey: "AVURLAssetHTTPHeaderFieldsKey" as NSCopying) + } + let cookies: [AnyObject]! = HTTPCookieStorage.shared.cookies + assetOptions.setObject(cookies, forKey: AVURLAssetHTTPCookiesKey as NSCopying) + asset = AVURLAsset(url: url!, options: assetOptions as! [String: Any]) + } else { + asset = AVURLAsset(url: url!) + } + return (asset, assetOptions) } - } - - static func prepareAsset(source: VideoSource) -> (asset: AVURLAsset?, assetOptions: NSMutableDictionary?)? { - guard let sourceUri = source.uri, sourceUri != "" else { return nil } - var asset: AVURLAsset! - let bundlePath = Bundle.main.path(forResource: source.uri, ofType: source.type) ?? "" - let url = source.isNetwork || source.isAsset - ? URL(string: source.uri?.addingPercentEncoding(withAllowedCharacters: .urlQueryAllowed) ?? "") - : URL(fileURLWithPath: bundlePath) - let assetOptions: NSMutableDictionary! = NSMutableDictionary() - - if source.isNetwork { - if let headers = source.requestHeaders, !headers.isEmpty { - assetOptions.setObject(headers, forKey: "AVURLAssetHTTPHeaderFieldsKey" as NSCopying) - } - let cookies: [AnyObject]! = HTTPCookieStorage.shared.cookies - assetOptions.setObject(cookies, forKey: AVURLAssetHTTPCookiesKey as NSCopying) - asset = AVURLAsset(url: url!, options: assetOptions as! [String: Any]) - } else { - asset = AVURLAsset(url: url!) + + static func createMetadataItems(for mapping: [AVMetadataIdentifier: Any]) -> [AVMetadataItem] { + return mapping.compactMap { createMetadataItem(for: $0, value: $1) } } - return (asset, assetOptions) - } - - static func createMetadataItems(for mapping: [AVMetadataIdentifier: Any]) -> [AVMetadataItem] { - return mapping.compactMap { createMetadataItem(for: $0, value: $1) } - } - - static func createMetadataItem(for identifier: AVMetadataIdentifier, - value: Any) -> AVMetadataItem { - let item = AVMutableMetadataItem() - item.identifier = identifier - item.value = value as? NSCopying & NSObjectProtocol - // Specify "und" to indicate an undefined language. - item.extendedLanguageTag = "und" - return item.copy() as! AVMetadataItem - } - - static func createImageMetadataItem(imageUri: String) -> Data? { - if let uri = URL(string: imageUri), - let imgData = try? Data(contentsOf: uri), - let image = UIImage(data: imgData), - let pngData = image.pngData() { - return pngData + + static func createMetadataItem(for identifier: AVMetadataIdentifier, + value: Any) -> AVMetadataItem { + let item = AVMutableMetadataItem() + item.identifier = identifier + item.value = value as? NSCopying & NSObjectProtocol + // Specify "und" to indicate an undefined language. + item.extendedLanguageTag = "und" + return item.copy() as! AVMetadataItem } - return nil - } + static func createImageMetadataItem(imageUri: String) -> Data? { + if let uri = URL(string: imageUri), + let imgData = try? Data(contentsOf: uri), + let image = UIImage(data: imgData), + let pngData = image.pngData() { + return pngData + } + + return nil + } } diff --git a/ios/Video/RCTVideo.swift b/ios/Video/RCTVideo.swift index d7f3c130f6..9ebb9b610b 100644 --- a/ios/Video/RCTVideo.swift +++ b/ios/Video/RCTVideo.swift @@ -2,7 +2,7 @@ import AVFoundation import AVKit import Foundation #if USE_GOOGLE_IMA - import GoogleInteractiveMediaAds + import GoogleInteractiveMediaAds #endif import Promises import React @@ -10,1358 +10,1358 @@ import React // MARK: - RCTVideo class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverHandler { - private var _player: AVPlayer? - private var _playerItem: AVPlayerItem? - private var _source: VideoSource? - private var _playerBufferEmpty = true - private var _playerLayer: AVPlayerLayer? - private var _chapters: [Chapter]? - - private var _playerViewController: RCTVideoPlayerViewController? - private var _videoURL: NSURL? - - /* DRM */ - private var _drm: DRMParams? - - private var _localSourceEncryptionKeyScheme: String? - - /* Required to publish events */ - private var _eventDispatcher: RCTEventDispatcher? - private var _videoLoadStarted = false - - private var _pendingSeek = false - private var _pendingSeekTime: Float = 0.0 - private var _lastSeekTime: Float = 0.0 - - /* For sending videoProgress events */ - private var _controls = false - - /* Keep track of any modifiers, need to be applied after each play */ - private var _audioOutput: String = "speaker" - private var _volume: Float = 1.0 - private var _rate: Float = 1.0 - private var _maxBitRate: Float? - - private var _automaticallyWaitsToMinimizeStalling = true - private var _muted = false - private var _paused = false - private var _repeat = false - private var _allowsExternalPlayback = true - private var _textTracks: [TextTrack]? - private var _selectedTextTrackCriteria: SelectedTrackCriteria? - private var _selectedAudioTrackCriteria: SelectedTrackCriteria? - private var _playbackStalled = false - private var _playInBackground = false - private var _preventsDisplaySleepDuringVideoPlayback = true - private var _preferredForwardBufferDuration: Float = 0.0 - private var _playWhenInactive = false - private var _ignoreSilentSwitch: String! = "inherit" // inherit, ignore, obey - private var _mixWithOthers: String! = "inherit" // inherit, mix, duck - private var _resizeMode: String! = "cover" - private var _fullscreen = false - private var _fullscreenAutorotate = true - private var _fullscreenOrientation: String! = "all" - private var _fullscreenPlayerPresented = false - private var _fullscreenUncontrolPlayerPresented = false // to call events switching full screen mode from player controls - private var _filterName: String! - private var _filterEnabled = false - private var _presentingViewController: UIViewController? - private var _pictureInPictureEnabled = false - private var _startPosition: Float64 = -1 - - /* IMA Ads */ - private var _adTagUrl: String? - #if USE_GOOGLE_IMA - private var _imaAdsManager: RCTIMAAdsManager! - /* Playhead used by the SDK to track content video progress and insert mid-rolls. */ - private var _contentPlayhead: IMAAVPlayerContentPlayhead? - #endif - private var _didRequestAds = false - private var _adPlaying = false - - private var _resouceLoaderDelegate: RCTResourceLoaderDelegate? - private var _playerObserver: RCTPlayerObserver = .init() - - #if USE_VIDEO_CACHING - private let _videoCache: RCTVideoCachingHandler = .init() - #endif - - #if os(iOS) - private var _pip: RCTPictureInPicture? - #endif - - // Events - @objc var onVideoLoadStart: RCTDirectEventBlock? - @objc var onVideoLoad: RCTDirectEventBlock? - @objc var onVideoBuffer: RCTDirectEventBlock? - @objc var onVideoError: RCTDirectEventBlock? - @objc var onVideoProgress: RCTDirectEventBlock? - @objc var onVideoBandwidthUpdate: RCTDirectEventBlock? - @objc var onVideoSeek: RCTDirectEventBlock? - @objc var onVideoEnd: RCTDirectEventBlock? - @objc var onTimedMetadata: RCTDirectEventBlock? - @objc var onVideoAudioBecomingNoisy: RCTDirectEventBlock? - @objc var onVideoFullscreenPlayerWillPresent: RCTDirectEventBlock? - @objc var onVideoFullscreenPlayerDidPresent: RCTDirectEventBlock? - @objc var onVideoFullscreenPlayerWillDismiss: RCTDirectEventBlock? - @objc var onVideoFullscreenPlayerDidDismiss: RCTDirectEventBlock? - @objc var onReadyForDisplay: RCTDirectEventBlock? - @objc var onPlaybackStalled: RCTDirectEventBlock? - @objc var onPlaybackResume: RCTDirectEventBlock? - @objc var onPlaybackRateChange: RCTDirectEventBlock? - @objc var onVolumeChange: RCTDirectEventBlock? - @objc var onVideoPlaybackStateChanged: RCTDirectEventBlock? - @objc var onVideoExternalPlaybackChange: RCTDirectEventBlock? - @objc var onPictureInPictureStatusChanged: RCTDirectEventBlock? - @objc var onRestoreUserInterfaceForPictureInPictureStop: RCTDirectEventBlock? - @objc var onGetLicense: RCTDirectEventBlock? - @objc var onReceiveAdEvent: RCTDirectEventBlock? - - @objc - func _onPictureInPictureStatusChanged() { - onPictureInPictureStatusChanged?(["isActive": NSNumber(value: true)]) - } - - @objc - func _onRestoreUserInterfaceForPictureInPictureStop() { - onPictureInPictureStatusChanged?(["isActive": NSNumber(value: false)]) - } - - func isPipEnabled() -> Bool { - return _pictureInPictureEnabled - } - - init(eventDispatcher: RCTEventDispatcher!) { - super.init(frame: CGRect(x: 0, y: 0, width: 100, height: 100)) + private var _player: AVPlayer? + private var _playerItem: AVPlayerItem? + private var _source: VideoSource? + private var _playerBufferEmpty = true + private var _playerLayer: AVPlayerLayer? + private var _chapters: [Chapter]? + + private var _playerViewController: RCTVideoPlayerViewController? + private var _videoURL: NSURL? + + /* DRM */ + private var _drm: DRMParams? + + private var _localSourceEncryptionKeyScheme: String? + + /* Required to publish events */ + private var _eventDispatcher: RCTEventDispatcher? + private var _videoLoadStarted = false + + private var _pendingSeek = false + private var _pendingSeekTime: Float = 0.0 + private var _lastSeekTime: Float = 0.0 + + /* For sending videoProgress events */ + private var _controls = false + + /* Keep track of any modifiers, need to be applied after each play */ + private var _audioOutput: String = "speaker" + private var _volume: Float = 1.0 + private var _rate: Float = 1.0 + private var _maxBitRate: Float? + + private var _automaticallyWaitsToMinimizeStalling = true + private var _muted = false + private var _paused = false + private var _repeat = false + private var _allowsExternalPlayback = true + private var _textTracks: [TextTrack]? + private var _selectedTextTrackCriteria: SelectedTrackCriteria? + private var _selectedAudioTrackCriteria: SelectedTrackCriteria? + private var _playbackStalled = false + private var _playInBackground = false + private var _preventsDisplaySleepDuringVideoPlayback = true + private var _preferredForwardBufferDuration: Float = 0.0 + private var _playWhenInactive = false + private var _ignoreSilentSwitch: String! = "inherit" // inherit, ignore, obey + private var _mixWithOthers: String! = "inherit" // inherit, mix, duck + private var _resizeMode: String! = "cover" + private var _fullscreen = false + private var _fullscreenAutorotate = true + private var _fullscreenOrientation: String! = "all" + private var _fullscreenPlayerPresented = false + private var _fullscreenUncontrolPlayerPresented = false // to call events switching full screen mode from player controls + private var _filterName: String! + private var _filterEnabled = false + private var _presentingViewController: UIViewController? + private var _pictureInPictureEnabled = false + private var _startPosition: Float64 = -1 + + /* IMA Ads */ + private var _adTagUrl: String? #if USE_GOOGLE_IMA - _imaAdsManager = RCTIMAAdsManager(video: self, pipEnabled: isPipEnabled) + private var _imaAdsManager: RCTIMAAdsManager! + /* Playhead used by the SDK to track content video progress and insert mid-rolls. */ + private var _contentPlayhead: IMAAVPlayerContentPlayhead? #endif + private var _didRequestAds = false + private var _adPlaying = false - _eventDispatcher = eventDispatcher + private var _resouceLoaderDelegate: RCTResourceLoaderDelegate? + private var _playerObserver: RCTPlayerObserver = .init() - #if os(iOS) - _pip = RCTPictureInPicture(self._onPictureInPictureStatusChanged, self._onRestoreUserInterfaceForPictureInPictureStop) - #endif - - NotificationCenter.default.addObserver( - self, - selector: #selector(applicationWillResignActive(notification:)), - name: UIApplication.willResignActiveNotification, - object: nil - ) - - NotificationCenter.default.addObserver( - self, - selector: #selector(applicationDidBecomeActive(notification:)), - name: UIApplication.didBecomeActiveNotification, - object: nil - ) - - NotificationCenter.default.addObserver( - self, - selector: #selector(applicationDidEnterBackground(notification:)), - name: UIApplication.didEnterBackgroundNotification, - object: nil - ) - - NotificationCenter.default.addObserver( - self, - selector: #selector(applicationWillEnterForeground(notification:)), - name: UIApplication.willEnterForegroundNotification, - object: nil - ) - - NotificationCenter.default.addObserver( - self, - selector: #selector(audioRouteChanged(notification:)), - name: AVAudioSession.routeChangeNotification, - object: nil - ) - _playerObserver._handlers = self #if USE_VIDEO_CACHING - _videoCache.playerItemPrepareText = playerItemPrepareText + private let _videoCache: RCTVideoCachingHandler = .init() #endif - } - required init?(coder aDecoder: NSCoder) { - super.init(coder: aDecoder) - #if USE_GOOGLE_IMA - _imaAdsManager = RCTIMAAdsManager(video: self, pipEnabled: isPipEnabled) + #if os(iOS) + private var _pip: RCTPictureInPicture? #endif - } - deinit { - NotificationCenter.default.removeObserver(self) - self.removePlayerLayer() - _playerObserver.clearPlayer() - } + // Events + @objc var onVideoLoadStart: RCTDirectEventBlock? + @objc var onVideoLoad: RCTDirectEventBlock? + @objc var onVideoBuffer: RCTDirectEventBlock? + @objc var onVideoError: RCTDirectEventBlock? + @objc var onVideoProgress: RCTDirectEventBlock? + @objc var onVideoBandwidthUpdate: RCTDirectEventBlock? + @objc var onVideoSeek: RCTDirectEventBlock? + @objc var onVideoEnd: RCTDirectEventBlock? + @objc var onTimedMetadata: RCTDirectEventBlock? + @objc var onVideoAudioBecomingNoisy: RCTDirectEventBlock? + @objc var onVideoFullscreenPlayerWillPresent: RCTDirectEventBlock? + @objc var onVideoFullscreenPlayerDidPresent: RCTDirectEventBlock? + @objc var onVideoFullscreenPlayerWillDismiss: RCTDirectEventBlock? + @objc var onVideoFullscreenPlayerDidDismiss: RCTDirectEventBlock? + @objc var onReadyForDisplay: RCTDirectEventBlock? + @objc var onPlaybackStalled: RCTDirectEventBlock? + @objc var onPlaybackResume: RCTDirectEventBlock? + @objc var onPlaybackRateChange: RCTDirectEventBlock? + @objc var onVolumeChange: RCTDirectEventBlock? + @objc var onVideoPlaybackStateChanged: RCTDirectEventBlock? + @objc var onVideoExternalPlaybackChange: RCTDirectEventBlock? + @objc var onPictureInPictureStatusChanged: RCTDirectEventBlock? + @objc var onRestoreUserInterfaceForPictureInPictureStop: RCTDirectEventBlock? + @objc var onGetLicense: RCTDirectEventBlock? + @objc var onReceiveAdEvent: RCTDirectEventBlock? + + @objc + func _onPictureInPictureStatusChanged() { + onPictureInPictureStatusChanged?(["isActive": NSNumber(value: true)]) + } - // MARK: - App lifecycle handlers + @objc + func _onRestoreUserInterfaceForPictureInPictureStop() { + onPictureInPictureStatusChanged?(["isActive": NSNumber(value: false)]) + } - @objc - func applicationWillResignActive(notification _: NSNotification!) { - if _playInBackground || _playWhenInactive || _paused { return } + func isPipEnabled() -> Bool { + return _pictureInPictureEnabled + } + + init(eventDispatcher: RCTEventDispatcher!) { + super.init(frame: CGRect(x: 0, y: 0, width: 100, height: 100)) + #if USE_GOOGLE_IMA + _imaAdsManager = RCTIMAAdsManager(video: self, pipEnabled: isPipEnabled) + #endif - _player?.pause() - _player?.rate = 0.0 - } + _eventDispatcher = eventDispatcher - @objc - func applicationDidBecomeActive(notification _: NSNotification!) { - if _playInBackground || _playWhenInactive || _paused { return } + #if os(iOS) + _pip = RCTPictureInPicture(self._onPictureInPictureStatusChanged, self._onRestoreUserInterfaceForPictureInPictureStop) + #endif - // Resume the player or any other tasks that should continue when the app becomes active. - _player?.play() - _player?.rate = _rate - } + NotificationCenter.default.addObserver( + self, + selector: #selector(applicationWillResignActive(notification:)), + name: UIApplication.willResignActiveNotification, + object: nil + ) + + NotificationCenter.default.addObserver( + self, + selector: #selector(applicationDidBecomeActive(notification:)), + name: UIApplication.didBecomeActiveNotification, + object: nil + ) + + NotificationCenter.default.addObserver( + self, + selector: #selector(applicationDidEnterBackground(notification:)), + name: UIApplication.didEnterBackgroundNotification, + object: nil + ) + + NotificationCenter.default.addObserver( + self, + selector: #selector(applicationWillEnterForeground(notification:)), + name: UIApplication.willEnterForegroundNotification, + object: nil + ) + + NotificationCenter.default.addObserver( + self, + selector: #selector(audioRouteChanged(notification:)), + name: AVAudioSession.routeChangeNotification, + object: nil + ) + _playerObserver._handlers = self + #if USE_VIDEO_CACHING + _videoCache.playerItemPrepareText = playerItemPrepareText + #endif + } - @objc - func applicationDidEnterBackground(notification _: NSNotification!) { - if !_playInBackground { - // Needed to play sound in background. See https://developer.apple.com/library/ios/qa/qa1668/_index.html - _playerLayer?.player = nil - _playerViewController?.player = nil + required init?(coder aDecoder: NSCoder) { + super.init(coder: aDecoder) + #if USE_GOOGLE_IMA + _imaAdsManager = RCTIMAAdsManager(video: self, pipEnabled: isPipEnabled) + #endif } - } - @objc - func applicationWillEnterForeground(notification _: NSNotification!) { - self.applyModifiers() - if !_playInBackground { - _playerLayer?.player = _player - _playerViewController?.player = _player + deinit { + NotificationCenter.default.removeObserver(self) + self.removePlayerLayer() + _playerObserver.clearPlayer() } - } - // MARK: - Audio events + // MARK: - App lifecycle handlers - @objc - func audioRouteChanged(notification: NSNotification!) { - if let userInfo = notification.userInfo { - let reason: AVAudioSession.RouteChangeReason! = userInfo[AVAudioSessionRouteChangeReasonKey] as? AVAudioSession.RouteChangeReason - // let previousRoute:NSNumber! = userInfo[AVAudioSessionRouteChangePreviousRouteKey] as? NSNumber - if reason == .oldDeviceUnavailable, let onVideoAudioBecomingNoisy = onVideoAudioBecomingNoisy { - onVideoAudioBecomingNoisy(["target": reactTag as Any]) - } + @objc + func applicationWillResignActive(notification _: NSNotification!) { + if _playInBackground || _playWhenInactive || _paused { return } + + _player?.pause() + _player?.rate = 0.0 } - } - // MARK: - Progress + @objc + func applicationDidBecomeActive(notification _: NSNotification!) { + if _playInBackground || _playWhenInactive || _paused { return } - func sendProgressUpdate() { - if let video = _player?.currentItem, - video == nil || video.status != AVPlayerItem.Status.readyToPlay { - return + // Resume the player or any other tasks that should continue when the app becomes active. + _player?.play() + _player?.rate = _rate + } + + @objc + func applicationDidEnterBackground(notification _: NSNotification!) { + if !_playInBackground { + // Needed to play sound in background. See https://developer.apple.com/library/ios/qa/qa1668/_index.html + _playerLayer?.player = nil + _playerViewController?.player = nil + } } - let playerDuration: CMTime = RCTVideoUtils.playerItemDuration(_player) - if CMTIME_IS_INVALID(playerDuration) { - return + @objc + func applicationWillEnterForeground(notification _: NSNotification!) { + self.applyModifiers() + if !_playInBackground { + _playerLayer?.player = _player + _playerViewController?.player = _player + } } - var currentTime = _player?.currentTime() - if currentTime != nil && _source?.cropStart != nil { - currentTime = CMTimeSubtract(currentTime!, CMTimeMake(value: _source?.cropStart ?? 0, timescale: 1000)) + // MARK: - Audio events + + @objc + func audioRouteChanged(notification: NSNotification!) { + if let userInfo = notification.userInfo { + let reason: AVAudioSession.RouteChangeReason! = userInfo[AVAudioSessionRouteChangeReasonKey] as? AVAudioSession.RouteChangeReason + // let previousRoute:NSNumber! = userInfo[AVAudioSessionRouteChangePreviousRouteKey] as? NSNumber + if reason == .oldDeviceUnavailable, let onVideoAudioBecomingNoisy = onVideoAudioBecomingNoisy { + onVideoAudioBecomingNoisy(["target": reactTag as Any]) + } + } } - let currentPlaybackTime = _player?.currentItem?.currentDate() - let duration = CMTimeGetSeconds(playerDuration) - let currentTimeSecs = CMTimeGetSeconds(currentTime ?? .zero) - NotificationCenter.default.post(name: NSNotification.Name("RCTVideo_progress"), object: nil, userInfo: [ - "progress": NSNumber(value: currentTimeSecs / duration), - ]) + // MARK: - Progress - if currentTimeSecs >= 0 { - #if USE_GOOGLE_IMA - if !_didRequestAds && currentTimeSecs >= 0.0001 && _adTagUrl != nil { - _imaAdsManager.requestAds() - _didRequestAds = true + func sendProgressUpdate() { + if let video = _player?.currentItem, + video == nil || video.status != AVPlayerItem.Status.readyToPlay { + return } - #endif - onVideoProgress?([ - "currentTime": NSNumber(value: Float(currentTimeSecs)), - "playableDuration": RCTVideoUtils.calculatePlayableDuration(_player, withSource: _source), - "atValue": NSNumber(value: currentTime?.value ?? .zero), - "currentPlaybackTime": NSNumber(value: NSNumber(value: floor(currentPlaybackTime?.timeIntervalSince1970 ?? 0 * 1000)).int64Value), - "target": reactTag, - "seekableDuration": RCTVideoUtils.calculateSeekableDuration(_player), - ]) - } - } - - // MARK: - Player and source - - @objc - func setSrc(_ source: NSDictionary!) { - let dispatchClosure = { - self._source = VideoSource(source) - if self._source?.uri == nil || self._source?.uri == "" { - self._player?.replaceCurrentItem(with: nil) - return - } - self.removePlayerLayer() - self._playerObserver.player = nil - self._resouceLoaderDelegate = nil - self._playerObserver.playerItem = nil - - // perform on next run loop, otherwise other passed react-props may not be set - RCTVideoUtils.delay() - .then { [weak self] in - guard let self = self else { throw NSError(domain: "", code: 0, userInfo: nil) } - guard let source = self._source else { - DebugLog("The source not exist") - throw NSError(domain: "", code: 0, userInfo: nil) - } - if let uri = source.uri, uri.starts(with: "ph://") { - return Promise { - RCTVideoUtils.preparePHAsset(uri: uri).then { asset in - return self.playerItemPrepareText(asset: asset, assetOptions: nil, uri: source.uri ?? "") - } + + let playerDuration: CMTime = RCTVideoUtils.playerItemDuration(_player) + if CMTIME_IS_INVALID(playerDuration) { + return + } + + var currentTime = _player?.currentTime() + if currentTime != nil && _source?.cropStart != nil { + currentTime = CMTimeSubtract(currentTime!, CMTimeMake(value: _source?.cropStart ?? 0, timescale: 1000)) + } + let currentPlaybackTime = _player?.currentItem?.currentDate() + let duration = CMTimeGetSeconds(playerDuration) + let currentTimeSecs = CMTimeGetSeconds(currentTime ?? .zero) + + NotificationCenter.default.post(name: NSNotification.Name("RCTVideo_progress"), object: nil, userInfo: [ + "progress": NSNumber(value: currentTimeSecs / duration), + ]) + + if currentTimeSecs >= 0 { + #if USE_GOOGLE_IMA + if !_didRequestAds && currentTimeSecs >= 0.0001 && _adTagUrl != nil { + _imaAdsManager.requestAds() + _didRequestAds = true + } + #endif + onVideoProgress?([ + "currentTime": NSNumber(value: Float(currentTimeSecs)), + "playableDuration": RCTVideoUtils.calculatePlayableDuration(_player, withSource: _source), + "atValue": NSNumber(value: currentTime?.value ?? .zero), + "currentPlaybackTime": NSNumber(value: NSNumber(value: floor(currentPlaybackTime?.timeIntervalSince1970 ?? 0 * 1000)).int64Value), + "target": reactTag, + "seekableDuration": RCTVideoUtils.calculateSeekableDuration(_player), + ]) + } + } + + // MARK: - Player and source + + @objc + func setSrc(_ source: NSDictionary!) { + let dispatchClosure = { + self._source = VideoSource(source) + if self._source?.uri == nil || self._source?.uri == "" { + self._player?.replaceCurrentItem(with: nil) + return } - } - guard let assetResult = RCTVideoUtils.prepareAsset(source: source), - let asset = assetResult.asset, - let assetOptions = assetResult.assetOptions else { - DebugLog("Could not find video URL in source '\(String(describing: self._source))'") - throw NSError(domain: "", code: 0, userInfo: nil) - } - - if let startPosition = self._source?.startPosition { - self._startPosition = Float64(startPosition) / 1000 - } - - #if USE_VIDEO_CACHING - if self._videoCache.shouldCache(source: source, textTracks: self._textTracks) { - return self._videoCache.playerItemForSourceUsingCache(uri: source.uri, assetOptions: assetOptions) + self.removePlayerLayer() + self._playerObserver.player = nil + self._resouceLoaderDelegate = nil + self._playerObserver.playerItem = nil + + // perform on next run loop, otherwise other passed react-props may not be set + RCTVideoUtils.delay() + .then { [weak self] in + guard let self = self else { throw NSError(domain: "", code: 0, userInfo: nil) } + guard let source = self._source else { + DebugLog("The source not exist") + throw NSError(domain: "", code: 0, userInfo: nil) + } + if let uri = source.uri, uri.starts(with: "ph://") { + return Promise { + RCTVideoUtils.preparePHAsset(uri: uri).then { asset in + return self.playerItemPrepareText(asset: asset, assetOptions: nil, uri: source.uri ?? "") + } + } + } + guard let assetResult = RCTVideoUtils.prepareAsset(source: source), + let asset = assetResult.asset, + let assetOptions = assetResult.assetOptions else { + DebugLog("Could not find video URL in source '\(String(describing: self._source))'") + throw NSError(domain: "", code: 0, userInfo: nil) + } + + if let startPosition = self._source?.startPosition { + self._startPosition = Float64(startPosition) / 1000 + } + + #if USE_VIDEO_CACHING + if self._videoCache.shouldCache(source: source, textTracks: self._textTracks) { + return self._videoCache.playerItemForSourceUsingCache(uri: source.uri, assetOptions: assetOptions) + } + #endif + + if self._drm != nil || self._localSourceEncryptionKeyScheme != nil { + self._resouceLoaderDelegate = RCTResourceLoaderDelegate( + asset: asset, + drm: self._drm, + localSourceEncryptionKeyScheme: self._localSourceEncryptionKeyScheme, + onVideoError: self.onVideoError, + onGetLicense: self.onGetLicense, + reactTag: self.reactTag + ) + } + + return Promise { self.playerItemPrepareText(asset: asset, assetOptions: assetOptions, uri: source.uri ?? "") } + }.then { [weak self] (playerItem: AVPlayerItem!) in + guard let self = self else { throw NSError(domain: "", code: 0, userInfo: nil) } + + self._player?.pause() + self._playerItem = playerItem + self._playerObserver.playerItem = self._playerItem + self.setPreferredForwardBufferDuration(self._preferredForwardBufferDuration) + self.setPlaybackRange(playerItem, withVideoStart: self._source?.cropStart, withVideoEnd: self._source?.cropEnd) + self.setFilter(self._filterName) + if let maxBitRate = self._maxBitRate { + self._playerItem?.preferredPeakBitRate = Double(maxBitRate) + } + + self._player = self._player ?? AVPlayer() + self._player?.replaceCurrentItem(with: playerItem) + self._playerObserver.player = self._player + self.applyModifiers() + self._player?.actionAtItemEnd = .none + + if #available(iOS 10.0, *) { + self.setAutomaticallyWaitsToMinimizeStalling(self._automaticallyWaitsToMinimizeStalling) + } + + #if USE_GOOGLE_IMA + if self._adTagUrl != nil { + // Set up your content playhead and contentComplete callback. + self._contentPlayhead = IMAAVPlayerContentPlayhead(avPlayer: self._player!) + + self._imaAdsManager.setUpAdsLoader() + } + #endif + // Perform on next run loop, otherwise onVideoLoadStart is nil + self.onVideoLoadStart?([ + "src": [ + "uri": self._source?.uri ?? NSNull(), + "type": self._source?.type ?? NSNull(), + "isNetwork": NSNumber(value: self._source?.isNetwork ?? false), + ], + "drm": self._drm?.json ?? NSNull(), + "target": self.reactTag, + ]) + }.catch { _ in } + self._videoLoadStarted = true + } + DispatchQueue.global(qos: .default).async(execute: dispatchClosure) + } + + @objc + func setDrm(_ drm: NSDictionary) { + _drm = DRMParams(drm) + } + + @objc + func setLocalSourceEncryptionKeyScheme(_ keyScheme: String) { + _localSourceEncryptionKeyScheme = keyScheme + } + + func playerItemPrepareText(asset: AVAsset!, assetOptions: NSDictionary?, uri: String) -> AVPlayerItem { + if (_textTracks == nil) || _textTracks?.isEmpty == true || (uri.hasSuffix(".m3u8")) { + return self.playerItemPropegateMetadata(AVPlayerItem(asset: asset)) + } + + // AVPlayer can't airplay AVMutableCompositions + _allowsExternalPlayback = false + let mixComposition = RCTVideoUtils.generateMixComposition(asset) + let validTextTracks = RCTVideoUtils.getValidTextTracks( + asset: asset, + assetOptions: assetOptions, + mixComposition: mixComposition, + textTracks: _textTracks + ) + if validTextTracks.count != _textTracks?.count { + setTextTracks(validTextTracks) + } + + return self.playerItemPropegateMetadata(AVPlayerItem(asset: mixComposition)) + } + + func playerItemPropegateMetadata(_ playerItem: AVPlayerItem!) -> AVPlayerItem { + var mapping: [AVMetadataIdentifier: Any] = [:] + + if let title = _source?.title { + mapping[.commonIdentifierTitle] = title + } + + if let subtitle = _source?.subtitle { + mapping[.iTunesMetadataTrackSubTitle] = subtitle + } + + if let description = _source?.description { + mapping[.commonIdentifierDescription] = description + } + + if let customImageUri = _source?.customImageUri, + let imageData = RCTVideoUtils.createImageMetadataItem(imageUri: customImageUri) { + mapping[.commonIdentifierArtwork] = imageData + } + + if #available(iOS 12.2, *), !mapping.isEmpty { + playerItem.externalMetadata = RCTVideoUtils.createMetadataItems(for: mapping) + } + + #if os(tvOS) + if let chapters = _chapters { + playerItem.navigationMarkerGroups = RCTVideoTVUtils.makeNavigationMarkerGroups(chapters) } - #endif - - if self._drm != nil || self._localSourceEncryptionKeyScheme != nil { - self._resouceLoaderDelegate = RCTResourceLoaderDelegate( - asset: asset, - drm: self._drm, - localSourceEncryptionKeyScheme: self._localSourceEncryptionKeyScheme, - onVideoError: self.onVideoError, - onGetLicense: self.onGetLicense, - reactTag: self.reactTag - ) - } - - return Promise { self.playerItemPrepareText(asset: asset, assetOptions: assetOptions, uri: source.uri ?? "") } - }.then { [weak self] (playerItem: AVPlayerItem!) in - guard let self = self else { throw NSError(domain: "", code: 0, userInfo: nil) } - - self._player?.pause() - self._playerItem = playerItem - self._playerObserver.playerItem = self._playerItem - self.setPreferredForwardBufferDuration(self._preferredForwardBufferDuration) - self.setPlaybackRange(playerItem, withVideoStart: self._source?.cropStart, withVideoEnd: self._source?.cropEnd) - self.setFilter(self._filterName) - if let maxBitRate = self._maxBitRate { - self._playerItem?.preferredPeakBitRate = Double(maxBitRate) - } - - self._player = self._player ?? AVPlayer() - self._player?.replaceCurrentItem(with: playerItem) - self._playerObserver.player = self._player - self.applyModifiers() - self._player?.actionAtItemEnd = .none - - if #available(iOS 10.0, *) { - self.setAutomaticallyWaitsToMinimizeStalling(self._automaticallyWaitsToMinimizeStalling) - } - - #if USE_GOOGLE_IMA - if self._adTagUrl != nil { - // Set up your content playhead and contentComplete callback. - self._contentPlayhead = IMAAVPlayerContentPlayhead(avPlayer: self._player!) - - self._imaAdsManager.setUpAdsLoader() + #endif + + return playerItem + } + + // MARK: - Prop setters + + @objc + func setResizeMode(_ mode: String) { + var resizeMode: AVLayerVideoGravity = .resizeAspect + + switch mode { + case "contain": + resizeMode = .resizeAspect + case "none": + resizeMode = .resizeAspect + case "cover": + resizeMode = .resizeAspectFill + case "stretch": + resizeMode = .resize + default: + resizeMode = .resizeAspect + } + + if _controls { + _playerViewController?.videoGravity = resizeMode + } else { + _playerLayer?.videoGravity = resizeMode + } + + _resizeMode = mode + } + + @objc + func setPlayInBackground(_ playInBackground: Bool) { + _playInBackground = playInBackground + } + + @objc + func setPreventsDisplaySleepDuringVideoPlayback(_ preventsDisplaySleepDuringVideoPlayback: Bool) { + _preventsDisplaySleepDuringVideoPlayback = preventsDisplaySleepDuringVideoPlayback + self.applyModifiers() + } + + @objc + func setAllowsExternalPlayback(_ allowsExternalPlayback: Bool) { + _allowsExternalPlayback = allowsExternalPlayback + _player?.allowsExternalPlayback = _allowsExternalPlayback + } + + @objc + func setPlayWhenInactive(_ playWhenInactive: Bool) { + _playWhenInactive = playWhenInactive + } + + @objc + func setPictureInPicture(_ pictureInPicture: Bool) { + #if os(iOS) + let audioSession = AVAudioSession.sharedInstance() + do { + try audioSession.setCategory(.playback) + try audioSession.setActive(true, options: []) + } catch {} + if pictureInPicture { + _pictureInPictureEnabled = true + } else { + _pictureInPictureEnabled = false } - #endif - // Perform on next run loop, otherwise onVideoLoadStart is nil - self.onVideoLoadStart?([ - "src": [ - "uri": self._source?.uri ?? NSNull(), - "type": self._source?.type ?? NSNull(), - "isNetwork": NSNumber(value: self._source?.isNetwork ?? false), - ], - "drm": self._drm?.json ?? NSNull(), - "target": self.reactTag, - ]) - }.catch { _ in } - self._videoLoadStarted = true + _pip?.setPictureInPicture(pictureInPicture) + #endif } - DispatchQueue.global(qos: .default).async(execute: dispatchClosure) - } - @objc - func setDrm(_ drm: NSDictionary) { - _drm = DRMParams(drm) - } + @objc + func setRestoreUserInterfaceForPIPStopCompletionHandler(_ restore: Bool) { + #if os(iOS) + _pip?.setRestoreUserInterfaceForPIPStopCompletionHandler(restore) + #endif + } - @objc - func setLocalSourceEncryptionKeyScheme(_ keyScheme: String) { - _localSourceEncryptionKeyScheme = keyScheme - } + @objc + func setIgnoreSilentSwitch(_ ignoreSilentSwitch: String?) { + _ignoreSilentSwitch = ignoreSilentSwitch + RCTPlayerOperations.configureAudio(ignoreSilentSwitch: _ignoreSilentSwitch, mixWithOthers: _mixWithOthers, audioOutput: _audioOutput) + applyModifiers() + } - func playerItemPrepareText(asset: AVAsset!, assetOptions: NSDictionary?, uri: String) -> AVPlayerItem { - if (_textTracks == nil) || _textTracks?.isEmpty == true || (uri.hasSuffix(".m3u8")) { - return self.playerItemPropegateMetadata(AVPlayerItem(asset: asset)) + @objc + func setMixWithOthers(_ mixWithOthers: String?) { + _mixWithOthers = mixWithOthers + applyModifiers() } - // AVPlayer can't airplay AVMutableCompositions - _allowsExternalPlayback = false - let mixComposition = RCTVideoUtils.generateMixComposition(asset) - let validTextTracks = RCTVideoUtils.getValidTextTracks( - asset: asset, - assetOptions: assetOptions, - mixComposition: mixComposition, - textTracks: _textTracks - ) - if validTextTracks.count != _textTracks?.count { - setTextTracks(validTextTracks) + @objc + func setPaused(_ paused: Bool) { + if paused { + if _adPlaying { + #if USE_GOOGLE_IMA + _imaAdsManager.getAdsManager()?.pause() + #endif + } else { + _player?.pause() + _player?.rate = 0.0 + } + } else { + RCTPlayerOperations.configureAudio(ignoreSilentSwitch: _ignoreSilentSwitch, mixWithOthers: _mixWithOthers, audioOutput: _audioOutput) + + if _adPlaying { + #if USE_GOOGLE_IMA + _imaAdsManager.getAdsManager()?.resume() + #endif + } else { + if #available(iOS 10.0, *), !_automaticallyWaitsToMinimizeStalling { + _player?.playImmediately(atRate: _rate) + } else { + _player?.play() + _player?.rate = _rate + } + _player?.rate = _rate + } + } + + _paused = paused } - return self.playerItemPropegateMetadata(AVPlayerItem(asset: mixComposition)) - } + @objc + func setSeek(_ info: NSDictionary!) { + let seekTime: NSNumber! = info["time"] as! NSNumber + let seekTolerance: NSNumber! = info["tolerance"] as! NSNumber + let item: AVPlayerItem? = _player?.currentItem + guard item != nil, let player = _player, let item = item, item.status == AVPlayerItem.Status.readyToPlay else { + _pendingSeek = true + _pendingSeekTime = seekTime.floatValue + return + } + let wasPaused = _paused + + RCTPlayerOperations.seek( + player: player, + playerItem: item, + paused: wasPaused, + seekTime: seekTime.floatValue, + seekTolerance: seekTolerance.floatValue + ) + .then { [weak self] (_: Bool) in + guard let self = self else { return } - func playerItemPropegateMetadata(_ playerItem: AVPlayerItem!) -> AVPlayerItem { - var mapping: [AVMetadataIdentifier: Any] = [:] + self._playerObserver.addTimeObserverIfNotSet() + if !wasPaused { + self.setPaused(false) + } + self.onVideoSeek?(["currentTime": NSNumber(value: Float(CMTimeGetSeconds(item.currentTime()))), + "seekTime": seekTime, + "target": self.reactTag]) + }.catch { _ in } - if let title = _source?.title { - mapping[.commonIdentifierTitle] = title + _pendingSeek = false } - if let subtitle = _source?.subtitle { - mapping[.iTunesMetadataTrackSubTitle] = subtitle + @objc + func setRate(_ rate: Float) { + _rate = rate + applyModifiers() } - if let description = _source?.description { - mapping[.commonIdentifierDescription] = description + @objc + func isMuted() -> Bool { + return _muted } - if let customImageUri = _source?.customImageUri, - let imageData = RCTVideoUtils.createImageMetadataItem(imageUri: customImageUri) { - mapping[.commonIdentifierArtwork] = imageData + @objc + func setMuted(_ muted: Bool) { + _muted = muted + applyModifiers() } - if #available(iOS 12.2, *), !mapping.isEmpty { - playerItem.externalMetadata = RCTVideoUtils.createMetadataItems(for: mapping) + @objc + func setAudioOutput(_ audioOutput: String) { + _audioOutput = audioOutput + RCTPlayerOperations.configureAudio(ignoreSilentSwitch: _ignoreSilentSwitch, mixWithOthers: _mixWithOthers, audioOutput: _audioOutput) + do { + if audioOutput == "speaker" { + #if os(iOS) + try AVAudioSession.sharedInstance().overrideOutputAudioPort(AVAudioSession.PortOverride.speaker) + #endif + } else if audioOutput == "earpiece" { + try AVAudioSession.sharedInstance().overrideOutputAudioPort(AVAudioSession.PortOverride.none) + } + } catch { + print("Error occurred: \(error.localizedDescription)") + } } - #if os(tvOS) - if let chapters = _chapters { - playerItem.navigationMarkerGroups = RCTVideoTVUtils.makeNavigationMarkerGroups(chapters) - } - #endif + @objc + func setVolume(_ volume: Float) { + _volume = volume + applyModifiers() + } - return playerItem - } - - // MARK: - Prop setters - - @objc - func setResizeMode(_ mode: String) { - var resizeMode: AVLayerVideoGravity = .resizeAspect - - switch mode { - case "contain": - resizeMode = .resizeAspect - case "none": - resizeMode = .resizeAspect - case "cover": - resizeMode = .resizeAspectFill - case "stretch": - resizeMode = .resize - default: - resizeMode = .resizeAspect - } - - if _controls { - _playerViewController?.videoGravity = resizeMode - } else { - _playerLayer?.videoGravity = resizeMode - } - - _resizeMode = mode - } - - @objc - func setPlayInBackground(_ playInBackground: Bool) { - _playInBackground = playInBackground - } - - @objc - func setPreventsDisplaySleepDuringVideoPlayback(_ preventsDisplaySleepDuringVideoPlayback: Bool) { - _preventsDisplaySleepDuringVideoPlayback = preventsDisplaySleepDuringVideoPlayback - self.applyModifiers() - } - - @objc - func setAllowsExternalPlayback(_ allowsExternalPlayback: Bool) { - _allowsExternalPlayback = allowsExternalPlayback - _player?.allowsExternalPlayback = _allowsExternalPlayback - } - - @objc - func setPlayWhenInactive(_ playWhenInactive: Bool) { - _playWhenInactive = playWhenInactive - } - - @objc - func setPictureInPicture(_ pictureInPicture: Bool) { - #if os(iOS) - let audioSession = AVAudioSession.sharedInstance() - do { - try audioSession.setCategory(.playback) - try audioSession.setActive(true, options: []) - } catch {} - if pictureInPicture { - _pictureInPictureEnabled = true - } else { - _pictureInPictureEnabled = false - } - _pip?.setPictureInPicture(pictureInPicture) - #endif - } + @objc + func setMaxBitRate(_ maxBitRate: Float) { + _maxBitRate = maxBitRate + _playerItem?.preferredPeakBitRate = Double(maxBitRate) + } - @objc - func setRestoreUserInterfaceForPIPStopCompletionHandler(_ restore: Bool) { - #if os(iOS) - _pip?.setRestoreUserInterfaceForPIPStopCompletionHandler(restore) - #endif - } - - @objc - func setIgnoreSilentSwitch(_ ignoreSilentSwitch: String?) { - _ignoreSilentSwitch = ignoreSilentSwitch - RCTPlayerOperations.configureAudio(ignoreSilentSwitch: _ignoreSilentSwitch, mixWithOthers: _mixWithOthers, audioOutput: _audioOutput) - applyModifiers() - } - - @objc - func setMixWithOthers(_ mixWithOthers: String?) { - _mixWithOthers = mixWithOthers - applyModifiers() - } - - @objc - func setPaused(_ paused: Bool) { - if paused { - if _adPlaying { - #if USE_GOOGLE_IMA - _imaAdsManager.getAdsManager()?.pause() - #endif - } else { - _player?.pause() - _player?.rate = 0.0 - } - } else { - RCTPlayerOperations.configureAudio(ignoreSilentSwitch: _ignoreSilentSwitch, mixWithOthers: _mixWithOthers, audioOutput: _audioOutput) + @objc + func setPreferredForwardBufferDuration(_ preferredForwardBufferDuration: Float) { + _preferredForwardBufferDuration = preferredForwardBufferDuration + if #available(iOS 10.0, *) { + _playerItem?.preferredForwardBufferDuration = TimeInterval(preferredForwardBufferDuration) + } else { + // Fallback on earlier versions + } + } - if _adPlaying { - #if USE_GOOGLE_IMA - _imaAdsManager.getAdsManager()?.resume() - #endif - } else { - if #available(iOS 10.0, *), !_automaticallyWaitsToMinimizeStalling { - _player?.playImmediately(atRate: _rate) + @objc + func setAutomaticallyWaitsToMinimizeStalling(_ waits: Bool) { + _automaticallyWaitsToMinimizeStalling = waits + if #available(iOS 10.0, *) { + _player?.automaticallyWaitsToMinimizeStalling = waits } else { - _player?.play() - _player?.rate = _rate + // Fallback on earlier versions } - _player?.rate = _rate - } - } - - _paused = paused - } - - @objc - func setSeek(_ info: NSDictionary!) { - let seekTime: NSNumber! = info["time"] as! NSNumber - let seekTolerance: NSNumber! = info["tolerance"] as! NSNumber - let item: AVPlayerItem? = _player?.currentItem - guard item != nil, let player = _player, let item = item, item.status == AVPlayerItem.Status.readyToPlay else { - _pendingSeek = true - _pendingSeekTime = seekTime.floatValue - return - } - let wasPaused = _paused - - RCTPlayerOperations.seek( - player: player, - playerItem: item, - paused: wasPaused, - seekTime: seekTime.floatValue, - seekTolerance: seekTolerance.floatValue - ) - .then { [weak self] (_: Bool) in - guard let self = self else { return } - - self._playerObserver.addTimeObserverIfNotSet() - if !wasPaused { - self.setPaused(false) - } - self.onVideoSeek?(["currentTime": NSNumber(value: Float(CMTimeGetSeconds(item.currentTime()))), - "seekTime": seekTime, - "target": self.reactTag]) - }.catch { _ in } - - _pendingSeek = false - } - - @objc - func setRate(_ rate: Float) { - _rate = rate - applyModifiers() - } - - @objc - func isMuted() -> Bool { - return _muted - } - - @objc - func setMuted(_ muted: Bool) { - _muted = muted - applyModifiers() - } - - @objc - func setAudioOutput(_ audioOutput: String) { - _audioOutput = audioOutput - RCTPlayerOperations.configureAudio(ignoreSilentSwitch: _ignoreSilentSwitch, mixWithOthers: _mixWithOthers, audioOutput: _audioOutput) - do { - if audioOutput == "speaker" { - #if os(iOS) - try AVAudioSession.sharedInstance().overrideOutputAudioPort(AVAudioSession.PortOverride.speaker) - #endif - } else if audioOutput == "earpiece" { - try AVAudioSession.sharedInstance().overrideOutputAudioPort(AVAudioSession.PortOverride.none) - } - } catch { - print("Error occurred: \(error.localizedDescription)") - } - } - - @objc - func setVolume(_ volume: Float) { - _volume = volume - applyModifiers() - } - - @objc - func setMaxBitRate(_ maxBitRate: Float) { - _maxBitRate = maxBitRate - _playerItem?.preferredPeakBitRate = Double(maxBitRate) - } - - @objc - func setPreferredForwardBufferDuration(_ preferredForwardBufferDuration: Float) { - _preferredForwardBufferDuration = preferredForwardBufferDuration - if #available(iOS 10.0, *) { - _playerItem?.preferredForwardBufferDuration = TimeInterval(preferredForwardBufferDuration) - } else { - // Fallback on earlier versions - } - } - - @objc - func setAutomaticallyWaitsToMinimizeStalling(_ waits: Bool) { - _automaticallyWaitsToMinimizeStalling = waits - if #available(iOS 10.0, *) { - _player?.automaticallyWaitsToMinimizeStalling = waits - } else { - // Fallback on earlier versions - } - } - - func setPlaybackRange(_ item: AVPlayerItem!, withVideoStart videoStart: Int64?, withVideoEnd videoEnd: Int64?) { - if videoStart != nil { - let start = CMTimeMake(value: videoStart!, timescale: 1000) - item.reversePlaybackEndTime = start - _pendingSeekTime = Float(CMTimeGetSeconds(start)) - _pendingSeek = true - } - if videoEnd != nil { - item.forwardPlaybackEndTime = CMTimeMake(value: videoEnd!, timescale: 1000) - } - } - - func applyModifiers() { - if let video = _player?.currentItem, - video == nil || video.status != AVPlayerItem.Status.readyToPlay { - return - } - if _muted { - if !_controls { - _player?.volume = 0 - } - _player?.isMuted = true - } else { - _player?.volume = _volume - _player?.isMuted = false - } - - if #available(iOS 12.0, tvOS 12.0, *) { - _player?.preventsDisplaySleepDuringVideoPlayback = _preventsDisplaySleepDuringVideoPlayback - } else { - // Fallback on earlier versions - } - - if let _maxBitRate = _maxBitRate { - setMaxBitRate(_maxBitRate) - } - - setAudioOutput(_audioOutput) - setSelectedAudioTrack(_selectedAudioTrackCriteria) - setSelectedTextTrack(_selectedTextTrackCriteria) - setResizeMode(_resizeMode) - setRepeat(_repeat) - setControls(_controls) - setPaused(_paused) - setAllowsExternalPlayback(_allowsExternalPlayback) - } - - @objc - func setRepeat(_ repeat: Bool) { - _repeat = `repeat` - } - - @objc - func setSelectedAudioTrack(_ selectedAudioTrack: NSDictionary?) { - setSelectedAudioTrack(SelectedTrackCriteria(selectedAudioTrack)) - } - - func setSelectedAudioTrack(_ selectedAudioTrack: SelectedTrackCriteria?) { - _selectedAudioTrackCriteria = selectedAudioTrack - RCTPlayerOperations.setMediaSelectionTrackForCharacteristic(player: _player, characteristic: AVMediaCharacteristic.audible, - criteria: _selectedAudioTrackCriteria) - } - - @objc - func setSelectedTextTrack(_ selectedTextTrack: NSDictionary?) { - setSelectedTextTrack(SelectedTrackCriteria(selectedTextTrack)) - } - - func setSelectedTextTrack(_ selectedTextTrack: SelectedTrackCriteria?) { - _selectedTextTrackCriteria = selectedTextTrack - if _textTracks != nil { // sideloaded text tracks - RCTPlayerOperations.setSideloadedText(player: _player, textTracks: _textTracks, criteria: _selectedTextTrackCriteria) - } else { // text tracks included in the HLS playlist - RCTPlayerOperations.setMediaSelectionTrackForCharacteristic(player: _player, characteristic: AVMediaCharacteristic.legible, - criteria: _selectedTextTrackCriteria) - } - } - - @objc - func setTextTracks(_ textTracks: [NSDictionary]?) { - setTextTracks(textTracks?.map { TextTrack($0) }) - } - - func setTextTracks(_ textTracks: [TextTrack]?) { - _textTracks = textTracks - - // in case textTracks was set after selectedTextTrack - if _selectedTextTrackCriteria != nil { setSelectedTextTrack(_selectedTextTrackCriteria) } - } - - @objc - func setChapters(_ chapters: [NSDictionary]?) { - setChapters(chapters?.map { Chapter($0) }) - } - - func setChapters(_ chapters: [Chapter]?) { - _chapters = chapters - } - - @objc - func setFullscreen(_ fullscreen: Bool) { - if fullscreen && !_fullscreenPlayerPresented && _player != nil { - // Ensure player view controller is not null - // Controls will be displayed even if it is disabled in configuration - if _playerViewController == nil { - self.usePlayerViewController() - } - - // Set presentation style to fullscreen - _playerViewController?.modalPresentationStyle = .fullScreen - - // Find the nearest view controller - var viewController: UIViewController! = self.firstAvailableUIViewController() - if viewController == nil { - let keyWindow: UIWindow! = UIApplication.shared.keyWindow - viewController = keyWindow.rootViewController - if !viewController.children.isEmpty { - viewController = viewController.children.last + } + + func setPlaybackRange(_ item: AVPlayerItem!, withVideoStart videoStart: Int64?, withVideoEnd videoEnd: Int64?) { + if videoStart != nil { + let start = CMTimeMake(value: videoStart!, timescale: 1000) + item.reversePlaybackEndTime = start + _pendingSeekTime = Float(CMTimeGetSeconds(start)) + _pendingSeek = true + } + if videoEnd != nil { + item.forwardPlaybackEndTime = CMTimeMake(value: videoEnd!, timescale: 1000) + } + } + + func applyModifiers() { + if let video = _player?.currentItem, + video == nil || video.status != AVPlayerItem.Status.readyToPlay { + return + } + if _muted { + if !_controls { + _player?.volume = 0 + } + _player?.isMuted = true + } else { + _player?.volume = _volume + _player?.isMuted = false } - } - if viewController != nil { - _presentingViewController = viewController - self.onVideoFullscreenPlayerWillPresent?(["target": reactTag as Any]) + if #available(iOS 12.0, tvOS 12.0, *) { + _player?.preventsDisplaySleepDuringVideoPlayback = _preventsDisplaySleepDuringVideoPlayback + } else { + // Fallback on earlier versions + } - if let playerViewController = _playerViewController { - if _controls { - // prevents crash https://github.com/react-native-video/react-native-video/issues/3040 - self._playerViewController?.removeFromParent() - } + if let _maxBitRate = _maxBitRate { + setMaxBitRate(_maxBitRate) + } - viewController.present(playerViewController, animated: true, completion: { [weak self] in - guard let self = self else { return } - // In fullscreen we must display controls - self._playerViewController?.showsPlaybackControls = true - self._fullscreenPlayerPresented = fullscreen - self._playerViewController?.autorotate = self._fullscreenAutorotate + setAudioOutput(_audioOutput) + setSelectedAudioTrack(_selectedAudioTrackCriteria) + setSelectedTextTrack(_selectedTextTrackCriteria) + setResizeMode(_resizeMode) + setRepeat(_repeat) + setControls(_controls) + setPaused(_paused) + setAllowsExternalPlayback(_allowsExternalPlayback) + } + + @objc + func setRepeat(_ repeat: Bool) { + _repeat = `repeat` + } + + @objc + func setSelectedAudioTrack(_ selectedAudioTrack: NSDictionary?) { + setSelectedAudioTrack(SelectedTrackCriteria(selectedAudioTrack)) + } - self.onVideoFullscreenPlayerDidPresent?(["target": self.reactTag]) - }) + func setSelectedAudioTrack(_ selectedAudioTrack: SelectedTrackCriteria?) { + _selectedAudioTrackCriteria = selectedAudioTrack + RCTPlayerOperations.setMediaSelectionTrackForCharacteristic(player: _player, characteristic: AVMediaCharacteristic.audible, + criteria: _selectedAudioTrackCriteria) + } + + @objc + func setSelectedTextTrack(_ selectedTextTrack: NSDictionary?) { + setSelectedTextTrack(SelectedTrackCriteria(selectedTextTrack)) + } + + func setSelectedTextTrack(_ selectedTextTrack: SelectedTrackCriteria?) { + _selectedTextTrackCriteria = selectedTextTrack + if _textTracks != nil { // sideloaded text tracks + RCTPlayerOperations.setSideloadedText(player: _player, textTracks: _textTracks, criteria: _selectedTextTrackCriteria) + } else { // text tracks included in the HLS playlist + RCTPlayerOperations.setMediaSelectionTrackForCharacteristic(player: _player, characteristic: AVMediaCharacteristic.legible, + criteria: _selectedTextTrackCriteria) } - } - } else if !fullscreen && _fullscreenPlayerPresented, let _playerViewController = _playerViewController { - self.videoPlayerViewControllerWillDismiss(playerViewController: _playerViewController) - _presentingViewController?.dismiss(animated: true, completion: { [weak self] in - self?.videoPlayerViewControllerDidDismiss(playerViewController: _playerViewController) - }) - } - } - - @objc - func setFullscreenAutorotate(_ autorotate: Bool) { - _fullscreenAutorotate = autorotate - if _fullscreenPlayerPresented { - _playerViewController?.autorotate = autorotate - } - } - - @objc - func setFullscreenOrientation(_ orientation: String?) { - _fullscreenOrientation = orientation - if _fullscreenPlayerPresented { - _playerViewController?.preferredOrientation = orientation - } - } - - func usePlayerViewController() { - guard let _player = _player, let _playerItem = _playerItem else { return } - - if _playerViewController == nil { - _playerViewController = createPlayerViewController(player: _player, withPlayerItem: _playerItem) - } - // to prevent video from being animated when resizeMode is 'cover' - // resize mode must be set before subview is added - setResizeMode(_resizeMode) - - guard let _playerViewController = _playerViewController else { return } - - if _controls { - let viewController: UIViewController! = self.reactViewController() - viewController?.addChild(_playerViewController) - self.addSubview(_playerViewController.view) - } - - _playerObserver.playerViewController = _playerViewController - } - - func createPlayerViewController(player: AVPlayer, withPlayerItem _: AVPlayerItem) -> RCTVideoPlayerViewController { - let viewController = RCTVideoPlayerViewController() - viewController.showsPlaybackControls = self._controls - viewController.rctDelegate = self - viewController.preferredOrientation = _fullscreenOrientation - - viewController.view.frame = self.bounds - viewController.player = player - if #available(tvOS 14.0, *) { - viewController.allowsPictureInPicturePlayback = true - } - return viewController - } - - func usePlayerLayer() { - if let _player = _player { - _playerLayer = AVPlayerLayer(player: _player) - _playerLayer?.frame = self.bounds - _playerLayer?.needsDisplayOnBoundsChange = true - - // to prevent video from being animated when resizeMode is 'cover' - // resize mode must be set before layer is added - setResizeMode(_resizeMode) - _playerObserver.playerLayer = _playerLayer - - if let _playerLayer = _playerLayer { - self.layer.addSublayer(_playerLayer) - } - self.layer.needsDisplayOnBoundsChange = true - #if os(iOS) - _pip?.setupPipController(_playerLayer) - #endif - } - } - - @objc - func setControls(_ controls: Bool) { - if _controls != controls || ((_playerLayer == nil) && (_playerViewController == nil)) { - _controls = controls - if _controls { - self.removePlayerLayer() - self.usePlayerViewController() - } else { - _playerViewController?.view.removeFromSuperview() - _playerViewController?.removeFromParent() - _playerViewController = nil - _playerObserver.playerViewController = nil - self.usePlayerLayer() - } - } - } - - @objc - func setProgressUpdateInterval(_ progressUpdateInterval: Float) { - _playerObserver.replaceTimeObserverIfSet(Float64(progressUpdateInterval)) - } - - func removePlayerLayer() { - _playerLayer?.removeFromSuperlayer() - _playerLayer = nil - _playerObserver.playerLayer = nil - } - - // MARK: - RCTVideoPlayerViewControllerDelegate - - func videoPlayerViewControllerWillDismiss(playerViewController: AVPlayerViewController) { - if _playerViewController == playerViewController - && _fullscreenPlayerPresented, - let onVideoFullscreenPlayerWillDismiss = onVideoFullscreenPlayerWillDismiss { - _playerObserver.removePlayerViewControllerObservers() - onVideoFullscreenPlayerWillDismiss(["target": reactTag as Any]) - } - } - - func videoPlayerViewControllerDidDismiss(playerViewController: AVPlayerViewController) { - if _playerViewController == playerViewController && _fullscreenPlayerPresented { - _fullscreenPlayerPresented = false - _presentingViewController = nil - _playerViewController = nil - _playerObserver.playerViewController = nil - self.applyModifiers() - - onVideoFullscreenPlayerDidDismiss?(["target": reactTag as Any]) - } - } - - @objc - func setFilter(_ filterName: String!) { - _filterName = filterName - - if !_filterEnabled { - return - } else if let uri = _source?.uri, uri.contains("m3u8") { - return // filters don't work for HLS... return - } else if _playerItem?.asset == nil { - return - } - - let filter: CIFilter! = CIFilter(name: filterName) - if #available(iOS 9.0, *), let _playerItem = _playerItem { - self._playerItem?.videoComposition = AVVideoComposition( - asset: _playerItem.asset, - applyingCIFiltersWithHandler: { (request: AVAsynchronousCIImageFilteringRequest) in - if filter == nil { - request.finish(with: request.sourceImage, context: nil) - } else { - let image: CIImage! = request.sourceImage.clampedToExtent() - filter.setValue(image, forKey: kCIInputImageKey) - let output: CIImage! = filter.outputImage?.cropped(to: request.sourceImage.extent) - request.finish(with: output, context: nil) - } + } + + @objc + func setTextTracks(_ textTracks: [NSDictionary]?) { + setTextTracks(textTracks?.map { TextTrack($0) }) + } + + func setTextTracks(_ textTracks: [TextTrack]?) { + _textTracks = textTracks + + // in case textTracks was set after selectedTextTrack + if _selectedTextTrackCriteria != nil { setSelectedTextTrack(_selectedTextTrackCriteria) } + } + + @objc + func setChapters(_ chapters: [NSDictionary]?) { + setChapters(chapters?.map { Chapter($0) }) + } + + func setChapters(_ chapters: [Chapter]?) { + _chapters = chapters + } + + @objc + func setFullscreen(_ fullscreen: Bool) { + if fullscreen && !_fullscreenPlayerPresented && _player != nil { + // Ensure player view controller is not null + // Controls will be displayed even if it is disabled in configuration + if _playerViewController == nil { + self.usePlayerViewController() + } + + // Set presentation style to fullscreen + _playerViewController?.modalPresentationStyle = .fullScreen + + // Find the nearest view controller + var viewController: UIViewController! = self.firstAvailableUIViewController() + if viewController == nil { + let keyWindow: UIWindow! = UIApplication.shared.keyWindow + viewController = keyWindow.rootViewController + if !viewController.children.isEmpty { + viewController = viewController.children.last + } + } + if viewController != nil { + _presentingViewController = viewController + + self.onVideoFullscreenPlayerWillPresent?(["target": reactTag as Any]) + + if let playerViewController = _playerViewController { + if _controls { + // prevents crash https://github.com/react-native-video/react-native-video/issues/3040 + self._playerViewController?.removeFromParent() + } + + viewController.present(playerViewController, animated: true, completion: { [weak self] in + guard let self = self else { return } + // In fullscreen we must display controls + self._playerViewController?.showsPlaybackControls = true + self._fullscreenPlayerPresented = fullscreen + self._playerViewController?.autorotate = self._fullscreenAutorotate + + self.onVideoFullscreenPlayerDidPresent?(["target": self.reactTag]) + }) + } + } + } else if !fullscreen && _fullscreenPlayerPresented, let _playerViewController = _playerViewController { + self.videoPlayerViewControllerWillDismiss(playerViewController: _playerViewController) + _presentingViewController?.dismiss(animated: true, completion: { [weak self] in + self?.videoPlayerViewControllerDidDismiss(playerViewController: _playerViewController) + }) + } + } + + @objc + func setFullscreenAutorotate(_ autorotate: Bool) { + _fullscreenAutorotate = autorotate + if _fullscreenPlayerPresented { + _playerViewController?.autorotate = autorotate + } + } + + @objc + func setFullscreenOrientation(_ orientation: String?) { + _fullscreenOrientation = orientation + if _fullscreenPlayerPresented { + _playerViewController?.preferredOrientation = orientation + } + } + + func usePlayerViewController() { + guard let _player = _player, let _playerItem = _playerItem else { return } + + if _playerViewController == nil { + _playerViewController = createPlayerViewController(player: _player, withPlayerItem: _playerItem) } - ) - } else { - // Fallback on earlier versions - } - } - - @objc - func setFilterEnabled(_ filterEnabled: Bool) { - _filterEnabled = filterEnabled - } - - // MARK: - RCTIMAAdsManager - - func getAdTagUrl() -> String? { - return _adTagUrl - } - - @objc - func setAdTagUrl(_ adTagUrl: String!) { - _adTagUrl = adTagUrl - } - - #if USE_GOOGLE_IMA - func getContentPlayhead() -> IMAAVPlayerContentPlayhead? { - return _contentPlayhead - } - #endif - func setAdPlaying(_ adPlaying: Bool) { - _adPlaying = adPlaying - } - - // MARK: - React View Management - - func insertReactSubview(view: UIView!, atIndex: Int) { - if _controls { - view.frame = self.bounds - _playerViewController?.contentOverlayView?.insertSubview(view, at: atIndex) - } else { - RCTLogError("video cannot have any subviews") - } - return - } - - func removeReactSubview(subview: UIView!) { - if _controls { - subview.removeFromSuperview() - } else { - RCTLog("video cannot have any subviews") - } - return - } - - override func layoutSubviews() { - super.layoutSubviews() - if _controls, let _playerViewController = _playerViewController { - _playerViewController.view.frame = bounds - - // also adjust all subviews of contentOverlayView - for subview in _playerViewController.contentOverlayView?.subviews ?? [] { - subview.frame = bounds - } - } else { - CATransaction.begin() - CATransaction.setAnimationDuration(0) - _playerLayer?.frame = bounds - CATransaction.commit() - } - } - - // MARK: - Lifecycle + // to prevent video from being animated when resizeMode is 'cover' + // resize mode must be set before subview is added + setResizeMode(_resizeMode) - override func removeFromSuperview() { - _player?.pause() - _player = nil - _resouceLoaderDelegate = nil - _playerObserver.clearPlayer() + guard let _playerViewController = _playerViewController else { return } - self.removePlayerLayer() + if _controls { + let viewController: UIViewController! = self.reactViewController() + viewController?.addChild(_playerViewController) + self.addSubview(_playerViewController.view) + } - if let _playerViewController = _playerViewController { - _playerViewController.view.removeFromSuperview() - _playerViewController.removeFromParent() - _playerViewController.rctDelegate = nil - _playerViewController.player = nil - self._playerViewController = nil - _playerObserver.playerViewController = nil + _playerObserver.playerViewController = _playerViewController } - _eventDispatcher = nil - // swiftlint:disable:next notification_center_detachment - NotificationCenter.default.removeObserver(self) - - super.removeFromSuperview() - } - - // MARK: - Export - - @objc - func save(options: NSDictionary!, resolve: @escaping RCTPromiseResolveBlock, reject: @escaping RCTPromiseRejectBlock) { - RCTVideoSave.save( - options: options, - resolve: resolve, - reject: reject, - playerItem: _playerItem - ) - } - - func setLicenseResult(_ license: String!, _ licenseUrl: String!) { - _resouceLoaderDelegate?.setLicenseResult(license, licenseUrl) - } - - func setLicenseResultError(_ error: String!, _ licenseUrl: String!) { - _resouceLoaderDelegate?.setLicenseResultError(error, licenseUrl) - } - - func dismissFullscreenPlayer() { - setFullscreen(false) - } - - func presentFullscreenPlayer() { - setFullscreen(true) - } - - // MARK: - RCTPlayerObserverHandler - - func handleTimeUpdate(time _: CMTime) { - sendProgressUpdate() - } - - func handleReadyForDisplay(changeObject _: Any, change _: NSKeyValueObservedChange) { - onReadyForDisplay?([ - "target": reactTag, - ]) - } - - // When timeMetadata is read the event onTimedMetadata is triggered - func handleTimeMetadataChange(playerItem _: AVPlayerItem, change: NSKeyValueObservedChange<[AVMetadataItem]?>) { - guard let newValue = change.newValue, let _items = newValue, !_items.isEmpty else { - return - } - - var metadata: [[String: String?]?] = [] - for item in _items { - let value = item.value as? String - let identifier = item.identifier?.rawValue - - if let value = value { - metadata.append(["value": value, "identifier": identifier]) - } - } - - onTimedMetadata?([ - "target": reactTag, - "metadata": metadata, - ]) - } - - // Handle player item status change. - func handlePlayerItemStatusChange(playerItem _: AVPlayerItem, change _: NSKeyValueObservedChange) { - guard let _playerItem = _playerItem else { - return - } - - if _playerItem.status == .readyToPlay { - handleReadyToPlay() - } else if _playerItem.status == .failed { - handlePlaybackFailed() - } - } - - func handleReadyToPlay() { - guard let _playerItem = _playerItem else { return } - var duration = Float(CMTimeGetSeconds(_playerItem.asset.duration)) - - if duration.isNaN { - duration = 0.0 - } - - var width: Float? - var height: Float? - var orientation = "undefined" - - if !_playerItem.asset.tracks(withMediaType: AVMediaType.video).isEmpty { - let videoTrack = _playerItem.asset.tracks(withMediaType: .video)[0] - width = Float(videoTrack.naturalSize.width) - height = Float(videoTrack.naturalSize.height) - let preferredTransform = videoTrack.preferredTransform - - if (videoTrack.naturalSize.width == preferredTransform.tx - && videoTrack.naturalSize.height == preferredTransform.ty) - || (preferredTransform.tx == 0 && preferredTransform.ty == 0) { - orientation = "landscape" - } else { - orientation = "portrait" - } - } else if _playerItem.presentationSize.height != 0.0 { - width = Float(_playerItem.presentationSize.width) - height = Float(_playerItem.presentationSize.height) - orientation = _playerItem.presentationSize.width > _playerItem.presentationSize.height ? "landscape" : "portrait" - } - - if _pendingSeek { - setSeek([ - "time": NSNumber(value: _pendingSeekTime), - "tolerance": NSNumber(value: 100), - ]) - _pendingSeek = false - } - - if _startPosition >= 0 { - setSeek([ - "time": NSNumber(value: _startPosition), - "tolerance": NSNumber(value: 100), - ]) - _startPosition = -1 - } - - if _videoLoadStarted { - let audioTracks = RCTVideoUtils.getAudioTrackInfo(_player) - let textTracks = RCTVideoUtils.getTextTrackInfo(_player).map(\.json) - onVideoLoad?(["duration": NSNumber(value: duration), - "currentTime": NSNumber(value: Float(CMTimeGetSeconds(_playerItem.currentTime()))), - "canPlayReverse": NSNumber(value: _playerItem.canPlayReverse), - "canPlayFastForward": NSNumber(value: _playerItem.canPlayFastForward), - "canPlaySlowForward": NSNumber(value: _playerItem.canPlaySlowForward), - "canPlaySlowReverse": NSNumber(value: _playerItem.canPlaySlowReverse), - "canStepBackward": NSNumber(value: _playerItem.canStepBackward), - "canStepForward": NSNumber(value: _playerItem.canStepForward), - "naturalSize": [ - "width": width != nil ? NSNumber(value: width!) : "undefinded", - "height": width != nil ? NSNumber(value: height!) : "undefinded", - "orientation": orientation, - ], - "audioTracks": audioTracks, - "textTracks": textTracks, - "target": reactTag as Any]) - } - _videoLoadStarted = false - _playerObserver.attachPlayerEventListeners() - applyModifiers() - } - - func handlePlaybackFailed() { - guard let _playerItem = _playerItem else { return } - onVideoError?( - [ - "error": [ - "code": NSNumber(value: (_playerItem.error! as NSError).code), - "localizedDescription": _playerItem.error?.localizedDescription == nil ? "" : _playerItem.error?.localizedDescription, - "localizedFailureReason": ((_playerItem.error! as NSError).localizedFailureReason == nil ? - "" : (_playerItem.error! as NSError).localizedFailureReason) ?? "", - "localizedRecoverySuggestion": ((_playerItem.error! as NSError).localizedRecoverySuggestion == nil ? - "" : (_playerItem.error! as NSError).localizedRecoverySuggestion) ?? "", - "domain": (_playerItem.error as! NSError).domain, - ], - "target": reactTag, - ] - ) - } - - func handlePlaybackBufferKeyEmpty(playerItem _: AVPlayerItem, change _: NSKeyValueObservedChange) { - _playerBufferEmpty = true - onVideoBuffer?(["isBuffering": true, "target": reactTag as Any]) - } - - // Continue playing (or not if paused) after being paused due to hitting an unbuffered zone. - func handlePlaybackLikelyToKeepUp(playerItem _: AVPlayerItem, change _: NSKeyValueObservedChange) { - if (!(_controls || _fullscreenPlayerPresented) || _playerBufferEmpty) && ((_playerItem?.isPlaybackLikelyToKeepUp) == true) { - setPaused(_paused) - } - _playerBufferEmpty = false - onVideoBuffer?(["isBuffering": false, "target": reactTag as Any]) - } - - func handlePlaybackRateChange(player: AVPlayer, change: NSKeyValueObservedChange) { - guard let _player = _player else { return } - - if player.rate == change.oldValue && change.oldValue != nil { - return - } - - onPlaybackRateChange?(["playbackRate": NSNumber(value: _player.rate), - "target": reactTag as Any]) - - onVideoPlaybackStateChanged?(["isPlaying": _player.rate != 0, - "target": reactTag as Any]) - - if _playbackStalled && _player.rate > 0 { - onPlaybackResume?(["playbackRate": NSNumber(value: _player.rate), - "target": reactTag as Any]) - _playbackStalled = false + func createPlayerViewController(player: AVPlayer, withPlayerItem _: AVPlayerItem) -> RCTVideoPlayerViewController { + let viewController = RCTVideoPlayerViewController() + viewController.showsPlaybackControls = self._controls + viewController.rctDelegate = self + viewController.preferredOrientation = _fullscreenOrientation + + viewController.view.frame = self.bounds + viewController.player = player + if #available(tvOS 14.0, *) { + viewController.allowsPictureInPicturePlayback = true + } + return viewController } - } - func handleVolumeChange(player: AVPlayer, change: NSKeyValueObservedChange) { - guard let _player = _player else { return } + func usePlayerLayer() { + if let _player = _player { + _playerLayer = AVPlayerLayer(player: _player) + _playerLayer?.frame = self.bounds + _playerLayer?.needsDisplayOnBoundsChange = true - if player.rate == change.oldValue && change.oldValue != nil { - return + // to prevent video from being animated when resizeMode is 'cover' + // resize mode must be set before layer is added + setResizeMode(_resizeMode) + _playerObserver.playerLayer = _playerLayer + + if let _playerLayer = _playerLayer { + self.layer.addSublayer(_playerLayer) + } + self.layer.needsDisplayOnBoundsChange = true + #if os(iOS) + _pip?.setupPipController(_playerLayer) + #endif + } } - onVolumeChange?(["volume": NSNumber(value: _player.volume), - "target": reactTag as Any]) - } + @objc + func setControls(_ controls: Bool) { + if _controls != controls || ((_playerLayer == nil) && (_playerViewController == nil)) { + _controls = controls + if _controls { + self.removePlayerLayer() + self.usePlayerViewController() + } else { + _playerViewController?.view.removeFromSuperview() + _playerViewController?.removeFromParent() + _playerViewController = nil + _playerObserver.playerViewController = nil + self.usePlayerLayer() + } + } + } + + @objc + func setProgressUpdateInterval(_ progressUpdateInterval: Float) { + _playerObserver.replaceTimeObserverIfSet(Float64(progressUpdateInterval)) + } + + func removePlayerLayer() { + _playerLayer?.removeFromSuperlayer() + _playerLayer = nil + _playerObserver.playerLayer = nil + } - func handleExternalPlaybackActiveChange(player _: AVPlayer, change _: NSKeyValueObservedChange) { - guard let _player = _player else { return } - onVideoExternalPlaybackChange?(["isExternalPlaybackActive": NSNumber(value: _player.isExternalPlaybackActive), - "target": reactTag as Any]) - } + // MARK: - RCTVideoPlayerViewControllerDelegate - func handleViewControllerOverlayViewFrameChange(overlayView _: UIView, change: NSKeyValueObservedChange) { - let oldRect = change.oldValue - let newRect = change.newValue - if !oldRect!.equalTo(newRect!) { - // https://github.com/react-native-video/react-native-video/issues/3085#issuecomment-1557293391 - if newRect!.equalTo(UIScreen.main.bounds) { - RCTLog("in fullscreen") - if !_fullscreenUncontrolPlayerPresented { - _fullscreenUncontrolPlayerPresented = true + func videoPlayerViewControllerWillDismiss(playerViewController: AVPlayerViewController) { + if _playerViewController == playerViewController + && _fullscreenPlayerPresented, + let onVideoFullscreenPlayerWillDismiss = onVideoFullscreenPlayerWillDismiss { + _playerObserver.removePlayerViewControllerObservers() + onVideoFullscreenPlayerWillDismiss(["target": reactTag as Any]) + } + } + + func videoPlayerViewControllerDidDismiss(playerViewController: AVPlayerViewController) { + if _playerViewController == playerViewController && _fullscreenPlayerPresented { + _fullscreenPlayerPresented = false + _presentingViewController = nil + _playerViewController = nil + _playerObserver.playerViewController = nil + self.applyModifiers() - self.onVideoFullscreenPlayerWillPresent?(["target": self.reactTag as Any]) - self.onVideoFullscreenPlayerDidPresent?(["target": self.reactTag as Any]) + onVideoFullscreenPlayerDidDismiss?(["target": reactTag as Any]) } - } else { - NSLog("not fullscreen") - if _fullscreenUncontrolPlayerPresented { - _fullscreenUncontrolPlayerPresented = false + } + + @objc + func setFilter(_ filterName: String!) { + _filterName = filterName - self.onVideoFullscreenPlayerWillDismiss?(["target": self.reactTag as Any]) - self.onVideoFullscreenPlayerDidDismiss?(["target": self.reactTag as Any]) + if !_filterEnabled { + return + } else if let uri = _source?.uri, uri.contains("m3u8") { + return // filters don't work for HLS... return + } else if _playerItem?.asset == nil { + return } - } - - self.reactViewController().view.frame = UIScreen.main.bounds - self.reactViewController().view.setNeedsLayout() - } - } - - @objc - func handleDidFailToFinishPlaying(notification: NSNotification!) { - let error: NSError! = notification.userInfo?[AVPlayerItemFailedToPlayToEndTimeErrorKey] as? NSError - onVideoError?( - [ - "error": [ - "code": NSNumber(value: (error as NSError).code), - "localizedDescription": error.localizedDescription ?? "", - "localizedFailureReason": (error as NSError).localizedFailureReason ?? "", - "localizedRecoverySuggestion": (error as NSError).localizedRecoverySuggestion ?? "", - "domain": (error as NSError).domain, - ], - "target": reactTag, - ] - ) - } - - @objc - func handlePlaybackStalled(notification _: NSNotification!) { - onPlaybackStalled?(["target": reactTag as Any]) - _playbackStalled = true - } - - @objc - func handlePlayerItemDidReachEnd(notification: NSNotification!) { - onVideoEnd?(["target": reactTag as Any]) + + let filter: CIFilter! = CIFilter(name: filterName) + if #available(iOS 9.0, *), let _playerItem = _playerItem { + self._playerItem?.videoComposition = AVVideoComposition( + asset: _playerItem.asset, + applyingCIFiltersWithHandler: { (request: AVAsynchronousCIImageFilteringRequest) in + if filter == nil { + request.finish(with: request.sourceImage, context: nil) + } else { + let image: CIImage! = request.sourceImage.clampedToExtent() + filter.setValue(image, forKey: kCIInputImageKey) + let output: CIImage! = filter.outputImage?.cropped(to: request.sourceImage.extent) + request.finish(with: output, context: nil) + } + } + ) + } else { + // Fallback on earlier versions + } + } + + @objc + func setFilterEnabled(_ filterEnabled: Bool) { + _filterEnabled = filterEnabled + } + + // MARK: - RCTIMAAdsManager + + func getAdTagUrl() -> String? { + return _adTagUrl + } + + @objc + func setAdTagUrl(_ adTagUrl: String!) { + _adTagUrl = adTagUrl + } + #if USE_GOOGLE_IMA - if notification.object as? AVPlayerItem == _player?.currentItem { - _imaAdsManager.getAdsLoader()?.contentComplete() - } + func getContentPlayhead() -> IMAAVPlayerContentPlayhead? { + return _contentPlayhead + } #endif - if _repeat { - let item: AVPlayerItem! = notification.object as? AVPlayerItem - item.seek(to: CMTime.zero, completionHandler: nil) - self.applyModifiers() - } else { - self.setPaused(true) - _playerObserver.removePlayerTimeObserver() - } - } - - @objc - func handleAVPlayerAccess(notification: NSNotification!) { - let accessLog: AVPlayerItemAccessLog! = (notification.object as! AVPlayerItem).accessLog() - let lastEvent: AVPlayerItemAccessLogEvent! = accessLog.events.last - - onVideoBandwidthUpdate?(["bitrate": lastEvent.observedBitrate, "target": reactTag]) - } + func setAdPlaying(_ adPlaying: Bool) { + _adPlaying = adPlaying + } + + // MARK: - React View Management + + func insertReactSubview(view: UIView!, atIndex: Int) { + if _controls { + view.frame = self.bounds + _playerViewController?.contentOverlayView?.insertSubview(view, at: atIndex) + } else { + RCTLogError("video cannot have any subviews") + } + return + } + + func removeReactSubview(subview: UIView!) { + if _controls { + subview.removeFromSuperview() + } else { + RCTLog("video cannot have any subviews") + } + return + } + + override func layoutSubviews() { + super.layoutSubviews() + if _controls, let _playerViewController = _playerViewController { + _playerViewController.view.frame = bounds + + // also adjust all subviews of contentOverlayView + for subview in _playerViewController.contentOverlayView?.subviews ?? [] { + subview.frame = bounds + } + } else { + CATransaction.begin() + CATransaction.setAnimationDuration(0) + _playerLayer?.frame = bounds + CATransaction.commit() + } + } + + // MARK: - Lifecycle + + override func removeFromSuperview() { + _player?.pause() + _player = nil + _resouceLoaderDelegate = nil + _playerObserver.clearPlayer() + + self.removePlayerLayer() + + if let _playerViewController = _playerViewController { + _playerViewController.view.removeFromSuperview() + _playerViewController.removeFromParent() + _playerViewController.rctDelegate = nil + _playerViewController.player = nil + self._playerViewController = nil + _playerObserver.playerViewController = nil + } + + _eventDispatcher = nil + // swiftlint:disable:next notification_center_detachment + NotificationCenter.default.removeObserver(self) + + super.removeFromSuperview() + } + + // MARK: - Export + + @objc + func save(options: NSDictionary!, resolve: @escaping RCTPromiseResolveBlock, reject: @escaping RCTPromiseRejectBlock) { + RCTVideoSave.save( + options: options, + resolve: resolve, + reject: reject, + playerItem: _playerItem + ) + } + + func setLicenseResult(_ license: String!, _ licenseUrl: String!) { + _resouceLoaderDelegate?.setLicenseResult(license, licenseUrl) + } + + func setLicenseResultError(_ error: String!, _ licenseUrl: String!) { + _resouceLoaderDelegate?.setLicenseResultError(error, licenseUrl) + } + + func dismissFullscreenPlayer() { + setFullscreen(false) + } + + func presentFullscreenPlayer() { + setFullscreen(true) + } + + // MARK: - RCTPlayerObserverHandler + + func handleTimeUpdate(time _: CMTime) { + sendProgressUpdate() + } + + func handleReadyForDisplay(changeObject _: Any, change _: NSKeyValueObservedChange) { + onReadyForDisplay?([ + "target": reactTag, + ]) + } + + // When timeMetadata is read the event onTimedMetadata is triggered + func handleTimeMetadataChange(playerItem _: AVPlayerItem, change: NSKeyValueObservedChange<[AVMetadataItem]?>) { + guard let newValue = change.newValue, let _items = newValue, !_items.isEmpty else { + return + } + + var metadata: [[String: String?]?] = [] + for item in _items { + let value = item.value as? String + let identifier = item.identifier?.rawValue + + if let value = value { + metadata.append(["value": value, "identifier": identifier]) + } + } + + onTimedMetadata?([ + "target": reactTag, + "metadata": metadata, + ]) + } + + // Handle player item status change. + func handlePlayerItemStatusChange(playerItem _: AVPlayerItem, change _: NSKeyValueObservedChange) { + guard let _playerItem = _playerItem else { + return + } + + if _playerItem.status == .readyToPlay { + handleReadyToPlay() + } else if _playerItem.status == .failed { + handlePlaybackFailed() + } + } + + func handleReadyToPlay() { + guard let _playerItem = _playerItem else { return } + var duration = Float(CMTimeGetSeconds(_playerItem.asset.duration)) + + if duration.isNaN { + duration = 0.0 + } + + var width: Float? + var height: Float? + var orientation = "undefined" + + if !_playerItem.asset.tracks(withMediaType: AVMediaType.video).isEmpty { + let videoTrack = _playerItem.asset.tracks(withMediaType: .video)[0] + width = Float(videoTrack.naturalSize.width) + height = Float(videoTrack.naturalSize.height) + let preferredTransform = videoTrack.preferredTransform + + if (videoTrack.naturalSize.width == preferredTransform.tx + && videoTrack.naturalSize.height == preferredTransform.ty) + || (preferredTransform.tx == 0 && preferredTransform.ty == 0) { + orientation = "landscape" + } else { + orientation = "portrait" + } + } else if _playerItem.presentationSize.height != 0.0 { + width = Float(_playerItem.presentationSize.width) + height = Float(_playerItem.presentationSize.height) + orientation = _playerItem.presentationSize.width > _playerItem.presentationSize.height ? "landscape" : "portrait" + } + + if _pendingSeek { + setSeek([ + "time": NSNumber(value: _pendingSeekTime), + "tolerance": NSNumber(value: 100), + ]) + _pendingSeek = false + } + + if _startPosition >= 0 { + setSeek([ + "time": NSNumber(value: _startPosition), + "tolerance": NSNumber(value: 100), + ]) + _startPosition = -1 + } + + if _videoLoadStarted { + let audioTracks = RCTVideoUtils.getAudioTrackInfo(_player) + let textTracks = RCTVideoUtils.getTextTrackInfo(_player).map(\.json) + onVideoLoad?(["duration": NSNumber(value: duration), + "currentTime": NSNumber(value: Float(CMTimeGetSeconds(_playerItem.currentTime()))), + "canPlayReverse": NSNumber(value: _playerItem.canPlayReverse), + "canPlayFastForward": NSNumber(value: _playerItem.canPlayFastForward), + "canPlaySlowForward": NSNumber(value: _playerItem.canPlaySlowForward), + "canPlaySlowReverse": NSNumber(value: _playerItem.canPlaySlowReverse), + "canStepBackward": NSNumber(value: _playerItem.canStepBackward), + "canStepForward": NSNumber(value: _playerItem.canStepForward), + "naturalSize": [ + "width": width != nil ? NSNumber(value: width!) : "undefinded", + "height": width != nil ? NSNumber(value: height!) : "undefinded", + "orientation": orientation, + ], + "audioTracks": audioTracks, + "textTracks": textTracks, + "target": reactTag as Any]) + } + _videoLoadStarted = false + _playerObserver.attachPlayerEventListeners() + applyModifiers() + } + + func handlePlaybackFailed() { + guard let _playerItem = _playerItem else { return } + onVideoError?( + [ + "error": [ + "code": NSNumber(value: (_playerItem.error! as NSError).code), + "localizedDescription": _playerItem.error?.localizedDescription == nil ? "" : _playerItem.error?.localizedDescription, + "localizedFailureReason": ((_playerItem.error! as NSError).localizedFailureReason == nil ? + "" : (_playerItem.error! as NSError).localizedFailureReason) ?? "", + "localizedRecoverySuggestion": ((_playerItem.error! as NSError).localizedRecoverySuggestion == nil ? + "" : (_playerItem.error! as NSError).localizedRecoverySuggestion) ?? "", + "domain": (_playerItem.error as! NSError).domain, + ], + "target": reactTag, + ] + ) + } + + func handlePlaybackBufferKeyEmpty(playerItem _: AVPlayerItem, change _: NSKeyValueObservedChange) { + _playerBufferEmpty = true + onVideoBuffer?(["isBuffering": true, "target": reactTag as Any]) + } + + // Continue playing (or not if paused) after being paused due to hitting an unbuffered zone. + func handlePlaybackLikelyToKeepUp(playerItem _: AVPlayerItem, change _: NSKeyValueObservedChange) { + if (!(_controls || _fullscreenPlayerPresented) || _playerBufferEmpty) && ((_playerItem?.isPlaybackLikelyToKeepUp) == true) { + setPaused(_paused) + } + _playerBufferEmpty = false + onVideoBuffer?(["isBuffering": false, "target": reactTag as Any]) + } + + func handlePlaybackRateChange(player: AVPlayer, change: NSKeyValueObservedChange) { + guard let _player = _player else { return } + + if player.rate == change.oldValue && change.oldValue != nil { + return + } + + onPlaybackRateChange?(["playbackRate": NSNumber(value: _player.rate), + "target": reactTag as Any]) + + onVideoPlaybackStateChanged?(["isPlaying": _player.rate != 0, + "target": reactTag as Any]) + + if _playbackStalled && _player.rate > 0 { + onPlaybackResume?(["playbackRate": NSNumber(value: _player.rate), + "target": reactTag as Any]) + _playbackStalled = false + } + } + + func handleVolumeChange(player: AVPlayer, change: NSKeyValueObservedChange) { + guard let _player = _player else { return } + + if player.rate == change.oldValue && change.oldValue != nil { + return + } + + onVolumeChange?(["volume": NSNumber(value: _player.volume), + "target": reactTag as Any]) + } + + func handleExternalPlaybackActiveChange(player _: AVPlayer, change _: NSKeyValueObservedChange) { + guard let _player = _player else { return } + onVideoExternalPlaybackChange?(["isExternalPlaybackActive": NSNumber(value: _player.isExternalPlaybackActive), + "target": reactTag as Any]) + } + + func handleViewControllerOverlayViewFrameChange(overlayView _: UIView, change: NSKeyValueObservedChange) { + let oldRect = change.oldValue + let newRect = change.newValue + if !oldRect!.equalTo(newRect!) { + // https://github.com/react-native-video/react-native-video/issues/3085#issuecomment-1557293391 + if newRect!.equalTo(UIScreen.main.bounds) { + RCTLog("in fullscreen") + if !_fullscreenUncontrolPlayerPresented { + _fullscreenUncontrolPlayerPresented = true + + self.onVideoFullscreenPlayerWillPresent?(["target": self.reactTag as Any]) + self.onVideoFullscreenPlayerDidPresent?(["target": self.reactTag as Any]) + } + } else { + NSLog("not fullscreen") + if _fullscreenUncontrolPlayerPresented { + _fullscreenUncontrolPlayerPresented = false + + self.onVideoFullscreenPlayerWillDismiss?(["target": self.reactTag as Any]) + self.onVideoFullscreenPlayerDidDismiss?(["target": self.reactTag as Any]) + } + } + + self.reactViewController().view.frame = UIScreen.main.bounds + self.reactViewController().view.setNeedsLayout() + } + } + + @objc + func handleDidFailToFinishPlaying(notification: NSNotification!) { + let error: NSError! = notification.userInfo?[AVPlayerItemFailedToPlayToEndTimeErrorKey] as? NSError + onVideoError?( + [ + "error": [ + "code": NSNumber(value: (error as NSError).code), + "localizedDescription": error.localizedDescription ?? "", + "localizedFailureReason": (error as NSError).localizedFailureReason ?? "", + "localizedRecoverySuggestion": (error as NSError).localizedRecoverySuggestion ?? "", + "domain": (error as NSError).domain, + ], + "target": reactTag, + ] + ) + } + + @objc + func handlePlaybackStalled(notification _: NSNotification!) { + onPlaybackStalled?(["target": reactTag as Any]) + _playbackStalled = true + } + + @objc + func handlePlayerItemDidReachEnd(notification: NSNotification!) { + onVideoEnd?(["target": reactTag as Any]) + #if USE_GOOGLE_IMA + if notification.object as? AVPlayerItem == _player?.currentItem { + _imaAdsManager.getAdsLoader()?.contentComplete() + } + #endif + if _repeat { + let item: AVPlayerItem! = notification.object as? AVPlayerItem + item.seek(to: CMTime.zero, completionHandler: nil) + self.applyModifiers() + } else { + self.setPaused(true) + _playerObserver.removePlayerTimeObserver() + } + } + + @objc + func handleAVPlayerAccess(notification: NSNotification!) { + let accessLog: AVPlayerItemAccessLog! = (notification.object as! AVPlayerItem).accessLog() + let lastEvent: AVPlayerItemAccessLogEvent! = accessLog.events.last + + onVideoBandwidthUpdate?(["bitrate": lastEvent.observedBitrate, "target": reactTag]) + } } diff --git a/ios/Video/RCTVideoManager.swift b/ios/Video/RCTVideoManager.swift index 2839e61d00..bfc398d7b4 100644 --- a/ios/Video/RCTVideoManager.swift +++ b/ios/Video/RCTVideoManager.swift @@ -3,88 +3,88 @@ import React @objc(RCTVideoManager) class RCTVideoManager: RCTViewManager { - override func view() -> UIView { - return RCTVideo(eventDispatcher: bridge.eventDispatcher() as! RCTEventDispatcher) - } + override func view() -> UIView { + return RCTVideo(eventDispatcher: bridge.eventDispatcher() as! RCTEventDispatcher) + } - func methodQueue() -> DispatchQueue { - return bridge.uiManager.methodQueue - } + func methodQueue() -> DispatchQueue { + return bridge.uiManager.methodQueue + } - @objc(save:reactTag:resolver:rejecter:) - func save(options: NSDictionary, reactTag: NSNumber, resolve: @escaping RCTPromiseResolveBlock, reject: @escaping RCTPromiseRejectBlock) { - bridge.uiManager.prependUIBlock { _, viewRegistry in - let view = viewRegistry?[reactTag] - if !(view is RCTVideo) { - RCTLogError("Invalid view returned from registry, expecting RCTVideo, got: %@", String(describing: view)) - } else if let view = view as? RCTVideo { - view.save(options: options, resolve: resolve, reject: reject) - } + @objc(save:reactTag:resolver:rejecter:) + func save(options: NSDictionary, reactTag: NSNumber, resolve: @escaping RCTPromiseResolveBlock, reject: @escaping RCTPromiseRejectBlock) { + bridge.uiManager.prependUIBlock { _, viewRegistry in + let view = viewRegistry?[reactTag] + if !(view is RCTVideo) { + RCTLogError("Invalid view returned from registry, expecting RCTVideo, got: %@", String(describing: view)) + } else if let view = view as? RCTVideo { + view.save(options: options, resolve: resolve, reject: reject) + } + } } - } - @objc(setLicenseResult:licenseUrl:reactTag:) - func setLicenseResult(license: NSString, licenseUrl: NSString, reactTag: NSNumber) { - bridge.uiManager.prependUIBlock { _, viewRegistry in - let view = viewRegistry?[reactTag] - if !(view is RCTVideo) { - RCTLogError("Invalid view returned from registry, expecting RCTVideo, got: %@", String(describing: view)) - } else if let view = view as? RCTVideo { - view.setLicenseResult(license as String, licenseUrl as String) - } + @objc(setLicenseResult:licenseUrl:reactTag:) + func setLicenseResult(license: NSString, licenseUrl: NSString, reactTag: NSNumber) { + bridge.uiManager.prependUIBlock { _, viewRegistry in + let view = viewRegistry?[reactTag] + if !(view is RCTVideo) { + RCTLogError("Invalid view returned from registry, expecting RCTVideo, got: %@", String(describing: view)) + } else if let view = view as? RCTVideo { + view.setLicenseResult(license as String, licenseUrl as String) + } + } } - } - @objc(setLicenseResultError:licenseUrl:reactTag:) - func setLicenseResultError(error: NSString, licenseUrl: NSString, reactTag: NSNumber) { - bridge.uiManager.prependUIBlock { _, viewRegistry in - let view = viewRegistry?[reactTag] - if !(view is RCTVideo) { - RCTLogError("Invalid view returned from registry, expecting RCTVideo, got: %@", String(describing: view)) - } else if let view = view as? RCTVideo { - view.setLicenseResultError(error as String, licenseUrl as String) - } + @objc(setLicenseResultError:licenseUrl:reactTag:) + func setLicenseResultError(error: NSString, licenseUrl: NSString, reactTag: NSNumber) { + bridge.uiManager.prependUIBlock { _, viewRegistry in + let view = viewRegistry?[reactTag] + if !(view is RCTVideo) { + RCTLogError("Invalid view returned from registry, expecting RCTVideo, got: %@", String(describing: view)) + } else if let view = view as? RCTVideo { + view.setLicenseResultError(error as String, licenseUrl as String) + } + } } - } - @objc(dismissFullscreenPlayer:) - func dismissFullscreenPlayer(_ reactTag: NSNumber) { - bridge.uiManager.prependUIBlock { _, viewRegistry in - let view = viewRegistry?[reactTag] - if !(view is RCTVideo) { - RCTLogError("Invalid view returned from registry, expecting RCTVideo, got: %@", String(describing: view)) - } else if let view = view as? RCTVideo { - view.dismissFullscreenPlayer() - } + @objc(dismissFullscreenPlayer:) + func dismissFullscreenPlayer(_ reactTag: NSNumber) { + bridge.uiManager.prependUIBlock { _, viewRegistry in + let view = viewRegistry?[reactTag] + if !(view is RCTVideo) { + RCTLogError("Invalid view returned from registry, expecting RCTVideo, got: %@", String(describing: view)) + } else if let view = view as? RCTVideo { + view.dismissFullscreenPlayer() + } + } } - } - @objc(presentFullscreenPlayer:) - func presentFullscreenPlayer(_ reactTag: NSNumber) { - bridge.uiManager.prependUIBlock { _, viewRegistry in - let view = viewRegistry?[reactTag] - if !(view is RCTVideo) { - RCTLogError("Invalid view returned from registry, expecting RCTVideo, got: %@", String(describing: view)) - } else if let view = view as? RCTVideo { - view.presentFullscreenPlayer() - } + @objc(presentFullscreenPlayer:) + func presentFullscreenPlayer(_ reactTag: NSNumber) { + bridge.uiManager.prependUIBlock { _, viewRegistry in + let view = viewRegistry?[reactTag] + if !(view is RCTVideo) { + RCTLogError("Invalid view returned from registry, expecting RCTVideo, got: %@", String(describing: view)) + } else if let view = view as? RCTVideo { + view.presentFullscreenPlayer() + } + } } - } - @objc(setPlayerPauseState:reactTag:) - func setPlayerPauseState(paused: NSNumber, reactTag: NSNumber) { - bridge.uiManager.prependUIBlock { _, viewRegistry in - let view = viewRegistry?[reactTag] - if !(view is RCTVideo) { - RCTLogError("Invalid view returned from registry, expecting RCTVideo, got: %@", String(describing: view)) - } else if let view = view as? RCTVideo { - let paused = paused.boolValue - view.setPaused(paused) - } + @objc(setPlayerPauseState:reactTag:) + func setPlayerPauseState(paused: NSNumber, reactTag: NSNumber) { + bridge.uiManager.prependUIBlock { _, viewRegistry in + let view = viewRegistry?[reactTag] + if !(view is RCTVideo) { + RCTLogError("Invalid view returned from registry, expecting RCTVideo, got: %@", String(describing: view)) + } else if let view = view as? RCTVideo { + let paused = paused.boolValue + view.setPaused(paused) + } + } } - } - override class func requiresMainQueueSetup() -> Bool { - return true - } + override class func requiresMainQueueSetup() -> Bool { + return true + } } diff --git a/ios/Video/RCTVideoPlayerViewController.swift b/ios/Video/RCTVideoPlayerViewController.swift index 4cc175710a..eef65a3691 100644 --- a/ios/Video/RCTVideoPlayerViewController.swift +++ b/ios/Video/RCTVideoPlayerViewController.swift @@ -1,44 +1,44 @@ import AVKit class RCTVideoPlayerViewController: AVPlayerViewController { - weak var rctDelegate: RCTVideoPlayerViewControllerDelegate? + weak var rctDelegate: RCTVideoPlayerViewControllerDelegate? - // Optional paramters - var preferredOrientation: String? - var autorotate: Bool? + // Optional paramters + var preferredOrientation: String? + var autorotate: Bool? - func shouldAutorotate() -> Bool { - if autorotate! || preferredOrientation == nil || (preferredOrientation!.lowercased() == "all") { - return true - } - - return false - } - - override func viewDidDisappear(_ animated: Bool) { - super.viewDidDisappear(animated) + func shouldAutorotate() -> Bool { + if autorotate! || preferredOrientation == nil || (preferredOrientation!.lowercased() == "all") { + return true + } - rctDelegate?.videoPlayerViewControllerWillDismiss(playerViewController: self) - rctDelegate?.videoPlayerViewControllerDidDismiss(playerViewController: self) - } - - #if !os(tvOS) - - func supportedInterfaceOrientations() -> UIInterfaceOrientationMask { - return .all + return false } - func preferredInterfaceOrientationForPresentation() -> UIInterfaceOrientation { - if preferredOrientation?.lowercased() == "landscape" { - return .landscapeRight - } else if preferredOrientation?.lowercased() == "portrait" { - return .portrait - } else { - // default case - let orientation = UIApplication.shared.statusBarOrientation - return orientation - } + override func viewDidDisappear(_ animated: Bool) { + super.viewDidDisappear(animated) + + rctDelegate?.videoPlayerViewControllerWillDismiss(playerViewController: self) + rctDelegate?.videoPlayerViewControllerDidDismiss(playerViewController: self) } - #endif + #if !os(tvOS) + + func supportedInterfaceOrientations() -> UIInterfaceOrientationMask { + return .all + } + + func preferredInterfaceOrientationForPresentation() -> UIInterfaceOrientation { + if preferredOrientation?.lowercased() == "landscape" { + return .landscapeRight + } else if preferredOrientation?.lowercased() == "portrait" { + return .portrait + } else { + // default case + let orientation = UIApplication.shared.statusBarOrientation + return orientation + } + } + + #endif } diff --git a/ios/Video/RCTVideoPlayerViewControllerDelegate.swift b/ios/Video/RCTVideoPlayerViewControllerDelegate.swift index 7d1bbc6c4f..2f9fec3fbd 100644 --- a/ios/Video/RCTVideoPlayerViewControllerDelegate.swift +++ b/ios/Video/RCTVideoPlayerViewControllerDelegate.swift @@ -2,6 +2,6 @@ import AVKit import Foundation protocol RCTVideoPlayerViewControllerDelegate: class { - func videoPlayerViewControllerWillDismiss(playerViewController: AVPlayerViewController) - func videoPlayerViewControllerDidDismiss(playerViewController: AVPlayerViewController) + func videoPlayerViewControllerWillDismiss(playerViewController: AVPlayerViewController) + func videoPlayerViewControllerDidDismiss(playerViewController: AVPlayerViewController) } diff --git a/ios/Video/RCTVideoSwiftLog/RCTVideoSwiftLog.swift b/ios/Video/RCTVideoSwiftLog/RCTVideoSwiftLog.swift index 361ee7d722..36111725c2 100644 --- a/ios/Video/RCTVideoSwiftLog/RCTVideoSwiftLog.swift +++ b/ios/Video/RCTVideoSwiftLog/RCTVideoSwiftLog.swift @@ -28,27 +28,27 @@ let logHeader: String = "RNV:" func RCTLogError(_ message: String, _ file: String = #file, _ line: UInt = #line) { - RCTVideoSwiftLog.error(logHeader + message, file: file, line: line) + RCTVideoSwiftLog.error(logHeader + message, file: file, line: line) } func RCTLogWarn(_ message: String, _ file: String = #file, _ line: UInt = #line) { - RCTVideoSwiftLog.warn(logHeader + message, file: file, line: line) + RCTVideoSwiftLog.warn(logHeader + message, file: file, line: line) } func RCTLogInfo(_ message: String, _ file: String = #file, _ line: UInt = #line) { - RCTVideoSwiftLog.info(logHeader + message, file: file, line: line) + RCTVideoSwiftLog.info(logHeader + message, file: file, line: line) } func RCTLog(_ message: String, _ file: String = #file, _ line: UInt = #line) { - RCTVideoSwiftLog.log(logHeader + message, file: file, line: line) + RCTVideoSwiftLog.log(logHeader + message, file: file, line: line) } func RCTLogTrace(_ message: String, _ file: String = #file, _ line: UInt = #line) { - RCTVideoSwiftLog.trace(logHeader + message, file: file, line: line) + RCTVideoSwiftLog.trace(logHeader + message, file: file, line: line) } func DebugLog(_ message: String) { - #if DEBUG - print(logHeader + message) - #endif + #if DEBUG + print(logHeader + message) + #endif } diff --git a/ios/Video/UIView+FindUIViewController.swift b/ios/Video/UIView+FindUIViewController.swift index 3adc298a11..2947e1e384 100644 --- a/ios/Video/UIView+FindUIViewController.swift +++ b/ios/Video/UIView+FindUIViewController.swift @@ -1,18 +1,18 @@ // Source: http://stackoverflow.com/a/3732812/1123156 extension UIView { - func firstAvailableUIViewController() -> UIViewController? { - // convenience function for casting and to "mask" the recursive function - return traverseResponderChainForUIViewController() - } + func firstAvailableUIViewController() -> UIViewController? { + // convenience function for casting and to "mask" the recursive function + return traverseResponderChainForUIViewController() + } - func traverseResponderChainForUIViewController() -> UIViewController? { - if let nextUIViewController = next as? UIViewController { - return nextUIViewController - } else if let nextUIView = next as? UIView { - return nextUIView.traverseResponderChainForUIViewController() - } else { - return nil + func traverseResponderChainForUIViewController() -> UIViewController? { + if let nextUIViewController = next as? UIViewController { + return nextUIViewController + } else if let nextUIView = next as? UIView { + return nextUIView.traverseResponderChainForUIViewController() + } else { + return nil + } } - } } diff --git a/ios/VideoCaching/RCTVideoCachingHandler.swift b/ios/VideoCaching/RCTVideoCachingHandler.swift index 7a795468f3..8ccc4aaae5 100644 --- a/ios/VideoCaching/RCTVideoCachingHandler.swift +++ b/ios/VideoCaching/RCTVideoCachingHandler.swift @@ -4,94 +4,94 @@ import Foundation import Promises class RCTVideoCachingHandler: NSObject, DVAssetLoaderDelegatesDelegate { - private var _videoCache: RCTVideoCache! = RCTVideoCache.sharedInstance() - var playerItemPrepareText: ((AVAsset?, NSDictionary?, String) -> AVPlayerItem)? + private var _videoCache: RCTVideoCache! = RCTVideoCache.sharedInstance() + var playerItemPrepareText: ((AVAsset?, NSDictionary?, String) -> AVPlayerItem)? - override init() { - super.init() - } + override init() { + super.init() + } - func shouldCache(source: VideoSource, textTracks: [TextTrack]?) -> Bool { - if source.isNetwork && source.shouldCache && ((textTracks == nil) || (textTracks!.isEmpty)) { - /* The DVURLAsset created by cache doesn't have a tracksWithMediaType property, so trying - * to bring in the text track code will crash. I suspect this is because the asset hasn't fully loaded. - * Until this is fixed, we need to bypass caching when text tracks are specified. - */ - DebugLog(""" - Caching is not supported for uri '\(source.uri)' because text tracks are not compatible with the cache. - Checkout https://github.com/react-native-community/react-native-video/blob/master/docs/caching.md - """) - return true + func shouldCache(source: VideoSource, textTracks: [TextTrack]?) -> Bool { + if source.isNetwork && source.shouldCache && ((textTracks == nil) || (textTracks!.isEmpty)) { + /* The DVURLAsset created by cache doesn't have a tracksWithMediaType property, so trying + * to bring in the text track code will crash. I suspect this is because the asset hasn't fully loaded. + * Until this is fixed, we need to bypass caching when text tracks are specified. + */ + DebugLog(""" + Caching is not supported for uri '\(source.uri)' because text tracks are not compatible with the cache. + Checkout https://github.com/react-native-community/react-native-video/blob/master/docs/caching.md + """) + return true + } + return false } - return false - } - func playerItemForSourceUsingCache(uri: String!, assetOptions options: NSDictionary!) -> Promise { - let url = URL(string: uri) - return getItemForUri(uri) - .then { [weak self] (videoCacheStatus: RCTVideoCacheStatus, cachedAsset: AVAsset?) -> AVPlayerItem in - guard let self = self, let playerItemPrepareText = self.playerItemPrepareText else { throw NSError(domain: "", code: 0, userInfo: nil) } - switch videoCacheStatus { - case .missingFileExtension: - DebugLog(""" - Could not generate cache key for uri '\(uri)'. - It is currently not supported to cache urls that do not include a file extension. - The video file will not be cached. - Checkout https://github.com/react-native-community/react-native-video/blob/master/docs/caching.md - """) - let asset: AVURLAsset! = AVURLAsset(url: url!, options: options as! [String: Any]) - return playerItemPrepareText(asset, options, "") + func playerItemForSourceUsingCache(uri: String!, assetOptions options: NSDictionary!) -> Promise { + let url = URL(string: uri) + return getItemForUri(uri) + .then { [weak self] (videoCacheStatus: RCTVideoCacheStatus, cachedAsset: AVAsset?) -> AVPlayerItem in + guard let self = self, let playerItemPrepareText = self.playerItemPrepareText else { throw NSError(domain: "", code: 0, userInfo: nil) } + switch videoCacheStatus { + case .missingFileExtension: + DebugLog(""" + Could not generate cache key for uri '\(uri)'. + It is currently not supported to cache urls that do not include a file extension. + The video file will not be cached. + Checkout https://github.com/react-native-community/react-native-video/blob/master/docs/caching.md + """) + let asset: AVURLAsset! = AVURLAsset(url: url!, options: options as! [String: Any]) + return playerItemPrepareText(asset, options, "") - case .unsupportedFileExtension: - DebugLog(""" - Could not generate cache key for uri '\(uri)'. - The file extension of that uri is currently not supported. - The video file will not be cached. - Checkout https://github.com/react-native-community/react-native-video/blob/master/docs/caching.md - """) - let asset: AVURLAsset! = AVURLAsset(url: url!, options: options as! [String: Any]) - return playerItemPrepareText(asset, options, "") + case .unsupportedFileExtension: + DebugLog(""" + Could not generate cache key for uri '\(uri)'. + The file extension of that uri is currently not supported. + The video file will not be cached. + Checkout https://github.com/react-native-community/react-native-video/blob/master/docs/caching.md + """) + let asset: AVURLAsset! = AVURLAsset(url: url!, options: options as! [String: Any]) + return playerItemPrepareText(asset, options, "") - default: - if let cachedAsset = cachedAsset { - DebugLog("Playing back uri '\(uri)' from cache") - // See note in playerItemForSource about not being able to support text tracks & caching - return AVPlayerItem(asset: cachedAsset) - } - } + default: + if let cachedAsset = cachedAsset { + DebugLog("Playing back uri '\(uri)' from cache") + // See note in playerItemForSource about not being able to support text tracks & caching + return AVPlayerItem(asset: cachedAsset) + } + } - let asset: DVURLAsset! = DVURLAsset(url: url, options: options as! [String: Any], networkTimeout: 10000) - asset.loaderDelegate = self + let asset: DVURLAsset! = DVURLAsset(url: url, options: options as! [String: Any], networkTimeout: 10000) + asset.loaderDelegate = self - /* More granular code to have control over the DVURLAsset - let resourceLoaderDelegate = DVAssetLoaderDelegate(url: url) - resourceLoaderDelegate.delegate = self - let components = NSURLComponents(url: url, resolvingAgainstBaseURL: false) - components?.scheme = DVAssetLoaderDelegate.scheme() - var asset: AVURLAsset? = nil - if let url = components?.url { - asset = AVURLAsset(url: url, options: options) - } - asset?.resourceLoader.setDelegate(resourceLoaderDelegate, queue: DispatchQueue.main) - */ + /* More granular code to have control over the DVURLAsset + let resourceLoaderDelegate = DVAssetLoaderDelegate(url: url) + resourceLoaderDelegate.delegate = self + let components = NSURLComponents(url: url, resolvingAgainstBaseURL: false) + components?.scheme = DVAssetLoaderDelegate.scheme() + var asset: AVURLAsset? = nil + if let url = components?.url { + asset = AVURLAsset(url: url, options: options) + } + asset?.resourceLoader.setDelegate(resourceLoaderDelegate, queue: DispatchQueue.main) + */ - return AVPlayerItem(asset: asset) - } - } + return AVPlayerItem(asset: asset) + } + } - func getItemForUri(_ uri: String) -> Promise<(videoCacheStatus: RCTVideoCacheStatus, cachedAsset: AVAsset?)> { - return Promise<(videoCacheStatus: RCTVideoCacheStatus, cachedAsset: AVAsset?)> { fulfill, _ in - self._videoCache.getItemForUri(uri, withCallback: { (videoCacheStatus: RCTVideoCacheStatus, cachedAsset: AVAsset?) in - fulfill((videoCacheStatus, cachedAsset)) - }) + func getItemForUri(_ uri: String) -> Promise<(videoCacheStatus: RCTVideoCacheStatus, cachedAsset: AVAsset?)> { + return Promise<(videoCacheStatus: RCTVideoCacheStatus, cachedAsset: AVAsset?)> { fulfill, _ in + self._videoCache.getItemForUri(uri, withCallback: { (videoCacheStatus: RCTVideoCacheStatus, cachedAsset: AVAsset?) in + fulfill((videoCacheStatus, cachedAsset)) + }) + } } - } - // MARK: - DVAssetLoaderDelegate + // MARK: - DVAssetLoaderDelegate - func dvAssetLoaderDelegate(_: DVAssetLoaderDelegate!, didLoad data: Data!, for url: URL!) { - _videoCache.storeItem(data as Data?, forUri: url.absoluteString, withCallback: { (_: Bool) in - DebugLog("Cache data stored successfully 🎉") - }) - } + func dvAssetLoaderDelegate(_: DVAssetLoaderDelegate!, didLoad data: Data!, for url: URL!) { + _videoCache.storeItem(data as Data?, forUri: url.absoluteString, withCallback: { (_: Bool) in + DebugLog("Cache data stored successfully 🎉") + }) + } } From 2ef3fec7d3111e7f85db016655faeabcdf6c91cc Mon Sep 17 00:00:00 2001 From: Krzysztof Moch Date: Wed, 6 Dec 2023 21:47:37 +0100 Subject: [PATCH 09/10] tmp rename to fix git no diff --- .../java/com/brentvatne/common/{API => api-tmp}/ResizeMode.kt | 0 .../java/com/brentvatne/common/{API => api-tmp}/SubtitleStyle.kt | 0 .../java/com/brentvatne/common/{API => api-tmp}/TimedMetadata.kt | 0 .../src/main/java/com/brentvatne/common/{API => api-tmp}/Track.kt | 0 .../java/com/brentvatne/common/{API => api-tmp}/VideoTrack.kt | 0 5 files changed, 0 insertions(+), 0 deletions(-) rename android/src/main/java/com/brentvatne/common/{API => api-tmp}/ResizeMode.kt (100%) rename android/src/main/java/com/brentvatne/common/{API => api-tmp}/SubtitleStyle.kt (100%) rename android/src/main/java/com/brentvatne/common/{API => api-tmp}/TimedMetadata.kt (100%) rename android/src/main/java/com/brentvatne/common/{API => api-tmp}/Track.kt (100%) rename android/src/main/java/com/brentvatne/common/{API => api-tmp}/VideoTrack.kt (100%) diff --git a/android/src/main/java/com/brentvatne/common/API/ResizeMode.kt b/android/src/main/java/com/brentvatne/common/api-tmp/ResizeMode.kt similarity index 100% rename from android/src/main/java/com/brentvatne/common/API/ResizeMode.kt rename to android/src/main/java/com/brentvatne/common/api-tmp/ResizeMode.kt diff --git a/android/src/main/java/com/brentvatne/common/API/SubtitleStyle.kt b/android/src/main/java/com/brentvatne/common/api-tmp/SubtitleStyle.kt similarity index 100% rename from android/src/main/java/com/brentvatne/common/API/SubtitleStyle.kt rename to android/src/main/java/com/brentvatne/common/api-tmp/SubtitleStyle.kt diff --git a/android/src/main/java/com/brentvatne/common/API/TimedMetadata.kt b/android/src/main/java/com/brentvatne/common/api-tmp/TimedMetadata.kt similarity index 100% rename from android/src/main/java/com/brentvatne/common/API/TimedMetadata.kt rename to android/src/main/java/com/brentvatne/common/api-tmp/TimedMetadata.kt diff --git a/android/src/main/java/com/brentvatne/common/API/Track.kt b/android/src/main/java/com/brentvatne/common/api-tmp/Track.kt similarity index 100% rename from android/src/main/java/com/brentvatne/common/API/Track.kt rename to android/src/main/java/com/brentvatne/common/api-tmp/Track.kt diff --git a/android/src/main/java/com/brentvatne/common/API/VideoTrack.kt b/android/src/main/java/com/brentvatne/common/api-tmp/VideoTrack.kt similarity index 100% rename from android/src/main/java/com/brentvatne/common/API/VideoTrack.kt rename to android/src/main/java/com/brentvatne/common/api-tmp/VideoTrack.kt From 6a91dd205fc0a2b8c2a092f10dc34854bbfc37b6 Mon Sep 17 00:00:00 2001 From: Krzysztof Moch Date: Wed, 6 Dec 2023 21:48:23 +0100 Subject: [PATCH 10/10] refactor: rename folder "API" to "api" --- .../java/com/brentvatne/common/{api-tmp => api}/ResizeMode.kt | 0 .../java/com/brentvatne/common/{api-tmp => api}/SubtitleStyle.kt | 0 .../java/com/brentvatne/common/{api-tmp => api}/TimedMetadata.kt | 0 .../src/main/java/com/brentvatne/common/{api-tmp => api}/Track.kt | 0 .../java/com/brentvatne/common/{api-tmp => api}/VideoTrack.kt | 0 5 files changed, 0 insertions(+), 0 deletions(-) rename android/src/main/java/com/brentvatne/common/{api-tmp => api}/ResizeMode.kt (100%) rename android/src/main/java/com/brentvatne/common/{api-tmp => api}/SubtitleStyle.kt (100%) rename android/src/main/java/com/brentvatne/common/{api-tmp => api}/TimedMetadata.kt (100%) rename android/src/main/java/com/brentvatne/common/{api-tmp => api}/Track.kt (100%) rename android/src/main/java/com/brentvatne/common/{api-tmp => api}/VideoTrack.kt (100%) diff --git a/android/src/main/java/com/brentvatne/common/api-tmp/ResizeMode.kt b/android/src/main/java/com/brentvatne/common/api/ResizeMode.kt similarity index 100% rename from android/src/main/java/com/brentvatne/common/api-tmp/ResizeMode.kt rename to android/src/main/java/com/brentvatne/common/api/ResizeMode.kt diff --git a/android/src/main/java/com/brentvatne/common/api-tmp/SubtitleStyle.kt b/android/src/main/java/com/brentvatne/common/api/SubtitleStyle.kt similarity index 100% rename from android/src/main/java/com/brentvatne/common/api-tmp/SubtitleStyle.kt rename to android/src/main/java/com/brentvatne/common/api/SubtitleStyle.kt diff --git a/android/src/main/java/com/brentvatne/common/api-tmp/TimedMetadata.kt b/android/src/main/java/com/brentvatne/common/api/TimedMetadata.kt similarity index 100% rename from android/src/main/java/com/brentvatne/common/api-tmp/TimedMetadata.kt rename to android/src/main/java/com/brentvatne/common/api/TimedMetadata.kt diff --git a/android/src/main/java/com/brentvatne/common/api-tmp/Track.kt b/android/src/main/java/com/brentvatne/common/api/Track.kt similarity index 100% rename from android/src/main/java/com/brentvatne/common/api-tmp/Track.kt rename to android/src/main/java/com/brentvatne/common/api/Track.kt diff --git a/android/src/main/java/com/brentvatne/common/api-tmp/VideoTrack.kt b/android/src/main/java/com/brentvatne/common/api/VideoTrack.kt similarity index 100% rename from android/src/main/java/com/brentvatne/common/api-tmp/VideoTrack.kt rename to android/src/main/java/com/brentvatne/common/api/VideoTrack.kt