Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Adapt pixelbuffer width/height in custom frame capturer #53

Merged
merged 2 commits into from
Jun 1, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion ScreenShare/ScreenShare.entitlements
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@
<plist version="1.0">
<dict>
<key>com.apple.security.application-groups</key>
<array/>
<array>
<string>group.io.antmedia.ios.webrtc.sample</string>
</array>
</dict>
</plist>
4 changes: 3 additions & 1 deletion WebRTC-Sample-App/WebRTC-Sample-App.entitlements
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@
<plist version="1.0">
<dict>
<key>com.apple.security.application-groups</key>
<array/>
<array>
<string>group.io.antmedia.ios.webrtc.sample</string>
</array>
</dict>
</plist>
4 changes: 3 additions & 1 deletion WebRTC-Sample-App/WebRTC-Sample-AppDebug.entitlements
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@
<plist version="1.0">
<dict>
<key>com.apple.security.application-groups</key>
<array/>
<array>
<string>group.io.antmedia.ios.webrtc.sample</string>
</array>
</dict>
</plist>
8 changes: 8 additions & 0 deletions WebRTCiOSSDK/api/AntMediaClient.swift
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,7 @@ open class AntMediaClient: NSObject, AntMediaClientProtocol {


internal static var isDebug: Bool = false
internal static var isVerbose: Bool = false
public weak var delegate: AntMediaClientDelegate?

private var wsUrl: String!
Expand Down Expand Up @@ -1073,6 +1074,13 @@ open class AntMediaClient: NSObject, AntMediaClientProtocol {
}
}

public static func verbose(_ msg:String) {
if (AntMediaClient.isVerbose) {
debugPrint("--> AntMediaSDK[verbose]: " + msg);
}

}

public func getStreamInfo()
{
if (self.isWebSocketConnected)
Expand Down
64 changes: 28 additions & 36 deletions WebRTCiOSSDK/api/webrtc/RTCCustomFrameCapturer.swift
Original file line number Diff line number Diff line change
Expand Up @@ -54,20 +54,36 @@ class RTCCustomFrameCapturer: RTCVideoCapturer {
public func capture(_ pixelBuffer: CVPixelBuffer, rotation:RTCVideoRotation, timeStampNs: Int64 )
{
if ((Double(timeStampNs) - Double(lastSentFrameTimeStampNanoSeconds)) < frameRateIntervalNanoSeconds ) {
AntMediaClient.printf("Dropping frame because high fps than the configured fps: \(fps). Incoming timestampNs:\(timeStampNs) last sent timestampNs:\(lastSentFrameTimeStampNanoSeconds) frameRateIntervalNs:\(frameRateIntervalNanoSeconds)");
AntMediaClient.verbose("Dropping frame because high fps than the configured fps: \(fps). Incoming timestampNs:\(timeStampNs) last sent timestampNs:\(lastSentFrameTimeStampNanoSeconds) frameRateIntervalNs:\(frameRateIntervalNanoSeconds)");
return;

}

let width = Int32(CVPixelBufferGetWidth(pixelBuffer))
let height = Int32(CVPixelBufferGetHeight(pixelBuffer))

var scaledWidth = (width * Int32(self.targetHeight)) / height;
if (scaledWidth % 2 == 1) {
scaledWidth+=1;
}

let rtcPixelBuffer = RTCCVPixelBuffer(
pixelBuffer: pixelBuffer)

let rtcVideoFrame = RTCVideoFrame(buffer: rtcPixelBuffer,

rotation: rotation, timeStampNs: Int64(timeStampNs))
pixelBuffer: pixelBuffer,
adaptedWidth:scaledWidth,
adaptedHeight: Int32(self.targetHeight),
cropWidth: width,
cropHeight: height,
cropX: 0,
cropY: 0)

self.delegate?.capturer(self, didCapture: rtcVideoFrame.newI420())
lastSentFrameTimeStampNanoSeconds = timeStampNs;
let rtcVideoFrame = RTCVideoFrame(
buffer: rtcPixelBuffer,
rotation: rotation,
timeStampNs: Int64(timeStampNs)
)

self.delegate?.capturer(self, didCapture: rtcVideoFrame.newI420())
lastSentFrameTimeStampNanoSeconds = Int64(timeStampNs);
}

public func capture(_ sampleBuffer: CMSampleBuffer, externalRotation:Int = -1) {
Expand All @@ -83,25 +99,16 @@ class RTCCustomFrameCapturer: RTCVideoCapturer {
kNanosecondsPerSecond;

if ((Double(timeStampNs) - Double(lastSentFrameTimeStampNanoSeconds)) < frameRateIntervalNanoSeconds ) {
AntMediaClient.printf("Dropping frame because high fps than the configured fps: \(fps). Incoming timestampNs:\(timeStampNs) last sent timestampNs:\(lastSentFrameTimeStampNanoSeconds) frameRateIntervalNs:\(frameRateIntervalNanoSeconds)");
AntMediaClient.verbose("Dropping frame because high fps than the configured fps: \(fps). Incoming timestampNs:\(timeStampNs) last sent timestampNs:\(lastSentFrameTimeStampNanoSeconds) frameRateIntervalNs:\(frameRateIntervalNanoSeconds)");
return;

}

let _pixelBuffer:CVPixelBuffer? = CMSampleBufferGetImageBuffer(sampleBuffer);


if let pixelBuffer = _pixelBuffer
{


let width = Int32(CVPixelBufferGetWidth(pixelBuffer))
let height = Int32(CVPixelBufferGetHeight(pixelBuffer))

var scaledWidth = (width * Int32(self.targetHeight)) / height;
if (scaledWidth % 2 == 1) {
scaledWidth+=1;
}

//NSLog("Incoming frame width:\(width) height:\(height) adapted width:\(scaledWidth) height:\(self.targetHeight)")

var rotation = RTCVideoRotation._0;
Expand Down Expand Up @@ -140,24 +147,9 @@ class RTCCustomFrameCapturer: RTCVideoCapturer {
rotation = RTCVideoRotation(rawValue:externalRotation) ?? RTCVideoRotation._0;
}

//NSLog("Device orientation width: %d, height:%d ", width, height);

let rtcPixelBuffer = RTCCVPixelBuffer(
pixelBuffer: pixelBuffer,
adaptedWidth:scaledWidth,
adaptedHeight: Int32(self.targetHeight),
cropWidth: width,
cropHeight: height,
cropX: 0,
cropY: 0)

let rtcVideoFrame = RTCVideoFrame(buffer: rtcPixelBuffer,

rotation: rotation, timeStampNs: Int64(timeStampNs))

self.delegate?.capturer(self, didCapture: rtcVideoFrame.newI420())
lastSentFrameTimeStampNanoSeconds = Int64(timeStampNs);

capture(pixelBuffer, rotation: rotation, timeStampNs: Int64(timeStampNs))
//NSLog("Device orientation width: %d, height:%d ", width, height);
}
else {
NSLog("Cannot get image buffer");
Expand Down
Loading