Skip to content

Commit 187d1e0

Browse files
authored
Video processing example (#55)
SDK code not published yet. livekit/client-sdk-swift#530
1 parent 7228481 commit 187d1e0

File tree

2 files changed

+103
-0
lines changed

2 files changed

+103
-0
lines changed

Multiplatform/Controllers/RoomContext.swift

+99
Original file line numberDiff line numberDiff line change
@@ -79,6 +79,14 @@ final class RoomContext: ObservableObject {
7979

8080
@Published var textFieldString: String = ""
8181

82+
@Published var isVideoProcessingEnabled: Bool = false {
83+
didSet {
84+
if let track = room.localParticipant.firstCameraVideoTrack as? LocalVideoTrack {
85+
track.capturer.processor = isVideoProcessingEnabled ? self : nil
86+
}
87+
}
88+
}
89+
8290
var _connectTask: Task<Void, Error>?
8391

8492
public init(store: ValueStore<Preferences>) {
@@ -289,4 +297,95 @@ extension RoomContext: RoomDelegate {
289297
func room(_: Room, trackPublication _: TrackPublication, didUpdateE2EEState state: E2EEState) {
290298
print("didUpdateE2EEState: \(state)")
291299
}
300+
301+
func room(_: Room, participant _: LocalParticipant, didPublishTrack publication: LocalTrackPublication) {
302+
print("didPublishTrack: \(publication)")
303+
guard let localVideoTrack = publication.track as? LocalVideoTrack, localVideoTrack.source == .camera else { return }
304+
305+
// Attach example processor.
306+
localVideoTrack.capturer.processor = isVideoProcessingEnabled ? self : nil
307+
}
308+
}
309+
310+
extension RoomContext: VideoProcessor {
311+
func process(frame: VideoFrame) -> VideoFrame? {
312+
guard let pixelBuffer = frame.toCVPixelBuffer() else {
313+
print("Failed to get pixel buffer")
314+
return nil
315+
}
316+
317+
// Do something with pixel buffer.
318+
guard let newPixelBuffer = processPixelBuffer(pixelBuffer) else {
319+
print("Failed to proces the pixel buffer")
320+
return nil
321+
}
322+
323+
// Re-construct a VideoFrame
324+
return VideoFrame(dimensions: frame.dimensions,
325+
rotation: frame.rotation,
326+
timeStampNs: frame.timeStampNs,
327+
buffer: CVPixelVideoBuffer(pixelBuffer: newPixelBuffer))
328+
}
329+
}
330+
331+
// Processing example
332+
func processPixelBuffer(_ pixelBuffer: CVPixelBuffer) -> CVPixelBuffer? {
333+
// Lock the buffer for reading
334+
CVPixelBufferLockBaseAddress(pixelBuffer, .readOnly)
335+
336+
// Create CIImage from the pixel buffer
337+
let ciImage = CIImage(cvPixelBuffer: pixelBuffer)
338+
339+
// Create Core Image context
340+
let device = MTLCreateSystemDefaultDevice()!
341+
let context = CIContext(mtlDevice: device, options: nil)
342+
343+
// Apply dramatic filters
344+
345+
// 1. Gaussian blur effect
346+
let blurFilter = CIFilter(name: "CIGaussianBlur")!
347+
blurFilter.setValue(ciImage, forKey: kCIInputImageKey)
348+
blurFilter.setValue(8.0, forKey: kCIInputRadiusKey) // Larger radius = more blur
349+
350+
// 2. Color inversion
351+
// let colorInvertFilter = CIFilter(name: "CIColorInvert")!
352+
// colorInvertFilter.setValue(blurFilter.outputImage, forKey: kCIInputImageKey)
353+
354+
// 3. Add a sepia tone effect
355+
// let sepiaFilter = CIFilter(name: "CISepiaTone")!
356+
// sepiaFilter.setValue(ciImage, forKey: kCIInputImageKey)
357+
// sepiaFilter.setValue(0.8, forKey: kCIInputIntensityKey)
358+
359+
let pixelBufferAttributes: [String: Any] = [
360+
kCVPixelBufferMetalCompatibilityKey as String: true,
361+
]
362+
363+
// Create output pixel buffer
364+
var outputPixelBuffer: CVPixelBuffer?
365+
let status = CVPixelBufferCreate(
366+
kCFAllocatorDefault,
367+
CVPixelBufferGetWidth(pixelBuffer),
368+
CVPixelBufferGetHeight(pixelBuffer),
369+
CVPixelBufferGetPixelFormatType(pixelBuffer),
370+
pixelBufferAttributes as CFDictionary,
371+
&outputPixelBuffer
372+
)
373+
374+
guard status == kCVReturnSuccess, let outputBuffer = outputPixelBuffer else {
375+
CVPixelBufferUnlockBaseAddress(pixelBuffer, .readOnly)
376+
return nil
377+
}
378+
379+
// Render the processed image to the output buffer
380+
context.render(
381+
blurFilter.outputImage!,
382+
to: outputBuffer,
383+
bounds: ciImage.extent,
384+
colorSpace: CGColorSpaceCreateDeviceRGB()
385+
)
386+
387+
// Unlock the original buffer
388+
CVPixelBufferUnlockBaseAddress(pixelBuffer, .readOnly)
389+
390+
return outputBuffer
292391
}

Multiplatform/RoomView.swift

+4
Original file line numberDiff line numberDiff line change
@@ -492,6 +492,10 @@ struct RoomView: View {
492492
Divider()
493493
}
494494

495+
Group {
496+
Toggle("Video processing", isOn: $roomCtx.isVideoProcessingEnabled)
497+
}
498+
495499
#if os(macOS)
496500
Group {
497501
Picker("Output device", selection: $appCtx.outputDevice) {

0 commit comments

Comments
 (0)