Skip to content

Commit

Permalink
Video filters
Browse files Browse the repository at this point in the history
  • Loading branch information
Brazol committed Nov 25, 2024
1 parent 3e2fe07 commit a90358f
Show file tree
Hide file tree
Showing 25 changed files with 1,434 additions and 22 deletions.
Binary file added dogfooding/assets/bg1.jpg
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added dogfooding/assets/bg2.jpg
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added dogfooding/assets/bg3.jpg
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
166 changes: 165 additions & 1 deletion dogfooding/lib/widgets/settings_menu.dart
Original file line number Diff line number Diff line change
Expand Up @@ -53,19 +53,26 @@ class SettingsMenu extends StatefulWidget {
class _SettingsMenuState extends State<SettingsMenu> {
final _deviceNotifier = RtcMediaDeviceNotifier.instance;
StreamSubscription<List<RtcMediaDevice>>? _deviceChangeSubscription;
late StreamVideoEffectsManager _videoEffectsManager;

var _audioOutputs = <RtcMediaDevice>[];
var _audioInputs = <RtcMediaDevice>[];

bool showAudioOutputs = false;
bool showAudioInputs = false;
bool showIncomingQuality = false;
bool showBackgroundEffects = false;

bool get showMainSettings =>
!showAudioOutputs && !showAudioInputs && !showIncomingQuality;
!showAudioOutputs &&
!showAudioInputs &&
!showIncomingQuality &&
!showBackgroundEffects;

@override
void initState() {
super.initState();
_videoEffectsManager = StreamVideoEffectsManager(widget.call);
_deviceChangeSubscription = _deviceNotifier.onDeviceChange.listen(
(devices) {
_audioOutputs = devices
Expand Down Expand Up @@ -105,6 +112,7 @@ class _SettingsMenuState extends State<SettingsMenu> {
if (showAudioOutputs) ..._buildAudioOutputsMenu(),
if (showAudioInputs) ..._buildAudioInputsMenu(),
if (showIncomingQuality) ..._buildIncomingQualityMenu(),
if (showBackgroundEffects) ..._buildBackgroundFiltersMenu(),
]),
);
}
Expand Down Expand Up @@ -182,6 +190,24 @@ class _SettingsMenuState extends State<SettingsMenu> {
},
),
const SizedBox(height: 16),
StandardActionMenuItem(
icon: Icons.auto_awesome,
label: 'Set Background Effect',
trailing: Text(
_videoEffectsManager.currentEffect != null ? 'On' : 'Off',
style: TextStyle(
color: _videoEffectsManager.currentEffect != null
? AppColorPalette.appGreen
: null,
),
),
onPressed: () {
setState(() {
showBackgroundEffects = true;
});
},
),
const SizedBox(height: 16),
StandardActionMenuItem(
icon: Icons.high_quality_sharp,
label: 'Incoming video quality',
Expand Down Expand Up @@ -322,6 +348,144 @@ class _SettingsMenuState extends State<SettingsMenu> {
];
}

List<Widget> _buildBackgroundFiltersMenu() {
return [
Row(
mainAxisAlignment: MainAxisAlignment.spaceBetween,
children: [
GestureDetector(
onTap: () {
setState(() {
showBackgroundEffects = false;
});
},
child: const Align(
alignment: Alignment.centerLeft,
child: Icon(Icons.arrow_back, size: 24),
),
),
TextButton(
child: const Text('Clear'),
onPressed: () {
_videoEffectsManager.disableAllFilters();
},
)
],
),
const SizedBox(height: 16),
const Text('Background Blur',
style: TextStyle(fontWeight: FontWeight.bold)),
const SizedBox(height: 16),
Row(
mainAxisAlignment: MainAxisAlignment.spaceEvenly,
children: [
Column(
children: [
SizedBox(
height: 60,
child: Center(
child: IconButton(
icon: const Icon(
Icons.blur_on,
size: 30,
),
onPressed: () => _videoEffectsManager
.applyBackgroundBlurFilter(BlurIntensity.light),
),
),
),
const Text('Light'),
],
),
Column(
children: [
SizedBox(
height: 60,
child: Center(
child: IconButton(
icon: const Icon(
Icons.blur_on,
size: 40,
),
onPressed: () => _videoEffectsManager
.applyBackgroundBlurFilter(BlurIntensity.medium),
),
),
),
const Text('Medium'),
],
),
Column(
children: [
SizedBox(
height: 60,
child: Center(
child: IconButton(
icon: const Icon(
Icons.blur_on,
size: 50,
),
onPressed: () => _videoEffectsManager
.applyBackgroundBlurFilter(BlurIntensity.heavy),
),
),
),
const Text('Heavy'),
],
)
],
),
const SizedBox(height: 16),
const Text('Image Background',
style: TextStyle(fontWeight: FontWeight.bold)),
const SizedBox(height: 16),
Row(
mainAxisAlignment: MainAxisAlignment.spaceEvenly,
children: [
InkWell(
onTap: () => _videoEffectsManager
.applyBackgroundImageFilter('assets/bg1.jpg'),
child: ClipRRect(
borderRadius: BorderRadius.circular(8.0),
child: Image.asset(
'assets/bg1.jpg',
fit: BoxFit.cover,
width: 72,
height: 102,
),
),
),
InkWell(
onTap: () => _videoEffectsManager
.applyBackgroundImageFilter('assets/bg2.jpg'),
child: ClipRRect(
borderRadius: BorderRadius.circular(8.0),
child: Image.asset(
'assets/bg2.jpg',
fit: BoxFit.cover,
width: 72,
height: 102,
),
),
),
InkWell(
onTap: () => _videoEffectsManager
.applyBackgroundImageFilter('assets/bg3.jpg'),
child: ClipRRect(
borderRadius: BorderRadius.circular(8.0),
child: Image.asset(
'assets/bg3.jpg',
fit: BoxFit.cover,
width: 72,
height: 102,
),
),
)
],
),
];
}

VideoResolution? getIncomingVideoResolution(IncomingVideoQuality quality) {
switch (quality) {
case IncomingVideoQuality.auto:
Expand Down
3 changes: 3 additions & 0 deletions packages/stream_video/lib/src/webrtc/peer_connection.dart
Original file line number Diff line number Diff line change
Expand Up @@ -348,6 +348,8 @@ class StreamPeerConnection extends Disposable {
Duration(milliseconds: _reportingIntervalMs),
(_) async {
try {
if (_statsController.isClosed) return;

final stats = await pc.getStats();
final rtcPrintableStats = stats.toPrintableRtcStats();
final rawStats = stats.toRawStats();
Expand Down Expand Up @@ -386,6 +388,7 @@ class StreamPeerConnection extends Disposable {
onIceCandidate = null;
onTrack = null;
_pendingCandidates.clear();
await _statsController.close();
await pc.dispose();
return await super.dispose();
}
Expand Down
6 changes: 6 additions & 0 deletions packages/stream_video_flutter/android/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,12 @@ android {
implementation 'androidx.media:media:1.1.0'
implementation 'androidx.appcompat:appcompat:1.1.0'
implementation 'org.jetbrains.kotlinx:kotlinx-coroutines-core:1.3.2'
implementation "org.jetbrains.kotlin:kotlin-stdlib:$kotlin_version"
implementation "io.github.crow-misia.libyuv:libyuv-android:0.34.0"
implementation "androidx.annotation:annotation:1.8.0"
implementation 'com.google.mlkit:segmentation-selfie:16.0.0-beta5'
implementation "com.github.android:renderscript-intrinsics-replacement-toolkit:344be3f"
implementation 'io.github.webrtc-sdk:android:125.6422.03'
}

testOptions {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,10 @@ import io.getstream.video.flutter.stream_video_flutter.service.StreamCallService
import io.getstream.video.flutter.stream_video_flutter.service.StreamScreenShareService
import io.getstream.video.flutter.stream_video_flutter.service.notification.NotificationPayload
import io.getstream.video.flutter.stream_video_flutter.service.utils.putBoolean
import com.cloudwebrtc.webrtc.videoEffects.ProcessorProvider
import io.getstream.video.flutter.stream_video_flutter.videoFilters.factories.BackgroundBlurFactory
import io.getstream.video.flutter.stream_video_flutter.videoFilters.factories.BlurIntensity
import io.getstream.video.flutter.stream_video_flutter.videoFilters.factories.VirtualBackgroundFactory

class MethodCallHandlerImpl(
appContext: Context,
Expand All @@ -34,6 +38,7 @@ class MethodCallHandlerImpl(
private val logger by taggedLogger(tag = "StreamMethodHandler")

private val serviceManager: ServiceManager = ServiceManagerImpl(appContext.applicationContext)
private val applicationContext = appContext.applicationContext

private var permissionCallback: ((Result<Unit>) -> Unit)? = null

Expand Down Expand Up @@ -68,6 +73,38 @@ class MethodCallHandlerImpl(
override fun onMethodCall(call: MethodCall, result: MethodChannel.Result) {
logger.d { "[onMethodCall] method: ${call.method}" }
when (call.method) {
"isBackgroundEffectSupported" -> {
result.success(true)
}
"registerBlurEffectProcessors" -> {
ProcessorProvider.addProcessor(
"BackgroundBlurLight",
BackgroundBlurFactory(BlurIntensity.LIGHT)
)

ProcessorProvider.addProcessor(
"BackgroundBlurMedium",
BackgroundBlurFactory(BlurIntensity.MEDIUM)
)

ProcessorProvider.addProcessor(
"BackgroundBlurHeavy",
BackgroundBlurFactory(BlurIntensity.HEAVY)
)

result.success(null)
}
"registerImageEffectProcessors" -> {
val backgroundImageUrl = call.argument<String>("backgroundImageUrl")
backgroundImageUrl?.let {
ProcessorProvider.addProcessor(
"VirtualBackground-$backgroundImageUrl",
VirtualBackgroundFactory(applicationContext, backgroundImageUrl)
)
}

result.success(null)
}
"enablePictureInPictureMode" -> {
val activity = getActivity()
putBoolean(activity, PictureInPictureHelper.PIP_ENABLED_PREF_KEY, true)
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
package io.getstream.video.flutter.stream_video_flutter.videoFilters.common

import android.graphics.Bitmap

/**
* A filter that provides a Bitmap of each frame. It's less performant than using the
* RawVideoFilter because we do YUV<->ARGB conversions internally.
*/
abstract class BitmapVideoFilter {
abstract fun applyFilter(videoFrameBitmap: Bitmap)
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
package io.getstream.video.flutter.stream_video_flutter.videoFilters.common

import android.graphics.Bitmap
import android.graphics.Matrix
import com.google.mlkit.vision.segmentation.SegmentationMask

internal fun copySegment(
segment: Segment,
source: Bitmap,
destination: Bitmap,
segmentationMask: SegmentationMask,
confidenceThreshold: Double,
) {
val scaleBetweenSourceAndMask = getScalingFactors(
widths = Pair(source.width, segmentationMask.width),
heights = Pair(source.height, segmentationMask.height),
)

segmentationMask.buffer.rewind()

val sourcePixels = IntArray(source.width * source.height)
source.getPixels(sourcePixels, 0, source.width, 0, 0, source.width, source.height)
val destinationPixels = IntArray(destination.width * destination.height)

for (y in 0 until segmentationMask.height) {
for (x in 0 until segmentationMask.width) {
val confidence = segmentationMask.buffer.float

if (((segment == Segment.BACKGROUND) && confidence < confidenceThreshold) ||
((segment == Segment.FOREGROUND) && confidence >= confidenceThreshold)
) {
val scaledX = (x * scaleBetweenSourceAndMask.first).toInt()
val scaledY = (y * scaleBetweenSourceAndMask.second).toInt()
destinationPixels[y * destination.width + x] =
sourcePixels[scaledY * source.width + scaledX]
}
}
}

destination.setPixels(
destinationPixels,
0,
destination.width,
0,
0,
destination.width,
destination.height,
)
}

internal enum class Segment {
FOREGROUND, BACKGROUND
}

private fun getScalingFactors(widths: Pair<Int, Int>, heights: Pair<Int, Int>) =
Pair(widths.first.toFloat() / widths.second, heights.first.toFloat() / heights.second)

internal fun newSegmentationMaskMatrix(bitmap: Bitmap, mask: SegmentationMask): Matrix {
val isRawSizeMaskEnabled = mask.width != bitmap.width || mask.height != bitmap.height
return if (!isRawSizeMaskEnabled) {
Matrix()
} else {
val scale =
getScalingFactors(Pair(bitmap.width, mask.width), Pair(bitmap.height, mask.height))
Matrix().apply { preScale(scale.first, scale.second) }
}
}
Loading

0 comments on commit a90358f

Please sign in to comment.