diff --git a/README.md b/README.md
index 019372b51d..81f4305f83 100644
--- a/README.md
+++ b/README.md
@@ -115,6 +115,7 @@ Video roadmap and changelog is available [here](https://github.com/GetStream/pro
### 0.4.0 milestone
+- [X] Screensharing from mobile
- [ ] Complete Livestreaming APIs and Tutorials for hosting & watching
- [ ] Android SDK development.md cleanup (Daniel)
- [ ] Upgrade to more recent versions of webrtc (Kanat)
@@ -131,7 +132,6 @@ Video roadmap and changelog is available [here](https://github.com/GetStream/pro
- [ ] Testing on more devices
- [ ] Enable SFU switching
-- [ ] Screensharing from mobile
- [ ] Camera controls
- [ ] Tap to focus
- [ ] H264 workaround on Samsung 23 (see https://github.com/livekit/client-sdk-android/blob/main/livekit-android-sdk/src/main/java/io/livekit/android/webrtc/SimulcastVideoEncoderFactoryWrapper.kt#L34 and
diff --git a/buildSrc/src/main/kotlin/io/getstream/video/android/Configuration.kt b/buildSrc/src/main/kotlin/io/getstream/video/android/Configuration.kt
index 80701ff629..143a783e84 100644
--- a/buildSrc/src/main/kotlin/io/getstream/video/android/Configuration.kt
+++ b/buildSrc/src/main/kotlin/io/getstream/video/android/Configuration.kt
@@ -6,9 +6,9 @@ object Configuration {
const val minSdk = 24
const val majorVersion = 0
const val minorVersion = 3
- const val patchVersion = 2
+ const val patchVersion = 3
const val versionName = "$majorVersion.$minorVersion.$patchVersion"
- const val versionCode = 7
+ const val versionCode = 8
const val snapshotVersionName = "$majorVersion.$minorVersion.${patchVersion + 1}-SNAPSHOT"
const val artifactGroup = "io.getstream"
const val streamVideoCallGooglePlayVersion = "1.0.0"
diff --git a/docusaurus/docs/Android/02-tutorials/01-video-calling.mdx b/docusaurus/docs/Android/02-tutorials/01-video-calling.mdx
index b14fdc813c..b1a3df70a1 100644
--- a/docusaurus/docs/Android/02-tutorials/01-video-calling.mdx
+++ b/docusaurus/docs/Android/02-tutorials/01-video-calling.mdx
@@ -31,7 +31,7 @@ If you're new to android, note that there are 2 `build.gradle` files, you want t
```kotlin
dependencies {
// Stream Video Compose SDK
- implementation("io.getstream:stream-video-android-compose:0.3.2")
+ implementation("io.getstream:stream-video-android-compose:0.3.3")
// Optionally add Jetpack Compose if Android studio didn't automatically include them
implementation(platform("androidx.compose:compose-bom:2023.08.00"))
diff --git a/docusaurus/docs/Android/02-tutorials/02-audio-room.mdx b/docusaurus/docs/Android/02-tutorials/02-audio-room.mdx
index 8022966901..199abce08e 100644
--- a/docusaurus/docs/Android/02-tutorials/02-audio-room.mdx
+++ b/docusaurus/docs/Android/02-tutorials/02-audio-room.mdx
@@ -35,7 +35,7 @@ If you're new to android, note that there are 2 `build.gradle` files, you want t
```groovy
dependencies {
// Stream Video Compose SDK
- implementation("io.getstream:stream-video-android-compose:0.3.2")
+ implementation("io.getstream:stream-video-android-compose:0.3.3")
// Jetpack Compose (optional/ android studio typically adds them when you create a new project)
implementation(platform("androidx.compose:compose-bom:2023.08.00"))
diff --git a/docusaurus/docs/Android/02-tutorials/03-livestream.mdx b/docusaurus/docs/Android/02-tutorials/03-livestream.mdx
index 793568d6ab..80418cc4d4 100644
--- a/docusaurus/docs/Android/02-tutorials/03-livestream.mdx
+++ b/docusaurus/docs/Android/02-tutorials/03-livestream.mdx
@@ -35,7 +35,7 @@ If you're new to android, note that there are 2 `build.gradle` files, you want t
```kotlin
dependencies {
// Stream Video Compose SDK
- implementation("io.getstream:stream-video-android-compose:0.3.2")
+ implementation("io.getstream:stream-video-android-compose:0.3.3")
// Jetpack Compose (optional/ android studio typically adds them when you create a new project)
implementation(platform("androidx.compose:compose-bom:2023.08.00"))
diff --git a/docusaurus/docs/Android/06-advanced/04-screen-sharing.mdx b/docusaurus/docs/Android/06-advanced/04-screen-sharing.mdx
new file mode 100644
index 0000000000..8611dc9bc2
--- /dev/null
+++ b/docusaurus/docs/Android/06-advanced/04-screen-sharing.mdx
@@ -0,0 +1,57 @@
+---
+title: Screen sharing
+description: Setup for screen sharing
+---
+
+## Introduction
+
+The Stream Video Android SDK has support for screen sharing from an Android device. The SDK is using the [Android Media Projection API](https://developer.android.com/guide/topics/large-screens/media-projection) for the capture.
+
+In order for a user to be able to share their screen, they must have the `screenshare` capability configured for the call they are in.
+
+## How to start sharing your screen
+
+You need to be in an active call (have a `Call` instance in Active call state) to start screen sharing.
+
+You must ask the user for screen sharing permission before you can start sharing the screen. The permission is requested by using the [Media Projection API](https://developer.android.com/guide/topics/large-screens/media-projection). And then use the returned intent data from the permission result and call `Call.startScreenSharing(intentData)`.
+
+An example implementation:
+
+```kotlin
+val startMediaProjection = registerForActivityResult(StartActivityForResult()) { result ->
+ if (it.resultCode == Activity.RESULT_OK && it.data != null) {
+ call.startScreenSharing(it.data!!)
+ }
+}
+
+val mediaProjectionManager = context.getSystemService(MediaProjectionManager::class.java)
+startMediaProjection.launch(mediaProjectionManager.createScreenCaptureIntent())
+```
+
+You can check if screen sharing is currently active by observing `call.screenShare.isEnabled`.
+
+## Stopping screen sharing
+
+Screen sharing can be stopped wit `Call.stopScreenSharing()`. It is automatically stopped if the call state goes into Inactive state.
+
+The user can also disable screen sharing directly in the system settings (depending on the OEM there is usually a button in the notification bar for disabling screen sharing).
+
+And the screen sharing can also be disabled through the screen sharing notification action button (described in next section).
+
+## Screen sharing notification
+
+A notification is always displayed to the user when the screen sharing is active. The notification itself can't be hidden and is required by the Android OS. The notification title and description can be customised.
+
+Override string `stream_video_screen_sharing_notification_title` and `stream_video_screen_sharing_notification_description` to customise the notification text.
+
+There is also a "Stop screen sharing" action button on the notification, the text of the button can be modified by overriding `stream_video_screen_sharing_notification_action_stop`.
+
+All notifications in Android need to have a notification channel. The Stream Video Android SDK will automatically create a new channel for the screen sharing notification. You can customise the channel title and description (this is visible to the user in the system application settings). Override `stream_video_screen_sharing_notification_channel_title` and `stream_video_screen_sharing_notification_channel_description`.
+
+```xml
+You are screen sharing
+
+Stop screen sharing
+Screen-sharing
+Required to be enabled for screen sharing
+```
diff --git a/docusaurus/docs/Android/06-advanced/04-chat-with-video.mdx b/docusaurus/docs/Android/06-advanced/05-chat-with-video.mdx
similarity index 99%
rename from docusaurus/docs/Android/06-advanced/04-chat-with-video.mdx
rename to docusaurus/docs/Android/06-advanced/05-chat-with-video.mdx
index 306cae83be..0cd918b5d8 100644
--- a/docusaurus/docs/Android/06-advanced/04-chat-with-video.mdx
+++ b/docusaurus/docs/Android/06-advanced/05-chat-with-video.mdx
@@ -31,7 +31,7 @@ Let the project sync. It should have all the dependencies required for you to fi
```groovy
dependencies {
// Stream Video Compose SDK
- implementation("io.getstream:stream-video-android-compose:0.3.2")
+ implementation("io.getstream:stream-video-android-compose:0.3.3")
// Stream Chat
implementation(libs.stream.chat.compose)
diff --git a/docusaurus/docs/Android/06-advanced/06-recording.mdx b/docusaurus/docs/Android/06-advanced/07-recording.mdx
similarity index 100%
rename from docusaurus/docs/Android/06-advanced/06-recording.mdx
rename to docusaurus/docs/Android/06-advanced/07-recording.mdx
diff --git a/docusaurus/docs/Android/06-advanced/08-datastore.mdx b/docusaurus/docs/Android/06-advanced/09-datastore.mdx
similarity index 100%
rename from docusaurus/docs/Android/06-advanced/08-datastore.mdx
rename to docusaurus/docs/Android/06-advanced/09-datastore.mdx
diff --git a/dogfooding/src/main/kotlin/io/getstream/video/android/ui/call/SettingsMenu.kt b/dogfooding/src/main/kotlin/io/getstream/video/android/ui/call/SettingsMenu.kt
index ac2c9f6c7f..b029bfedfc 100644
--- a/dogfooding/src/main/kotlin/io/getstream/video/android/ui/call/SettingsMenu.kt
+++ b/dogfooding/src/main/kotlin/io/getstream/video/android/ui/call/SettingsMenu.kt
@@ -16,7 +16,11 @@
package io.getstream.video.android.ui.call
+import android.app.Activity
+import android.media.projection.MediaProjectionManager
import android.widget.Toast
+import androidx.activity.compose.rememberLauncherForActivityResult
+import androidx.activity.result.contract.ActivityResultContracts
import androidx.compose.foundation.background
import androidx.compose.foundation.clickable
import androidx.compose.foundation.layout.Column
@@ -29,6 +33,8 @@ import androidx.compose.material.Card
import androidx.compose.material.Icon
import androidx.compose.material.Text
import androidx.compose.runtime.Composable
+import androidx.compose.runtime.collectAsState
+import androidx.compose.runtime.getValue
import androidx.compose.runtime.rememberCoroutineScope
import androidx.compose.ui.Alignment
import androidx.compose.ui.Modifier
@@ -54,6 +60,21 @@ internal fun SettingsMenu(
val reactions =
listOf(":fireworks:", ":hello:", ":raise-hand:", ":like:", ":hate:", ":smile:", ":heart:")
+ val screenSharePermissionResult = rememberLauncherForActivityResult(
+ contract = ActivityResultContracts.StartActivityForResult(),
+ onResult = {
+ if (it.resultCode == Activity.RESULT_OK && it.data != null) {
+ call.startScreenSharing(it.data!!)
+ }
+ onDismissed.invoke()
+ },
+ )
+
+ val isScreenSharing by call.screenShare.isEnabled.collectAsState()
+ val screenShareButtonText = if (isScreenSharing) {
+ "Stop screen-sharing"
+ } else { "Start screen-sharing" }
+
Popup(
alignment = Alignment.BottomStart,
offset = IntOffset(30, -200),
@@ -92,6 +113,37 @@ internal fun SettingsMenu(
Spacer(modifier = Modifier.height(12.dp))
+ Row(
+ modifier = Modifier.clickable {
+ if (!isScreenSharing) {
+ scope.launch {
+ val mediaProjectionManager = context.getSystemService(
+ MediaProjectionManager::class.java,
+ )
+ screenSharePermissionResult.launch(
+ mediaProjectionManager.createScreenCaptureIntent(),
+ )
+ }
+ } else {
+ call.stopScreenSharing()
+ }
+ },
+ ) {
+ Icon(
+ painter = painterResource(id = R.drawable.stream_video_ic_screensharing),
+ tint = VideoTheme.colors.textHighEmphasis,
+ contentDescription = null,
+ )
+
+ Text(
+ modifier = Modifier.padding(start = 20.dp),
+ text = screenShareButtonText,
+ color = VideoTheme.colors.textHighEmphasis,
+ )
+ }
+
+ Spacer(modifier = Modifier.height(12.dp))
+
if (showDebugOptions) {
Row(
modifier = Modifier.clickable {
diff --git a/stream-video-android-compose/src/main/kotlin/io/getstream/video/android/compose/pip/PictureInPicture.kt b/stream-video-android-compose/src/main/kotlin/io/getstream/video/android/compose/pip/PictureInPicture.kt
index 16360d5a92..fafa562436 100644
--- a/stream-video-android-compose/src/main/kotlin/io/getstream/video/android/compose/pip/PictureInPicture.kt
+++ b/stream-video-android-compose/src/main/kotlin/io/getstream/video/android/compose/pip/PictureInPicture.kt
@@ -34,7 +34,7 @@ internal fun enterPictureInPicture(context: Context, call: Call) {
val screenSharing = call.state.screenSharingSession.value
val aspect =
- if (currentOrientation == ActivityInfo.SCREEN_ORIENTATION_PORTRAIT && screenSharing == null) {
+ if (currentOrientation == ActivityInfo.SCREEN_ORIENTATION_PORTRAIT && (screenSharing == null || screenSharing.participant.isLocal)) {
Rational(9, 16)
} else {
Rational(16, 9)
diff --git a/stream-video-android-compose/src/main/kotlin/io/getstream/video/android/compose/ui/components/call/activecall/CallContent.kt b/stream-video-android-compose/src/main/kotlin/io/getstream/video/android/compose/ui/components/call/activecall/CallContent.kt
index 252e81ad5d..a95ed59144 100644
--- a/stream-video-android-compose/src/main/kotlin/io/getstream/video/android/compose/ui/components/call/activecall/CallContent.kt
+++ b/stream-video-android-compose/src/main/kotlin/io/getstream/video/android/compose/ui/components/call/activecall/CallContent.kt
@@ -212,7 +212,7 @@ internal fun DefaultPictureInPictureContent(call: Call) {
val video = session?.participant?.video?.collectAsStateWithLifecycle()
val pictureInPictureAspectRatio: Float = 16f / 9f
- if (session != null) {
+ if (session != null && !session.participant.isLocal) {
VideoRenderer(
modifier = Modifier.aspectRatio(pictureInPictureAspectRatio, false),
call = call,
diff --git a/stream-video-android-compose/src/main/kotlin/io/getstream/video/android/compose/ui/components/call/renderer/ParticipantsGrid.kt b/stream-video-android-compose/src/main/kotlin/io/getstream/video/android/compose/ui/components/call/renderer/ParticipantsGrid.kt
index a71f245673..2f2b65ea95 100644
--- a/stream-video-android-compose/src/main/kotlin/io/getstream/video/android/compose/ui/components/call/renderer/ParticipantsGrid.kt
+++ b/stream-video-android-compose/src/main/kotlin/io/getstream/video/android/compose/ui/components/call/renderer/ParticipantsGrid.kt
@@ -69,7 +69,8 @@ public fun ParticipantsGrid(
val screenSharingSession = call.state.screenSharingSession.collectAsStateWithLifecycle()
val screenSharing = screenSharingSession.value
- if (screenSharing == null) {
+ // We do not display our own screen-sharing session
+ if (screenSharing == null || screenSharing.participant.isLocal) {
ParticipantsRegularGrid(
call = call,
modifier = modifier,
diff --git a/stream-video-android-core/api/stream-video-android-core.api b/stream-video-android-core/api/stream-video-android-core.api
index ddde26682f..ff72dce037 100644
--- a/stream-video-android-core/api/stream-video-android-core.api
+++ b/stream-video-android-core/api/stream-video-android-core.api
@@ -15,6 +15,7 @@ public final class io/getstream/video/android/core/Call {
public final fun getLocalMicrophoneAudioLevel ()Lkotlinx/coroutines/flow/StateFlow;
public final fun getMicrophone ()Lio/getstream/video/android/core/MicrophoneManager;
public final fun getMonitor ()Lio/getstream/video/android/core/CallHealthMonitor;
+ public final fun getScreenShare ()Lio/getstream/video/android/core/ScreenShareManager;
public final fun getSessionId ()Ljava/lang/String;
public final fun getSpeaker ()Lio/getstream/video/android/core/SpeakerManager;
public final fun getState ()Lio/getstream/video/android/core/CallState;
@@ -51,9 +52,11 @@ public final class io/getstream/video/android/core/Call {
public final fun setVisibility (Ljava/lang/String;Lstream/video/sfu/models/TrackType;Z)V
public final fun startHLS (Lkotlin/coroutines/Continuation;)Ljava/lang/Object;
public final fun startRecording (Lkotlin/coroutines/Continuation;)Ljava/lang/Object;
+ public final fun startScreenSharing (Landroid/content/Intent;)V
public final fun stopHLS (Lkotlin/coroutines/Continuation;)Ljava/lang/Object;
public final fun stopLive (Lkotlin/coroutines/Continuation;)Ljava/lang/Object;
public final fun stopRecording (Lkotlin/coroutines/Continuation;)Ljava/lang/Object;
+ public final fun stopScreenSharing ()V
public final fun subscribe (Lio/getstream/video/android/core/events/VideoEventListener;)Lio/getstream/video/android/core/EventSubscription;
public final fun subscribeFor ([Ljava/lang/Class;Lio/getstream/video/android/core/events/VideoEventListener;)Lio/getstream/video/android/core/EventSubscription;
public final fun switchSfu (ZLkotlin/coroutines/Continuation;)Ljava/lang/Object;
@@ -392,6 +395,8 @@ public final class io/getstream/video/android/core/MediaManagerImpl {
public final fun getContext ()Landroid/content/Context;
public final fun getEglBaseContext ()Lorg/webrtc/EglBase$Context;
public final fun getScope ()Lkotlinx/coroutines/CoroutineScope;
+ public final fun getScreenShareTrack ()Lorg/webrtc/VideoTrack;
+ public final fun getScreenShareVideoSource ()Lorg/webrtc/VideoSource;
public final fun getVideoSource ()Lorg/webrtc/VideoSource;
public final fun getVideoTrack ()Lorg/webrtc/VideoTrack;
}
@@ -663,6 +668,22 @@ public final class io/getstream/video/android/core/RingingState$TimeoutNoAnswer
public fun toString ()Ljava/lang/String;
}
+public final class io/getstream/video/android/core/ScreenShareManager {
+ public static final field Companion Lio/getstream/video/android/core/ScreenShareManager$Companion;
+ public fun (Lio/getstream/video/android/core/MediaManagerImpl;Lorg/webrtc/EglBase$Context;)V
+ public final fun disable (Z)V
+ public static synthetic fun disable$default (Lio/getstream/video/android/core/ScreenShareManager;ZILjava/lang/Object;)V
+ public final fun enable (Landroid/content/Intent;Z)V
+ public static synthetic fun enable$default (Lio/getstream/video/android/core/ScreenShareManager;Landroid/content/Intent;ZILjava/lang/Object;)V
+ public final fun getEglBaseContext ()Lorg/webrtc/EglBase$Context;
+ public final fun getMediaManager ()Lio/getstream/video/android/core/MediaManagerImpl;
+ public final fun getStatus ()Lkotlinx/coroutines/flow/StateFlow;
+ public final fun isEnabled ()Lkotlinx/coroutines/flow/StateFlow;
+}
+
+public final class io/getstream/video/android/core/ScreenShareManager$Companion {
+}
+
public final class io/getstream/video/android/core/SpeakerManager {
public fun (Lio/getstream/video/android/core/MediaManagerImpl;Lio/getstream/video/android/core/MicrophoneManager;Ljava/lang/Integer;)V
public synthetic fun (Lio/getstream/video/android/core/MediaManagerImpl;Lio/getstream/video/android/core/MicrophoneManager;Ljava/lang/Integer;ILkotlin/jvm/internal/DefaultConstructorMarker;)V
@@ -874,6 +895,7 @@ public final class io/getstream/video/android/core/call/RtcSession {
public final fun reconnect (Lkotlin/coroutines/Continuation;)Ljava/lang/Object;
public final fun setLocalTrack (Lstream/video/sfu/models/TrackType;Lio/getstream/video/android/core/model/MediaTrack;)V
public final fun setMuteState (ZLstream/video/sfu/models/TrackType;)V
+ public final fun setScreenShareTrack ()V
public final fun setSubscriber (Lio/getstream/video/android/core/call/connection/StreamPeerConnection;)V
public final fun setTrack (Ljava/lang/String;Lstream/video/sfu/models/TrackType;Lio/getstream/video/android/core/model/MediaTrack;)V
public final fun setTracks (Ljava/util/Map;)V
@@ -902,7 +924,7 @@ public final class io/getstream/video/android/core/call/connection/StreamPeerCon
public fun (Lkotlinx/coroutines/CoroutineScope;Lio/getstream/video/android/core/model/StreamPeerType;Lorg/webrtc/MediaConstraints;Lkotlin/jvm/functions/Function1;Lkotlin/jvm/functions/Function2;Lkotlin/jvm/functions/Function2;I)V
public final fun addAudioTransceiver (Lorg/webrtc/MediaStreamTrack;Ljava/util/List;)V
public final fun addIceCandidate (Lio/getstream/video/android/core/model/IceCandidate;Lkotlin/coroutines/Continuation;)Ljava/lang/Object;
- public final fun addVideoTransceiver (Lorg/webrtc/MediaStreamTrack;Ljava/util/List;)V
+ public final fun addVideoTransceiver (Lorg/webrtc/MediaStreamTrack;Ljava/util/List;Z)V
public final fun createAnswer (Lkotlin/coroutines/Continuation;)Ljava/lang/Object;
public final fun createOffer (Lkotlin/coroutines/Continuation;)Ljava/lang/Object;
public final fun getAudioTransceiver ()Lorg/webrtc/RtpTransceiver;
diff --git a/stream-video-android-core/src/main/AndroidManifest.xml b/stream-video-android-core/src/main/AndroidManifest.xml
index 423a9c1124..bd1c6b0ce3 100644
--- a/stream-video-android-core/src/main/AndroidManifest.xml
+++ b/stream-video-android-core/src/main/AndroidManifest.xml
@@ -59,5 +59,15 @@
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/Call.kt b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/Call.kt
index 6f33cc5054..2831d95de7 100644
--- a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/Call.kt
+++ b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/Call.kt
@@ -16,6 +16,7 @@
package io.getstream.video.android.core
+import android.content.Intent
import android.view.View
import androidx.annotation.VisibleForTesting
import io.getstream.log.taggedLogger
@@ -57,6 +58,7 @@ import org.openapitools.client.models.JoinCallResponse
import org.openapitools.client.models.ListRecordingsResponse
import org.openapitools.client.models.MemberRequest
import org.openapitools.client.models.MuteUsersResponse
+import org.openapitools.client.models.OwnCapability
import org.openapitools.client.models.RejectCallResponse
import org.openapitools.client.models.SendEventResponse
import org.openapitools.client.models.SendReactionResponse
@@ -117,6 +119,7 @@ public class Call(
val camera by lazy { mediaManager.camera }
val microphone by lazy { mediaManager.microphone }
val speaker by lazy { mediaManager.speaker }
+ val screenShare by lazy { mediaManager.screenShare }
/** The cid is type:id */
val cid = "$type:$id"
@@ -516,6 +519,7 @@ public class Call(
} else {
RealtimeConnection.Disconnected
}
+ stopScreenSharing()
client.state.removeActiveCall()
client.state.removeRingingCall()
(client as StreamVideoImpl).onCallCleanUp(this)
@@ -662,6 +666,28 @@ public class Call(
return clientImpl.stopRecording(type, id)
}
+ /**
+ * User needs to have [OwnCapability.Screenshare] capability in order to start screen
+ * sharing.
+ *
+ * @param mediaProjectionPermissionResultData - intent data returned from the
+ * activity result after asking for screen sharing permission by launching
+ * MediaProjectionManager.createScreenCaptureIntent().
+ * See https://developer.android.com/guide/topics/large-screens/media-projection#recommended_approach
+ */
+ fun startScreenSharing(mediaProjectionPermissionResultData: Intent) {
+ if (state.ownCapabilities.value.contains(OwnCapability.Screenshare)) {
+ session?.setScreenShareTrack()
+ screenShare.enable(mediaProjectionPermissionResultData)
+ } else {
+ logger.w { "Can't start screen sharing - user doesn't have wnCapability.Screenshare permission" }
+ }
+ }
+
+ fun stopScreenSharing() {
+ screenShare.disable(fromUser = true)
+ }
+
suspend fun startHLS(): Result {
return clientImpl.startBroadcasting(type, id)
.onSuccess {
diff --git a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/MediaManager.kt b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/MediaManager.kt
index 64cb5ab001..a55b1ac4c0 100644
--- a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/MediaManager.kt
+++ b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/MediaManager.kt
@@ -16,30 +16,40 @@
package io.getstream.video.android.core
+import android.content.ComponentName
import android.content.Context
+import android.content.Intent
+import android.content.ServiceConnection
import android.hardware.camera2.CameraCharacteristics
import android.hardware.camera2.CameraManager
import android.media.AudioAttributes
import android.media.AudioManager
+import android.media.projection.MediaProjection
import android.os.Build
+import android.os.IBinder
+import androidx.core.content.ContextCompat
import androidx.core.content.getSystemService
import io.getstream.log.taggedLogger
import io.getstream.video.android.core.audio.AudioSwitchHandler
import io.getstream.video.android.core.audio.StreamAudioDevice
import io.getstream.video.android.core.audio.StreamAudioDevice.Companion.fromAudio
import io.getstream.video.android.core.audio.StreamAudioDevice.Companion.toAudioDevice
+import io.getstream.video.android.core.screenshare.StreamScreenShareService
import io.getstream.video.android.core.utils.buildAudioConstraints
import io.getstream.video.android.core.utils.mapState
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.flow.MutableStateFlow
import kotlinx.coroutines.flow.StateFlow
+import kotlinx.coroutines.launch
import kotlinx.coroutines.runBlocking
import org.openapitools.client.models.VideoSettings
import org.webrtc.Camera2Capturer
import org.webrtc.Camera2Enumerator
import org.webrtc.CameraEnumerationAndroid
import org.webrtc.EglBase
+import org.webrtc.ScreenCapturerAndroid
import org.webrtc.SurfaceTextureHelper
+import stream.video.sfu.models.VideoDimension
import java.util.UUID
sealed class DeviceStatus {
@@ -157,6 +167,130 @@ class SpeakerManager(
}
}
+class ScreenShareManager(
+ val mediaManager: MediaManagerImpl,
+ val eglBaseContext: EglBase.Context,
+) {
+
+ companion object {
+ // TODO: This could be configurable by the client
+ internal val screenShareResolution = VideoDimension(1920, 1080)
+ internal val screenShareBitrate = 1_000_000
+ internal val screenShareFps = 15
+ }
+
+ private val logger by taggedLogger("Media:ScreenShareManager")
+
+ private val _status = MutableStateFlow(DeviceStatus.NotSelected)
+ val status: StateFlow = _status
+
+ public val isEnabled: StateFlow = _status.mapState { it is DeviceStatus.Enabled }
+
+ private lateinit var screenCapturerAndroid: ScreenCapturerAndroid
+ private lateinit var surfaceTextureHelper: SurfaceTextureHelper
+ private var setupCompleted = false
+ private var isScreenSharing = false
+ private var mediaProjectionPermissionResultData: Intent? = null
+
+ /**
+ * The [ServiceConnection.onServiceConnected] is called when our [StreamScreenShareService]
+ * has started. At this point we can start screen-sharing. Starting the screen-sharing without
+ * waiting for the Service to start would throw an exception (
+ */
+ private val connection: ServiceConnection = object : ServiceConnection {
+ override fun onServiceConnected(name: ComponentName, service: IBinder) {
+ if (isScreenSharing) {
+ logger.w { "We are already screen-sharing - ignoring call to start another screenshare" }
+ return
+ }
+
+ // Create the ScreenCapturerAndroid from webrtc-android
+ screenCapturerAndroid =
+ ScreenCapturerAndroid(
+ mediaProjectionPermissionResultData,
+ object : MediaProjection.Callback() {
+ override fun onStop() {
+ super.onStop()
+ // User can also disable screen sharing from the system menu
+ disable()
+ }
+ },
+ )
+
+ // initialize it
+ screenCapturerAndroid.initialize(
+ surfaceTextureHelper,
+ mediaManager.context,
+ mediaManager.screenShareVideoSource.capturerObserver,
+ )
+
+ // start
+ screenCapturerAndroid.startCapture(
+ screenShareResolution.width,
+ screenShareResolution.height,
+ 0,
+ )
+
+ isScreenSharing = true
+ }
+
+ override fun onServiceDisconnected(name: ComponentName) {}
+ }
+
+ fun enable(mediaProjectionPermissionResultData: Intent, fromUser: Boolean = true) {
+ mediaManager.screenShareTrack.setEnabled(true)
+ if (fromUser) {
+ _status.value = DeviceStatus.Enabled
+ }
+ setup()
+ startScreenShare(mediaProjectionPermissionResultData)
+ }
+
+ fun disable(fromUser: Boolean = true) {
+ if (fromUser) {
+ _status.value = DeviceStatus.Disabled
+ }
+
+ if (isScreenSharing) {
+ mediaManager.screenShareTrack.setEnabled(false)
+ screenCapturerAndroid.stopCapture()
+ mediaManager.context.stopService(
+ Intent(mediaManager.context, StreamScreenShareService::class.java),
+ )
+ isScreenSharing = false
+ }
+ }
+
+ private fun startScreenShare(mediaProjectionPermissionResultData: Intent) {
+ mediaManager.scope.launch {
+ this@ScreenShareManager.mediaProjectionPermissionResultData = mediaProjectionPermissionResultData
+
+ // Screen sharing requires a foreground service with foregroundServiceType "mediaProjection" to be started first.
+ // We can wait for the service to be ready by binding to it and then starting the
+ // media projection in onServiceConnected.
+ val intent = StreamScreenShareService.createIntent(
+ mediaManager.context,
+ mediaManager.call.cid,
+ )
+ ContextCompat.startForegroundService(
+ mediaManager.context,
+ StreamScreenShareService.createIntent(mediaManager.context, mediaManager.call.cid),
+ )
+ mediaManager.context.bindService(intent, connection, 0)
+ }
+ }
+
+ private fun setup() {
+ if (setupCompleted) {
+ return
+ }
+
+ surfaceTextureHelper = SurfaceTextureHelper.create("CaptureThread", eglBaseContext)
+
+ setupCompleted = true
+ }
+}
+
/**
* The Microphone manager makes it easy to use your microphone in a call
*
@@ -610,12 +744,25 @@ class MediaManagerImpl(
// source & tracks
val videoSource = call.clientImpl.peerConnectionFactory.makeVideoSource(false)
+ val screenShareVideoSource by lazy {
+ call.clientImpl.peerConnectionFactory.makeVideoSource(
+ true,
+ )
+ }
+
// for track ids we emulate the browser behaviour of random UUIDs, doing something different would be confusing
val videoTrack = call.clientImpl.peerConnectionFactory.makeVideoTrack(
source = videoSource,
trackId = UUID.randomUUID().toString(),
)
+ val screenShareTrack by lazy {
+ call.clientImpl.peerConnectionFactory.makeVideoTrack(
+ source = screenShareVideoSource,
+ trackId = UUID.randomUUID().toString(),
+ )
+ }
+
val audioSource = call.clientImpl.peerConnectionFactory.makeAudioSource(buildAudioConstraints())
// for track ids we emulate the browser behaviour of random UUIDs, doing something different would be confusing
@@ -627,9 +774,11 @@ class MediaManagerImpl(
internal val camera = CameraManager(this, eglBaseContext)
internal val microphone = MicrophoneManager(this, preferSpeakerphone = true)
internal val speaker = SpeakerManager(this, microphone)
+ internal val screenShare = ScreenShareManager(this, eglBaseContext)
fun cleanup() {
videoSource.dispose()
+ screenShareVideoSource.dispose()
videoTrack.dispose()
audioSource.dispose()
audioTrack.dispose()
diff --git a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/call/RtcSession.kt b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/call/RtcSession.kt
index 31ce1f676e..d00358dbe8 100644
--- a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/call/RtcSession.kt
+++ b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/call/RtcSession.kt
@@ -23,6 +23,8 @@ import io.getstream.result.Result.Failure
import io.getstream.result.Result.Success
import io.getstream.video.android.core.Call
import io.getstream.video.android.core.DeviceStatus
+import io.getstream.video.android.core.MediaManagerImpl
+import io.getstream.video.android.core.ScreenShareManager
import io.getstream.video.android.core.StreamVideo
import io.getstream.video.android.core.StreamVideoImpl
import io.getstream.video.android.core.call.connection.StreamPeerConnection
@@ -76,6 +78,7 @@ import kotlinx.serialization.json.Json
import okio.IOException
import org.openapitools.client.models.OwnCapability
import org.openapitools.client.models.VideoEvent
+import org.webrtc.CameraEnumerationAndroid.CaptureFormat
import org.webrtc.MediaConstraints
import org.webrtc.MediaStream
import org.webrtc.MediaStreamTrack
@@ -165,6 +168,7 @@ public class RtcSession internal constructor(
private var videoTransceiverInitialized: Boolean = false
private var audioTransceiverInitialized: Boolean = false
+ private var screenshareTransceiverInitialized: Boolean = false
private var errorJob: Job? = null
private var eventJob: Job? = null
internal val socket by lazy { sfuConnectionModule.sfuSocket }
@@ -374,12 +378,26 @@ public class RtcSession internal constructor(
it.addVideoTransceiver(
call.mediaManager.videoTrack,
listOf(buildTrackId(TrackType.TRACK_TYPE_VIDEO)),
+ isScreenShare = false,
)
videoTransceiverInitialized = true
}
}
}
+ private fun initializeScreenshareTransceiver() {
+ if (!screenshareTransceiverInitialized) {
+ publisher?.let {
+ it.addVideoTransceiver(
+ call.mediaManager.screenShareTrack,
+ listOf(buildTrackId(TrackType.TRACK_TYPE_SCREEN_SHARE)),
+ isScreenShare = true,
+ )
+ screenshareTransceiverInitialized = true
+ }
+ }
+ }
+
private fun initialiseAudioTransceiver() {
if (!audioTransceiverInitialized) {
publisher?.let {
@@ -414,6 +432,20 @@ public class RtcSession internal constructor(
}
}
}
+
+ coroutineScope.launch {
+ call.mediaManager.screenShare.status.collectLatest {
+ // set the mute /unumute status
+ setMuteState(
+ isEnabled = it == DeviceStatus.Enabled,
+ TrackType.TRACK_TYPE_SCREEN_SHARE,
+ )
+
+ if (it == DeviceStatus.Enabled) {
+ initializeScreenshareTransceiver()
+ }
+ }
+ }
}
/**
@@ -545,6 +577,7 @@ public class RtcSession internal constructor(
video = call.mediaManager.videoTrack,
),
)
+
// render it on the surface. but we need to start this before forwarding it to the publisher
logger.v { "[createUserTracks] #sfu; videoTrack: ${call.mediaManager.videoTrack.stringify()}" }
if (call.mediaManager.camera.status.value == DeviceStatus.Enabled) {
@@ -553,6 +586,9 @@ public class RtcSession internal constructor(
if (call.mediaManager.microphone.status.value == DeviceStatus.Enabled) {
initialiseAudioTransceiver()
}
+ if (call.mediaManager.screenShare.status.value == DeviceStatus.Enabled) {
+ initializeScreenshareTransceiver()
+ }
}
}
@@ -565,6 +601,16 @@ public class RtcSession internal constructor(
return
}
+ fun setScreenShareTrack() {
+ setLocalTrack(
+ TrackType.TRACK_TYPE_SCREEN_SHARE,
+ VideoTrack(
+ streamId = buildTrackId(TrackType.TRACK_TYPE_SCREEN_SHARE),
+ video = call.mediaManager.screenShareTrack,
+ ),
+ )
+ }
+
/**
* Responds to TrackPublishedEvent event
* @see TrackPublishedEvent
@@ -1199,6 +1245,7 @@ public class RtcSession internal constructor(
)
val result = setPublisher(request)
// step 5 - set the remote description
+
peerConnection.setRemoteDescription(
SessionDescription(
SessionDescription.Type.ANSWER, result.getOrThrow().sdp,
@@ -1226,6 +1273,7 @@ public class RtcSession internal constructor(
private fun getPublisherTracks(): List {
val captureResolution = call.camera.resolution.value
+ val screenShareTrack = getLocalTrack(TrackType.TRACK_TYPE_SCREEN_SHARE)
val transceivers = publisher?.connection?.transceivers?.toList() ?: emptyList()
val tracks = transceivers.filter {
@@ -1236,53 +1284,29 @@ public class RtcSession internal constructor(
val trackType = when (track.kind()) {
"audio" -> TrackType.TRACK_TYPE_AUDIO
"screen" -> TrackType.TRACK_TYPE_SCREEN_SHARE
- "video" -> TrackType.TRACK_TYPE_VIDEO
+ "video" -> {
+ // video tracks and screenshare tracks in webrtc are both video
+ // (the "screen" track type doesn't seem to be used).
+ if (screenShareTrack?.asVideoTrack()?.video?.id() == track.id()) {
+ TrackType.TRACK_TYPE_SCREEN_SHARE
+ } else {
+ TrackType.TRACK_TYPE_VIDEO
+ }
+ }
else -> TrackType.TRACK_TYPE_UNSPECIFIED
}
- if (trackType == TrackType.TRACK_TYPE_VIDEO && captureResolution == null) {
- throw IllegalStateException(
- "video capture needs to be enabled before adding the local track",
- )
- }
-
- val layers: List = if (trackType != TrackType.TRACK_TYPE_VIDEO) {
- emptyList()
- } else {
- // we tell the Sfu which resolutions we're sending
- transceiver.sender.parameters.encodings.map {
- val scaleBy = it.scaleResolutionDownBy ?: 1.0
- val width = captureResolution?.width?.div(scaleBy) ?: 0
- val height = captureResolution?.height?.div(scaleBy) ?: 0
- val quality = when (it.rid) {
- "f" -> {
- VideoQuality.VIDEO_QUALITY_HIGH
- }
-
- "h" -> {
- VideoQuality.VIDEO_QUALITY_MID
- }
-
- else -> {
- VideoQuality.VIDEO_QUALITY_LOW_UNSPECIFIED
- }
- }
-
- // We need to divide by 1000 because the the FramerateRange is multiplied
- // by 1000 (see javadoc).
- val fps = (captureResolution?.framerate?.max ?: 0).div(1000)
-
- VideoLayer(
- rid = it.rid ?: "",
- video_dimension = VideoDimension(
- width = width.toInt(),
- height = height.toInt(),
- ),
- bitrate = it.maxBitrateBps ?: 0,
- fps = fps,
- quality = quality,
+ val layers: List = if (trackType == TrackType.TRACK_TYPE_VIDEO) {
+ checkNotNull(captureResolution) {
+ throw IllegalStateException(
+ "video capture needs to be enabled before adding the local track",
)
}
+ createVideoLayers(transceiver, captureResolution)
+ } else if (trackType == TrackType.TRACK_TYPE_SCREEN_SHARE) {
+ createScreenShareLayers(transceiver)
+ } else {
+ emptyList()
}
TrackInfo(
@@ -1294,6 +1318,64 @@ public class RtcSession internal constructor(
return tracks
}
+ private fun createVideoLayers(transceiver: RtpTransceiver, captureResolution: CaptureFormat): List {
+ // we tell the Sfu which resolutions we're sending
+ return transceiver.sender.parameters.encodings.map {
+ val scaleBy = it.scaleResolutionDownBy ?: 1.0
+ val width = captureResolution.width.div(scaleBy) ?: 0
+ val height = captureResolution.height.div(scaleBy) ?: 0
+ val quality = ridToVideoQuality(it.rid)
+
+ // We need to divide by 1000 because the the FramerateRange is multiplied
+ // by 1000 (see javadoc).
+ val fps = (captureResolution.framerate?.max ?: 0).div(1000)
+
+ VideoLayer(
+ rid = it.rid ?: "",
+ video_dimension = VideoDimension(
+ width = width.toInt(),
+ height = height.toInt(),
+ ),
+ bitrate = it.maxBitrateBps ?: 0,
+ fps = fps,
+ quality = quality,
+ )
+ }
+ }
+
+ private fun createScreenShareLayers(transceiver: RtpTransceiver): List {
+ return transceiver.sender.parameters.encodings.map {
+ // So far we use hardcoded parameters for screen-sharing. This is aligned
+ // with iOS.
+
+ VideoLayer(
+ rid = "q",
+ video_dimension = VideoDimension(
+ width = ScreenShareManager.screenShareResolution.width,
+ height = ScreenShareManager.screenShareResolution.height,
+ ),
+ bitrate = ScreenShareManager.screenShareBitrate,
+ fps = ScreenShareManager.screenShareFps,
+ quality = VideoQuality.VIDEO_QUALITY_LOW_UNSPECIFIED,
+ )
+ }
+ }
+
+ private fun ridToVideoQuality(rid: String?) =
+ when (rid) {
+ "f" -> {
+ VideoQuality.VIDEO_QUALITY_HIGH
+ }
+
+ "h" -> {
+ VideoQuality.VIDEO_QUALITY_MID
+ }
+
+ else -> {
+ VideoQuality.VIDEO_QUALITY_LOW_UNSPECIFIED
+ }
+ }
+
/**
* @return [StateFlow] that holds [RTCStatsReport] that the publisher exposes.
*/
diff --git a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/call/connection/StreamPeerConnection.kt b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/call/connection/StreamPeerConnection.kt
index faa7a32cd4..ed2e47921e 100644
--- a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/call/connection/StreamPeerConnection.kt
+++ b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/call/connection/StreamPeerConnection.kt
@@ -292,11 +292,15 @@ public class StreamPeerConnection(
* @param track The track that contains video.
* @param streamIds The IDs that represent the stream tracks.
*/
- public fun addVideoTransceiver(track: MediaStreamTrack, streamIds: List) {
+ public fun addVideoTransceiver(
+ track: MediaStreamTrack,
+ streamIds: List,
+ isScreenShare: Boolean,
+ ) {
logger.d {
"[addVideoTransceiver] #sfu; #$typeTag; track: ${track.stringify()}, streamIds: $streamIds"
}
- val transceiverInit = buildVideoTransceiverInit(streamIds)
+ val transceiverInit = buildVideoTransceiverInit(streamIds, isScreenShare)
videoTransceiver = connection.addTransceiver(track, transceiverInit)
}
@@ -306,43 +310,59 @@ public class StreamPeerConnection(
*
* @param streamIds The list of stream IDs to bind to this transceiver.
*/
- private fun buildVideoTransceiverInit(streamIds: List): RtpTransceiverInit {
- /**
- * We create different RTP encodings for the transceiver.
- * Full quality, represented by "f" ID.
- * Half quality, represented by "h" ID.
- * Quarter quality, represented by "q" ID.
- *
- * Their bitrate is also roughly as the name states - maximum for "full", ~half of that
- * for "half" and another half, or total quarter of maximum, for "quarter".
- */
- val quarterQuality = RtpParameters.Encoding(
- "q",
- true,
- 4.0,
- ).apply {
- maxBitrateBps = maxBitRate / 4
- }
+ private fun buildVideoTransceiverInit(
+ streamIds: List,
+ isScreenShare: Boolean,
+ ): RtpTransceiverInit {
+ val encodings = if (!isScreenShare) {
+ /**
+ * We create different RTP encodings for the transceiver.
+ * Full quality, represented by "f" ID.
+ * Half quality, represented by "h" ID.
+ * Quarter quality, represented by "q" ID.
+ *
+ * Their bitrate is also roughly as the name states - maximum for "full", ~half of that
+ * for "half" and another half, or total quarter of maximum, for "quarter".
+ */
+ val quarterQuality = RtpParameters.Encoding(
+ "q",
+ true,
+ 4.0,
+ ).apply {
+ maxBitrateBps = maxBitRate / 4
+ }
- val halfQuality = RtpParameters.Encoding(
- "h",
- true,
- 2.0,
- ).apply {
- maxBitrateBps = maxBitRate / 2
- }
+ val halfQuality = RtpParameters.Encoding(
+ "h",
+ true,
+ 2.0,
+ ).apply {
+ maxBitrateBps = maxBitRate / 2
+ }
- val fullQuality = RtpParameters.Encoding(
- "f",
- true,
- 1.0,
- ).apply {
- maxBitrateBps = maxBitRate
+ val fullQuality = RtpParameters.Encoding(
+ "f",
+ true,
+ 1.0,
+ ).apply {
+ maxBitrateBps = maxBitRate
// networkPriority = 3
// bitratePriority = 4.0
- }
+ }
- val encodings = listOf(quarterQuality, halfQuality, fullQuality)
+ listOf(quarterQuality, halfQuality, fullQuality)
+ } else {
+ // this is aligned with iOS
+ val screenshareQuality = RtpParameters.Encoding(
+ "q",
+ true,
+ 1.0,
+ ).apply {
+ maxBitrateBps = 1_000_000
+ }
+
+ listOf(screenshareQuality)
+ }
return RtpTransceiverInit(
RtpTransceiver.RtpTransceiverDirection.SEND_ONLY,
diff --git a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/notifications/internal/StopScreenshareBroadcastReceiver.kt b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/notifications/internal/StopScreenshareBroadcastReceiver.kt
new file mode 100644
index 0000000000..57b75cd964
--- /dev/null
+++ b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/notifications/internal/StopScreenshareBroadcastReceiver.kt
@@ -0,0 +1,66 @@
+/*
+ * Copyright (c) 2014-2023 Stream.io Inc. All rights reserved.
+ *
+ * Licensed under the Stream License;
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://github.com/GetStream/stream-video-android/blob/main/LICENSE
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.getstream.video.android.core.notifications.internal
+
+import android.content.BroadcastReceiver
+import android.content.Context
+import android.content.Intent
+import androidx.core.app.NotificationManagerCompat
+import io.getstream.log.taggedLogger
+import io.getstream.video.android.core.StreamVideo
+import io.getstream.video.android.core.notifications.NotificationHandler.Companion.ACTION_REJECT_CALL
+import io.getstream.video.android.core.screenshare.StreamScreenShareService
+import io.getstream.video.android.model.StreamCallId
+import kotlinx.coroutines.CoroutineScope
+import kotlinx.coroutines.Dispatchers
+import kotlinx.coroutines.launch
+
+/**
+ * Used for handling the "Stop screen sharing" action button on the notification displayed
+ * by [StreamScreenShareService]
+ */
+internal class StopScreenshareBroadcastReceiver : BroadcastReceiver() {
+
+ val logger by taggedLogger("StopScreenshareBroadcastReceiver")
+
+ override fun onReceive(context: Context?, intent: Intent?) {
+ logger.d { "[onReceive] context: $context, intent: $intent" }
+
+ if (context != null && intent?.action == StreamScreenShareService.BROADCAST_CANCEL_ACTION) {
+ val callCid = StreamCallId.fromCallCid(
+ intent.getStringExtra(StreamScreenShareService.INTENT_EXTRA_CALL_ID)!!,
+ )
+
+ CoroutineScope(Dispatchers.IO).launch {
+ val streamVideo: StreamVideo? = StreamVideo.instanceOrNull()
+
+ if (streamVideo == null) {
+ logger.e {
+ "Received ${ACTION_REJECT_CALL} but StreamVideo is not initialised. " +
+ "Handling notifications requires to initialise StreamVideo in Application.onCreate"
+ }
+ return@launch
+ }
+
+ streamVideo.call(callCid.type, callCid.id).stopScreenSharing()
+ NotificationManagerCompat.from(
+ context,
+ ).cancel(StreamScreenShareService.NOTIFICATION_ID)
+ }
+ }
+ }
+}
diff --git a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/screenshare/StreamScreenShareService.kt b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/screenshare/StreamScreenShareService.kt
new file mode 100644
index 0000000000..659c9d583e
--- /dev/null
+++ b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/screenshare/StreamScreenShareService.kt
@@ -0,0 +1,120 @@
+/*
+ * Copyright (c) 2014-2023 Stream.io Inc. All rights reserved.
+ *
+ * Licensed under the Stream License;
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://github.com/GetStream/stream-video-android/blob/main/LICENSE
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.getstream.video.android.core.screenshare
+
+import android.app.NotificationManager
+import android.app.PendingIntent
+import android.app.Service
+import android.content.Context
+import android.content.Intent
+import android.os.Binder
+import android.os.IBinder
+import androidx.core.app.NotificationChannelCompat
+import androidx.core.app.NotificationCompat
+import androidx.core.app.NotificationManagerCompat
+import io.getstream.video.android.core.R
+import io.getstream.video.android.core.notifications.internal.StopScreenshareBroadcastReceiver
+
+/**
+ * Screen-sharing in Android requires a ForegroundService (with type foregroundServiceType set to "mediaProjection").
+ * The Stream SDK will start this [StreamScreenShareService] once screen-sharing is enabled and then
+ * will stop it when screen-sharing it's either stopped by the user or we get a callback that the
+ * screen-sharing was stopped by the system.
+ *
+ * This Service isn't doing any long-running operations. It's just an empty Service to meet the platform
+ * requirement (https://developer.android.com/reference/android/media/projection/MediaProjectionManager).
+ */
+internal class StreamScreenShareService : Service() {
+
+ private val channelId = "StreamScreenShareService"
+
+ private val binder = LocalBinder()
+
+ /**
+ * This Binder is only used to be able to wait for the service until it's started
+ * in [ScreenShareManager]
+ */
+ inner class LocalBinder : Binder()
+
+ override fun onBind(p0: Intent?): IBinder {
+ return binder
+ }
+
+ override fun onStartCommand(intent: Intent?, flags: Int, startId: Int): Int {
+ val callId = intent?.getStringExtra(EXTRA_CALL_ID)!!
+ val cancelScreenShareIntent = Intent(
+ this,
+ StopScreenshareBroadcastReceiver::class.java,
+ ).apply {
+ action = BROADCAST_CANCEL_ACTION
+ putExtra(INTENT_EXTRA_CALL_ID, callId)
+ }
+ val cancelScreenSharePendingIntent: PendingIntent =
+ PendingIntent.getBroadcast(
+ this,
+ 0,
+ cancelScreenShareIntent,
+ PendingIntent.FLAG_IMMUTABLE,
+ )
+
+ val builder = NotificationCompat.Builder(applicationContext, channelId).apply {
+ priority = NotificationCompat.PRIORITY_HIGH
+ setAutoCancel(false)
+ setOngoing(true)
+ setSmallIcon(R.drawable.stream_video_ic_screenshare)
+ setContentTitle(getString(R.string.stream_video_screen_sharing_notification_title))
+ setContentText(getString(R.string.stream_video_screen_sharing_notification_description))
+ setAllowSystemGeneratedContextualActions(false)
+ addAction(
+ R.drawable.stream_video_ic_cancel_screenshare,
+ getString(R.string.stream_video_screen_sharing_notification_action_stop),
+ cancelScreenSharePendingIntent,
+ )
+ }
+
+ NotificationManagerCompat.from(application).also {
+ it.createNotificationChannel(
+ NotificationChannelCompat
+ .Builder(channelId, NotificationManager.IMPORTANCE_DEFAULT)
+ .setName(
+ getString(R.string.stream_video_screen_sharing_notification_channel_title),
+ )
+ .setDescription(
+ getString(
+ R.string.stream_video_screen_sharing_notification_channel_description,
+ ),
+ )
+ .build(),
+ )
+ }
+
+ startForeground(NOTIFICATION_ID, builder.build())
+ return super.onStartCommand(intent, flags, startId)
+ }
+
+ companion object {
+ internal const val NOTIFICATION_ID = 43534
+ internal const val EXTRA_CALL_ID = "EXTRA_CALL_ID"
+ internal const val BROADCAST_CANCEL_ACTION = "io.getstream.video.android.action.CANCEL_SCREEN_SHARE"
+ internal const val INTENT_EXTRA_CALL_ID = "io.getstream.video.android.intent-extra.call_cid"
+
+ fun createIntent(context: Context, callId: String) =
+ Intent(context, StreamScreenShareService::class.java).apply {
+ putExtra(EXTRA_CALL_ID, callId)
+ }
+ }
+}
diff --git a/stream-video-android-core/src/main/res/drawable/stream_video_ic_cancel_screenshare.xml b/stream-video-android-core/src/main/res/drawable/stream_video_ic_cancel_screenshare.xml
new file mode 100644
index 0000000000..0b1fce10ed
--- /dev/null
+++ b/stream-video-android-core/src/main/res/drawable/stream_video_ic_cancel_screenshare.xml
@@ -0,0 +1,21 @@
+
+
+
+
+
diff --git a/stream-video-android-core/src/main/res/drawable/stream_video_ic_screenshare.xml b/stream-video-android-core/src/main/res/drawable/stream_video_ic_screenshare.xml
new file mode 100644
index 0000000000..c143a0705e
--- /dev/null
+++ b/stream-video-android-core/src/main/res/drawable/stream_video_ic_screenshare.xml
@@ -0,0 +1,21 @@
+
+
+
+
+
diff --git a/stream-video-android-core/src/main/res/values/strings.xml b/stream-video-android-core/src/main/res/values/strings.xml
index f31931bf1d..aca4840d11 100644
--- a/stream-video-android-core/src/main/res/values/strings.xml
+++ b/stream-video-android-core/src/main/res/values/strings.xml
@@ -20,4 +20,9 @@
Reject
Cancel
%1$s presenting
+ You are screen sharing
+
+ Stop screen sharing
+ Screen-sharing
+ Required to be enabled for screen sharing
\ No newline at end of file
diff --git a/stream-video-android-ui-common/src/main/kotlin/io/getstream/video/android/ui/common/AbstractCallActivity.kt b/stream-video-android-ui-common/src/main/kotlin/io/getstream/video/android/ui/common/AbstractCallActivity.kt
index 2b923d5ae8..ee2d2259dd 100644
--- a/stream-video-android-ui-common/src/main/kotlin/io/getstream/video/android/ui/common/AbstractCallActivity.kt
+++ b/stream-video-android-ui-common/src/main/kotlin/io/getstream/video/android/ui/common/AbstractCallActivity.kt
@@ -134,7 +134,7 @@ public abstract class AbstractCallActivity : ComponentActivity() {
val screenSharing = call.state.screenSharingSession.value
val aspect =
- if (currentOrientation == ActivityInfo.SCREEN_ORIENTATION_PORTRAIT && screenSharing == null) {
+ if (currentOrientation == ActivityInfo.SCREEN_ORIENTATION_PORTRAIT && (screenSharing == null || screenSharing.participant.isLocal)) {
Rational(9, 16)
} else {
Rational(16, 9)