diff --git a/demo-app/.gitignore b/demo-app/.gitignore
index 42afabfd2a..43c1a66995 100644
--- a/demo-app/.gitignore
+++ b/demo-app/.gitignore
@@ -1 +1,2 @@
-/build
\ No newline at end of file
+/build
+!/libs/**
\ No newline at end of file
diff --git a/demo-app/README.md b/demo-app/README.md
index cd75d75d4f..399d837bc7 100644
--- a/demo-app/README.md
+++ b/demo-app/README.md
@@ -47,9 +47,7 @@ If you want to build and run the [dogfooding app](https://github.com/GetStream/s
```
# Environment Variable for dogfooding app
-DOGFOODING_BUILD_CONFIG_API_KEY=YOUR_STREAM_API_KEY
-PRODUCTION_BUILD_CONFIG_API_KEY=YOUR_STREAM_API_KEY
-DOGFOODING_RES_CONFIG_DEEPLINKING_HOST=stream-calls-dogfood.vercel.app
+DOGFOODING_RES_CONFIG_DEEPLINKING_HOST=pronto.getstream.io
PRODUCTION_RES_CONFIG_DEEPLINKING_HOST=getstream.io
DOGFOODING_RES_CONFIG_DEEPLINKING_PATH_PREFIX=/
PRODUCTION_RES_CONFIG_DEEPLINKING_PATH_PREFIX=/video/demos/
diff --git a/demo-app/build.gradle.kts b/demo-app/build.gradle.kts
index d30f21e28a..7a622b7407 100644
--- a/demo-app/build.gradle.kts
+++ b/demo-app/build.gradle.kts
@@ -130,6 +130,10 @@ android {
baseline = file("lint-baseline.xml")
}
+ packaging {
+ jniLibs.pickFirsts.add("lib/*/librenderscript-toolkit.so")
+ }
+
baselineProfile {
mergeIntoMain = true
}
@@ -260,6 +264,10 @@ dependencies {
implementation(libs.play.auth)
implementation(libs.play.app.update.ktx)
+ // Video Filters
+ implementation(libs.google.mlkit.selfie.segmentation)
+ implementation(files("libs/renderscript-toolkit.aar"))
+
// Memory detection
debugImplementation(libs.leakCanary)
diff --git a/demo-app/libs/renderscript-toolkit.aar b/demo-app/libs/renderscript-toolkit.aar
new file mode 100644
index 0000000000..daf90d952d
Binary files /dev/null and b/demo-app/libs/renderscript-toolkit.aar differ
diff --git a/demo-app/src/development/res/drawable/ic_launcher_background.xml b/demo-app/src/development/res/drawable/ic_launcher_background.xml
new file mode 100644
index 0000000000..f766419288
--- /dev/null
+++ b/demo-app/src/development/res/drawable/ic_launcher_background.xml
@@ -0,0 +1,40 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/demo-app/src/development/res/mipmap-hdpi/ic_launcher_round.png b/demo-app/src/development/res/mipmap-hdpi/ic_launcher_round.png
new file mode 100644
index 0000000000..71fb898c7e
Binary files /dev/null and b/demo-app/src/development/res/mipmap-hdpi/ic_launcher_round.png differ
diff --git a/demo-app/src/development/res/mipmap-mdpi/ic_launcher_round.png b/demo-app/src/development/res/mipmap-mdpi/ic_launcher_round.png
new file mode 100644
index 0000000000..5567403877
Binary files /dev/null and b/demo-app/src/development/res/mipmap-mdpi/ic_launcher_round.png differ
diff --git a/demo-app/src/development/res/mipmap-xhdpi/ic_launcher_round.png b/demo-app/src/development/res/mipmap-xhdpi/ic_launcher_round.png
new file mode 100644
index 0000000000..6e2f762d48
Binary files /dev/null and b/demo-app/src/development/res/mipmap-xhdpi/ic_launcher_round.png differ
diff --git a/demo-app/src/development/res/mipmap-xxhdpi/ic_launcher_round.png b/demo-app/src/development/res/mipmap-xxhdpi/ic_launcher_round.png
new file mode 100644
index 0000000000..5feb7e0bdf
Binary files /dev/null and b/demo-app/src/development/res/mipmap-xxhdpi/ic_launcher_round.png differ
diff --git a/demo-app/src/development/res/mipmap-xxxhdpi/ic_launcher_round.png b/demo-app/src/development/res/mipmap-xxxhdpi/ic_launcher_round.png
new file mode 100644
index 0000000000..c71563fdaa
Binary files /dev/null and b/demo-app/src/development/res/mipmap-xxxhdpi/ic_launcher_round.png differ
diff --git a/demo-app/src/dogfooding/res/values/strings.xml b/demo-app/src/development/res/values/strings.xml
similarity index 92%
rename from demo-app/src/dogfooding/res/values/strings.xml
rename to demo-app/src/development/res/values/strings.xml
index 801b847c55..10c01a3f8b 100644
--- a/demo-app/src/dogfooding/res/values/strings.xml
+++ b/demo-app/src/development/res/values/strings.xml
@@ -15,7 +15,7 @@
limitations under the License.
-->
- Dogfooding
+ Stream Video Calls (Development)
getstream.io
/video/demos
\ No newline at end of file
diff --git a/demo-app/src/main/kotlin/io/getstream/video/android/App.kt b/demo-app/src/main/kotlin/io/getstream/video/android/App.kt
index 36b3511140..6a3d0127f5 100644
--- a/demo-app/src/main/kotlin/io/getstream/video/android/App.kt
+++ b/demo-app/src/main/kotlin/io/getstream/video/android/App.kt
@@ -55,6 +55,6 @@ class App : Application() {
}
}
-val STREAM_SDK_ENVIRONMENT = if (BuildConfig.FLAVOR == StreamFlavors.production) "pronto" else "demo"
+val STREAM_SDK_ENVIRONMENT = if (BuildConfig.FLAVOR == StreamFlavors.production) "demo" else "pronto"
val Context.app get() = applicationContext as App
diff --git a/demo-app/src/main/kotlin/io/getstream/video/android/DirectCallActivity.kt b/demo-app/src/main/kotlin/io/getstream/video/android/DirectCallActivity.kt
index 0d3da53a37..29a3b8e5da 100644
--- a/demo-app/src/main/kotlin/io/getstream/video/android/DirectCallActivity.kt
+++ b/demo-app/src/main/kotlin/io/getstream/video/android/DirectCallActivity.kt
@@ -32,6 +32,7 @@ import io.getstream.result.Result
import io.getstream.video.android.compose.theme.VideoTheme
import io.getstream.video.android.compose.ui.components.call.activecall.CallContent
import io.getstream.video.android.compose.ui.components.call.ringing.RingingCallContent
+import io.getstream.video.android.core.Call
import io.getstream.video.android.core.StreamVideo
import io.getstream.video.android.core.call.state.AcceptCall
import io.getstream.video.android.core.call.state.CallAction
@@ -45,7 +46,10 @@ import io.getstream.video.android.datastore.delegate.StreamUserDataStore
import io.getstream.video.android.model.mapper.isValidCallId
import io.getstream.video.android.model.mapper.toTypeAndId
import io.getstream.video.android.util.StreamVideoInitHelper
+import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.launch
+import kotlinx.coroutines.withContext
+import org.openapitools.client.models.CallRejectedEvent
import java.util.UUID
import javax.inject.Inject
@@ -54,6 +58,7 @@ class DirectCallActivity : ComponentActivity() {
@Inject
lateinit var dataStore: StreamUserDataStore
+ private lateinit var call: Call
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
@@ -73,7 +78,7 @@ class DirectCallActivity : ComponentActivity() {
}
// Create call object
- val call = StreamVideo.instance().call(type, id)
+ call = StreamVideo.instance().call(type, id)
// Get list of members
val members: List = intent.getStringArrayExtra(EXTRA_MEMBERS_ARRAY)?.asList() ?: emptyList()
@@ -84,6 +89,18 @@ class DirectCallActivity : ComponentActivity() {
// Ring the members
val result = call.create(ring = true, memberIds = membersWithMe)
+ // Update the call
+ call.get()
+
+ call.subscribe {
+ when (it) {
+ // Finish this activity if ever a call.reject is received
+ is CallRejectedEvent -> {
+ finish()
+ }
+ }
+ }
+
if (result is Result.Failure) {
// Failed to recover the current state of the call
// TODO: Automaticly call this in the SDK?
@@ -101,20 +118,19 @@ class DirectCallActivity : ComponentActivity() {
val onCallAction: (CallAction) -> Unit = { callAction ->
when (callAction) {
is ToggleCamera -> call.camera.setEnabled(callAction.isEnabled)
- is ToggleMicrophone -> call.microphone.setEnabled(callAction.isEnabled)
+ is ToggleMicrophone -> call.microphone.setEnabled(
+ callAction.isEnabled,
+ )
is ToggleSpeakerphone -> call.speaker.setEnabled(callAction.isEnabled)
is LeaveCall -> {
call.leave()
finish()
}
is DeclineCall -> {
- // Not needed. this activity is only used for outgoing calls.
+ reject(call)
}
is CancelCall -> {
- lifecycleScope.launch {
- call.leave()
- finish()
- }
+ reject(call)
}
is AcceptCall -> {
lifecycleScope.launch {
@@ -131,8 +147,7 @@ class DirectCallActivity : ComponentActivity() {
modifier = Modifier.background(color = VideoTheme.colors.appBackground),
call = call,
onBackPressed = {
- call.leave()
- finish()
+ reject(call)
},
onAcceptedContent = {
CallContent(
@@ -142,8 +157,7 @@ class DirectCallActivity : ComponentActivity() {
)
},
onRejectedContent = {
- call.leave()
- finish()
+ reject(call)
},
onCallAction = onCallAction,
)
@@ -152,6 +166,22 @@ class DirectCallActivity : ComponentActivity() {
}
}
+ override fun onStop() {
+ super.onStop()
+ if (::call.isInitialized) {
+ reject(call)
+ }
+ }
+
+ private fun reject(call: Call) {
+ lifecycleScope.launch(Dispatchers.IO) {
+ call.reject()
+ withContext(Dispatchers.Main) {
+ finish()
+ }
+ }
+ }
+
companion object {
const val EXTRA_CID: String = "EXTRA_CID"
const val EXTRA_MEMBERS_ARRAY: String = "EXTRA_MEMBERS_ARRAY"
diff --git a/demo-app/src/main/kotlin/io/getstream/video/android/IncomingCallActivity.kt b/demo-app/src/main/kotlin/io/getstream/video/android/IncomingCallActivity.kt
index 29cde224a0..f939d3abd2 100644
--- a/demo-app/src/main/kotlin/io/getstream/video/android/IncomingCallActivity.kt
+++ b/demo-app/src/main/kotlin/io/getstream/video/android/IncomingCallActivity.kt
@@ -25,6 +25,7 @@ import androidx.activity.ComponentActivity
import androidx.activity.compose.setContent
import androidx.compose.foundation.background
import androidx.compose.foundation.layout.fillMaxSize
+import androidx.compose.runtime.LaunchedEffect
import androidx.compose.ui.Modifier
import androidx.lifecycle.lifecycleScope
import dagger.hilt.android.AndroidEntryPoint
@@ -135,8 +136,10 @@ class IncomingCallActivity : ComponentActivity() {
)
},
onRejectedContent = {
- call.leave()
- finish()
+ LaunchedEffect(key1 = call) {
+ call.reject()
+ finish()
+ }
},
onCallAction = onCallAction,
)
diff --git a/demo-app/src/main/kotlin/io/getstream/video/android/ui/call/CallScreen.kt b/demo-app/src/main/kotlin/io/getstream/video/android/ui/call/CallScreen.kt
index 9f6cfd6e16..31547b34c2 100644
--- a/demo-app/src/main/kotlin/io/getstream/video/android/ui/call/CallScreen.kt
+++ b/demo-app/src/main/kotlin/io/getstream/video/android/ui/call/CallScreen.kt
@@ -22,6 +22,7 @@ import android.widget.Toast
import androidx.compose.animation.Crossfade
import androidx.compose.foundation.background
import androidx.compose.foundation.layout.Box
+import androidx.compose.foundation.layout.BoxWithConstraints
import androidx.compose.foundation.layout.fillMaxSize
import androidx.compose.foundation.layout.padding
import androidx.compose.foundation.layout.size
@@ -33,6 +34,7 @@ import androidx.compose.material.Text
import androidx.compose.material.rememberModalBottomSheetState
import androidx.compose.runtime.Composable
import androidx.compose.runtime.LaunchedEffect
+import androidx.compose.runtime.collectAsState
import androidx.compose.runtime.getValue
import androidx.compose.runtime.mutableIntStateOf
import androidx.compose.runtime.mutableStateListOf
@@ -42,13 +44,16 @@ import androidx.compose.runtime.rememberCoroutineScope
import androidx.compose.runtime.setValue
import androidx.compose.ui.Alignment
import androidx.compose.ui.Modifier
+import androidx.compose.ui.draw.clip
import androidx.compose.ui.platform.LocalContext
import androidx.compose.ui.text.font.FontWeight
import androidx.compose.ui.tooling.preview.Preview
+import androidx.compose.ui.unit.IntSize
import androidx.compose.ui.unit.dp
import androidx.lifecycle.compose.collectAsStateWithLifecycle
import io.getstream.chat.android.ui.common.state.messages.list.MessageItemState
import io.getstream.video.android.BuildConfig
+import io.getstream.video.android.compose.theme.StreamDimens
import io.getstream.video.android.compose.theme.VideoTheme
import io.getstream.video.android.compose.ui.components.call.activecall.CallContent
import io.getstream.video.android.compose.ui.components.call.controls.ControlActions
@@ -59,7 +64,11 @@ import io.getstream.video.android.compose.ui.components.call.controls.actions.Fl
import io.getstream.video.android.compose.ui.components.call.controls.actions.SettingsAction
import io.getstream.video.android.compose.ui.components.call.controls.actions.ToggleCameraAction
import io.getstream.video.android.compose.ui.components.call.controls.actions.ToggleMicrophoneAction
+import io.getstream.video.android.compose.ui.components.call.renderer.FloatingParticipantVideo
import io.getstream.video.android.compose.ui.components.call.renderer.LayoutType
+import io.getstream.video.android.compose.ui.components.call.renderer.ParticipantVideo
+import io.getstream.video.android.compose.ui.components.call.renderer.RegularVideoRendererStyle
+import io.getstream.video.android.compose.ui.components.call.renderer.copy
import io.getstream.video.android.core.Call
import io.getstream.video.android.core.RealtimeConnection
import io.getstream.video.android.core.call.state.ChooseLayout
@@ -84,6 +93,7 @@ fun CallScreen(
var isShowingLayoutChooseMenu by remember { mutableStateOf(false) }
var isShowingReactionsMenu by remember { mutableStateOf(false) }
var isShowingAvailableDeviceMenu by remember { mutableStateOf(false) }
+ var isBackgroundBlurEnabled by remember { mutableStateOf(false) }
var layout by remember { mutableStateOf(LayoutType.DYNAMIC) }
var unreadCount by remember { mutableIntStateOf(0) }
val chatState = rememberModalBottomSheetState(
@@ -95,27 +105,30 @@ fun CallScreen(
val scope = rememberCoroutineScope()
val messageScope = rememberCoroutineScope()
- val callState by call.state.connection.collectAsStateWithLifecycle()
+ val connection by call.state.connection.collectAsStateWithLifecycle()
+ val me by call.state.me.collectAsState()
- LaunchedEffect(key1 = callState) {
- if (callState == RealtimeConnection.Disconnected) {
+ LaunchedEffect(key1 = connection) {
+ if (connection == RealtimeConnection.Disconnected) {
onCallDisconnected.invoke()
- } else if (callState is RealtimeConnection.Failed) {
+ } else if (connection is RealtimeConnection.Failed) {
Toast.makeText(
context,
- "Call connection failed (${(callState as RealtimeConnection.Failed).error}",
+ "Call connection failed (${(connection as RealtimeConnection.Failed).error}",
Toast.LENGTH_LONG,
).show()
onCallDisconnected.invoke()
}
}
- VideoTheme {
+ VideoTheme(
+ dimens = StreamDimens.defaultDimens().copy(reactionSize = 32.dp),
+ ) {
ChatDialog(
state = chatState,
call = call,
content = {
- Box(modifier = Modifier.fillMaxSize()) {
+ BoxWithConstraints(modifier = Modifier.fillMaxSize()) {
CallContent(
modifier = Modifier.background(color = VideoTheme.colors.appBackground),
call = call,
@@ -220,6 +233,52 @@ fun CallScreen(
),
)
},
+ videoRenderer = { modifier, call, participant, style ->
+ ParticipantVideo(
+ modifier = modifier,
+ call = call,
+ participant = participant,
+ style = style,
+ reactionContent = {
+ CustomReactionContent(
+ participant = participant,
+ style = style.copy(
+ reactionPosition = Alignment.TopCenter,
+ reactionDuration = 5000,
+ ),
+ )
+ },
+ )
+ },
+ floatingVideoRenderer = { _, _ ->
+ FloatingParticipantVideo(
+ call = call,
+ participant = me!!,
+ parentBounds = IntSize(
+ this@BoxWithConstraints.constraints.maxWidth,
+ this@BoxWithConstraints.constraints.maxHeight,
+ ),
+ videoRenderer = { participant ->
+ ParticipantVideo(
+ modifier = Modifier
+ .fillMaxSize()
+ .clip(VideoTheme.shapes.floatingParticipant),
+ call = call,
+ participant = participant,
+ reactionContent = {
+ CustomReactionContent(
+ participant = participant,
+ style = RegularVideoRendererStyle().copy(
+ isShowingConnectionQualityIndicator = false,
+ reactionPosition = Alignment.TopCenter,
+ reactionDuration = 5000,
+ ),
+ )
+ },
+ )
+ },
+ )
+ },
videoOverlayContent = {
Crossfade(
modifier = Modifier
@@ -266,9 +325,14 @@ fun CallScreen(
SettingsMenu(
call = call,
showDebugOptions = showDebugOptions,
+ isBackgroundBlurEnabled = isBackgroundBlurEnabled,
onDisplayAvailableDevice = { isShowingAvailableDeviceMenu = true },
onDismissed = { isShowingSettingMenu = false },
onShowReactionsMenu = { isShowingReactionsMenu = true },
+ onToggleBackgroundBlur = {
+ isBackgroundBlurEnabled = !isBackgroundBlurEnabled
+ isShowingSettingMenu = false
+ },
)
}
diff --git a/demo-app/src/main/kotlin/io/getstream/video/android/ui/call/CustomReactionContent.kt b/demo-app/src/main/kotlin/io/getstream/video/android/ui/call/CustomReactionContent.kt
new file mode 100644
index 0000000000..39ed8e4671
--- /dev/null
+++ b/demo-app/src/main/kotlin/io/getstream/video/android/ui/call/CustomReactionContent.kt
@@ -0,0 +1,96 @@
+/*
+ * Copyright (c) 2014-2023 Stream.io Inc. All rights reserved.
+ *
+ * Licensed under the Stream License;
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://github.com/GetStream/stream-video-android/blob/main/LICENSE
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.getstream.video.android.ui.call
+
+import androidx.compose.foundation.layout.BoxScope
+import androidx.compose.foundation.layout.BoxWithConstraints
+import androidx.compose.foundation.layout.fillMaxSize
+import androidx.compose.foundation.layout.padding
+import androidx.compose.material.Text
+import androidx.compose.runtime.Composable
+import androidx.compose.runtime.LaunchedEffect
+import androidx.compose.runtime.getValue
+import androidx.compose.runtime.mutableStateOf
+import androidx.compose.runtime.remember
+import androidx.compose.runtime.setValue
+import androidx.compose.ui.Modifier
+import androidx.compose.ui.unit.sp
+import androidx.lifecycle.compose.collectAsStateWithLifecycle
+import io.getstream.video.android.compose.theme.VideoTheme
+import io.getstream.video.android.compose.ui.components.call.renderer.VideoRendererStyle
+import io.getstream.video.android.core.ParticipantState
+import io.getstream.video.android.core.model.Reaction
+import io.getstream.video.android.core.model.ReactionState
+import kotlinx.coroutines.delay
+
+@Composable
+fun BoxScope.CustomReactionContent(
+ participant: ParticipantState,
+ style: VideoRendererStyle,
+) {
+ val reactions by participant.reactions.collectAsStateWithLifecycle()
+ val reaction = reactions.lastOrNull { it.createdAt + 3000 > System.currentTimeMillis() }
+ var currentReaction: Reaction? by remember { mutableStateOf(null) }
+ var reactionState: ReactionState by remember { mutableStateOf(ReactionState.Nothing) }
+
+ LaunchedEffect(key1 = reaction) {
+ if (reactionState == ReactionState.Nothing) {
+ currentReaction?.let { participant.consumeReaction(it) }
+ currentReaction = reaction
+
+ // deliberately execute this instead of animation finish listener to remove animation on the screen.
+ if (reaction != null) {
+ reactionState = ReactionState.Running
+ delay(style.reactionDuration * 2 - 50L)
+ participant.consumeReaction(reaction)
+ currentReaction = null
+ reactionState = ReactionState.Nothing
+ }
+ } else {
+ if (currentReaction != null) {
+ participant.consumeReaction(currentReaction!!)
+ reactionState = ReactionState.Nothing
+ currentReaction = null
+ delay(style.reactionDuration * 2 - 50L)
+ }
+ }
+ }
+
+ val emojiCode = currentReaction?.response?.emojiCode
+ if (currentReaction != null && emojiCode != null) {
+ var isEmojiVisible by remember { mutableStateOf(true) }
+ val emojiMapper = VideoTheme.reactionMapper
+ val emojiText = emojiMapper.map(emojiCode)
+
+ LaunchedEffect(key1 = Unit) {
+ delay(style.reactionDuration.toLong())
+ isEmojiVisible = false
+ }
+
+ if (isEmojiVisible) {
+ BoxWithConstraints(modifier = Modifier.fillMaxSize()) {
+ Text(
+ text = emojiText,
+ modifier = Modifier
+ .padding(top = maxHeight * 0.10f)
+ .align(style.reactionPosition),
+ fontSize = VideoTheme.dimens.reactionSize.value.sp,
+ )
+ }
+ }
+ }
+}
diff --git a/demo-app/src/main/kotlin/io/getstream/video/android/ui/call/ReactionsMenu.kt b/demo-app/src/main/kotlin/io/getstream/video/android/ui/call/ReactionsMenu.kt
index 87278f06d9..d093ec7e46 100644
--- a/demo-app/src/main/kotlin/io/getstream/video/android/ui/call/ReactionsMenu.kt
+++ b/demo-app/src/main/kotlin/io/getstream/video/android/ui/call/ReactionsMenu.kt
@@ -66,9 +66,8 @@ private object DefaultReactionsMenuData {
val mainReaction = ReactionItemData("Raise hand", ":raise-hand:")
val defaultReactions = listOf(
ReactionItemData("Fireworks", ":fireworks:"),
- ReactionItemData("Wave", ":hello:"),
- ReactionItemData("Like", ":raise-hand:"),
- ReactionItemData("Dislike", ":hate:"),
+ ReactionItemData("Like", ":like:"),
+ ReactionItemData("Dislike", ":dislike:"),
ReactionItemData("Smile", ":smile:"),
ReactionItemData("Heart", ":heart:"),
)
diff --git a/demo-app/src/main/kotlin/io/getstream/video/android/ui/call/SettingsMenu.kt b/demo-app/src/main/kotlin/io/getstream/video/android/ui/call/SettingsMenu.kt
index 524fcbc8bc..c61d527842 100644
--- a/demo-app/src/main/kotlin/io/getstream/video/android/ui/call/SettingsMenu.kt
+++ b/demo-app/src/main/kotlin/io/getstream/video/android/ui/call/SettingsMenu.kt
@@ -22,6 +22,7 @@ import android.media.projection.MediaProjectionManager
import android.widget.Toast
import androidx.activity.compose.rememberLauncherForActivityResult
import androidx.activity.result.contract.ActivityResultContracts
+import androidx.annotation.DrawableRes
import androidx.compose.foundation.background
import androidx.compose.foundation.clickable
import androidx.compose.foundation.layout.Column
@@ -29,6 +30,7 @@ import androidx.compose.foundation.layout.Row
import androidx.compose.foundation.layout.Spacer
import androidx.compose.foundation.layout.height
import androidx.compose.foundation.layout.padding
+import androidx.compose.foundation.layout.width
import androidx.compose.foundation.shape.RoundedCornerShape
import androidx.compose.material.Card
import androidx.compose.material.Icon
@@ -49,8 +51,8 @@ import io.getstream.video.android.core.Call
import io.getstream.video.android.core.call.audio.AudioFilter
import io.getstream.video.android.core.call.video.BitmapVideoFilter
import io.getstream.video.android.ui.common.R
+import io.getstream.video.android.util.BlurredBackgroundVideoFilter
import io.getstream.video.android.util.SampleAudioFilter
-import io.getstream.video.android.util.SampleVideoFilter
import kotlinx.coroutines.launch
import java.nio.ByteBuffer
@@ -58,9 +60,11 @@ import java.nio.ByteBuffer
internal fun SettingsMenu(
call: Call,
showDebugOptions: Boolean,
+ isBackgroundBlurEnabled: Boolean,
onDisplayAvailableDevice: () -> Unit,
onDismissed: () -> Unit,
onShowReactionsMenu: () -> Unit,
+ onToggleBackgroundBlur: () -> Unit,
) {
val context = LocalContext.current
val scope = rememberCoroutineScope()
@@ -84,7 +88,7 @@ internal fun SettingsMenu(
Popup(
alignment = Alignment.BottomStart,
- offset = IntOffset(30, -200),
+ offset = IntOffset(30, -210),
onDismissRequest = { onDismissed.invoke() },
) {
Card(
@@ -93,32 +97,25 @@ internal fun SettingsMenu(
) {
Column(
modifier = Modifier
+ .width(245.dp)
.background(VideoTheme.colors.appBackground)
.padding(12.dp),
) {
- Row(
- modifier = Modifier.clickable {
+ MenuEntry(
+ icon = R.drawable.stream_video_ic_reaction,
+ label = "Reactions",
+ onClick = {
onDismissed()
onShowReactionsMenu()
},
- ) {
- Icon(
- painter = painterResource(id = R.drawable.stream_video_ic_reaction),
- tint = VideoTheme.colors.textHighEmphasis,
- contentDescription = null,
- )
-
- Text(
- modifier = Modifier.padding(start = 20.dp),
- text = "Reactions",
- color = VideoTheme.colors.textHighEmphasis,
- )
- }
+ )
Spacer(modifier = Modifier.height(12.dp))
- Row(
- modifier = Modifier.clickable {
+ MenuEntry(
+ icon = R.drawable.stream_video_ic_screensharing,
+ label = screenShareButtonText,
+ onClick = {
if (!isScreenSharing) {
scope.launch {
val mediaProjectionManager = context.getSystemService(
@@ -132,55 +129,56 @@ internal fun SettingsMenu(
call.stopScreenSharing()
}
},
- ) {
- Icon(
- painter = painterResource(id = R.drawable.stream_video_ic_screensharing),
- tint = VideoTheme.colors.textHighEmphasis,
- contentDescription = null,
- )
+ )
- Text(
- modifier = Modifier.padding(start = 20.dp),
- text = screenShareButtonText,
- color = VideoTheme.colors.textHighEmphasis,
- )
- }
+ Spacer(modifier = Modifier.height(12.dp))
+
+ MenuEntry(
+ icon = io.getstream.video.android.R.drawable.ic_mic,
+ label = "Switch Microphone",
+ onClick = {
+ onDismissed.invoke()
+ onDisplayAvailableDevice.invoke()
+ },
+ )
Spacer(modifier = Modifier.height(12.dp))
- if (showDebugOptions) {
- Row(
- modifier = Modifier.clickable {
- if (call.videoFilter == null) {
- call.videoFilter = object : BitmapVideoFilter() {
- override fun filter(bitmap: Bitmap) {
- SampleVideoFilter.toGrayscale(bitmap)
- }
+ MenuEntry(
+ icon = if (isBackgroundBlurEnabled) {
+ io.getstream.video.android.R.drawable.ic_blur_off
+ } else {
+ io.getstream.video.android.R.drawable.ic_blur_on
+ },
+ label = if (isBackgroundBlurEnabled) {
+ "Disable background blur"
+ } else {
+ "Enable background blur (beta)"
+ },
+ onClick = {
+ onToggleBackgroundBlur()
+
+ if (call.videoFilter == null) {
+ call.videoFilter = object : BitmapVideoFilter() {
+ val filter = BlurredBackgroundVideoFilter()
+
+ override fun filter(bitmap: Bitmap) {
+ filter.applyFilter(bitmap)
}
- } else {
- call.videoFilter = null
}
- },
- ) {
- Icon(
- painter = painterResource(
- id = R.drawable.stream_video_ic_fullscreen_exit,
- ),
- tint = VideoTheme.colors.textHighEmphasis,
- contentDescription = null,
- )
-
- Text(
- modifier = Modifier.padding(start = 20.dp),
- text = "Toggle video filter",
- color = VideoTheme.colors.textHighEmphasis,
- )
- }
+ } else {
+ call.videoFilter = null
+ }
+ },
+ )
+ if (showDebugOptions) {
Spacer(modifier = Modifier.height(12.dp))
- Row(
- modifier = Modifier.clickable {
+ MenuEntry(
+ icon = R.drawable.stream_video_ic_fullscreen_exit,
+ label = "Toggle audio filter",
+ onClick = {
if (call.audioFilter == null) {
call.audioFilter = object : AudioFilter {
override fun filter(
@@ -200,26 +198,14 @@ internal fun SettingsMenu(
call.audioFilter = null
}
},
- ) {
- Icon(
- painter = painterResource(
- id = R.drawable.stream_video_ic_fullscreen_exit,
- ),
- tint = VideoTheme.colors.textHighEmphasis,
- contentDescription = null,
- )
-
- Text(
- modifier = Modifier.padding(start = 20.dp),
- text = "Toggle audio filter",
- color = VideoTheme.colors.textHighEmphasis,
- )
- }
+ )
Spacer(modifier = Modifier.height(12.dp))
- Row(
- modifier = Modifier.clickable {
+ MenuEntry(
+ icon = R.drawable.stream_video_ic_fullscreen_exit,
+ label = "Restart Subscriber Ice",
+ onClick = {
call.debug.restartSubscriberIce()
onDismissed.invoke()
Toast.makeText(
@@ -228,26 +214,14 @@ internal fun SettingsMenu(
Toast.LENGTH_SHORT,
).show()
},
- ) {
- Icon(
- painter = painterResource(
- id = R.drawable.stream_video_ic_fullscreen_exit,
- ),
- tint = VideoTheme.colors.textHighEmphasis,
- contentDescription = null,
- )
-
- Text(
- modifier = Modifier.padding(start = 20.dp),
- text = "Restart Subscriber Ice",
- color = VideoTheme.colors.textHighEmphasis,
- )
- }
+ )
Spacer(modifier = Modifier.height(12.dp))
- Row(
- modifier = Modifier.clickable {
+ MenuEntry(
+ icon = R.drawable.stream_video_ic_fullscreen_exit,
+ label = "Restart Publisher Ice",
+ onClick = {
call.debug.restartPublisherIce()
onDismissed.invoke()
Toast.makeText(
@@ -256,26 +230,14 @@ internal fun SettingsMenu(
Toast.LENGTH_SHORT,
).show()
},
- ) {
- Icon(
- painter = painterResource(
- id = R.drawable.stream_video_ic_fullscreen_exit,
- ),
- tint = VideoTheme.colors.textHighEmphasis,
- contentDescription = null,
- )
-
- Text(
- modifier = Modifier.padding(start = 20.dp),
- text = "Restart Publisher Ice",
- color = VideoTheme.colors.textHighEmphasis,
- )
- }
+ )
Spacer(modifier = Modifier.height(12.dp))
- Row(
- modifier = Modifier.clickable {
+ MenuEntry(
+ icon = R.drawable.stream_video_ic_fullscreen_exit,
+ label = "Kill SFU WS",
+ onClick = {
call.debug.doFullReconnection()
onDismissed.invoke()
Toast.makeText(
@@ -284,66 +246,41 @@ internal fun SettingsMenu(
Toast.LENGTH_SHORT,
).show()
},
- ) {
- Icon(
- painter = painterResource(
- id = R.drawable.stream_video_ic_fullscreen_exit,
- ),
- tint = VideoTheme.colors.textHighEmphasis,
- contentDescription = null,
- )
-
- Text(
- modifier = Modifier.padding(start = 20.dp),
- text = "Kill SFU WS",
- color = VideoTheme.colors.textHighEmphasis,
- )
- }
+ )
Spacer(modifier = Modifier.height(12.dp))
- Row(
- modifier = Modifier.clickable {
+ MenuEntry(
+ icon = R.drawable.stream_video_ic_fullscreen,
+ label = "Switch sfu",
+ onClick = {
call.debug.switchSfu()
onDismissed.invoke()
Toast.makeText(context, "Switch sfu", Toast.LENGTH_SHORT).show()
},
- ) {
- Icon(
- painter = painterResource(id = R.drawable.stream_video_ic_fullscreen),
- tint = VideoTheme.colors.textHighEmphasis,
- contentDescription = null,
- )
-
- Text(
- modifier = Modifier.padding(start = 20.dp),
- text = "Switch sfu",
- color = VideoTheme.colors.textHighEmphasis,
- )
- }
- }
-
- Spacer(modifier = Modifier.height(12.dp))
-
- Row(
- modifier = Modifier.clickable {
- onDismissed.invoke()
- onDisplayAvailableDevice.invoke()
- },
- ) {
- Icon(
- painter = painterResource(id = R.drawable.stream_video_ic_mic_on),
- tint = VideoTheme.colors.textHighEmphasis,
- contentDescription = null,
- )
-
- Text(
- modifier = Modifier.padding(start = 20.dp),
- text = "Switch Microphone",
- color = VideoTheme.colors.textHighEmphasis,
)
}
}
}
}
}
+
+@Composable
+private fun MenuEntry(
+ @DrawableRes icon: Int,
+ label: String,
+ onClick: () -> Unit,
+) {
+ Row(modifier = Modifier.clickable(onClick = onClick)) {
+ Icon(
+ painter = painterResource(id = icon),
+ tint = VideoTheme.colors.textHighEmphasis,
+ contentDescription = null,
+ )
+ Text(
+ modifier = Modifier.padding(start = 12.dp, top = 2.dp),
+ text = label,
+ color = VideoTheme.colors.textHighEmphasis,
+ )
+ }
+}
diff --git a/demo-app/src/main/kotlin/io/getstream/video/android/ui/join/barcode/BardcodeScanner.kt b/demo-app/src/main/kotlin/io/getstream/video/android/ui/join/barcode/BardcodeScanner.kt
index d3f5d113ef..6c61ac7157 100644
--- a/demo-app/src/main/kotlin/io/getstream/video/android/ui/join/barcode/BardcodeScanner.kt
+++ b/demo-app/src/main/kotlin/io/getstream/video/android/ui/join/barcode/BardcodeScanner.kt
@@ -30,6 +30,7 @@ import androidx.camera.view.PreviewView
import androidx.compose.foundation.Canvas
import androidx.compose.foundation.layout.Box
import androidx.compose.foundation.layout.BoxScope
+import androidx.compose.foundation.layout.Column
import androidx.compose.foundation.layout.fillMaxSize
import androidx.compose.foundation.layout.padding
import androidx.compose.foundation.layout.size
@@ -40,6 +41,7 @@ import androidx.compose.material.icons.Icons
import androidx.compose.material.icons.filled.Cancel
import androidx.compose.material.icons.outlined.Cancel
import androidx.compose.runtime.Composable
+import androidx.compose.runtime.LaunchedEffect
import androidx.compose.runtime.remember
import androidx.compose.ui.Alignment
import androidx.compose.ui.Modifier
@@ -55,6 +57,9 @@ import androidx.compose.ui.tooling.preview.Preview
import androidx.compose.ui.unit.dp
import androidx.compose.ui.viewinterop.AndroidView
import androidx.core.content.ContextCompat
+import com.google.accompanist.permissions.ExperimentalPermissionsApi
+import com.google.accompanist.permissions.PermissionStatus
+import com.google.accompanist.permissions.rememberPermissionState
import com.google.android.gms.tasks.OnSuccessListener
import com.google.firebase.analytics.FirebaseAnalytics
import com.google.mlkit.vision.barcode.BarcodeScanner
@@ -66,9 +71,11 @@ import io.getstream.video.android.DeeplinkingActivity
import io.getstream.video.android.R
import io.getstream.video.android.analytics.FirebaseEvents
import io.getstream.video.android.compose.theme.VideoTheme
+import io.getstream.video.android.ui.theme.StreamButton
import java.util.concurrent.Executor
import java.util.concurrent.Executors
+@kotlin.OptIn(ExperimentalPermissionsApi::class)
@Composable
internal fun BarcodeScanner(navigateBack: () -> Unit = {}) {
val executor: Executor = Executors.newSingleThreadExecutor()
@@ -84,32 +91,72 @@ internal fun BarcodeScanner(navigateBack: () -> Unit = {}) {
processImageProxy(imageProxy, barcodeScanner, qrCodeCallback)
}
}
- val color = VideoTheme.colors.primaryAccent
- Box(modifier = Modifier.fillMaxSize()) {
- CameraPreview(imageAnalysis = imageAnalysis)
- CornerRectWithArcs(color = color, cornerRadius = 32f, strokeWidth = 12f)
- IconButton(
- modifier = Modifier
- .align(Alignment.TopStart)
- .padding(8.dp),
- onClick = {
- navigateBack()
- },
- ) {
- Icon(
- imageVector = Icons.Filled.Cancel,
- contentDescription = null,
- tint = Color.White,
- )
+
+ // Camera permission
+ val cameraPermissionState = rememberPermissionState(
+ android.Manifest.permission.CAMERA,
+ )
+
+ when (val cameraPermissionStatus = cameraPermissionState.status) {
+ PermissionStatus.Granted -> {
+ val color = VideoTheme.colors.primaryAccent
+ Box(modifier = Modifier.fillMaxSize()) {
+ CameraPreview(imageAnalysis = imageAnalysis)
+ CornerRectWithArcs(color = color, cornerRadius = 32f, strokeWidth = 12f)
+ IconButton(
+ modifier = Modifier
+ .align(Alignment.TopStart)
+ .padding(8.dp),
+ onClick = {
+ navigateBack()
+ },
+ ) {
+ Icon(
+ imageVector = Icons.Filled.Cancel,
+ contentDescription = null,
+ tint = Color.White,
+ )
+ }
+ Text(
+ modifier = Modifier
+ .align(Alignment.TopCenter)
+ .padding(8.dp),
+ textAlign = TextAlign.Center,
+ color = Color.White,
+ text = stringResource(id = R.string.scan_qr_code_to_enter),
+ )
+ }
+ }
+
+ is PermissionStatus.Denied -> {
+ Box(
+ modifier = Modifier.fillMaxSize(),
+ ) {
+ Column(modifier = Modifier.align(Alignment.Center)) {
+ if (cameraPermissionStatus.shouldShowRationale) {
+ Text(
+ modifier = Modifier
+ .align(Alignment.CenterHorizontally)
+ .padding(8.dp),
+ textAlign = TextAlign.Center,
+ color = Color.White,
+ text = stringResource(
+ id = io.getstream.video.android.ui.common.R.string.stream_video_permissions_title,
+ ),
+ )
+ StreamButton(
+ modifier = Modifier.align(Alignment.CenterHorizontally),
+ text = "Request permission",
+ onClick = { cameraPermissionState.launchPermissionRequest() },
+ )
+ } else {
+ LaunchedEffect(key1 = "") {
+ cameraPermissionState.launchPermissionRequest()
+ }
+ }
+ }
+ }
}
- Text(
- modifier = Modifier
- .align(Alignment.TopCenter)
- .padding(8.dp),
- textAlign = TextAlign.Center,
- color = Color.White,
- text = stringResource(id = R.string.scan_qr_code_to_enter),
- )
}
}
diff --git a/demo-app/src/main/kotlin/io/getstream/video/android/util/BlurredBackgroundVideoFilter.kt b/demo-app/src/main/kotlin/io/getstream/video/android/util/BlurredBackgroundVideoFilter.kt
new file mode 100644
index 0000000000..71a18a5a0b
--- /dev/null
+++ b/demo-app/src/main/kotlin/io/getstream/video/android/util/BlurredBackgroundVideoFilter.kt
@@ -0,0 +1,138 @@
+/*
+ * Copyright (c) 2014-2023 Stream.io Inc. All rights reserved.
+ *
+ * Licensed under the Stream License;
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://github.com/GetStream/stream-video-android/blob/main/LICENSE
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.getstream.video.android.util
+
+import android.graphics.Bitmap
+import android.graphics.Canvas
+import android.graphics.Matrix
+import com.google.android.gms.tasks.Tasks
+import com.google.android.renderscript.Toolkit
+import com.google.mlkit.vision.common.InputImage
+import com.google.mlkit.vision.segmentation.Segmentation
+import com.google.mlkit.vision.segmentation.SegmentationMask
+import com.google.mlkit.vision.segmentation.selfie.SelfieSegmenterOptions
+
+/**
+ * Applies a blur effect to the background of a video frame.
+ *
+ * Note that this filter is still in beta and may not work as expected. To tweak it, see constants at bottom of file.
+ *
+ * To do:
+ * - For better performance research the [Android built-in accelerated image operations](https://developer.android.com/guide/topics/renderscript/migrate#image_blur_on_android_12_rendered_into_a_bitmap).
+ * - Determine what is available for which Android version (Toolkit library vs built-in operations).
+ */
+class BlurredBackgroundVideoFilter {
+ private val options =
+ SelfieSegmenterOptions.Builder()
+ .setDetectorMode(SelfieSegmenterOptions.STREAM_MODE)
+ .enableRawSizeMask()
+ .build()
+ private val segmenter = Segmentation.getClient(options)
+
+ private lateinit var segmentationMask: SegmentationMask
+ private val onlyBackgroundBitmap by lazy {
+ Bitmap.createBitmap(
+ segmentationMask.width,
+ segmentationMask.height,
+ Bitmap.Config.ARGB_8888,
+ )
+ }
+
+ fun applyFilter(bitmap: Bitmap) {
+ val mlImage = InputImage.fromBitmap(bitmap, 0)
+ val task = segmenter.process(mlImage)
+ segmentationMask = Tasks.await(task)
+
+ copySegment(
+ segment = Segment.BACKGROUND,
+ source = bitmap,
+ destination = onlyBackgroundBitmap,
+ segmentationMask = segmentationMask,
+ )
+
+ val blurredBackgroundBitmap = Toolkit.blur(onlyBackgroundBitmap, BLUR_RADIUS.toInt())
+ val canvas = Canvas(bitmap)
+ val matrix = newMatrix(bitmap, segmentationMask)
+
+ canvas.drawBitmap(blurredBackgroundBitmap, matrix, null)
+ }
+
+ private fun copySegment(
+ segment: Segment,
+ source: Bitmap,
+ destination: Bitmap,
+ segmentationMask: SegmentationMask,
+ ) {
+ val scaleBetweenSourceAndMask = getScalingFactors(
+ widths = Pair(source.width, segmentationMask.width),
+ heights = Pair(source.height, segmentationMask.height),
+ )
+
+ segmentationMask.buffer.rewind()
+
+ val sourcePixels = IntArray(source.width * source.height)
+ source.getPixels(sourcePixels, 0, source.width, 0, 0, source.width, source.height)
+ val destinationPixels = IntArray(destination.width * destination.height)
+
+ for (y in 0 until segmentationMask.height) {
+ for (x in 0 until segmentationMask.width) {
+ val confidence = segmentationMask.buffer.float
+
+ if (((segment == Segment.BACKGROUND) && confidence.isBackground()) ||
+ ((segment == Segment.FOREGROUND) && !confidence.isBackground())
+ ) {
+ val scaledX = (x * scaleBetweenSourceAndMask.first).toInt()
+ val scaledY = (y * scaleBetweenSourceAndMask.second).toInt()
+ destinationPixels[y * destination.width + x] = sourcePixels[scaledY * source.width + scaledX]
+ }
+ }
+ }
+
+ destination.setPixels(
+ destinationPixels,
+ 0,
+ destination.width,
+ 0,
+ 0,
+ destination.width,
+ destination.height,
+ )
+ }
+
+ private enum class Segment {
+ FOREGROUND, BACKGROUND
+ }
+
+ private fun getScalingFactors(widths: Pair, heights: Pair) =
+ Pair(widths.first.toFloat() / widths.second, heights.first.toFloat() / heights.second)
+
+ private fun newMatrix(bitmap: Bitmap, mask: SegmentationMask): Matrix {
+ val isRawSizeMaskEnabled = mask.width != bitmap.width || mask.height != bitmap.height
+ return if (!isRawSizeMaskEnabled) {
+ Matrix()
+ } else {
+ val scale =
+ getScalingFactors(Pair(bitmap.width, mask.width), Pair(bitmap.height, mask.height))
+ Matrix().apply { preScale(scale.first, scale.second) }
+ }
+ }
+}
+
+private fun Float.isBackground() = this <= BACKGROUND_UPPER_CONFIDENCE
+
+private const val BACKGROUND_UPPER_CONFIDENCE = 0.999 // 1 is max confidence that pixel is in the foreground
+private const val BLUR_RADIUS = 10f // Set the radius of the Blur. Supported range 0 < radius <= 25
diff --git a/demo-app/src/main/res/drawable/ic_blur_off.xml b/demo-app/src/main/res/drawable/ic_blur_off.xml
new file mode 100644
index 0000000000..434201fd57
--- /dev/null
+++ b/demo-app/src/main/res/drawable/ic_blur_off.xml
@@ -0,0 +1,26 @@
+
+
+
+
+
diff --git a/demo-app/src/main/res/drawable/ic_blur_on.xml b/demo-app/src/main/res/drawable/ic_blur_on.xml
new file mode 100644
index 0000000000..5b2260d335
--- /dev/null
+++ b/demo-app/src/main/res/drawable/ic_blur_on.xml
@@ -0,0 +1,26 @@
+
+
+
+
+
diff --git a/demo-app/src/main/res/drawable/ic_launcher_background.xml b/demo-app/src/main/res/drawable/ic_launcher_background.xml
index 1eb8841954..658e46c180 100644
--- a/demo-app/src/main/res/drawable/ic_launcher_background.xml
+++ b/demo-app/src/main/res/drawable/ic_launcher_background.xml
@@ -16,10 +16,13 @@
-->
+
@@ -30,7 +33,7 @@
android:endY="107"
android:type="linear">
-
+
diff --git a/demo-app/src/main/res/drawable/ic_launcher_foreground.xml b/demo-app/src/main/res/drawable/ic_launcher_foreground.xml
index 642ab06c3e..8c98d479e2 100644
--- a/demo-app/src/main/res/drawable/ic_launcher_foreground.xml
+++ b/demo-app/src/main/res/drawable/ic_launcher_foreground.xml
@@ -15,8 +15,8 @@
limitations under the License.
-->
-
-
-
-
-
+
+
+
+
+
-
diff --git a/demo-app/src/main/res/drawable/ic_mic.xml b/demo-app/src/main/res/drawable/ic_mic.xml
new file mode 100644
index 0000000000..004fc14cf3
--- /dev/null
+++ b/demo-app/src/main/res/drawable/ic_mic.xml
@@ -0,0 +1,26 @@
+
+
+
+
+
diff --git a/demo-app/src/main/res/mipmap-hdpi/ic_launcher.png b/demo-app/src/main/res/mipmap-hdpi/ic_launcher.png
index 2d689462dc..f4ae9ce160 100644
Binary files a/demo-app/src/main/res/mipmap-hdpi/ic_launcher.png and b/demo-app/src/main/res/mipmap-hdpi/ic_launcher.png differ
diff --git a/demo-app/src/main/res/mipmap-hdpi/ic_launcher_round.png b/demo-app/src/main/res/mipmap-hdpi/ic_launcher_round.png
index 65f905d019..e2df88d0a7 100644
Binary files a/demo-app/src/main/res/mipmap-hdpi/ic_launcher_round.png and b/demo-app/src/main/res/mipmap-hdpi/ic_launcher_round.png differ
diff --git a/demo-app/src/main/res/mipmap-mdpi/ic_launcher.png b/demo-app/src/main/res/mipmap-mdpi/ic_launcher.png
index ea6a9bb59c..402f6f2f3a 100644
Binary files a/demo-app/src/main/res/mipmap-mdpi/ic_launcher.png and b/demo-app/src/main/res/mipmap-mdpi/ic_launcher.png differ
diff --git a/demo-app/src/main/res/mipmap-mdpi/ic_launcher_round.png b/demo-app/src/main/res/mipmap-mdpi/ic_launcher_round.png
index ef630f7799..8936d0d573 100644
Binary files a/demo-app/src/main/res/mipmap-mdpi/ic_launcher_round.png and b/demo-app/src/main/res/mipmap-mdpi/ic_launcher_round.png differ
diff --git a/demo-app/src/main/res/mipmap-xhdpi/ic_launcher.png b/demo-app/src/main/res/mipmap-xhdpi/ic_launcher.png
index b2ff841165..f5616810c9 100644
Binary files a/demo-app/src/main/res/mipmap-xhdpi/ic_launcher.png and b/demo-app/src/main/res/mipmap-xhdpi/ic_launcher.png differ
diff --git a/demo-app/src/main/res/mipmap-xhdpi/ic_launcher_round.png b/demo-app/src/main/res/mipmap-xhdpi/ic_launcher_round.png
index be16c37159..5ca8d40d3b 100644
Binary files a/demo-app/src/main/res/mipmap-xhdpi/ic_launcher_round.png and b/demo-app/src/main/res/mipmap-xhdpi/ic_launcher_round.png differ
diff --git a/demo-app/src/main/res/mipmap-xxhdpi/ic_launcher.png b/demo-app/src/main/res/mipmap-xxhdpi/ic_launcher.png
index 3cc6ce3174..bb399efa25 100644
Binary files a/demo-app/src/main/res/mipmap-xxhdpi/ic_launcher.png and b/demo-app/src/main/res/mipmap-xxhdpi/ic_launcher.png differ
diff --git a/demo-app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png b/demo-app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png
index 3832f0a23e..e59e59ccb5 100644
Binary files a/demo-app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png and b/demo-app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png differ
diff --git a/demo-app/src/main/res/mipmap-xxxhdpi/ic_launcher.png b/demo-app/src/main/res/mipmap-xxxhdpi/ic_launcher.png
index bcfa61acfb..dc5614549d 100644
Binary files a/demo-app/src/main/res/mipmap-xxxhdpi/ic_launcher.png and b/demo-app/src/main/res/mipmap-xxxhdpi/ic_launcher.png differ
diff --git a/demo-app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png b/demo-app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png
index 5c39d0bfef..ca34a7422e 100644
Binary files a/demo-app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png and b/demo-app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png differ
diff --git a/docusaurus/docs/Android/01-basics/01-introduction.mdx b/docusaurus/docs/Android/01-basics/01-introduction.mdx
index 658ecaa4ba..1ee91c8952 100644
--- a/docusaurus/docs/Android/01-basics/01-introduction.mdx
+++ b/docusaurus/docs/Android/01-basics/01-introduction.mdx
@@ -12,9 +12,9 @@ Moreover, all calls are routed through Stream's global edge network, thereby ens
If you're new to Stream Video SDK, we recommend starting with the following three tutorials:
-* ** [Video & Audio Calling Tutorial](../02-tutorials/01-video-calling.mdx) **
-* ** [Audio Room Tutorial](../02-tutorials/02-audio-room.mdx) **
-* ** [Livestream Tutorial](../02-tutorials/03-livestream.mdx) **
+* ** [Video & Audio Calling Tutorial](https://getstream.io/video/sdk/android/tutorial/video-calling/) **
+* ** [Audio Room Tutorial](https://getstream.io/video/sdk/android/tutorial/audio-room/) **
+* ** [Livestream Tutorial](https://getstream.io/video/sdk/android/tutorial/livestreaming/) **
After the tutorials the documentation explains how to use the
diff --git a/docusaurus/docs/Android/02-tutorials/01-video-calling.mdx b/docusaurus/docs/Android/02-tutorials/01-video-calling.mdx
deleted file mode 100644
index b2f9009bdc..0000000000
--- a/docusaurus/docs/Android/02-tutorials/01-video-calling.mdx
+++ /dev/null
@@ -1,454 +0,0 @@
----
-title: How to Build an Android Video Calling App
-description: How to build a video call similar to Zoom or facebook messenger
----
-
-import { TokenSnippet } from '../../../shared/_tokenSnippet.jsx';
-
-This tutorial teaches you how to build Zoom/Whatsapp style video calling for your app.
-
-* Calls run on Stream's global edge network for optimal latency & reliability.
-* Permissions give you fine grained control over who can do what.
-* Video quality and codecs are automatically optimized.
-* Powered by Stream's [Video Calling API](https://getstream.io/video/).
-
-### Step 1 - Create a new project in Android Studio
-
-1. Create a new project
-2. Select Phone & Tablet -> **Empty Activity**
-3. Name your project **VideoCall**.
-
-Note that this tutorial was written using Android Studio Giraffe. Setup steps can vary slightly across Android Studio versions.
-We recommend using Android Studio Giraffe or newer.
-
-### Step 2 - Install the SDK & Setup the client
-
-**Add the Video Compose SDK** and [Jetpack Compose](https://developer.android.com/jetpack/compose) dependencies to your app's `build.gradle.kts` file found in `app/build.gradle.kts`.
-If you're new to android, note that there are 2 `build.gradle` files, you want to open the `build.gradle` in the app folder.
-
-
-
-```kotlin
-dependencies {
- // Stream Video Compose SDK
- implementation("io.getstream:stream-video-android-ui-compose:0.4.2")
-
- // Optionally add Jetpack Compose if Android studio didn't automatically include them
- implementation(platform("androidx.compose:compose-bom:2023.08.00"))
- implementation("androidx.activity:activity-compose:1.7.2")
- implementation("androidx.compose.ui:ui")
- implementation("androidx.compose.ui:ui-tooling")
- implementation("androidx.compose.runtime:runtime")
- implementation("androidx.compose.foundation:foundation")
- implementation("androidx.compose.material:material")
-}
-```
-
-There are 2 versions of Stream's SDK.
-
-- **Video Compose SDK**: `io.getstream:stream-video-android-ui-compose` dependency that includes the video core SDK + compose UI components.
-- **Video Core SDK**: `io.getstream:stream-video-android-core` that only includes the core parts of the video SDK.
-
-For this tutorial, we'll use the compose UI components.
-
-### Step 3 - Create & Join a call
-
-To keep this tutorial short and easy to understand we'll place all code in `MainActivity.kt`.
-For a production app you'd want to initialize the client in your Application class or DI module.
-You'd also want to use a viewmodel.
-
-Open up `MainActivity.kt` and replace the **MainActivity** class with:
-
-```kotlin
-class MainActivity : ComponentActivity() {
- override fun onCreate(savedInstanceState: Bundle?) {
- super.onCreate(savedInstanceState)
-
- val userToken = "REPLACE_WITH_TOKEN"
- val userId = "REPLACE_WITH_USER_ID"
- val callId = "REPLACE_WITH_CALL_ID"
-
- // step1 - create a user.
- val user = User(
- id = userId, // any string
- name = "Tutorial" // name and image are used in the UI
- )
-
- // step2 - initialize StreamVideo. For a production app we recommend adding the client to your Application class or di module.
- val client = StreamVideoBuilder(
- context = applicationContext,
- apiKey = "hd8szvscpxvd", // demo API key
- geo = GEO.GlobalEdgeNetwork,
- user = user,
- token = userToken,
- ).build()
-
- // step3 - join a call, which type is `default` and id is `123`.
- val call = client.call("default", callId)
- lifecycleScope.launch {
- val result = call.join(create = true)
- result.onError {
- Toast.makeText(applicationContext, it.message, Toast.LENGTH_LONG).show()
- }
- }
-
- setContent {
- // step4 - apply VideoTheme
- VideoTheme {
- // step5 - define required properties.
- val participants by call.state.participants.collectAsState()
- val connection by call.state.connection.collectAsState()
-
- // step6 - render texts that display connection status.
- Box(
- contentAlignment = Alignment.Center,
- modifier = Modifier.fillMaxSize()
- ) {
- if (connection != RealtimeConnection.Connected) {
- Text("loading...", fontSize = 30.sp)
- } else {
- Text("Call ${call.id} has ${participants.size} participants", fontSize = 30.sp)
- }
- }
- }
- }
- }
-}
-```
-
-To actually run this sample, we need a valid user token. The user token is typically generated by your server side API.
-When a user logs in to your app you return the user token that gives them access to the call.
-To make this tutorial easier to follow we'll generate a user token for you:
-
-Please update **REPLACE_WITH_USER_ID**, **REPLACE_WITH_TOKEN** and **REPLACE_WITH_CALL_ID** with the actual values shown below:
-
-
-
-Now when you run the sample app it will connect successfully.
-The text will say "call ... has 1 participant" (yourself).
-Let's review what we did in the above code.
-
-**Create a user**. First we create a user object.
-You typically sync these users via a server side integration from your own backend.
-Alternatively, you can also use guest or anonymous users.
-
-```kotlin
-val user = User(
- id = userId, // any string
- name = "Tutorial" // name and image are used in the UI
-)
-```
-
-**Initialize the Stream Client**. Next we initialize the client by passing the API Key, user and user token.
-
-```kotlin
- val client = StreamVideoBuilder(
- context = applicationContext,
- apiKey = "hd8szvscpxvd", // demo API key
- geo = GEO.GlobalEdgeNetwork,
- user = user,
- token = userToken,
-).build()
-```
-
-**Create and Join Call**. After the user and client are created, we create a call like this:
-
-```kotlin
-val call = client.call("default", callId)
-lifecycleScope.launch {
- val result = call.join(create = true)
- result.onError {
- Toast.makeText(applicationContext, it.message, Toast.LENGTH_LONG).show()
- }
-}
-```
-
-As soon as you use `call.join` the connection for video & audio is setup.
-
-Lastly, the UI is rendered by observing `call.state` (participants and connection states):
-
-```kotlin
-val participants by call.state.participants.collectAsState()
-val connection by call.state.connection.collectAsState()
-```
-
-You'll find all relevant state for the call in `call.state` and `call.state.participants`.
-The documentation on [Call state and Participant state](../03-guides/03-call-and-participant-state.mdx) explains this in further detail.
-
-### Step 4 - Joining from the web
-
-To make this a little more interactive, let's join the call from your browser.
-
-
-
-On your Android device, you'll see the text update to 2 participants.
-Let's keep the browser tab open as you go through the tutorial.
-
-### Step 5 - Rendering Video
-
-In this next step we're going to:
-
-1. Request Android Runtime permissions (to capture video and audio)
-2. Render your local & remote participant video
-
-#### A. Requesting Android Runtime Permissions
-
-To capture the microphone and camera output we need to request [Android runtime permissions](https://source.android.com/docs/core/permissions/runtime_perms).
-In `MainActivity.kt` just below setContent add the line `LaunchCallPermissions(call = call)`:
-
-```kotlin
-setContent {
- LaunchCallPermissions(call = call)
- ...
-}
-```
-
-The launch call permissions will request permissions when you open the call.
-Review the [permissions docs](../05-ui-cookbook/08-permission-requests.mdx) to learn more about how you can easily request permissions.
-
-#### B. Render the video
-
-In the `MainActivity.kt` file, replace the code inside `setContent` code with the example below:
-
-```kotlin
-setContent {
- LaunchCallPermissions(call = call)
-
- VideoTheme {
- val remoteParticipants by call.state.remoteParticipants.collectAsState()
- val remoteParticipant = remoteParticipants.firstOrNull()
- val me by call.state.me.collectAsState()
- val connection by call.state.connection.collectAsState()
- var parentSize: IntSize by remember { mutableStateOf(IntSize(0, 0)) }
-
- Box(
- contentAlignment = Alignment.Center,
- modifier = Modifier
- .fillMaxSize()
- .background(VideoTheme.colors.appBackground)
- .onSizeChanged { parentSize = it }
- ) {
- if (remoteParticipant != null) {
- val remoteVideo by remoteParticipant.video.collectAsState()
-
- Column(modifier = Modifier.fillMaxSize()) {
- VideoRenderer(
- modifier = Modifier.weight(1f),
- call = call,
- video = remoteVideo
- )
- }
- } else {
- if (connection != RealtimeConnection.Connected) {
- Text(
- text = "loading...",
- fontSize = 30.sp,
- color = VideoTheme.colors.textHighEmphasis
- )
- } else {
- Text(
- modifier = Modifier.padding(30.dp),
- text = "Join call ${call.id} in your browser to see the video here",
- fontSize = 30.sp,
- color = VideoTheme.colors.textHighEmphasis,
- textAlign = TextAlign.Center
- )
- }
- }
-
- // floating video UI for the local video participant
- me?.let { localVideo ->
- FloatingParticipantVideo(
- modifier = Modifier.align(Alignment.TopEnd),
- call = call,
- participant = localVideo,
- parentBounds = parentSize
- )
- }
- }
- }
-}
-```
-
-Now when you run the app, you'll see your local video in a floating video element and the video from your browser.
-The end result should look somewhat like this:
-
-![Video Tutorial](../assets/portrait-video-two.png)
-
-Let's review the changes we made.
-
-**[VideoRenderer](../04-ui-components/02-video-renderer.mdx)** is one of our primary low-level components.
-
-```kotlin
-VideoRenderer(
- modifier = Modifier.weight(1f),
- call = call,
- video = remoteVideo?.value
-)
-```
-
-It only displays the video and doesn't add any other UI elements.
-The video is lazily loaded, and only requested from the video infrastructure if you're actually displaying it.
-So if you have a video call with 200 participants, and you show only 10 of them, you'll only receive video for 10 participants.
-This is how software like Zoom and Google Meet make large calls work.
-
-**[FloatingParticipantVideo](../04-ui-components/05-participants/03-floating-participant-video.mdx)** renders a draggable display of your own video.
-
-```kotlin
-FloatingParticipantVideo(
- modifier = Modifier.align(Alignment.TopEnd),
- call = call,
- participant = me!!,
- parentBounds = parentSize
-)
-```
-
-### Step 6 - A Full Video Calling UI
-
-The above example showed how to use the call state object and compose to build a basic video UI.
-For a production version of calling you'd want a few more UI elements:
-
-* Indicators of when someone is speaking
-* Quality of their network
-* Layout support for >2 participants
-* Labels for the participant names
-* Call header and controls
-
-Stream ships with several Compose components to make this easy.
-You can customize the components with theming, arguments and swapping parts of them.
-This is convenient if you want to quickly build a production ready calling experience for you app.
-(and if you need more flexibility, many customers use the above low level approach to build a UI from scratch)
-
-To render a full calling UI, we'll leverage the [CallContent](../04-ui-components/04-call/01-call-content.mdx) component.
-This includes sensible defaults for a call header, video grid, call controls, picture-in-picture, and everything that you need to build a video call screen.
-
-Open `MainActivity.kt`, and update the code inside of `VideoTheme` to use the `CallContent`.
-The code will be a lot smaller than before since all UI logic is handled in the `CallContent`:
-
-```kotlin
-VideoTheme {
- CallContent(
- modifier = Modifier.fillMaxSize(),
- call = call,
- onBackPressed = { onBackPressed() },
- )
-}
-```
-
-The result will be:
-
-![Compose Content](../assets/compose_call_container.png)
-
-When you now run your app, you'll see a more polished video UI.
-It supports reactions, screensharing, active speaker detection, network quality indicators etc.
-The most commonly used UI components are:
-
-- **[VideoRenderer](../04-ui-components/02-video-renderer.mdx)**: For rendering video and automatically requesting video tracks when needed. Most of the Video components are built on top of this.
-- **[ParticipantVideo](../04-ui-components/05-participants/01-participant-video.mdx)**: The participant's video + some UI elements for network quality, reactions, speaking etc.
-- **[ParticipantsGrid](../04-ui-components/05-participants/02-participants-grid.mdx)**: A grid of participant video elements.
-- **[FloatingParticipantVideo](../04-ui-components/05-participants/03-floating-participant-video.mdx)**: A draggable version of the participant video. Typically used for your own video.
-- **[ControlActions](../05-ui-cookbook/02-control-actions.mdx)**: A set of buttons for controlling your call, such as changing audio and video states.
-- **[RingingCallContent](../04-ui-components/04-call/04-ringing-call.mdx)**: UI for displaying incoming and outgoing calls.
-
-The full list of **[UI components](../04-ui-components/01-overview.mdx)** is available in the docs.
-
-### Step 7 - Customizing the UI
-
-You can customize the UI by:
-
-* Building your own UI components (the most flexibility, build anything).
-* Mixing and matching with Stream's UI Components (speeds up how quickly you can build common video UIs).
-* Theming (basic customization of colors, fonts etc).
-
-The example below shows how to swap out the call controls for your own controls:
-
-```kotlin
-override fun onCreate(savedInstanceState: Bundle?) {
- super.onCreate(savedInstanceState)
-
- lifecycleScope.launch {
- val result = call.join(create = true)
- result.onError {
- Toast.makeText(applicationContext, it.message, Toast.LENGTH_LONG).show()
- }
- }
-
- setContent {
- VideoTheme {
- val isCameraEnabled by call.camera.isEnabled.collectAsState()
- val isMicrophoneEnabled by call.microphone.isEnabled.collectAsState()
-
- CallContent(
- modifier = Modifier.background(color = VideoTheme.colors.appBackground),
- call = call,
- onBackPressed = { onBackPressed() },
- controlsContent = {
- ControlActions(
- call = call,
- actions = listOf(
- {
- ToggleCameraAction(
- modifier = Modifier.size(52.dp),
- isCameraEnabled = isCameraEnabled,
- onCallAction = { call.camera.setEnabled(it.isEnabled) }
- )
- },
- {
- ToggleMicrophoneAction(
- modifier = Modifier.size(52.dp),
- isMicrophoneEnabled = isMicrophoneEnabled,
- onCallAction = { call.microphone.setEnabled(it.isEnabled) }
- )
- },
- {
- FlipCameraAction(
- modifier = Modifier.size(52.dp),
- onCallAction = { call.camera.flip() }
- )
- },
- )
- )
- }
- )
- }
- }
-}
-```
-
-Stream's Video SDK provides fully polished UI components, allowing you to build a video call quickly and customize them. As you've seen before, you can implement a full complete video call screen with `CallContent` composable in Jetpack Compose. The `CallContent` composable consists of three major parts below:
-
-- **appBarContent**: Content is shown that calls information or additional actions.
-- **controlsContent**: Content is shown that allows users to trigger different actions to control a joined call.
-- **videoContent**: Content shown to be rendered when we're connected to a call successfully.
-
-Theming gives you control over the colors and fonts.
-
-```kotlin
-VideoTheme(
- colors = StreamColors.defaultColors().copy(appBackground = Color.Black),
- dimens = StreamDimens.defaultDimens().copy(callAvatarSize = 72.dp),
- typography = StreamTypography.defaultTypography().copy(title1 = TextStyle()),
- shapes = StreamShapes.defaultShapes().copy(avatar = CircleShape)
-) {
- ..
-}
-```
-
-### Recap
-
-Please do let us know if you ran into any issues while building an video calling app with Kotlin.
-Our team is also happy to review your UI designs and offer recommendations on how to achieve it with Stream.
-
-To recap what we've learned about android video calling:
-
-* You setup a call: (val call = client.call("default", "123"))
-* The call type ("default" in the above case) controls which features are enabled and how permissions are setup
-* When you join a call, realtime communication is setup for audio & video calling: (call.join())
-* Stateflow objects in call.state and call.state.participants make it easy to build your own UI
-* VideoRenderer is the low level component that renders video
-
-We've used Stream's [Video Calling API](https://getstream.io/video/), which means calls run on a global edge network of video servers.
-By being closer to your users the latency and reliability of calls are better.
-The kotlin SDK enables you to build in-app video calling, audio rooms and livestreaming in days.
-
-We hope you've enjoyed this tutorial and please do feel free to reach out if you have any suggestions or questions.
diff --git a/docusaurus/docs/Android/02-tutorials/02-audio-room.mdx b/docusaurus/docs/Android/02-tutorials/02-audio-room.mdx
deleted file mode 100644
index d50ecc0a41..0000000000
--- a/docusaurus/docs/Android/02-tutorials/02-audio-room.mdx
+++ /dev/null
@@ -1,535 +0,0 @@
----
-title: How to Build an Android Audio Room with Kotlin
-description: How to build an audio room using Stream's video SDKs
----
-
-import { TokenSnippet } from '../../../shared/_tokenSnippet.jsx';
-
-This tutorial will teach you how to build an audio room experience like Twitter Spaces or Clubhouse.
-The end result will look like the image below and support the following features:
-
-* Backstage mode. You can start the call with your co-hosts and chat a bit before going live.
-* Calls run on Stream's global edge network for optimal latency and scalability.
-* There is no cap to how many listeners you can have in a room.
-* Listeners can raise their hand, and be invited to speak by the host.
-* Audio tracks are sent multiple times for optimal reliability.
-
-![Audio Room](../assets/audio-room.png)
-
-Time to get started building an audio-room for your app.
-
-### Step 1 - Create a new project in Android Studio
-
-Note that this tutorial was written using Android Studio Giraffe. Setup steps can vary slightly across Android Studio versions.
-We recommend using Android Studio Giraffe or newer.
-
-1. Create a new project
-2. Select Phone & Tablet -> **Empty Activity**
-3. Name your project **AudioRoom**.
-
-### Step 2 - Install the SDK & Setup the client
-
-**Add the Video Compose SDK** and [Jetpack Compose](https://developer.android.com/jetpack/compose) dependencies to your app's `build.gradle.kts` file found in `app/build.gradle.kts`.
-If you're new to android, note that there are 2 `build.gradle` files, you want to open the `build.gradle` in the app folder.
-
-```groovy
-dependencies {
- // Stream Video Compose SDK
- implementation("io.getstream:stream-video-android-ui-compose:0.4.2")
-
- // Jetpack Compose (optional/ android studio typically adds them when you create a new project)
- implementation(platform("androidx.compose:compose-bom:2023.08.00"))
- implementation("androidx.activity:activity-compose:1.7.2")
- implementation("androidx.compose.ui:ui")
- implementation("androidx.compose.ui:ui-tooling")
- implementation("androidx.compose.runtime:runtime")
- implementation("androidx.compose.foundation:foundation")
- implementation("androidx.compose.material:material")
-}
-```
-
-There are 2 versions of Stream's SDK.
-
-- **Video Compose SDK**: `io.getstream:stream-video-android-ui-compose` dependency that includes the video core SDK + compose UI components.
-- **Video Core SDK**: `io.getstream:stream-video-android-core` that only includes the core parts of the video SDK.
-
-For this tutorial, we'll use the compose UI components.
-
-### Step 3 - Create & Join a call
-
-Open up `MainActivity.kt` and replace the **MainActivity** class with the following code:
-
-```kotlin
-class MainActivity : ComponentActivity() {
- override fun onCreate(savedInstanceState: Bundle?) {
- super.onCreate(savedInstanceState)
-
- val userToken = "REPLACE_WITH_TOKEN"
- val userId = "REPLACE_WITH_USER_ID"
- val callId = "REPLACE_WITH_CALL_ID"
-
- // step1 - create a user.
- val user = User(
- id = userId, // any string
- name = "Tutorial" // name and image are used in the UI
- )
-
- // step2 - initialize StreamVideo. For a production app we recommend adding the client to your Application class or di module.
- val client = StreamVideoBuilder(
- context = applicationContext,
- apiKey = "hd8szvscpxvd", // demo API key
- geo = GEO.GlobalEdgeNetwork,
- user = user,
- token = userToken,
- ).build()
-
- // step3 - join a call, which type is `audio_room` and id is `123`.
- val call = client.call("audio_room", callId)
- lifecycleScope.launch {
- val result = call.join(create = true, createOptions = CreateCallOptions(
- members = listOf(
- MemberRequest(userId = userId, role="host", custom = emptyMap())
- ), custom = mapOf(
- "title" to "Compose Trends",
- "description" to "Talk about how easy compose makes it to reuse and combine UI"
- )
- ))
- result.onError {
- Toast.makeText(applicationContext, it.message, Toast.LENGTH_LONG).show()
- }
- }
-
- setContent {
- VideoTheme {
- val connection by call.state.connection.collectAsState()
-
- Column(horizontalAlignment = Alignment.CenterHorizontally, modifier = Modifier.padding(16.dp)) {
- if (connection != RealtimeConnection.Connected) {
- Text("loading", fontSize = 30.sp)
- } else {
- Text("ready to render an audio room", fontSize = 30.sp)
- }
- }
- }
- }
- }
-}
-```
-
-To keep the tutorial short and simple to follow we've added the client, state and UI straight into the **MainActivity** class.
-For a real app, you'd typically want to use an [Application class](https://developer.android.com/reference/android/app/Application) for the client and a [ViewModel](https://developer.android.com/topic/libraries/architecture/viewmodel) for managing the state.
-
-Let's review the example above and go over the details.
-
-**Create a user**. First we create a user object.
-You typically sync your users via a server side integration from your own backend.
-Alternatively, you can also use guest or anonymous users.
-
-```kotlin
-val user = User(
- id = userId, // any string
- name = "Tutorial" // name and image are used in the UI
-)
-```
-
-**Initialize the Stream Client**. Next we initialize the client by passing the API Key, user and user token.
-
-```kotlin
- val client = StreamVideoBuilder(
- context = applicationContext,
- apiKey = "hd8szvscpxvd", // demo API key
- geo = GEO.GlobalEdgeNetwork,
- user = user,
- token = userToken,
-).build()
-```
-
-**Create and Join Call**. After the user and client are created, we create a call like this:
-
-```kotlin
-val call = client.call("audio_room", callId)
-lifecycleScope.launch {
- val result = call.join(
- create = true, createOptions = CreateCallOptions(
- members = listOf(
- MemberRequest(userId = userId, role = "host", custom = emptyMap())
- ), custom = mapOf(
- "title" to "Compose Trends",
- "description" to "Talk about how easy compose makes it to reuse and combine UI"
- )
- )
- )
- result.onError {
- Toast.makeText(applicationContext, it.message, Toast.LENGTH_LONG).show()
- }
-}
-```
-
-* This joins and creates a call with the type: "audio_room" and the specified callId.
-* You add yourself as a member with the "host" role. You can create custom roles and grant them permissions to fit your app.
-* The `title` and `description` custom fields are set on the call object.
-* Shows an error toast if you fail to join an audio room.
-
-To actually run this sample, we need a valid user token. The user token is typically generated by your server side API.
-When a user logs in to your app you return the user token that gives them access to the call.
-To make this tutorial easier to follow we'll generate a user token for you:
-
-Please update **REPLACE_WITH_USER_ID**, **REPLACE_WITH_TOKEN** and **REPLACE_WITH_CALL_ID** with the actual values shown below:
-
-
-
-With valid credentials in place, we can join the call.
-When you run the app you'll see the following:
-
-![Audio Room](../assets/audio-room-2.png)
-
-### Step 4 - Audio Room & Description
-
-Now that we've successfully connected to the audio room. Let's setup a basic UI and description.
-Replace the code in `setContent` with the following sample:
-
-```kotlin
-setContent {
- VideoTheme {
- val connection by call.state.connection.collectAsState()
- val activeSpeakers by call.state.activeSpeakers.collectAsState()
- val audioLevel = activeSpeakers.firstOrNull()?.audioLevel?.collectAsState()
-
- val color1 = Color.White.copy(alpha = 0.2f + (audioLevel?.value ?: 0f) * 0.8f)
- val color2 = Color.White.copy(alpha = 0.2f + (audioLevel?.value ?: 0f) * 0.8f)
-
- Column(
- horizontalAlignment = Alignment.CenterHorizontally,
- verticalArrangement = Arrangement.Top,
- modifier = Modifier
- .background(Brush.linearGradient(listOf(color1, color2)))
- .fillMaxSize()
- .fillMaxHeight()
- .padding(16.dp)
- ) {
-
- if (connection != RealtimeConnection.Connected) {
- Text("loading", fontSize = 30.sp)
- } else {
- AudioRoom(call = call)
- }
- }
- }
-}
-```
-
-All state for a call is available in `call.state`. In the example above we're observing the connection state and the active speakers.
-The [ParticipantState docs](../03-guides/03-call-and-participant-state.mdx) explain the available stateflow objects.
-
-You'll see that the **AudioRoom** composable hasn't been implemented yet. In `MainActivity`, add the following `AudioRoom` composable:
-
-```kotlin
-@Composable
-public fun AudioRoom(
- call: Call,
-){
- val custom by call.state.custom.collectAsState()
- val title = custom["title"] as? String
- val description = custom["description"] as? String
- val participants by call.state.participants.collectAsState()
- val activeSpeakers by call.state.activeSpeakers.collectAsState()
- val activeSpeaker = activeSpeakers.firstOrNull()
- val sortedParticipants by call.state.sortedParticipants.collectAsState(emptyList())
-
- val backstage by call.state.backstage.collectAsState()
- val isMicrophoneEnabled by call.microphone.isEnabled.collectAsState()
-
- Description(title, description, participants)
-
- activeSpeaker?.let {
- Text("${it.userNameOrId.value} is speaking")
- }
-
- Column(
- modifier = Modifier
- .fillMaxHeight()
- .padding(0.dp, 32.dp, 0.dp, 0.dp)
- ) {
- Participants(
- modifier = Modifier.weight(4f),
- sortedParticipants = sortedParticipants
- )
- Controls(
- modifier = Modifier
- .weight(1f)
- .fillMaxWidth()
- .padding(16.dp), call = call,
- isMicrophoneEnabled = isMicrophoneEnabled,
- backstage = backstage,
- enableMicrophone = { call.microphone.setEnabled(it) }
- )
- }
-}
-```
-
-The code above observes the participants, active speakers and backstage stateflow objects in `call.state`.
-
-We still need to implement a **Controls**, **Participants**, and **Description** composable.
-Let's add those next.
-
-```kotlin
-@Composable
-public fun Description(
- title: String?,
- description: String?,
- participants: List
-) {
- Text("$title", fontSize = 30.sp)
- Text("$description", fontSize = 20.sp, modifier = Modifier.padding(16.dp))
- Text("${participants.size} participants", fontSize = 20.sp)
-}
-
-@Composable
-public fun Participants(
- modifier: Modifier = Modifier,
- sortedParticipants: List
-) {
- Text("participants todo", fontSize = 30.sp)
-}
-
-@Composable
-public fun Controls(
- modifier: Modifier = Modifier,
- call: Call,
- backstage: Boolean = false,
- isMicrophoneEnabled: Boolean = false,
- enableMicrophone: (Boolean) -> Unit = {}
-) {
- Text("controls todo", fontSize = 30.sp)
-}
-```
-
-That's it for the basics. Now when you run your app, you'll see the following UI:
-
-![Audio Room](../assets/audio-room-3.png)
-
-The approach is the same for all components. We take the states of the call by observing `call.state` properties, such as `call.state.participants` and use it to power our UI.
-The [ParticipantState docs](../03-guides/03-call-and-participant-state.mdx) exposes all the state objects we need for the name, avatar, audio levels, speaking, etc.
-
-### Step 5 - Audio Room Controls & Permission
-
-Any app that records the microphone needs to ask the user for permission. We'll do this now.
-
-To capture the microphone output, we need to request [Android runtime permissions](https://source.android.com/docs/core/permissions/runtime_perms).
-In `MainActivity.kt` just below `setContent` add the line `LaunchMicrophonePermissions(call = call)`:
-
-```kotlin
-setContent {
- LaunchMicrophonePermissions(call = call)
- ..
-}
-```
-
-The launch call permissions will request permissions when you enter the app.
-Review the [permissions docs](../05-ui-cookbook/08-permission-requests.mdx) to learn more about how you can easily request permissions.
-
-Now let's have a look at the `Controls` composable. Replace the `Controls` composable with the following:
-
-```kotlin
-@Composable
-public fun Controls(
- modifier: Modifier = Modifier,
- call: Call,
- backstage: Boolean = false,
- isMicrophoneEnabled: Boolean = false,
- enableMicrophone: (Boolean) -> Unit = {}
-){
- val scope = rememberCoroutineScope()
- Row(
- modifier = modifier,
- horizontalArrangement = Arrangement.SpaceEvenly
- ) {
- ToggleMicrophoneAction(
- modifier = Modifier.size(52.dp),
- isMicrophoneEnabled = isMicrophoneEnabled,
- onCallAction = { enableMicrophone(it.isEnabled) }
- )
-
- Button(
- onClick = {
- scope.launch {
- if (backstage) call.goLive() else call.stopLive()
- }
- }
- ) {
- Text(text = if (backstage) "Go Live" else "End")
- }
- }
-}
-```
-
-Now when you run the app, you'll see a button to disable/enable the microphone and to start or end the broadcast.
-
-To make this a little more interactive, let's join the audio room from your browser.
-
-
-
-At first you won't be allowed to join the room since it's not live yet.
-By default the audio_room call type has backstage mode enabled. This makes it easy to try out your room and talk to your co-hosts before going live.
-You can enable/disable the usage of backstage mode in the dashboard.
-
-Let's go live and join the call:
-
-* Click go live on Android
-* On web join the room
-* You'll see the participant count increase to 2
-
-### Step 6 - Participants UI
-
-Time to build a pretty UI for the participants. Replace the `Participants` composable with the following:
-
-```kotlin
-@Composable
-public fun Participants(
- modifier: Modifier = Modifier,
- sortedParticipants: List
-){
- LazyVerticalGrid(
- modifier = modifier,
- columns = GridCells.Adaptive(minSize = 128.dp)
- ) {
- items(items = sortedParticipants, key = { it.sessionId }) { participant ->
- ParticipantAvatar(participant)
- }
- }
-}
-```
-
-The `Participants` composable is responsible for rendering all participants in the audio room as a grid list.
-Now we'll add a pretty **ParticipantAvatar** composable, which represents a user in the audio room:
-
-```kotlin
-@Composable
-public fun ParticipantAvatar(
- participant: ParticipantState,
- modifier: Modifier = Modifier
-) {
- val nameOrId by participant.userNameOrId.collectAsState()
- val image by participant.image.collectAsState()
- val isSpeaking by participant.speaking.collectAsState()
- val audioEnabled by participant.audioEnabled.collectAsState()
-
- Column(
- modifier = modifier,
- horizontalAlignment = Alignment.CenterHorizontally,
- verticalArrangement = Arrangement.Center
- ) {
-
- Box(modifier = Modifier.size(VideoTheme.dimens.audioAvatarSize)) {
- UserAvatar(
- userName = nameOrId,
- userImage = image,
- modifier = Modifier
- .fillMaxSize()
- .padding(VideoTheme.dimens.audioAvatarPadding)
- )
-
- if (isSpeaking) {
- Box(
- modifier = Modifier
- .fillMaxSize()
- .border(BorderStroke(2.dp, Color.Gray), CircleShape)
- )
- } else if (!audioEnabled) {
- Box(
- modifier = Modifier
- .fillMaxSize()
- .padding(VideoTheme.dimens.audioAvatarPadding)
- ) {
- Box(
- modifier = Modifier
- .clip(CircleShape)
- .background(VideoTheme.colors.appBackground)
- .size(VideoTheme.dimens.audioRoomMicSize)
- ) {
- Icon(
- modifier = Modifier
- .fillMaxSize()
- .padding(VideoTheme.dimens.audioRoomMicPadding),
- painter = painterResource(id = io.getstream.video.android.ui.common.R.drawable.stream_video_ic_mic_off),
- tint = VideoTheme.colors.errorAccent,
- contentDescription = null
- )
- }
- }
- }
- }
-
- Spacer(modifier = Modifier.height(8.dp))
-
- Text(
- modifier = Modifier.fillMaxWidth(),
- text = nameOrId,
- fontSize = 14.sp,
- fontWeight = FontWeight.Bold,
- color = VideoTheme.colors.textHighEmphasis,
- textAlign = TextAlign.Center,
- )
-
- Text(
- modifier = Modifier.fillMaxWidth(),
- text = user.role,
- fontSize = 11.sp,
- color = VideoTheme.colors.textHighEmphasis,
- textAlign = TextAlign.Center,
- )
- }
-}
-```
-
-The `ParticipantAvatar` composable represents each participant in the audio room, displays the initial of the user and the status of the microphone.
-Now when you run the app, you'll see a pretty UI for the participants.
-
-![Audio Room](../assets/audio-room-4.png)
-
-In the above example, we use the following state flow objects:
-
-```kotlin
-val user by participant.user.collectAsState()
-val nameOrId by participant.userNameOrId.collectAsState()
-val isSpeaking by participant.speaking.collectAsState()
-val audioEnabled by participant.audioEnabled.collectAsState()
-```
-
-The [ParticipantState docs](../03-guides/03-call-and-participant-state.mdx) include all the other attributes that are also available.
-For audio rooms, `participant.audioLevel` and `participant.audioLevels` can be convenient to implement an audio visualizer.
-
-### Other built-in features
-
-There are a few more exciting features that you can use to build audio rooms:
-
-- ** Requesting Permissions **: Participants can ask the host for permission to speak, share video etc
-- ** Query Calls **: You can query calls to easily show upcoming calls, calls that recently finished etc
-- ** Call Previews **: Before you join the call you can observe it and show a preview. IE John, Sarah and 3 others are on this call.
-- ** Reactions & Custom events **: Reactions and custom events are supported
-- ** Recording & Broadcasting **: You can record your calls, or broadcast them to HLS
-- ** Chat **: Stream's chat SDKs are fully featured and you can integrate them in the call
-- ** Moderation **: Moderation capabilities are built-in to the product
-- ** Transcriptions **: Transcriptions aren't available yet, but are coming soon
-
-### Recap
-
-It was fun to see just how quickly you can build an audio-room for your app.
-Please do let us know if you ran into any issues.
-Our team is also happy to review your UI designs and offer recommendations on how to achieve it with Stream.
-
-To recap what we've learned:
-
-* You setup a call: (val call = client.call("audio_room", "222"))
-* The call type "audio_room" controls, which features are enabled and how permissions are setup
-* The audio_room by default enables "backstage" mode, and only allows admins to join before the call goes live
-* When you join a call, realtime communication is setup for audio & video calling: (call.join())
-* Stateflow objects in `call.state` and `call.state.participants` make it easy to build your own UI
-
-Calls run on Stream's global edge network of video servers.
-Being closer to your users improves the latency and reliability of calls.
-For audio rooms we use Opus RED and Opus DTX for optimal audio quality.
-
-The SDKs enable you to build audio rooms, video calling and livestreaming in days.
-
-We hope you've enjoyed this tutorial, and please do feel free to reach out if you have any suggestions or questions.
diff --git a/docusaurus/docs/Android/02-tutorials/03-livestream.mdx b/docusaurus/docs/Android/02-tutorials/03-livestream.mdx
deleted file mode 100644
index 49182f93e0..0000000000
--- a/docusaurus/docs/Android/02-tutorials/03-livestream.mdx
+++ /dev/null
@@ -1,391 +0,0 @@
----
-title: Livestream Tutorial
-description: How to build a livestream experience using Stream's video SDKs
----
-
-import { TokenSnippet } from '../../../shared/_tokenSnippet.jsx';
-
-In this tutorial we'll quickly build a low-latency in-app livestreaming experience.
-The livestream is broadcasted using Stream's edge network of servers around the world.
-We'll cover the following topics:
-
-* Ultra low latency streaming
-* Multiple streams & co-hosts
-* RTMP in and WebRTC input
-* Exporting to HLS
-* Reactions, custom events and chat
-* Recording & Transcriptions
-
-Let's get started, if you have any questions or feedback be sure to let us know via the feedback button.
-
-### Step 1 - Create a new project in Android Studio
-
-Note that this tutorial was written using **Android Studio Giraffe**. Setup steps can vary slightly across Android Studio versions.
-We recommend using [Android Studio Giraffe or newer](https://developer.android.com/studio/releases).
-
-1. Create a new project
-2. Select Phone & Tablet -> **Empty Activity**
-3. Name your project **Livestream**.
-
-### Step 2 - Install the SDK & Setup the client
-
-**Add the Video Compose SDK** and [Jetpack Compose](https://developer.android.com/jetpack/compose) dependencies to your app's `build.gradle.kts` file found in `app/build.gradle.kts`.
-If you're new to android, note that there are 2 `build.gradle` files, you want to open the `build.gradle` in the app folder.
-
-```kotlin
-dependencies {
- // Stream Video Compose SDK
- implementation("io.getstream:stream-video-android-ui-compose:0.4.2")
-
- // Jetpack Compose (optional/ android studio typically adds them when you create a new project)
- implementation(platform("androidx.compose:compose-bom:2023.08.00"))
- implementation("androidx.activity:activity-compose:1.7.2")
- implementation("androidx.compose.ui:ui")
- implementation("androidx.compose.ui:ui-tooling")
- implementation("androidx.compose.runtime:runtime")
- implementation("androidx.compose.foundation:foundation")
- implementation("androidx.compose.material:material")
-}
-```
-
-There are 2 versions of Stream's SDK.
-
-- **Video Compose SDK**: `io.getstream:stream-video-android-ui-compose` dependency that includes the video core SDK + compose UI components.
-- **Video Core SDK**: `io.getstream:stream-video-android-core` that only includes the core parts of the video SDK.
-
-This tutorial demonstrates the Compose Video SDK, but you have the option to use the core library without Compose based on your preference.
-
-### Step 3 - Broadcast a livestream from your phone
-
-The following code shows how to publish from your phone's camera.
-Let's open `MainActivity.kt` and replace the `MainActivity` class with the following code:
-
-```kotlin
-class MainActivity : ComponentActivity() {
- override fun onCreate(savedInstanceState: Bundle?) {
- super.onCreate(savedInstanceState)
-
- val userToken = "REPLACE_WITH_TOKEN"
- val userId = "REPLACE_WITH_USER_ID"
- val callId = "REPLACE_WITH_CALL_ID"
-
- // create a user.
- val user = User(
- id = userId, // any string
- name = "Tutorial" // name and image are used in the UI
- )
-
- // for a production app we recommend adding the client to your Application class or di module.
- val client = StreamVideoBuilder(
- context = applicationContext,
- apiKey = "hd8szvscpxvd", // demo API key
- geo = GEO.GlobalEdgeNetwork,
- user = user,
- token = userToken,
- ).build()
-
- // join a call, which type is `default`
- val call = client.call("livestream", callId)
- lifecycleScope.launch {
- // join the call
- val result = call.join(create = true)
- result.onError {
- Toast.makeText(applicationContext, "uh oh $it", Toast.LENGTH_SHORT).show()
- }
- }
-
- setContent {
- // request the Android runtime permissions for the camera and microphone
- LaunchCallPermissions(call = call)
-
- VideoTheme {
- Text("TODO: render video")
- }
- }
- }
-}
-```
-
-You'll notice that these first 3 lines need their values replaced.
-
-```kotlin
-val userToken = "REPLACE_WITH_TOKEN"
-val userId = "REPLACE_WITH_USER_ID"
-val callId = "REPLACE_WITH_CALL_ID"
-```
-
-Replace them now with the values shown below:
-
-
-
-When you run the app now you'll see a text message saying: "TODO: render video".
-Before we get around to rendering the video let's review the code above.
-
-In the first step we setup the user:
-
-```kotlin
-val user = User(
- id = userId, // any string
- name = "Tutorial" // name and image are used in the UI
-)
-```
-
-If you don't have an authenticated user you can also use a guest or anonymous user.
-For most apps it's convenient to match your own system of users to grant and remove permissions.
-
-Next we create the client:
-
-```kotlin
-val client = StreamVideoBuilder(
- context = applicationContext,
- apiKey = "mmhfdzb5evj2", // demo API key
- geo = GEO.GlobalEdgeNetwork,
- user = user,
- token = userToken,
-).build()
-```
-
-You'll see the `userToken` variable. Your backend typically generates the user token on signup or login.
-
-The most important step to review is how we create the call.
-Stream uses the same call object for livestreaming, audio rooms and video calling.
-Have a look at the code snippet below:
-
-```kotlin
-val call = client.call("livestream", callId)
-lifecycleScope.launch {
- // join the call
- val result = call.join(create = true)
- result.onError {
- Toast.makeText(applicationContext, "uh oh $it", Toast.LENGTH_SHORT).show()
- }
-}
-```
-
-To create the first call object, specify the call type as **livestream** and provide a unique **callId**. The **livestream** call type comes with default settings that are usually suitable for livestreams, but you can customize features, permissions, and settings in the dashboard. Additionally, the dashboard allows you to create new call types as required.
-
-Finally, using `call.join(create = true)` will not only create the call object on our servers but also initiate the real-time transport for audio and video. This allows for seamless and immediate engagement in the livestream.
-
-Note that you can also add members to a call and assign them different roles. For more information, see the [call creation docs](../03-guides/02-joining-creating-calls.mdx)
-
-### Step 4 - Rendering the video
-
-In this step we're going to build a UI for showing your local video with a button to start the livestream.
-This example uses Compose, but you could also use our XML VideoRenderer.
-
-In `MainActivity.kt` replace the `VideoTheme` with the following code:
-
-```kotlin
-VideoTheme {
- val connection by call.state.connection.collectAsState()
- val totalParticipants by call.state.totalParticipants.collectAsState()
- val backstage by call.state.backstage.collectAsState()
- val localParticipant by call.state.localParticipant.collectAsState()
- val video = localParticipant?.video?.collectAsState()?.value
- val duration by call.state.duration.collectAsState()
-
- androidx.compose.material.Scaffold(
- modifier = Modifier
- .fillMaxSize()
- .background(VideoTheme.colors.appBackground)
- .padding(6.dp),
- contentColor = VideoTheme.colors.appBackground,
- backgroundColor = VideoTheme.colors.appBackground,
- topBar = {
- if (connection == RealtimeConnection.Connected) {
- if (!backstage) {
- Box(
- modifier = Modifier
- .fillMaxWidth()
- .padding(6.dp)
- ) {
- Text(
- modifier = Modifier
- .align(Alignment.CenterEnd)
- .background(
- color = VideoTheme.colors.primaryAccent,
- shape = RoundedCornerShape(6.dp)
- )
- .padding(horizontal = 12.dp, vertical = 4.dp),
- text = "Live $total",
- color = Color.White
- )
-
- Text(
- modifier = Modifier.align(Alignment.Center),
- text = "Live for $duration",
- color = VideoTheme.colors.textHighEmphasis
- )
- }
- }
- }
- },
- bottomBar = {
- androidx.compose.material.Button(
- colors = ButtonDefaults.buttonColors(
- contentColor = VideoTheme.colors.primaryAccent,
- backgroundColor = VideoTheme.colors.primaryAccent
- ),
- onClick = {
- lifecycleScope.launch {
- if (backstage) call.goLive() else call.stopLive()
- }
- }
- ) {
- Text(
- text = if (backstage) "Go Live" else "Stop Broadcast",
- color = Color.White
- )
- }
- }
- ) {
- VideoRenderer(
- modifier = Modifier
- .fillMaxSize()
- .padding(it)
- .clip(RoundedCornerShape(6.dp)),
- call = call,
- video = video,
- videoFallbackContent = {
- Text(text = "Video rendering failed")
- }
- )
- }
-}
-```
-
-Upon running your app, you will be greeted with an interface that looks like this:
-
-![Livestream](../assets/tutorial-livestream.png)
-
-Stream uses a technology called SFU cascading to replicate your livestream over different servers around the world.
-This makes it possible to reach a large audience in realtime.
-
-Now let's press **Go live** in the android app and click the link below to watch the video in your browser.
-
-
-
-#### State & Participants
-
-Let's take a moment to review the Compose code above. `Call.state` exposes all the stateflow objects you need.
-The [participant state docs](../03-guides/03-call-and-participant-state.mdx) show all the available fields.
-
-In this example we use:
-
-* `call.state.connection`: to show if we're connected to the realtime video. you can use this for implementing a loading interface
-* `call.state.backstage`: a boolean that returns if the call is in backstage mode or not
-* `call.state.duration`: how long the call has been running
-* `call.state.totalParticipants`: the number of participants watching the livestream
-* `call.state.participants`: the list of participants
-
-The `call.state.participants` can optionally contain more information about who's watching the stream.
-If you have multiple people broadcasting video this also contain the video tracks.
-
-* `participant.user`: the user's name, image and custom data
-* `participant.video`: the video for this user
-* `participant.roles`: the roles for the participant. it enables you to have co-hosts etc
-
-There are many possibilities and the [participant state docs](../03-guides/03-call-and-participant-state.mdx) explain this in more detail.
-
-#### Creating a UI to watch a livestream
-
-The livestream layout is built using standard Jetpack Compose. The [VideoRenderer](../04-ui-components/02-video-renderer.mdx) component is provided by Stream.
-**VideoRenderer** renders the video and a fallback. You can use it for rendering the local and remote video.
-
-If you want to learn more about building an advanced UIs for watching a livestream, check out [Cookbook: Watching a livestream](../05-ui-cookbook/16-watching-livestream.mdx).
-
-#### Backstage mode
-
-In the example above you might have noticed the `call.goLive()` method and the `call.state.backstage` stateflow.
-The backstage functionality is enabled by default on the livestream call type.
-It makes it easy to build a flow where you and your co-hosts can setup your camera and equipment before going live.
-Only after you call `call.goLive()` will regular users be allowed to join the livestream.
-
-This is convenient for many livestreaming and audio-room use cases. If you want calls to start immediately when you join them that's also possible.
-Simply go the Stream dashboard, click the livestream call type and disable the backstage mode.
-
-### Step 4 - (Optional) Publishing RTMP using OBS
-
-The example above showed how to publish your phone's camera to the livestream.
-Almost all livestream software and hardware supports RTMPS.
-[OBS](https://obsproject.com/) is one of the most popular livestreaming software packages and we'll use it to explain how to import RTMPS.
-
-A. Log the URL & Stream Key
-
-```kotlin
-val rtmp = call.state.ingress.rtmp
-Log.i("Tutorial", "RTMP url and streamingKey: $rtmp")
-```
-
-B. Open OBS and go to settings -> stream
-
-- Select "custom" service
-- Server: equal to the server URL from the log
-- Stream key: equal to the stream key from the log
-
-Press start streaming in OBS. The RTMP stream will now show up in your call just like a regular video participant.
-Now that we've learned to publish using WebRTC or RTMP let's talk about watching the livestream.
-
-### Step 5 - Viewing a livestream (WebRTC)
-
-Watching a livestream is even easier than broadcasting.
-
-Compared to the current code in in `MainActivity.kt` you:
-
-* Don't need to request permissions or enable the camera
-* Don't render the local video, but instead render the remote video
-* Typically include some small UI elements like viewer count, a button to mute etc
-
-### Step 6 - (Optional) Viewing a livestream with HLS
-
-Another way to watch a livestream is using HLS. HLS tends to have a 10 to 20 seconds delay, while the above WebRTC approach is realtime.
-The benefit that HLS offers is better buffering under poor network conditions.
-So HLS can be a good option when:
-
-* A 10-20 second delay is acceptable
-* Your users want to watch the Stream in poor network conditions
-
-Let's show how to broadcast your call to HLS:
-
-```kotlin
-call.startHLS()
-val hlsUrl = call.state.egress.value?.hls?.playlistUrl
-Log.i("Tutorial", "HLS url = $hlsUrl")
-```
-
-You can play the HLS video feed using any HLS capable video player, such as [ExoPlayer](https://github.com/google/ExoPlayer).
-
-### 7 - Advanced Features
-
-This tutorial covered broadcasting and watching a livestream.
-It also went into more details about HLS & RTMP-in.
-
-There are several advanced features that can improve the livestreaming experience:
-
-* ** [Co-hosts](../03-guides/02-joining-creating-calls.mdx) ** You can add members to your livestream with elevated permissions. So you can have co-hosts, moderators etc.
-* ** [Custom events](../03-guides/09-reactions-and-custom-events.mdx) ** You can use custom events on the call to share any additional data. Think about showing the score for a game, or any other realtime use case.
-* ** [Reactions & Chat](../03-guides/09-reactions-and-custom-events.mdx) ** Users can react to the livestream, and you can add chat. This makes for a more engaging experience.
-* ** [Notifications](../06-advanced/01-ringing.mdx) ** You can notify users via push notifications when the livestream starts
-* ** [Recording](../06-advanced/06-recording.mdx) ** The call recording functionality allows you to record the call with various options and layouts
-
-### Recap
-
-It was fun to see just how quickly you can build in-app low latency livestreaming.
-Please do let us know if you ran into any issues.
-Our team is also happy to review your UI designs and offer recommendations on how to achieve it with Stream.
-
-To recap what we've learned:
-
-* WebRTC is optimal for latency, HLS is slower but buffers better for users with poor connections
-* You setup a call: (val call = client.call("livestream", callId))
-* The call type "livestream" controls which features are enabled and how permissions are setup
-* The livestream by default enables "backstage" mode. This allows you and your co-hosts to setup your mic and camera before allowing people in
-* When you join a call, realtime communication is setup for audio & video: (call.join())
-* Stateflow objects in call.state and call.state.participants make it easy to build your own UI
-* For a livestream the most important one is call.state.???
-
-Calls run on Stream's global edge network of video servers.
-Being closer to your users improves the latency and reliability of calls.
-The SDKs enable you to build livestreaming, audio rooms and video calling in days.
-
-We hope you've enjoyed this tutorial and please do feel free to reach out if you have any suggestions or questions.
\ No newline at end of file
diff --git a/docusaurus/docs/Android/04-ui-components/04-call/01-call-content.mdx b/docusaurus/docs/Android/04-ui-components/04-call/01-call-content.mdx
index cdcb546ace..290aaea241 100644
--- a/docusaurus/docs/Android/04-ui-components/04-call/01-call-content.mdx
+++ b/docusaurus/docs/Android/04-ui-components/04-call/01-call-content.mdx
@@ -109,4 +109,4 @@ The following parameters are available on the `CallContent`:
You can find out the parameters details in the [CallContent docs](https://getstream.github.io/stream-video-android/stream-video-android-ui-compose/io.getstream.video.android.compose.ui.components.call.activecall/-call-content.html).
:::
-If you're looking for guides on how to override and customize this UI, we have various [UI Cookbook](../../05-ui-cookbook/01-overview.mdx) recipes for you and we cover a portion of customization within the [Video Android SDK Tutorial](../../02-tutorials/01-video-calling.mdx).
+If you're looking for guides on how to override and customize this UI, we have various [UI Cookbook](../../05-ui-cookbook/01-overview.mdx) recipes for you and we cover a portion of customization within the [Video Android SDK Tutorial](https://getstream.io/video/sdk/android/tutorial/video-calling/).
diff --git a/docusaurus/docs/Android/04-ui-components/04-call/03-call-controls.mdx b/docusaurus/docs/Android/04-ui-components/04-call/03-call-controls.mdx
index f3bc9efc26..455d38b055 100644
--- a/docusaurus/docs/Android/04-ui-components/04-call/03-call-controls.mdx
+++ b/docusaurus/docs/Android/04-ui-components/04-call/03-call-controls.mdx
@@ -150,7 +150,7 @@ This is a very simple component so it doesn't have replaceable slots, but it sti
* `modifier`: Allows you to customize the size, position, elevation, background and much more of the component. Using this in pair with `VideoTheme` and our [theming guide](../03-video-theme.mdx), you're able to customize the shape of the call controls as well as colors padding and more.
* `actions`: As previously mentioned, by changing the `actions`, you don't only change the possible behavior, but also the appearance. You can use our own predefined action buttons or add your own Composable and tweak orders.
-In our [Video Android Tutorial](../../02-tutorials/01-video-calling.mdx), we showcased how to build custom `ControlActions` to remove a leave call action button and only feature camera and audio buttons. The result ended up looking something like this:
+In our [Video Android Tutorial](https://getstream.io/video/sdk/android/tutorial/video-calling/), we showcased how to build custom `ControlActions` to remove a leave call action button and only feature camera and audio buttons. The result ended up looking something like this:
![Compose Control Actions](../../assets/compose_call_controls_custom.png)
diff --git a/docusaurus/docs/Android/04-ui-components/04-call/05-screen-share-content.mdx b/docusaurus/docs/Android/04-ui-components/04-call/05-screen-share-content.mdx
index 176f9bb132..58a3927922 100644
--- a/docusaurus/docs/Android/04-ui-components/04-call/05-screen-share-content.mdx
+++ b/docusaurus/docs/Android/04-ui-components/04-call/05-screen-share-content.mdx
@@ -63,4 +63,4 @@ This is a very simple component so it doesn't have replaceable slots, but it sti
- `style`: Defined properties for styling a single video call track.
- `videoRenderer`: A single video renderer renders each individual participant.
-If you're looking for guides on how to override and customize this UI, we have various [UI Cookbook](../../05-ui-cookbook/01-overview.mdx) recipes for you and we cover a portion of customization within the [Video Android SDK Tutorial](../../02-tutorials/01-video-calling.mdx).
\ No newline at end of file
+If you're looking for guides on how to override and customize this UI, we have various [UI Cookbook](../../05-ui-cookbook/01-overview.mdx) recipes for you and we cover a portion of customization within the [Video Android SDK Tutorial](https://getstream.io/video/sdk/android/tutorial/video-calling/).
\ No newline at end of file
diff --git a/docusaurus/docs/Android/04-ui-components/05-participants/04-participants-spotlight.mdx b/docusaurus/docs/Android/04-ui-components/05-participants/04-participants-spotlight.mdx
index 86a9b7cb9a..cb927787af 100644
--- a/docusaurus/docs/Android/04-ui-components/05-participants/04-participants-spotlight.mdx
+++ b/docusaurus/docs/Android/04-ui-components/05-participants/04-participants-spotlight.mdx
@@ -67,4 +67,4 @@ This is a very simple component so it doesn't have replaceable slots, but it sti
- `style`: Defined properties for styling a single video call track.
- `videoRenderer`: A single video renderer renders each individual participant.
-If you're looking for guides on how to override and customize this UI, we have various [UI Cookbook](../../05-ui-cookbook/01-overview.mdx) recipes for you and we cover a portion of customization within the [Video Android SDK Tutorial](../../02-tutorials/01-video-calling.mdx).
\ No newline at end of file
+If you're looking for guides on how to override and customize this UI, we have various [UI Cookbook](../../05-ui-cookbook/01-overview.mdx) recipes for you and we cover a portion of customization within the [Video Android SDK Tutorial](https://getstream.io/video/sdk/android/tutorial/video-calling/).
\ No newline at end of file
diff --git a/docusaurus/docs/Android/05-ui-cookbook/15-hostling-livestream.mdx b/docusaurus/docs/Android/05-ui-cookbook/15-hostling-livestream.mdx
deleted file mode 100644
index 3473c3d82e..0000000000
--- a/docusaurus/docs/Android/05-ui-cookbook/15-hostling-livestream.mdx
+++ /dev/null
@@ -1,360 +0,0 @@
----
-title: Hosting a livestream
-description: How to host a livestream on Android with Kotlin
----
-
-This cookbook tutorial walks you through how to build an advanced UIs for hosting a livestream on Android.
-
-:::note
-In this cookbook tutorial, we will assume that you already know how to join a livestream call. If you haven't familiarized yourself with the [Livestream Tutorial](../02-tutorials/03-livestream.mdx) yet, we highly recommend doing so before proceeding with this cookbook.
-:::
-
-When you build a UI to host livestreaming, there are a few things to keep in mind:
-
-* Start/Stop the broadcasting
-* Toggling the device options, such as a camera and microphone
-* How to indicate when there are connection problems
-* Number of participants
-* Duration of the call
-
-In this cookbook tutorial, you'll learn how to build the result below at the end:
-
-| On Backstage | On Live |
-| --- | --- |
-| ![LiveStream Backstage](../assets/cookbook/livestream-backstage.png) | ![LiveStream Live](../assets/cookbook/livestream-live.png) |
-
-### Rendering Livestreaming
-
-First and foremost, rendering the livestreaming video is the key feature and the most crucial part of the screen.
-
-To accomplish this, you can easily render your livestreaming video using the following simple sample code:
-
-```kotlin
-val userToken = "REPLACE_WITH_TOKEN"
-val userId = "REPLACE_WITH_USER_ID"
-val callId = "REPLACE_WITH_CALL_ID"
-
-// step1 - create a user.
-val user = User(
- id = userId, // any string
- name = "Tutorial", // name and image are used in the UI
- role = "admin"
-)
-
-// step2 - initialize StreamVideo. For a production app we recommend adding the client to your Application class or di module.
-val client = StreamVideoBuilder(
- context = applicationContext,
- apiKey = "mmhfdzb5evj2", // demo API key
- geo = GEO.GlobalEdgeNetwork,
- user = user,
- token = userToken,
-).build()
-
-// step3 - join a call, which type is `default` and id is `123`.
-val call = client.call("livestream", callId)
-lifecycleScope.launch {
- // join the call
- val result = call.join(create = true)
- result.onError {
- Toast.makeText(applicationContext, "uh oh $it", Toast.LENGTH_SHORT).show()
- }
-}
-
-setContent {
- // request the Android runtime permissions for the camera and microphone
- LaunchCallPermissions(call = call)
-
- // step4 - apply VideoTheme
- VideoTheme {
- val me by call.state.me.collectAsState()
- val video = me?.video?.collectAsState()?.value
-
- VideoRenderer(
- modifier = Modifier
- .fillMaxSize()
- .clip(RoundedCornerShape(6.dp)),
- call = call,
- video = video,
- videoFallbackContent = {
- Text(text = "Video rendering failed")
- }
- )
- }
-}
-```
-
-If you run the above example, you'll see the very basic video streaming screen below:
-
-![Video Streaming](../assets/compose_single_video.png)
-
-### Implement Live Participants Label
-
-Now you need to build labels that display the count of participants in your livestreaming session and indicate the streaming time.
-
-You can easily implement the live label using the following approach:
-
-```kotlin
-@Composable
-fun LiveLabel(
- modifier: Modifier,
- liveCount: Int
-) {
- Row(modifier = modifier.clip(RoundedCornerShape(6.dp))) {
- Text(
- modifier = Modifier
- .background(VideoTheme.colors.primaryAccent)
- .padding(vertical = 3.dp, horizontal = 12.dp),
- text = "Live",
- color = Color.White
- )
-
- Row(
- modifier = Modifier.background(Color(0xFF1C1E22)),
- verticalAlignment = Alignment.CenterVertically
- ) {
- Icon(
- modifier = Modifier
- .padding(horizontal = 6.dp)
- .size(22.dp),
- imageVector = Icons.Default.Person,
- tint = Color.White,
- contentDescription = null
- )
-
- Text(
- modifier = Modifier.padding(end = 12.dp, top = 3.dp, bottom = 3.dp),
- text = liveCount.toString(),
- color = Color.White
- )
- }
- }
-}
-```
-
-Upon building a preview for the `LiveLabel` Composable, you will observe the following result:
-
-![LiveLabel](../assets/cookbook/livestream-live-label.png)
-
-### Implement Live Time Label
-
-Next, you need to implement the live time label, which will display the duration of the livestream once it starts.
-
-You can simply implement the live time label like so:
-
-```kotlin
-@Composable
-fun TimeLabel(
- modifier: Modifier = Modifier,
- sessionTime: Long
-) {
- val time by remember(sessionTime) {
- val date = Date(sessionTime)
- val format = SimpleDateFormat("mm:ss", Locale.US)
- mutableStateOf(format.format(date))
- }
-
- Row(
- modifier = modifier
- .background(Color(0xFF1C1E22), RoundedCornerShape(6.dp)),
- verticalAlignment = Alignment.CenterVertically
- ) {
- Icon(
- modifier = Modifier
- .size(28.dp)
- .padding(start = 12.dp),
- imageVector = Icons.Default.CheckCircle,
- tint = VideoTheme.colors.infoAccent,
- contentDescription = null
- )
-
- Text(
- modifier = Modifier.padding(horizontal = 12.dp),
- text = time,
- color = Color.White
- )
- }
-}
-```
-
-If you build a preview for `LiveLabel` Composable, you'll see the result below:
-
-![TimeLabel](../assets/cookbook/livestream-time-label.png)
-
-### Connect implementations With Call State
-
-Now, let's connect those implementations with the call state and put them all with `Scaffold`, which consists of `TopBar`, `BottomBar`, and `content`.
-
-```kotlin
-VideoTheme {
- val participantCount by call.state.participantCounts.collectAsState()
- val connection by call.state.connection.collectAsState()
- val backstage by call.state.backstage.collectAsState()
- val me by call.state.me.collectAsState()
- val video = me?.video?.collectAsState()?.value
- val sessionTime by call.state.liveDurationInMs.collectAsState()
-
- Scaffold(
- modifier = Modifier
- .fillMaxSize()
- .background(Color(0xFF272A30))
- .padding(6.dp),
- contentColor = Color(0xFF272A30),
- backgroundColor = Color(0xFF272A30),
- topBar = {
- if (connection == RealtimeConnection.Connected) {
- Box(
- modifier = Modifier
- .fillMaxWidth()
- .padding(6.dp)
- ) {
- if (!backstage) {
- LiveLabel(
- modifier = Modifier.align(Alignment.CenterStart),
- liveCount = participantCount?.total ?: 0
- )
- }
-
- TimeLabel(
- modifier = Modifier.align(Alignment.Center),
- sessionTime = sessionTime ?: 0
- )
- }
- }
- }
- ) {
- VideoRenderer(
- modifier = Modifier
- .fillMaxSize()
- .padding(it)
- .clip(RoundedCornerShape(6.dp)),
- call = call,
- video = video,
- videoFallbackContent = {
- Text(text = "Video rendering failed")
- }
- )
- }
-}
-```
-
-As demonstrated in the example above, you can observe several state declarations representing the call state.:
-
-- `participantCount`: A model that contains information about participant counts.
-- `connection`: Indicates the connection state of a call.
-- `backstage`: Whether the call is on the backstage or not.
-- `me`: A video track, which renders a local video stream.
-- `video`: A local video track.
-- `sessionTime`: Indicates the time duration since your call goes to live.
-
-### Implement Live Button
-
-Let's proceed with building a live button that enables you to start/stop broadcasting your call and control your physical device, including the camera and microphone.
-
-You can implement the live button like so:
-
-```kotlin
-@Composable
-fun LiveButton(
- modifier: Modifier,
- call: Call,
- isBackstage: Boolean,
- onClick: () -> Unit
-) {
- Box(modifier = Modifier.fillMaxWidth()) {
- Button(
- modifier = modifier,
- colors = if (isBackstage) {
- ButtonDefaults.buttonColors(
- backgroundColor = VideoTheme.colors.primaryAccent,
- contentColor = VideoTheme.colors.primaryAccent
- )
- } else {
- ButtonDefaults.buttonColors(
- backgroundColor = VideoTheme.colors.errorAccent,
- contentColor = VideoTheme.colors.errorAccent
- )
- },
- onClick = onClick
- ) {
- Icon(
- modifier = Modifier.padding(vertical = 3.dp, horizontal = 6.dp),
- imageVector = if (isBackstage) {
- Icons.Default.PlayArrow
- } else {
- Icons.Default.Close
- },
- tint = Color.White,
- contentDescription = null
- )
-
- Text(
- modifier = Modifier.padding(end = 6.dp),
- text = if (isBackstage) "Go Live" else "Stop Broadcast",
- fontWeight = FontWeight.Bold,
- fontSize = 16.sp,
- color = Color.White
- )
- }
-
- val isCameraEnabled by call.camera.isEnabled.collectAsState()
- val isMicrophoneEnabled by call.microphone.isEnabled.collectAsState()
-
- Row(modifier = Modifier.align(Alignment.CenterEnd)) {
- ToggleCameraAction(
- modifier = Modifier.size(45.dp),
- isCameraEnabled = isCameraEnabled,
- enabledColor = VideoTheme.colors.callActionIconEnabledBackground,
- disabledColor = VideoTheme.colors.callActionIconEnabledBackground,
- disabledIconTint = VideoTheme.colors.errorAccent,
- shape = RoundedCornerShape(8.dp),
- onCallAction = { callAction -> call.camera.setEnabled(callAction.isEnabled) }
- )
-
- ToggleMicrophoneAction(
- modifier = Modifier
- .padding(horizontal = 12.dp)
- .size(45.dp),
- isMicrophoneEnabled = isMicrophoneEnabled,
- enabledColor = VideoTheme.colors.callActionIconEnabledBackground,
- disabledColor = VideoTheme.colors.callActionIconEnabledBackground,
- disabledIconTint = VideoTheme.colors.errorAccent,
- shape = RoundedCornerShape(8.dp),
- onCallAction = { callAction -> call.microphone.setEnabled(callAction.isEnabled) }
- )
- }
- }
-}
-```
-
-Now, let's complete the `Scaffold` with the new `LiveButton` Composable.
-
-### Complete The Live Screen
-
-Now, everything is ready to put together. You can complete the `Scaffold` with the new `LiveButton` Composable like so:
-
-```kotlin
-Scaffold(
- ..,
- bottomBar = {
- LiveButton(
- modifier = Modifier.padding(9.dp),
- call = call,
- isBackstage = backstage
- ) {
- lifecycleScope.launch {
- if (backstage) call.goLive() else call.stopLive()
- }
- }
- }
- ) {
- ..
- }
-```
-
-Once you've completed building your project, you'll witness the final result as depicted below:
-
-![LiveStream Backstage](../assets/cookbook/livestream-backstage.png)
-
-By simply clicking the **Go Live** button, you can begin broadcasting your stream.
-
-In this cookbook tutorial, you have learned how to create an advanced live streaming screen. If you wish to refer to the code, feel free to explore the [GitHub Repository](https://github.com/GetStream/stream-video-android/tree/develop/tutorials/tutorial-livestream).
\ No newline at end of file
diff --git a/docusaurus/docs/Android/05-ui-cookbook/16-watching-livestream.mdx b/docusaurus/docs/Android/05-ui-cookbook/16-watching-livestream.mdx
deleted file mode 100644
index acca6e0cb3..0000000000
--- a/docusaurus/docs/Android/05-ui-cookbook/16-watching-livestream.mdx
+++ /dev/null
@@ -1,151 +0,0 @@
----
-title: Watching a livestream
-description: How to watch a livestream on Android with Kotlin
----
-
-This cookbook tutorial walks you through how to build an advanced UIs for watching a livestream on Android.
-
-:::note
-We will assume that you already know how to join a livestream call. If you haven't familiarized yourself with the [Livestream Tutorial](../02-tutorials/03-livestream.mdx) yet, we highly recommend doing so before proceeding with this cookbook.
-:::
-
-In this cookbook tutorial, you'll learn how to build the result below at the end:
-
-| On Backstage | On Live |
-| --- | --- |
-| ![LiveStream Backstage](../assets/cookbook/livestream-watching-backstage.png) | ![LiveStream Live](../assets/cookbook/livestream-watching-live.png) |
-
-### Watching Livestreaming
-
-The Stream Compose SDK offers a pre-built UI component, `LivestreamPlayer`, designed to simplify the creation of a livestream viewing screen. This component includes a video renderer, displays information such as the number of participants and call duration, and provides controls for pausing and resuming the livestream.
-
-You can use the `LivestreamPlayer` like the sample below:
-
-```kotlin
-val call = client.call("livestream", callId)
-lifecycleScope.launch {
- // join the call
- val result = call.join(create = true)
- result.onError {
- Toast.makeText(applicationContext, "uh oh $it", Toast.LENGTH_SHORT).show()
- }
-}
-
-setContent {
- LivestreamPlayer(call = call)
-}
-```
-
-If you run the above example, you'll see the screen below:
-
-![Watching Livestream](../assets/cookbook/livestream-watching-backstage.png)
-
-As indicated in the message above, it appears that the host hasn't initiated the livestream.
-
-Now, if you run the hosting a livestream sample and start a broadcasting following the [Livestream Tutorial](../02-tutorials/03-livestream.mdx), you'll see the livestreaming screen below:
-
-![LiveStream Live](../assets/cookbook/livestream-watching-live.png)
-
-### LivestreamPlayer
-
-The `LivestreamPlayer` component offers streamlined customization options for each element:
-
-```kotlin
-LivestreamPlayer(
- call = call,
- enablePausing = true,
- onPausedPlayer = { isPaused -> Log.d("livestream", "paused: $isPaused") },
- backstageContent = {
- Text(
- modifier = Modifier.align(Alignment.Center),
- text = "Waiting for live host",
- )
- },
- rendererContent = {
- val livestream by call.state.livestream.collectAsState()
-
- VideoRenderer(
- modifier = Modifier.fillMaxSize(),
- call = call,
- video = livestream,
- )
- },
- overlayContent = {
- val totalParticipants by call.state.totalParticipants.collectAsState()
- val duration by call.state.duration.collectAsState()
-
- Row(
- modifier = Modifier.align(Alignment.Center),
- verticalAlignment = Alignment.CenterVertically,
- ) {
- Text(text = totalParticipants.toString())
-
- Text(text = (duration ?: 0).toString())
- }
- }
-)
-```
-
-As you can observe in the example above, you have the flexibility to extensively customize every element of the `LivestreamPlayer`. Each element serves a distinct purpose:
-
-* `enablePausing`: Enables pausing or resuming the livestream video.
-* `onPausedPlayer`: Listen to pause or resume the livestream video.
-* `backstageContent`: Content shown when the host has not yet started the live stream.
-* `rendererContent`: The rendered stream originating from the host.
-* `overlayContent`: The default content is displayed to indicate participant counts, live stream duration, and device settings controls. You can overlay anything that you want by customizing this Composable parameter.
-
-You can utilize each element depending on your situations and requirements.
-
-`LivestreamPlayer` also provides pausing/resuming the livestream video by clicking on the video renderer.
-
-If you click on the video renderer, it will pause or resume like the image below:
-
-![LiveStream Live Pausing](../assets/cookbook/livestream-watching-live-pause.png)
-
-### Build Your Own LivestreamPlayer
-
-You can also create your own custom livestream player without relying on Stream's pre-built UI component.
-
-The key logic to consider includes:
-
-* UI for when the video isn't loaded yet
-* A message to show when the livestream didn't start yet
-* What to show when the livestream stopped
-* How to indicate when there are connection problems
-* Muting the volume
-* Number of participants
-* Duration of the call
-
-The `call.state` provides the means to monitor whether the livestream is in the background, the count of participants, and more. You can leverage this information to construct your own custom livestream player.
-
-```kotlin
-val backstage: Boolean by call.state.backstage.collectAsState()
-val livestream: ParticipantState.Video? by call.state.livestream.collectAsState()
-val totalParticipants: ParticipantCount? by call.state.totalParticipants.collectAsState()
-val duration: kotlin.time.Duration? by call.state.duration.collectAsState()
-```
-
-Then now you can implement a your livestream player like the example below:
-
-```kotlin
-if (backstage) {
- Text(text = "Waiting for live host")
-} else {
- VideoRenderer(
- modifier = Modifier.fillMaxSize(),
- call = call,
- video = livestream,
- )
-
- Row(
- modifier = Modifier.align(Alignment.Center),
- verticalAlignment = Alignment.CenterVertically,
- ) {
- Text(text = totalParticipants.toString())
-
- Text(text = (duration ?: 0).toString())
- }
-}
-```
-
-In this cookbook tutorial, you have learned how to create an advanced live streaming screen. If you wish to refer to the code, feel free to explore the [GitHub Repository](https://github.com/GetStream/stream-video-android/tree/develop/tutorials/tutorial-livestream).
\ No newline at end of file
diff --git a/docusaurus/sidebars-android.js b/docusaurus/sidebars-android.js
index e64c5532c8..7f3ab345ab 100644
--- a/docusaurus/sidebars-android.js
+++ b/docusaurus/sidebars-android.js
@@ -10,27 +10,6 @@ module.exports = {
},
],
},
- {
- type: "category",
- label: "Tutorials",
- items: [
- {
- type: 'doc',
- id: 'tutorials/video-calling', // document ID
- label: 'Video Call Tutorial', // sidebar label
- },
- {
- type: 'doc',
- id: 'tutorials/audio-room', // document ID
- label: 'Audio Room Tutorial', // sidebar label
- },
- {
- type: 'doc',
- id: 'tutorials/livestream', // document ID
- label: 'Livestream Tutorial', // sidebar label
- },
- ],
- },
{
type: "category",
label: "Core Concepts",
diff --git a/gradle/libs.versions.toml b/gradle/libs.versions.toml
index f08995dfb9..e7d7db7e47 100644
--- a/gradle/libs.versions.toml
+++ b/gradle/libs.versions.toml
@@ -1,10 +1,10 @@
[versions]
-androidGradlePlugin = "8.1.3"
+androidGradlePlugin = "8.2.0"
cameraCamera2 = "1.3.0"
spotless = "6.21.0"
nexusPlugin = "1.3.0"
-kotlin = "1.9.20"
-ksp = "1.9.20-1.0.13"
+kotlin = "1.9.21"
+ksp = "1.9.21-1.0.15"
kotlinSerialization = "1.6.0"
kotlinSerializationConverter = "1.0.0"
kotlinxCoroutines = "1.7.3"
@@ -23,18 +23,18 @@ androidxDataStore = "1.0.0"
googleService = "4.3.14"
androidxComposeBom = "2023.10.01"
-androidxComposeCompiler = "1.5.4"
+androidxComposeCompiler = "1.5.6"
androidxComposeTracing = "1.0.0-alpha03"
androidxHiltNavigation = "1.1.0"
androidxComposeNavigation = "2.7.5"
composeStableMarker = "1.0.2"
coil = "2.5.0"
-landscapist = "2.2.11"
+landscapist = "2.2.12"
accompanist = "0.32.0"
telephoto = "0.3.0"
audioswitch = "1.1.8"
-libyuv = "0.28.0"
+libyuv = "0.30.0"
wire = "4.7.0"
okhttp = "4.12.0"
@@ -48,12 +48,12 @@ streamWebRTC = "1.1.1"
streamResult = "1.1.0"
streamChat = "6.0.8"
streamLog = "1.1.4"
-streamPush = "1.1.6"
+streamPush = "1.1.7"
androidxTest = "1.5.2"
androidxTestCore = "1.5.0"
androidxProfileinstaller = "1.3.1"
-androidxMacroBenchmark = "1.2.0"
+androidxMacroBenchmark = "1.2.2"
androidxUiAutomator = "2.3.0-alpha05"
androidxContraintLayout = "2.1.4"
androidxEspresso = "3.5.1"
@@ -71,12 +71,14 @@ installReferrer = "2.2"
playAuth = "20.7.0"
playAppUpdate = "2.1.0"
-hilt = "2.48.1"
+hilt = "2.49"
desugar = "2.0.4"
leakCanary = "2.12"
binaryCompatabilityValidator = "0.13.2"
playPublisher = "3.8.4"
+googleMlKitSelfieSegmentation = "16.0.0-beta4"
+
[libraries]
androidx-camera-camera2 = { module = "androidx.camera:camera-camera2", version.ref = "cameraCamera2" }
androidx-material = { group = "com.google.android.material", name = "material", version.ref = "androidxMaterial" }
@@ -192,6 +194,8 @@ play-app-update-ktx = { group = "com.google.android.play", name = "app-update-kt
robolectric = { group = "org.robolectric", name = "robolectric", version.ref = "robolectric" }
leakCanary = { group = "com.squareup.leakcanary", name = "leakcanary-android", version.ref = "leakCanary" }
+google-mlkit-selfie-segmentation = { group = "com.google.mlkit", name = "segmentation-selfie", version.ref = "googleMlKitSelfieSegmentation" }
+
# Dependencies of the included build-logic
android-gradlePlugin = { group = "com.android.tools.build", name = "gradle", version.ref = "androidGradlePlugin" }
kotlin-gradlePlugin = { group = "org.jetbrains.kotlin", name = "kotlin-gradle-plugin", version.ref = "kotlin" }
diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar
index 033e24c4cd..7f93135c49 100644
Binary files a/gradle/wrapper/gradle-wrapper.jar and b/gradle/wrapper/gradle-wrapper.jar differ
diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties
index d11cdd907d..e6aba2515d 100644
--- a/gradle/wrapper/gradle-wrapper.properties
+++ b/gradle/wrapper/gradle-wrapper.properties
@@ -1,6 +1,6 @@
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
-distributionUrl=https\://services.gradle.org/distributions/gradle-8.3-all.zip
+distributionUrl=https\://services.gradle.org/distributions/gradle-8.5-all.zip
networkTimeout=10000
validateDistributionUrl=true
zipStoreBase=GRADLE_USER_HOME
diff --git a/gradlew b/gradlew
index fcb6fca147..0adc8e1a53 100755
--- a/gradlew
+++ b/gradlew
@@ -83,7 +83,8 @@ done
# This is normally unused
# shellcheck disable=SC2034
APP_BASE_NAME=${0##*/}
-APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit
+# Discard cd standard output in case $CDPATH is set (https://github.com/gradle/gradle/issues/25036)
+APP_HOME=$( cd "${APP_HOME:-./}" > /dev/null && pwd -P ) || exit
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD=maximum
diff --git a/stream-video-android-core/api/stream-video-android-core.api b/stream-video-android-core/api/stream-video-android-core.api
index 5f5dc27a8d..c7187b6558 100644
--- a/stream-video-android-core/api/stream-video-android-core.api
+++ b/stream-video-android-core/api/stream-video-android-core.api
@@ -734,6 +734,7 @@ public abstract interface class io/getstream/video/android/core/StreamVideo : io
public static synthetic fun call$default (Lio/getstream/video/android/core/StreamVideo;Ljava/lang/String;Ljava/lang/String;ILjava/lang/Object;)Lio/getstream/video/android/core/Call;
public abstract fun cleanup ()V
public abstract fun connectAsync (Lkotlin/coroutines/Continuation;)Ljava/lang/Object;
+ public abstract fun connectIfNotAlreadyConnected (Lkotlin/coroutines/Continuation;)Ljava/lang/Object;
public abstract fun createDevice (Lio/getstream/android/push/PushDevice;Lkotlin/coroutines/Continuation;)Ljava/lang/Object;
public abstract fun deleteDevice (Lio/getstream/video/android/model/Device;Lkotlin/coroutines/Continuation;)Ljava/lang/Object;
public abstract fun getContext ()Landroid/content/Context;
@@ -772,7 +773,8 @@ public final class io/getstream/video/android/core/StreamVideoBuilder {
public fun (Landroid/content/Context;Ljava/lang/String;Lio/getstream/video/android/core/GEO;Lio/getstream/video/android/model/User;Ljava/lang/String;Lkotlin/jvm/functions/Function2;Lio/getstream/video/android/core/logging/LoggingLevel;Lio/getstream/video/android/core/notifications/NotificationConfig;Lkotlin/jvm/functions/Function1;JZ)V
public fun (Landroid/content/Context;Ljava/lang/String;Lio/getstream/video/android/core/GEO;Lio/getstream/video/android/model/User;Ljava/lang/String;Lkotlin/jvm/functions/Function2;Lio/getstream/video/android/core/logging/LoggingLevel;Lio/getstream/video/android/core/notifications/NotificationConfig;Lkotlin/jvm/functions/Function1;JZLjava/lang/String;)V
public fun (Landroid/content/Context;Ljava/lang/String;Lio/getstream/video/android/core/GEO;Lio/getstream/video/android/model/User;Ljava/lang/String;Lkotlin/jvm/functions/Function2;Lio/getstream/video/android/core/logging/LoggingLevel;Lio/getstream/video/android/core/notifications/NotificationConfig;Lkotlin/jvm/functions/Function1;JZLjava/lang/String;Z)V
- public synthetic fun (Landroid/content/Context;Ljava/lang/String;Lio/getstream/video/android/core/GEO;Lio/getstream/video/android/model/User;Ljava/lang/String;Lkotlin/jvm/functions/Function2;Lio/getstream/video/android/core/logging/LoggingLevel;Lio/getstream/video/android/core/notifications/NotificationConfig;Lkotlin/jvm/functions/Function1;JZLjava/lang/String;ZILkotlin/jvm/internal/DefaultConstructorMarker;)V
+ public fun (Landroid/content/Context;Ljava/lang/String;Lio/getstream/video/android/core/GEO;Lio/getstream/video/android/model/User;Ljava/lang/String;Lkotlin/jvm/functions/Function2;Lio/getstream/video/android/core/logging/LoggingLevel;Lio/getstream/video/android/core/notifications/NotificationConfig;Lkotlin/jvm/functions/Function1;JZLjava/lang/String;ZLjava/lang/String;)V
+ public synthetic fun (Landroid/content/Context;Ljava/lang/String;Lio/getstream/video/android/core/GEO;Lio/getstream/video/android/model/User;Ljava/lang/String;Lkotlin/jvm/functions/Function2;Lio/getstream/video/android/core/logging/LoggingLevel;Lio/getstream/video/android/core/notifications/NotificationConfig;Lkotlin/jvm/functions/Function1;JZLjava/lang/String;ZLjava/lang/String;ILkotlin/jvm/internal/DefaultConstructorMarker;)V
public final fun build ()Lio/getstream/video/android/core/StreamVideo;
public final fun getScope ()Lkotlinx/coroutines/CoroutineScope;
}
@@ -4035,6 +4037,7 @@ public class io/getstream/video/android/core/notifications/DefaultNotificationHa
public fun getChannelId ()Ljava/lang/String;
public fun getChannelName ()Ljava/lang/String;
public fun getOngoingCallNotification (Lio/getstream/video/android/model/StreamCallId;)Landroid/app/Notification;
+ public fun getRingingCallNotification (Lio/getstream/video/android/model/StreamCallId;Ljava/lang/String;)Landroid/app/Notification;
public fun onLiveCall (Lio/getstream/video/android/model/StreamCallId;Ljava/lang/String;)V
public fun onNotification (Lio/getstream/video/android/model/StreamCallId;Ljava/lang/String;)V
public fun onPermissionDenied ()V
@@ -4075,8 +4078,10 @@ public abstract interface class io/getstream/video/android/core/notifications/No
public static final field Companion Lio/getstream/video/android/core/notifications/NotificationHandler$Companion;
public static final field INCOMING_CALL_NOTIFICATION_ID I
public static final field INTENT_EXTRA_CALL_CID Ljava/lang/String;
+ public static final field INTENT_EXTRA_CALL_DISPLAY_NAME Ljava/lang/String;
public static final field INTENT_EXTRA_NOTIFICATION_ID Ljava/lang/String;
public abstract fun getOngoingCallNotification (Lio/getstream/video/android/model/StreamCallId;)Landroid/app/Notification;
+ public abstract fun getRingingCallNotification (Lio/getstream/video/android/model/StreamCallId;Ljava/lang/String;)Landroid/app/Notification;
public abstract fun onLiveCall (Lio/getstream/video/android/model/StreamCallId;Ljava/lang/String;)V
public abstract fun onNotification (Lio/getstream/video/android/model/StreamCallId;Ljava/lang/String;)V
public abstract fun onRingingCall (Lio/getstream/video/android/model/StreamCallId;Ljava/lang/String;)V
@@ -4092,9 +4097,15 @@ public final class io/getstream/video/android/core/notifications/NotificationHan
public static final field ACTION_REJECT_CALL Ljava/lang/String;
public static final field INCOMING_CALL_NOTIFICATION_ID I
public static final field INTENT_EXTRA_CALL_CID Ljava/lang/String;
+ public static final field INTENT_EXTRA_CALL_DISPLAY_NAME Ljava/lang/String;
public static final field INTENT_EXTRA_NOTIFICATION_ID Ljava/lang/String;
}
+public final class io/getstream/video/android/core/notifications/internal/receivers/ToggleCameraBroadcastReceiver : android/content/BroadcastReceiver {
+ public fun ()V
+ public fun onReceive (Landroid/content/Context;Landroid/content/Intent;)V
+}
+
public final class io/getstream/video/android/core/permission/PermissionRequest {
public fun (Lio/getstream/video/android/core/Call;Lio/getstream/video/android/model/User;Lorg/threeten/bp/OffsetDateTime;Ljava/util/List;Lorg/threeten/bp/OffsetDateTime;Lorg/threeten/bp/OffsetDateTime;)V
public synthetic fun (Lio/getstream/video/android/core/Call;Lio/getstream/video/android/model/User;Lorg/threeten/bp/OffsetDateTime;Ljava/util/List;Lorg/threeten/bp/OffsetDateTime;Lorg/threeten/bp/OffsetDateTime;ILkotlin/jvm/internal/DefaultConstructorMarker;)V
@@ -5780,6 +5791,7 @@ public final class io/getstream/video/android/model/StreamCallId$Creator : andro
}
public final class io/getstream/video/android/model/StreamCallIdKt {
+ public static final fun streamCallDisplayName (Landroid/content/Intent;Ljava/lang/String;)Ljava/lang/String;
public static final fun streamCallId (Landroid/content/Intent;Ljava/lang/String;)Lio/getstream/video/android/model/StreamCallId;
}
diff --git a/stream-video-android-core/src/main/AndroidManifest.xml b/stream-video-android-core/src/main/AndroidManifest.xml
index 7fc99333d4..ae6934ca29 100644
--- a/stream-video-android-core/src/main/AndroidManifest.xml
+++ b/stream-video-android-core/src/main/AndroidManifest.xml
@@ -76,8 +76,8 @@
-
+
diff --git a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/Call.kt b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/Call.kt
index 1d23b81f6f..ed90759ab1 100644
--- a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/Call.kt
+++ b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/Call.kt
@@ -376,7 +376,7 @@ public class Call(
return result as Failure
}
val sfuToken = result.value.credentials.token
- val sfuUrl = result.value.credentials.server.url
+ val sfuUrl = clientImpl.testSfuAddress ?: result.value.credentials.server.url
val iceServers = result.value.credentials.iceServers.map { it.toIceServer() }
timer.split("join request completed")
diff --git a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/ClientState.kt b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/ClientState.kt
index b863b77df5..516ee8255e 100644
--- a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/ClientState.kt
+++ b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/ClientState.kt
@@ -17,10 +17,8 @@
package io.getstream.video.android.core
import android.content.Context
-import android.content.Intent
import androidx.core.content.ContextCompat
-import io.getstream.video.android.core.notifications.NotificationHandler
-import io.getstream.video.android.core.notifications.internal.service.OngoingCallService
+import io.getstream.video.android.core.notifications.internal.service.CallService
import io.getstream.video.android.model.StreamCallId
import io.getstream.video.android.model.User
import kotlinx.coroutines.flow.MutableStateFlow
@@ -125,10 +123,10 @@ class ClientState(client: StreamVideo) {
private fun maybeStartForegroundService(call: Call) {
if (clientImpl.runForeGroundService) {
val context = clientImpl.context
- val serviceIntent = Intent(context, OngoingCallService::class.java)
- serviceIntent.putExtra(
- NotificationHandler.INTENT_EXTRA_CALL_CID,
+ val serviceIntent = CallService.buildStartIntent(
+ context,
StreamCallId.fromCallCid(call.cid),
+ CallService.TRIGGER_ONGOING_CALL,
)
ContextCompat.startForegroundService(context, serviceIntent)
}
@@ -137,7 +135,7 @@ class ClientState(client: StreamVideo) {
private fun maybeStopForegroundService() {
if (clientImpl.runForeGroundService) {
val context = clientImpl.context
- val serviceIntent = Intent(context, OngoingCallService::class.java)
+ val serviceIntent = CallService.buildStopIntent(context)
context.stopService(serviceIntent)
}
}
diff --git a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/StreamVideo.kt b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/StreamVideo.kt
index a2bd07bfaa..7aadc31156 100644
--- a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/StreamVideo.kt
+++ b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/StreamVideo.kt
@@ -225,6 +225,7 @@ public interface StreamVideo : NotificationHandler {
}
public fun cleanup()
+ suspend fun connectIfNotAlreadyConnected()
}
private const val DEFAULT_QUERY_CALLS_SORT = "cid"
diff --git a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/StreamVideoBuilder.kt b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/StreamVideoBuilder.kt
index ad3103a772..ad7afde2dd 100644
--- a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/StreamVideoBuilder.kt
+++ b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/StreamVideoBuilder.kt
@@ -64,7 +64,8 @@ import java.util.UUID
* @property connectionTimeoutInMs Connection timeout in seconds.
* @property ensureSingleInstance Verify that only 1 version of the video client exists, prevents integration mistakes.
* @property videoDomain URL overwrite to allow for testing against a local instance of video.
- * @property runForegroundServiceForCalls if set to true, when there is an active call the SDK will run a foreground service to keep the process alive. (default: true)
+ * @property runForegroundServiceForCalls If set to true, when there is an active call the SDK will run a foreground service to keep the process alive. (default: true)
+ * @property localSfuAddress Local SFU address (IP:port) to be used for testing. Leave null if not needed.
*/
public class StreamVideoBuilder @JvmOverloads constructor(
context: Context,
@@ -82,6 +83,7 @@ public class StreamVideoBuilder @JvmOverloads constructor(
private var ensureSingleInstance: Boolean = true,
private val videoDomain: String = "video.stream-io-api.com",
private val runForegroundServiceForCalls: Boolean = true,
+ private val localSfuAddress: String? = null,
) {
private val context: Context = context.applicationContext
@@ -159,6 +161,7 @@ public class StreamVideoBuilder @JvmOverloads constructor(
connectionModule = connectionModule,
streamNotificationManager = streamNotificationManager,
runForeGroundService = runForegroundServiceForCalls,
+ testSfuAddress = localSfuAddress,
)
if (user.type == UserType.Guest) {
diff --git a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/StreamVideoImpl.kt b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/StreamVideoImpl.kt
index f0852b8a93..1cc8c52bdb 100644
--- a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/StreamVideoImpl.kt
+++ b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/StreamVideoImpl.kt
@@ -134,6 +134,7 @@ internal class StreamVideoImpl internal constructor(
internal val tokenProvider: (suspend (error: Throwable?) -> String)?,
internal val streamNotificationManager: StreamNotificationManager,
internal val runForeGroundService: Boolean = true,
+ internal val testSfuAddress: String? = null,
) : StreamVideo,
NotificationHandler by streamNotificationManager {
@@ -296,6 +297,14 @@ internal class StreamVideoImpl internal constructor(
return sub
}
+ override suspend fun connectIfNotAlreadyConnected() {
+ if (connectionModule.coordinatorSocket.connectionState.value != SocketState.NotConnected &&
+ connectionModule.coordinatorSocket.connectionState.value != SocketState.Connecting
+ ) {
+ connectionModule.coordinatorSocket.connect()
+ }
+ }
+
/**
* Observes the app lifecycle and attempts to reconnect/release the socket connection.
*/
diff --git a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/call/video/YuvFrame.kt b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/call/video/YuvFrame.kt
index ea407235db..66730c6398 100644
--- a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/call/video/YuvFrame.kt
+++ b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/call/video/YuvFrame.kt
@@ -17,10 +17,10 @@
package io.getstream.video.android.core.call.video
import android.graphics.Bitmap
-import android.graphics.Matrix
import io.getstream.log.taggedLogger
import io.github.crow_misia.libyuv.AbgrBuffer
import io.github.crow_misia.libyuv.I420Buffer
+import io.github.crow_misia.libyuv.RotateMode
import org.webrtc.JniCommon
import org.webrtc.VideoFrame
import org.webrtc.YuvHelper
@@ -56,8 +56,6 @@ object YuvFrame {
val planes = arrayOf(toI420.dataY, toI420.dataU, toI420.dataV)
val strides = intArrayOf(toI420.strideY, toI420.strideU, toI420.strideV)
- toI420.release()
-
val halfWidth = (width + 1).shr(1)
val halfHeight = (height + 1).shr(1)
@@ -92,43 +90,53 @@ object YuvFrame {
}
}
+ toI420.release()
+
return I420Buffer.wrap(byteBuffer, width, height)
}
private fun getBitmap(i420buffer: I420Buffer, width: Int, height: Int, rotationDegree: Int): Bitmap {
- val newBuffer = AbgrBuffer.allocate(width, height)
- i420buffer.convertTo(newBuffer)
+ val abgrBuffer = AbgrBuffer.allocate(width, height)
+ i420buffer.convertTo(abgrBuffer)
i420buffer.close()
- // Construct a Bitmap based on the new pixel data
- val bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888)
- bitmap.copyPixelsFromBuffer(newBuffer.asBuffer())
- newBuffer.close()
-
// If necessary, generate a rotated version of the Bitmap
- return when (rotationDegree) {
+ var swapWidthAndHeight = false
+ val rotatedAbgrBuffer = when (rotationDegree) {
90, -270 -> {
- val m = Matrix()
- m.postRotate(90f)
- Bitmap.createBitmap(bitmap, 0, 0, bitmap.width, bitmap.height, m, true)
- }
+ swapWidthAndHeight = true
+ val dstBuffer = AbgrBuffer.allocate(height, width)
+ abgrBuffer.rotate(dstBuffer, RotateMode.ROTATE_90)
+ dstBuffer
+ }
180, -180 -> {
- val m = Matrix()
- m.postRotate(180f)
- Bitmap.createBitmap(bitmap, 0, 0, bitmap.width, bitmap.height, m, true)
+ val dstBuffer = AbgrBuffer.allocate(width, height)
+ abgrBuffer.rotate(dstBuffer, RotateMode.ROTATE_180)
+ dstBuffer
}
-
270, -90 -> {
- val m = Matrix()
- m.postRotate(270f)
- Bitmap.createBitmap(bitmap, 0, 0, bitmap.width, bitmap.height, m, true)
- }
+ swapWidthAndHeight = true
+ val dstBuffer = AbgrBuffer.allocate(height, width)
+ abgrBuffer.rotate(dstBuffer, RotateMode.ROTATE_270)
+ dstBuffer
+ }
else -> {
- // Don't rotate, just return the Bitmap
- bitmap
+ abgrBuffer
}
}
+
+ // Construct a Bitmap based on the new pixel data
+ val bitmap = Bitmap.createBitmap(
+ if (swapWidthAndHeight) height else width,
+ if (swapWidthAndHeight) width else height,
+ Bitmap.Config.ARGB_8888,
+ )
+ bitmap.copyPixelsFromBuffer(rotatedAbgrBuffer.asBuffer())
+ abgrBuffer.close()
+ rotatedAbgrBuffer.close()
+
+ return bitmap
}
}
diff --git a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/internal/module/ConnectionModule.kt b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/internal/module/ConnectionModule.kt
index 881f7f4b99..661425d0d3 100644
--- a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/internal/module/ConnectionModule.kt
+++ b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/internal/module/ConnectionModule.kt
@@ -191,7 +191,11 @@ internal class SfuConnectionModule(
onFastReconnect: suspend () -> Unit,
) {
internal var sfuSocket: SfuSocket
- private val updatedSignalUrl = sfuUrl.removeSuffix(suffix = "/twirp")
+ private val updatedSignalUrl = if (sfuUrl.contains(Regex("https?://"))) {
+ sfuUrl
+ } else {
+ "http://$sfuUrl"
+ }.removeSuffix("/twirp")
private fun buildSfuOkHttpClient(): OkHttpClient {
val connectionTimeoutInMs = 10000L
@@ -227,7 +231,9 @@ internal class SfuConnectionModule(
}
init {
- val socketUrl = "$updatedSignalUrl/ws".replace("https", "wss")
+ val socketUrl = "$updatedSignalUrl/ws"
+ .replace("https", "wss")
+ .replace("http", "ws")
sfuSocket = SfuSocket(
socketUrl,
diff --git a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/mapper/ReactionMapper.kt b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/mapper/ReactionMapper.kt
index 01da4fbdc9..a0f70d6c71 100644
--- a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/mapper/ReactionMapper.kt
+++ b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/mapper/ReactionMapper.kt
@@ -29,12 +29,12 @@ public fun interface ReactionMapper {
return ReactionMapper { emojiCode ->
when (emojiCode) {
":fireworks:", ":tada:" -> "\uD83C\uDF89"
- ":hello:" -> "\uD83D\uDC4B"
":raise-hand:" -> "✋"
":like:" -> "\uD83D\uDC4D"
- ":hate:" -> "\uD83D\uDC4E"
- ":smile:" -> "\uD83D\uDE04"
- ":heart:" -> "❤️"
+ ":dislike:" -> "\uD83D\uDC4E"
+ ":hello:" -> "\uD83D\uDC4B"
+ ":smile:" -> "\uD83D\uDE42"
+ ":heart:" -> "\u2665"
else -> emojiCode
}
}
diff --git a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/notifications/DefaultNotificationHandler.kt b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/notifications/DefaultNotificationHandler.kt
index 8acb208b7b..7905a2f184 100644
--- a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/notifications/DefaultNotificationHandler.kt
+++ b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/notifications/DefaultNotificationHandler.kt
@@ -29,17 +29,17 @@ import androidx.core.app.NotificationCompat
import androidx.core.app.NotificationCompat.CallStyle
import androidx.core.app.NotificationManagerCompat
import androidx.core.app.Person
+import androidx.core.content.ContextCompat
import io.getstream.android.push.permissions.DefaultNotificationPermissionHandler
import io.getstream.android.push.permissions.NotificationPermissionHandler
import io.getstream.log.TaggedLogger
import io.getstream.log.taggedLogger
import io.getstream.video.android.core.R
-import io.getstream.video.android.core.notifications.NotificationHandler.Companion.ACTION_ACCEPT_CALL
-import io.getstream.video.android.core.notifications.NotificationHandler.Companion.ACTION_INCOMING_CALL
import io.getstream.video.android.core.notifications.NotificationHandler.Companion.ACTION_LIVE_CALL
import io.getstream.video.android.core.notifications.NotificationHandler.Companion.ACTION_NOTIFICATION
import io.getstream.video.android.core.notifications.NotificationHandler.Companion.INCOMING_CALL_NOTIFICATION_ID
import io.getstream.video.android.core.notifications.internal.DefaultStreamIntentResolver
+import io.getstream.video.android.core.notifications.internal.service.CallService
import io.getstream.video.android.model.StreamCallId
public open class DefaultNotificationHandler(
@@ -70,18 +70,63 @@ public open class DefaultNotificationHandler(
}
override fun onRingingCall(callId: StreamCallId, callDisplayName: String) {
- intentResolver.searchIncomingCallPendingIntent(callId)?.let { fullScreenPendingIntent ->
- intentResolver.searchAcceptCallPendingIntent(callId)?.let { acceptCallPendingIntent ->
- intentResolver.searchRejectCallPendingIntent(callId)?.let { rejectCallPendingIntent ->
- showIncomingCallNotification(
- fullScreenPendingIntent,
- acceptCallPendingIntent,
- rejectCallPendingIntent,
- callDisplayName,
- )
- }
- } ?: logger.e { "Couldn't find any activity for $ACTION_ACCEPT_CALL" }
- } ?: logger.e { "Couldn't find any activity for $ACTION_INCOMING_CALL" }
+ val serviceIntent = CallService.buildStartIntent(
+ this.application,
+ callId,
+ CallService.TRIGGER_INCOMING_CALL,
+ callDisplayName,
+ )
+ ContextCompat.startForegroundService(application.applicationContext, serviceIntent)
+ }
+
+ override fun getRingingCallNotification(callId: StreamCallId, callDisplayName: String): Notification? {
+ val fullScreenPendingIntent = intentResolver.searchIncomingCallPendingIntent(callId)
+ val acceptCallPendingIntent = intentResolver.searchAcceptCallPendingIntent(callId)
+ val rejectCallPendingIntent = intentResolver.searchRejectCallPendingIntent(callId)
+ return if (fullScreenPendingIntent != null && acceptCallPendingIntent != null && rejectCallPendingIntent != null) {
+ getIncomingCallNotification(
+ fullScreenPendingIntent,
+ acceptCallPendingIntent,
+ rejectCallPendingIntent,
+ callDisplayName,
+ )
+ } else {
+ logger.e { "Ringing call notification not shown, one of the intents is null." }
+ null
+ }
+ }
+
+ private fun getIncomingCallNotification(
+ fullScreenPendingIntent: PendingIntent,
+ acceptCallPendingIntent: PendingIntent,
+ rejectCallPendingIntent: PendingIntent,
+ callDisplayName: String,
+ ): Notification {
+ val channelId = application.getString(
+ R.string.stream_video_incoming_call_notification_channel_id,
+ )
+ maybeCreateChannel(channelId, application) {
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
+ description = application.getString(R.string.stream_video_incoming_call_notification_channel_description)
+ importance = NotificationManager.IMPORTANCE_HIGH
+ this.lockscreenVisibility = Notification.VISIBILITY_PUBLIC
+ this.setShowBadge(true)
+ }
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
+ this.setAllowBubbles(true)
+ }
+ }
+ return getNotification {
+ priority = NotificationCompat.PRIORITY_HIGH
+ setContentTitle("Incoming call")
+ setContentText(callDisplayName)
+ setChannelId(channelId)
+ setOngoing(false)
+ setContentIntent(fullScreenPendingIntent)
+ setFullScreenIntent(fullScreenPendingIntent, true)
+ setCategory(NotificationCompat.CATEGORY_CALL)
+ addCallActions(acceptCallPendingIntent, rejectCallPendingIntent, callDisplayName)
+ }
}
override fun onNotification(callId: StreamCallId, callDisplayName: String) {
@@ -121,7 +166,12 @@ public open class DefaultNotificationHandler(
val ongoingCallsChannelId = application.getString(
R.string.stream_video_ongoing_call_notification_channel_id,
)
- maybeCreateChannel(ongoingCallsChannelId, application)
+ maybeCreateChannel(ongoingCallsChannelId, application) {
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
+ description =
+ application.getString(R.string.stream_video_incoming_call_notification_channel_description)
+ }
+ }
// Build notification
return NotificationCompat.Builder(application, ongoingCallsChannelId)
@@ -152,7 +202,12 @@ public open class DefaultNotificationHandler(
.build()
}
- private fun maybeCreateChannel(channelId: String, context: Context) {
+ private fun maybeCreateChannel(
+ channelId: String,
+ context: Context,
+ configure: NotificationChannel.() -> Unit = {
+ },
+ ) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
val channel = NotificationChannel(
channelId,
@@ -160,9 +215,7 @@ public open class DefaultNotificationHandler(
R.string.stream_video_ongoing_call_notification_channel_title,
),
NotificationManager.IMPORTANCE_DEFAULT,
- ).apply {
- description = application.getString(R.string.stream_video_ongoing_call_notification_channel_description)
- }
+ ).apply(configure)
val notificationManager =
context.getSystemService(Context.NOTIFICATION_SERVICE) as NotificationManager
@@ -218,13 +271,20 @@ public open class DefaultNotificationHandler(
notificationId: Int,
builder: NotificationCompat.Builder.() -> Unit,
) {
- val notification = NotificationCompat.Builder(application, getChannelId())
+ val notification = getNotification(builder)
+ notificationManager.notify(notificationId, notification)
+ }
+
+ private fun getNotification(
+ builder: NotificationCompat.Builder.() -> Unit,
+ ): Notification {
+ return NotificationCompat.Builder(application, getChannelId())
.setSmallIcon(android.R.drawable.presence_video_online)
.setAutoCancel(true)
.apply(builder)
.build()
- notificationManager.notify(notificationId, notification)
}
+
private fun NotificationCompat.Builder.addCallActions(
acceptCallPendingIntent: PendingIntent,
rejectCallPendingIntent: PendingIntent,
diff --git a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/notifications/NotificationHandler.kt b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/notifications/NotificationHandler.kt
index d8a16935b4..cc7c00439a 100644
--- a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/notifications/NotificationHandler.kt
+++ b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/notifications/NotificationHandler.kt
@@ -25,6 +25,7 @@ public interface NotificationHandler : NotificationPermissionHandler {
fun onNotification(callId: StreamCallId, callDisplayName: String)
fun onLiveCall(callId: StreamCallId, callDisplayName: String)
fun getOngoingCallNotification(callId: StreamCallId): Notification?
+ fun getRingingCallNotification(callId: StreamCallId, callDisplayName: String): Notification?
companion object {
const val ACTION_NOTIFICATION = "io.getstream.video.android.action.NOTIFICATION"
@@ -35,6 +36,8 @@ public interface NotificationHandler : NotificationPermissionHandler {
const val ACTION_LEAVE_CALL = "io.getstream.video.android.action.LEAVE_CALL"
const val ACTION_ONGOING_CALL = "io.getstream.video.android.action.ONGOING_CALL"
const val INTENT_EXTRA_CALL_CID: String = "io.getstream.video.android.intent-extra.call_cid"
+ const val INTENT_EXTRA_CALL_DISPLAY_NAME: String = "io.getstream.video.android.intent-extra.call_displayname"
+
const val INTENT_EXTRA_NOTIFICATION_ID: String =
"io.getstream.video.android.intent-extra.notification_id"
const val INCOMING_CALL_NOTIFICATION_ID = 24756
diff --git a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/notifications/internal/NoOpNotificationHandler.kt b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/notifications/internal/NoOpNotificationHandler.kt
index 1ad0860482..c8a5e6da77 100644
--- a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/notifications/internal/NoOpNotificationHandler.kt
+++ b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/notifications/internal/NoOpNotificationHandler.kt
@@ -25,6 +25,11 @@ internal object NoOpNotificationHandler : NotificationHandler {
override fun onNotification(callId: StreamCallId, callDisplayName: String) { /* NoOp */ }
override fun onLiveCall(callId: StreamCallId, callDisplayName: String) { /* NoOp */ }
override fun getOngoingCallNotification(callId: StreamCallId): Notification? = null
+ override fun getRingingCallNotification(
+ callId: StreamCallId,
+ callDisplayName: String,
+ ): Notification? = null
+
override fun onPermissionDenied() { /* NoOp */ }
override fun onPermissionGranted() { /* NoOp */ }
override fun onPermissionRationale() { /* NoOp */ }
diff --git a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/notifications/internal/receivers/RejectCallBroadcastReceiver.kt b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/notifications/internal/receivers/RejectCallBroadcastReceiver.kt
index 8e36c25e67..9662a54ad3 100644
--- a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/notifications/internal/receivers/RejectCallBroadcastReceiver.kt
+++ b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/notifications/internal/receivers/RejectCallBroadcastReceiver.kt
@@ -22,8 +22,9 @@ import androidx.core.app.NotificationManagerCompat
import io.getstream.log.taggedLogger
import io.getstream.result.Result
import io.getstream.video.android.core.Call
+import io.getstream.video.android.core.notifications.NotificationHandler
import io.getstream.video.android.core.notifications.NotificationHandler.Companion.ACTION_REJECT_CALL
-import io.getstream.video.android.core.notifications.NotificationHandler.Companion.INTENT_EXTRA_NOTIFICATION_ID
+import io.getstream.video.android.core.notifications.internal.service.CallService
/**
* Used to process any pending intents that feature the [ACTION_REJECT_CALL] action. By consuming this
@@ -40,7 +41,12 @@ internal class RejectCallBroadcastReceiver : GenericCallActionBroadcastReceiver(
is Result.Success -> logger.d { "[onReceive] rejectCall, Success: $rejectResult" }
is Result.Failure -> logger.d { "[onReceive] rejectCall, Failure: $rejectResult" }
}
- val notificationId = intent.getIntExtra(INTENT_EXTRA_NOTIFICATION_ID, 0)
- NotificationManagerCompat.from(context).cancel(notificationId)
+ val serviceIntent = CallService.buildStopIntent(context)
+ context.stopService(serviceIntent)
+
+ // As a second precaution cancel also the notification
+ NotificationManagerCompat.from(
+ context,
+ ).cancel(NotificationHandler.INCOMING_CALL_NOTIFICATION_ID)
}
}
diff --git a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/notifications/internal/receivers/ToggleCameraBroadcastReceiver.kt b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/notifications/internal/receivers/ToggleCameraBroadcastReceiver.kt
new file mode 100644
index 0000000000..ab130db513
--- /dev/null
+++ b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/notifications/internal/receivers/ToggleCameraBroadcastReceiver.kt
@@ -0,0 +1,51 @@
+/*
+ * Copyright (c) 2014-2023 Stream.io Inc. All rights reserved.
+ *
+ * Licensed under the Stream License;
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://github.com/GetStream/stream-video-android/blob/main/LICENSE
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.getstream.video.android.core.notifications.internal.receivers
+
+import android.content.BroadcastReceiver
+import android.content.Context
+import android.content.Intent
+import io.getstream.log.taggedLogger
+import io.getstream.video.android.core.StreamVideo
+
+class ToggleCameraBroadcastReceiver : BroadcastReceiver() {
+ private val activeCall = StreamVideo.instanceOrNull()?.state?.activeCall?.value
+ private var shouldEnableCameraAgain = false
+ private val logger by taggedLogger("ToggleCameraBroadcastReceiver")
+
+ override fun onReceive(context: Context, intent: Intent) {
+ when (intent.action) {
+ Intent.ACTION_SCREEN_ON -> {
+ logger.d { "Screen is on and locked." }
+ }
+ Intent.ACTION_USER_PRESENT -> {
+ logger.d { "Screen is on and unlocked." }
+ if (shouldEnableCameraAgain) activeCall?.camera?.enable()
+ }
+ Intent.ACTION_SCREEN_OFF -> {
+ // This broadcast action actually means that the device is non-interactive.
+ // In a video call scenario, the only way to be non-interactive is when locking the phone manually.
+ activeCall?.camera.let { camera ->
+ shouldEnableCameraAgain = camera?.isEnabled?.value ?: false
+ camera?.disable()
+ }
+
+ logger.d { "Screen is off. Should re-enable camera: $shouldEnableCameraAgain." }
+ }
+ }
+ }
+}
diff --git a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/notifications/internal/service/CallService.kt b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/notifications/internal/service/CallService.kt
new file mode 100644
index 0000000000..daeab5f4e3
--- /dev/null
+++ b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/notifications/internal/service/CallService.kt
@@ -0,0 +1,328 @@
+/*
+ * Copyright (c) 2014-2023 Stream.io Inc. All rights reserved.
+ *
+ * Licensed under the Stream License;
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://github.com/GetStream/stream-video-android/blob/main/LICENSE
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.getstream.video.android.core.notifications.internal.service
+
+import android.app.Notification
+import android.app.Service
+import android.content.Context
+import android.content.Intent
+import android.content.IntentFilter
+import android.content.pm.ServiceInfo
+import android.os.Build
+import android.os.IBinder
+import androidx.core.app.NotificationManagerCompat
+import androidx.core.app.ServiceCompat
+import io.getstream.log.taggedLogger
+import io.getstream.video.android.core.RingingState
+import io.getstream.video.android.core.StreamVideo
+import io.getstream.video.android.core.notifications.NotificationHandler.Companion.INCOMING_CALL_NOTIFICATION_ID
+import io.getstream.video.android.core.notifications.NotificationHandler.Companion.INTENT_EXTRA_CALL_CID
+import io.getstream.video.android.core.notifications.NotificationHandler.Companion.INTENT_EXTRA_CALL_DISPLAY_NAME
+import io.getstream.video.android.core.notifications.internal.receivers.ToggleCameraBroadcastReceiver
+import io.getstream.video.android.model.StreamCallId
+import io.getstream.video.android.model.streamCallDisplayName
+import io.getstream.video.android.model.streamCallId
+import kotlinx.coroutines.CoroutineScope
+import kotlinx.coroutines.DelicateCoroutinesApi
+import kotlinx.coroutines.Dispatchers
+import kotlinx.coroutines.cancel
+import kotlinx.coroutines.launch
+import org.openapitools.client.models.CallEndedEvent
+import org.openapitools.client.models.CallRejectedEvent
+import java.lang.IllegalArgumentException
+
+/**
+ * A foreground service that is running when there is an active call.
+ */
+internal class CallService : Service() {
+ private val logger by taggedLogger("CallService")
+
+ // Data
+ private var callId: StreamCallId? = null
+ private var callDisplayName: String? = null
+
+ // Service scope
+ private val serviceScope: CoroutineScope = CoroutineScope(Dispatchers.IO)
+
+ // Camera handling receiver
+ private val toggleCameraBroadcastReceiver = ToggleCameraBroadcastReceiver()
+
+ internal companion object {
+ const val TRIGGER_KEY =
+ "io.getstream.video.android.core.notifications.internal.service.CallService.call_trigger"
+ const val TRIGGER_INCOMING_CALL = "incomming_call"
+ const val TRIGGER_ONGOING_CALL = "ongoing_call"
+
+ /**
+ * Build start intent.
+ *
+ * @param context the context.
+ * @param callId the call id.
+ * @param trigger one of [TRIGGER_INCOMING_CALL] or [TRIGGER_ONGOING_CALL]
+ * @param callDisplayName the display name.
+ */
+ fun buildStartIntent(
+ context: Context,
+ callId: StreamCallId,
+ trigger: String,
+ callDisplayName: String? = null,
+ ): Intent {
+ val serviceIntent = Intent(context, CallService::class.java)
+ serviceIntent.putExtra(INTENT_EXTRA_CALL_CID, callId)
+ when (trigger) {
+ TRIGGER_INCOMING_CALL -> {
+ serviceIntent.putExtra(TRIGGER_KEY, TRIGGER_INCOMING_CALL)
+ serviceIntent.putExtra(INTENT_EXTRA_CALL_DISPLAY_NAME, callDisplayName)
+ }
+
+ TRIGGER_ONGOING_CALL -> {
+ serviceIntent.putExtra(TRIGGER_KEY, TRIGGER_ONGOING_CALL)
+ }
+
+ else -> {
+ throw IllegalArgumentException(
+ "Unknown $trigger, must be one of $TRIGGER_INCOMING_CALL or $TRIGGER_ONGOING_CALL",
+ )
+ }
+ }
+ return serviceIntent
+ }
+
+ /**
+ * Build stop intent.
+ *
+ * @param context the context.
+ */
+ fun buildStopIntent(context: Context) = Intent(context, CallService::class.java)
+ }
+
+ override fun onTimeout(startId: Int) {
+ super.onTimeout(startId)
+ logger.w { "Timeout received from the system, service will stop." }
+ stopService()
+ }
+
+ override fun onTaskRemoved(rootIntent: Intent?) {
+ super.onTaskRemoved(rootIntent)
+
+ // Leave the call
+ callId?.let {
+ StreamVideo.instanceOrNull()?.call(it.type, it.id)?.leave()
+ logger.i { "Left ongoing call." }
+ }
+
+ // Stop the service
+ stopService()
+ }
+
+ override fun onStartCommand(intent: Intent?, flags: Int, startId: Int): Int {
+ logger.i { "Starting CallService. $intent" }
+ callId = intent?.streamCallId(INTENT_EXTRA_CALL_CID)
+ callDisplayName = intent?.streamCallDisplayName(INTENT_EXTRA_CALL_DISPLAY_NAME)
+ val trigger = intent?.getStringExtra(TRIGGER_KEY)
+ val streamVideo = StreamVideo.instanceOrNull()
+ val started = if (callId != null && streamVideo != null && trigger != null) {
+ val notificationData: Pair =
+ when (trigger) {
+ TRIGGER_ONGOING_CALL -> Pair(
+ streamVideo.getOngoingCallNotification(
+ callId!!,
+ ),
+ callId.hashCode(),
+ )
+
+ TRIGGER_INCOMING_CALL -> Pair(
+ streamVideo.getRingingCallNotification(
+ callId!!,
+ callDisplayName!!,
+ ),
+ INCOMING_CALL_NOTIFICATION_ID,
+ )
+
+ else -> Pair(null, callId.hashCode())
+ }
+ val notification = notificationData.first
+ if (notification != null) {
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.UPSIDE_DOWN_CAKE) {
+ val foregroundServiceType =
+ when (trigger) {
+ TRIGGER_ONGOING_CALL -> ServiceInfo.FOREGROUND_SERVICE_TYPE_MICROPHONE
+ TRIGGER_INCOMING_CALL -> ServiceInfo.FOREGROUND_SERVICE_TYPE_SHORT_SERVICE
+ else -> ServiceInfo.FOREGROUND_SERVICE_TYPE_SHORT_SERVICE
+ }
+ ServiceCompat.startForeground(
+ this@CallService,
+ callId.hashCode(),
+ notification,
+ foregroundServiceType,
+ )
+ } else {
+ startForeground(callId.hashCode(), notification)
+ }
+ true
+ } else {
+ // Service not started no notification
+ logger.e { "Could not get notification for ongoing call" }
+ false
+ }
+ } else {
+ // Service not started, no call Id or stream video
+ logger.e { "Call id or streamVideo or trigger are not available." }
+ false
+ }
+
+ if (!started) {
+ logger.w { "Foreground service did not start!" }
+ stopService()
+ } else {
+ if (trigger == TRIGGER_INCOMING_CALL) {
+ updateRingingCall(streamVideo!!, callId!!)
+ initializeCallAndSocket(streamVideo, callId!!)
+ }
+ observeCallState(callId!!, streamVideo!!)
+ registerToggleCameraBroadcastReceiver()
+ }
+ return START_NOT_STICKY
+ }
+
+ @OptIn(DelicateCoroutinesApi::class)
+ private fun updateRingingCall(streamVideo: StreamVideo, callId: StreamCallId) {
+ serviceScope.launch {
+ val call = streamVideo.call(callId.type, callId.id)
+ streamVideo.state.addRingingCall(call)
+ }
+ }
+
+ @OptIn(DelicateCoroutinesApi::class)
+ private fun observeCallState(callId: StreamCallId, streamVideo: StreamVideo) {
+ // Ringing state
+ serviceScope.launch {
+ val call = streamVideo.call(callId.type, callId.id)
+ call.state.ringingState.collect {
+ logger.i { "Ringing state: $it" }
+ when (it) {
+ is RingingState.RejectedByAll -> {
+ stopService()
+ }
+ else -> {
+ // Do nothing
+ }
+ }
+ }
+ }
+
+ // Call state
+ serviceScope.launch {
+ val call = streamVideo.call(callId.type, callId.id)
+ call.subscribe {
+ logger.i { "Received event in service: $it" }
+ when (it) {
+ is CallRejectedEvent -> {
+ // When call is rejected by the caller
+ stopService()
+ }
+
+ is CallEndedEvent -> {
+ // When call ends for any reason
+ stopService()
+ }
+ }
+ }
+ }
+ }
+
+ @OptIn(DelicateCoroutinesApi::class)
+ private fun initializeCallAndSocket(
+ streamVideo: StreamVideo,
+ callId: StreamCallId,
+ ) {
+ // Update call
+ serviceScope.launch {
+ val call = streamVideo.call(callId.type, callId.id)
+ val update = call.get()
+ if (update.isFailure) {
+ update.errorOrNull()?.let {
+ logger.e { it.message }
+ } ?: let {
+ logger.e { "Failed to update call." }
+ }
+ stopService() // Failed to update call
+ return@launch
+ }
+ }
+
+ // Monitor coordinator socket
+ serviceScope.launch {
+ streamVideo.connectIfNotAlreadyConnected()
+ }
+ }
+
+ override fun onDestroy() {
+ stopService()
+ super.onDestroy()
+ }
+
+ // This service does not return a Binder
+ override fun onBind(intent: Intent?): IBinder? = null
+
+ // Internal logic
+ /**
+ * Handle all aspects of stopping the service.
+ */
+ private fun stopService() {
+ // Cancel the notification
+ val notificationManager = NotificationManagerCompat.from(this)
+ callId?.let {
+ val notificationId = callId.hashCode()
+ notificationManager.cancel(notificationId)
+ }
+
+ // Optionally cancel any incoming call notification
+ notificationManager.cancel(INCOMING_CALL_NOTIFICATION_ID)
+
+ // Stop
+ unregisterToggleCameraBroadcastReceiver()
+
+ // Stop any jobs
+ serviceScope.cancel()
+
+ // Optionally (no-op if already stopping)
+ stopSelf()
+ }
+ private fun registerToggleCameraBroadcastReceiver() {
+ try {
+ registerReceiver(
+ toggleCameraBroadcastReceiver,
+ IntentFilter().apply {
+ addAction(Intent.ACTION_SCREEN_ON)
+ addAction(Intent.ACTION_SCREEN_OFF)
+ addAction(Intent.ACTION_USER_PRESENT)
+ },
+ )
+ } catch (e: Exception) {
+ logger.e(e) { "Unable to register ToggleCameraBroadcastReceiver." }
+ }
+ }
+
+ private fun unregisterToggleCameraBroadcastReceiver() {
+ try {
+ unregisterReceiver(toggleCameraBroadcastReceiver)
+ } catch (e: Exception) {
+ logger.e(e) { "Unable to unregister ToggleCameraBroadcastReceiver." }
+ }
+ }
+}
diff --git a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/notifications/internal/service/OngoingCallService.kt b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/notifications/internal/service/OngoingCallService.kt
deleted file mode 100644
index ce7420b5bb..0000000000
--- a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/notifications/internal/service/OngoingCallService.kt
+++ /dev/null
@@ -1,72 +0,0 @@
-/*
- * Copyright (c) 2014-2023 Stream.io Inc. All rights reserved.
- *
- * Licensed under the Stream License;
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * https://github.com/GetStream/stream-video-android/blob/main/LICENSE
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package io.getstream.video.android.core.notifications.internal.service
-
-import android.app.Service
-import android.content.Intent
-import android.os.IBinder
-import androidx.core.app.NotificationManagerCompat
-import io.getstream.log.taggedLogger
-import io.getstream.video.android.core.StreamVideo
-import io.getstream.video.android.core.notifications.NotificationHandler.Companion.INTENT_EXTRA_CALL_CID
-import io.getstream.video.android.model.StreamCallId
-import io.getstream.video.android.model.streamCallId
-
-/**
- * A foreground service that is running when there is an active call.
- */
-internal class OngoingCallService : Service() {
- private val logger by taggedLogger("OngoingCallService")
- private var callId: StreamCallId? = null
-
- override fun onStartCommand(intent: Intent?, flags: Int, startId: Int): Int {
- callId = intent?.streamCallId(INTENT_EXTRA_CALL_CID)
- val streamVideo = StreamVideo.instanceOrNull()
- val started = if (callId != null && streamVideo != null) {
- val notification = streamVideo.getOngoingCallNotification(callId!!)
- if (notification != null) {
- startForeground(callId.hashCode(), notification)
- true
- } else {
- // Service not started no notification
- logger.e { "Could not get notification for ongoing call" }
- false
- }
- } else {
- // Service not started, no call Id or stream video
- logger.e { "Call id or streamVideo are not available." }
- false
- }
-
- if (!started) {
- logger.w { "Foreground service did not start!" }
- stopSelf()
- }
- return START_NOT_STICKY
- }
-
- override fun onDestroy() {
- callId?.let {
- val notificationId = callId.hashCode()
- NotificationManagerCompat.from(this).cancel(notificationId)
- }
- super.onDestroy()
- }
-
- // This service does not return a Binder
- override fun onBind(intent: Intent?): IBinder? = null
-}
diff --git a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/model/StreamCallId.kt b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/model/StreamCallId.kt
index 099ea54e76..66211ee95b 100644
--- a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/model/StreamCallId.kt
+++ b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/model/StreamCallId.kt
@@ -83,3 +83,5 @@ public fun Intent.streamCallId(key: String): StreamCallId? = when {
getParcelableExtra(key)
as? StreamCallId
}
+
+public fun Intent.streamCallDisplayName(key: String): String = this.getStringExtra(key) ?: "."
diff --git a/stream-video-android-ui-compose/.gitignore b/stream-video-android-ui-compose/.gitignore
index 42afabfd2a..43c1a66995 100644
--- a/stream-video-android-ui-compose/.gitignore
+++ b/stream-video-android-ui-compose/.gitignore
@@ -1 +1,2 @@
-/build
\ No newline at end of file
+/build
+!/libs/**
\ No newline at end of file
diff --git a/stream-video-android-ui-compose/build.gradle.kts b/stream-video-android-ui-compose/build.gradle.kts
index 4e72c51887..f68356b319 100644
--- a/stream-video-android-ui-compose/build.gradle.kts
+++ b/stream-video-android-ui-compose/build.gradle.kts
@@ -67,12 +67,14 @@ dependencies {
implementation(libs.landscapist.coil)
implementation(libs.landscapist.animation)
implementation(libs.landscapist.placeholder)
- implementation(libs.landscapist.transformation)
+
+ // render scripts
+ compileOnly(files("libs/renderscript-toolkit.aar"))
// telephoto
implementation(libs.telephoto)
+
+ // preview
compileOnly(project(":stream-video-android-previewdata"))
testImplementation(project(":stream-video-android-previewdata"))
-
- // mock
}
\ No newline at end of file
diff --git a/stream-video-android-ui-compose/libs/renderscript-toolkit.aar b/stream-video-android-ui-compose/libs/renderscript-toolkit.aar
new file mode 100644
index 0000000000..daf90d952d
Binary files /dev/null and b/stream-video-android-ui-compose/libs/renderscript-toolkit.aar differ
diff --git a/stream-video-android-ui-compose/src/main/kotlin/io/getstream/video/android/compose/ui/components/background/CallBackground.kt b/stream-video-android-ui-compose/src/main/kotlin/io/getstream/video/android/compose/ui/components/background/CallBackground.kt
index 50a555748e..29d735a2bf 100644
--- a/stream-video-android-ui-compose/src/main/kotlin/io/getstream/video/android/compose/ui/components/background/CallBackground.kt
+++ b/stream-video-android-ui-compose/src/main/kotlin/io/getstream/video/android/compose/ui/components/background/CallBackground.kt
@@ -37,9 +37,9 @@ import com.skydoves.landscapist.ImageOptions
import com.skydoves.landscapist.animation.crossfade.CrossfadePlugin
import com.skydoves.landscapist.coil.CoilImage
import com.skydoves.landscapist.components.rememberImageComponent
-import com.skydoves.landscapist.transformation.blur.BlurTransformationPlugin
import io.getstream.video.android.compose.theme.VideoTheme
import io.getstream.video.android.compose.ui.components.avatar.AvatarImagePreview
+import io.getstream.video.android.compose.ui.components.plugins.BlurTransformationPlugin
import io.getstream.video.android.core.MemberState
import io.getstream.video.android.core.model.CallUser
import io.getstream.video.android.core.utils.toCallUser
diff --git a/stream-video-android-ui-compose/src/main/kotlin/io/getstream/video/android/compose/ui/components/call/renderer/ParticipantsLayout.kt b/stream-video-android-ui-compose/src/main/kotlin/io/getstream/video/android/compose/ui/components/call/renderer/ParticipantsLayout.kt
index 1cb885fb6f..941af80b34 100644
--- a/stream-video-android-ui-compose/src/main/kotlin/io/getstream/video/android/compose/ui/components/call/renderer/ParticipantsLayout.kt
+++ b/stream-video-android-ui-compose/src/main/kotlin/io/getstream/video/android/compose/ui/components/call/renderer/ParticipantsLayout.kt
@@ -99,6 +99,7 @@ public fun ParticipantsLayout(
isShowingReactions = style.isShowingReactions,
labelPosition = style.labelPosition,
),
+ videoRenderer = videoRenderer,
)
} else {
ParticipantsRegularGrid(
diff --git a/stream-video-android-ui-compose/src/main/kotlin/io/getstream/video/android/compose/ui/components/call/ringing/incomingcall/IncomingCallContent.kt b/stream-video-android-ui-compose/src/main/kotlin/io/getstream/video/android/compose/ui/components/call/ringing/incomingcall/IncomingCallContent.kt
index 0d49c7f5a5..de702b016d 100644
--- a/stream-video-android-ui-compose/src/main/kotlin/io/getstream/video/android/compose/ui/components/call/ringing/incomingcall/IncomingCallContent.kt
+++ b/stream-video-android-ui-compose/src/main/kotlin/io/getstream/video/android/compose/ui/components/call/ringing/incomingcall/IncomingCallContent.kt
@@ -147,7 +147,6 @@ public fun IncomingCallContent(
} else {
VideoTheme.dimens.avatarAppbarPadding
}
-
detailsContent?.invoke(this, participants, topPadding) ?: IncomingCallDetails(
modifier = Modifier
.align(Alignment.CenterHorizontally)
diff --git a/stream-video-android-ui-compose/src/main/kotlin/io/getstream/video/android/compose/ui/components/plugins/BlurTransformationPlugin.kt b/stream-video-android-ui-compose/src/main/kotlin/io/getstream/video/android/compose/ui/components/plugins/BlurTransformationPlugin.kt
new file mode 100644
index 0000000000..2f257e3c69
--- /dev/null
+++ b/stream-video-android-ui-compose/src/main/kotlin/io/getstream/video/android/compose/ui/components/plugins/BlurTransformationPlugin.kt
@@ -0,0 +1,51 @@
+/*
+ * Copyright (c) 2014-2023 Stream.io Inc. All rights reserved.
+ *
+ * Licensed under the Stream License;
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://github.com/GetStream/stream-video-android/blob/main/LICENSE
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.getstream.video.android.compose.ui.components.plugins
+
+import androidx.compose.runtime.Composable
+import androidx.compose.runtime.Immutable
+import androidx.compose.ui.graphics.ImageBitmap
+import androidx.compose.ui.graphics.painter.Painter
+import com.skydoves.landscapist.plugins.ImagePlugin
+
+/**
+ * Originated from [Landscapist](https://github.com/skydoves/landscapist).
+ *
+ * BlurTransformationPlugin adds blur transformation effect while rendering an image.
+ * An image plugin that extends [ImagePlugin.PainterPlugin] to be executed while rendering painters.
+ *
+ * @property radius The radius of the pixels used to blur, a value from 0 to infinite. Default is 10.
+ */
+@Immutable
+internal data class BlurTransformationPlugin(
+ val radius: Int = 10,
+) : ImagePlugin.PainterPlugin {
+
+ /**
+ * Compose circular reveal painter with an [imageBitmap] to the given [painter].
+ *
+ * @param imageBitmap A target [ImageBitmap] to be drawn on the painter.
+ * @param painter A given painter to be executed circular reveal animation.
+ */
+ @Composable
+ override fun compose(imageBitmap: ImageBitmap, painter: Painter): Painter {
+ return painter.rememberBlurPainter(
+ imageBitmap = imageBitmap,
+ radius = radius,
+ )
+ }
+}
diff --git a/stream-video-android-ui-compose/src/main/kotlin/io/getstream/video/android/compose/ui/components/plugins/RememberBlurPainter.kt b/stream-video-android-ui-compose/src/main/kotlin/io/getstream/video/android/compose/ui/components/plugins/RememberBlurPainter.kt
new file mode 100644
index 0000000000..91cc79854e
--- /dev/null
+++ b/stream-video-android-ui-compose/src/main/kotlin/io/getstream/video/android/compose/ui/components/plugins/RememberBlurPainter.kt
@@ -0,0 +1,80 @@
+/*
+ * Copyright (c) 2014-2023 Stream.io Inc. All rights reserved.
+ *
+ * Licensed under the Stream License;
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://github.com/GetStream/stream-video-android/blob/main/LICENSE
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.getstream.video.android.compose.ui.components.plugins
+
+import android.graphics.Bitmap
+import androidx.compose.runtime.Composable
+import androidx.compose.runtime.remember
+import androidx.compose.ui.graphics.ImageBitmap
+import androidx.compose.ui.graphics.asAndroidBitmap
+import androidx.compose.ui.graphics.asImageBitmap
+import androidx.compose.ui.graphics.painter.Painter
+import com.google.android.renderscript.Toolkit
+
+/**
+ * Originated from [Landscapist](https://github.com/skydoves/landscapist).
+ *
+ * This is an extension of the [Painter] for giving blur transformation effect to the given [imageBitmap].
+ *
+ * @param imageBitmap an image bitmap for loading the content.
+ * @property radius The radius of the pixels used to blur, a value from 1 to 25. Default is 10.
+ */
+@Composable
+internal fun Painter.rememberBlurPainter(
+ imageBitmap: ImageBitmap,
+ radius: Int,
+): Painter {
+ var androidBitmap = imageBitmap.asAndroidBitmap()
+
+ if (!(
+ androidBitmap.config == Bitmap.Config.ARGB_8888 ||
+ androidBitmap.config == Bitmap.Config.ALPHA_8
+ )
+ ) {
+ androidBitmap = androidBitmap.copy(Bitmap.Config.ARGB_8888, false)
+ }
+
+ val blurredBitmap = remember(imageBitmap, radius) {
+ iterativeBlur(androidBitmap, radius)
+ }
+ return remember(this) {
+ TransformationPainter(
+ imageBitmap = blurredBitmap.asImageBitmap(),
+ painter = this,
+ )
+ }
+}
+
+private fun iterativeBlur(
+ androidBitmap: Bitmap,
+ radius: Int,
+): Bitmap {
+ val iterate = (radius + 1) / 25
+ var bitmap: Bitmap = Toolkit.blur(
+ inputBitmap = androidBitmap,
+ radius = (radius + 1) % 25,
+ )
+
+ for (i in 0 until iterate) {
+ bitmap = Toolkit.blur(
+ inputBitmap = bitmap,
+ radius = 25,
+ )
+ }
+
+ return bitmap
+}
diff --git a/stream-video-android-ui-compose/src/main/kotlin/io/getstream/video/android/compose/ui/components/plugins/TransformationPainter.kt b/stream-video-android-ui-compose/src/main/kotlin/io/getstream/video/android/compose/ui/components/plugins/TransformationPainter.kt
new file mode 100644
index 0000000000..c4eb3e0df9
--- /dev/null
+++ b/stream-video-android-ui-compose/src/main/kotlin/io/getstream/video/android/compose/ui/components/plugins/TransformationPainter.kt
@@ -0,0 +1,102 @@
+/*
+ * Copyright (c) 2014-2023 Stream.io Inc. All rights reserved.
+ *
+ * Licensed under the Stream License;
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://github.com/GetStream/stream-video-android/blob/main/LICENSE
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.getstream.video.android.compose.ui.components.plugins
+
+import android.graphics.Matrix
+import android.graphics.RectF
+import androidx.compose.ui.geometry.Size
+import androidx.compose.ui.geometry.toRect
+import androidx.compose.ui.graphics.ImageBitmap
+import androidx.compose.ui.graphics.ImageShader
+import androidx.compose.ui.graphics.Paint
+import androidx.compose.ui.graphics.ShaderBrush
+import androidx.compose.ui.graphics.TileMode
+import androidx.compose.ui.graphics.asAndroidBitmap
+import androidx.compose.ui.graphics.drawscope.DrawScope
+import androidx.compose.ui.graphics.drawscope.drawIntoCanvas
+import androidx.compose.ui.graphics.painter.Painter
+import androidx.core.util.Pools
+
+/**
+ * Originated from [Landscapist](https://github.com/skydoves/landscapist).
+ *
+ * TransformationPainter is a [Painter] which draws the [imageBitmap] to the given [painter].
+ *
+ * @param imageBitmap an image bitmap for loading for the content.
+ * @param painter an image painter to draw an [ImageBitmap] into the provided canvas.
+ */
+internal class TransformationPainter(
+ private val imageBitmap: ImageBitmap,
+ private val painter: Painter,
+) : Painter() {
+
+ /** return the dimension size of the [painter]'s intrinsic width and height. */
+ override val intrinsicSize: Size get() = painter.intrinsicSize
+
+ override fun DrawScope.onDraw() {
+ drawIntoCanvas { canvas ->
+ var dx = 0f
+ var dy = 0f
+ val scale: Float
+ val shaderMatrix = Matrix()
+ val shader = ImageShader(imageBitmap, TileMode.Clamp)
+ val brush = ShaderBrush(shader)
+ val paint = paintPool.acquire() ?: Paint()
+ paint.asFrameworkPaint().apply {
+ isAntiAlias = true
+ isDither = true
+ isFilterBitmap = true
+ }
+
+ // cache the paint in the internal stack.
+ canvas.saveLayer(size.toRect(), paint)
+
+ val mDrawableRect = RectF(0f, 0f, size.width, size.height)
+ val bitmapWidth: Int = imageBitmap.asAndroidBitmap().width
+ val bitmapHeight: Int = imageBitmap.asAndroidBitmap().height
+
+ if (bitmapWidth * mDrawableRect.height() > mDrawableRect.width() * bitmapHeight) {
+ scale = mDrawableRect.height() / bitmapHeight.toFloat()
+ dx = (mDrawableRect.width() - bitmapWidth * scale) * 0.5f
+ } else {
+ scale = mDrawableRect.width() / bitmapWidth.toFloat()
+ dy = (mDrawableRect.height() - bitmapHeight * scale) * 0.5f
+ }
+
+ // resize the matrix to scale by sx and sy.
+ shaderMatrix.setScale(scale, scale)
+
+ // post translate the matrix with the specified translation.
+ shaderMatrix.postTranslate(
+ (dx + 0.5f) + mDrawableRect.left,
+ (dy + 0.5f) + mDrawableRect.top,
+ )
+ // apply the scaled matrix to the shader.
+ shader.setLocalMatrix(shaderMatrix)
+ // draw an image bitmap as a rect.
+ drawRect(brush = brush)
+ // restore canvas.
+ canvas.restore()
+ // resets the paint and release to the pool.
+ paint.asFrameworkPaint().reset()
+ paintPool.release(paint)
+ }
+ }
+}
+
+/** paint pool which caching and reusing [Paint] instances. */
+private val paintPool = Pools.SimplePool(2)