diff --git a/buildSrc/src/main/kotlin/io/getstream/video/android/Configuration.kt b/buildSrc/src/main/kotlin/io/getstream/video/android/Configuration.kt index 217899510e..e86bd2de59 100644 --- a/buildSrc/src/main/kotlin/io/getstream/video/android/Configuration.kt +++ b/buildSrc/src/main/kotlin/io/getstream/video/android/Configuration.kt @@ -6,11 +6,11 @@ object Configuration { const val minSdk = 24 const val majorVersion = 1 const val minorVersion = 0 - const val patchVersion = 15 + const val patchVersion = 16 const val versionName = "$majorVersion.$minorVersion.$patchVersion" const val versionCode = 39 const val snapshotVersionName = "$majorVersion.$minorVersion.${patchVersion + 1}-SNAPSHOT" const val artifactGroup = "io.getstream" - const val streamVideoCallGooglePlayVersion = "1.1.8" - const val streamWebRtcVersionName = "1.1.1" + const val streamVideoCallGooglePlayVersion = "1.1.9" + const val streamWebRtcVersionName = "1.2.1" } diff --git a/demo-app/build.gradle.kts b/demo-app/build.gradle.kts index ef5f8cf6ae..4f07c0d7d3 100644 --- a/demo-app/build.gradle.kts +++ b/demo-app/build.gradle.kts @@ -208,6 +208,9 @@ dependencies { implementation(project(":stream-video-android-filters-video")) compileOnly(project(":stream-video-android-previewdata")) + // Noise Cancellation + implementation(libs.stream.video.android.noise.cancellation) + // Stream Chat SDK implementation(libs.stream.chat.compose) implementation(libs.stream.chat.offline) diff --git a/demo-app/src/main/AndroidManifest.xml b/demo-app/src/main/AndroidManifest.xml index e883c5eabd..ed175b8b08 100644 --- a/demo-app/src/main/AndroidManifest.xml +++ b/demo-app/src/main/AndroidManifest.xml @@ -29,6 +29,7 @@ android:allowBackup="true" android:icon="@mipmap/ic_launcher" android:label="@string/app_name" + android:networkSecurityConfig="@xml/network_security_config" android:roundIcon="@mipmap/ic_launcher_round" android:supportsRtl="true" android:theme="@style/Dogfooding" diff --git a/demo-app/src/main/kotlin/io/getstream/video/android/ui/call/CallScreen.kt b/demo-app/src/main/kotlin/io/getstream/video/android/ui/call/CallScreen.kt index c9520f5c69..106566e118 100644 --- a/demo-app/src/main/kotlin/io/getstream/video/android/ui/call/CallScreen.kt +++ b/demo-app/src/main/kotlin/io/getstream/video/android/ui/call/CallScreen.kt @@ -94,6 +94,7 @@ import io.getstream.video.android.compose.ui.components.call.renderer.copy import io.getstream.video.android.core.Call import io.getstream.video.android.core.RealtimeConnection import io.getstream.video.android.core.call.state.ChooseLayout +import io.getstream.video.android.core.utils.isEnabled import io.getstream.video.android.filters.video.BlurredBackgroundVideoFilter import io.getstream.video.android.filters.video.VirtualBackgroundVideoFilter import io.getstream.video.android.mock.StreamPreviewDataUtils @@ -458,10 +459,17 @@ fun CallScreen( } if (isShowingSettingMenu) { + var isNoiseCancellationEnabled by remember { + mutableStateOf(call.isAudioProcessingEnabled()) + } + val settings by call.state.settings.collectAsStateWithLifecycle() + val noiseCancellationFeatureEnabled = settings?.audio?.noiseCancellation?.isEnabled == true SettingsMenu( call = call, selectedVideoFilter = selectedVideoFilter, showDebugOptions = showDebugOptions, + noiseCancellationFeatureEnabled = noiseCancellationFeatureEnabled, + noiseCancellationEnabled = isNoiseCancellationEnabled, onDismissed = { isShowingSettingMenu = false }, onSelectVideoFilter = { filterIndex -> selectedVideoFilter = filterIndex @@ -482,6 +490,9 @@ fun CallScreen( isShowingSettingMenu = false isShowingFeedbackDialog = true }, + onNoiseCancellation = { + isNoiseCancellationEnabled = call.toggleAudioProcessing() + }, ) { isShowingStats = true isShowingSettingMenu = false diff --git a/demo-app/src/main/kotlin/io/getstream/video/android/ui/join/CallJoinViewModel.kt b/demo-app/src/main/kotlin/io/getstream/video/android/ui/join/CallJoinViewModel.kt index da83874d44..12388cff98 100644 --- a/demo-app/src/main/kotlin/io/getstream/video/android/ui/join/CallJoinViewModel.kt +++ b/demo-app/src/main/kotlin/io/getstream/video/android/ui/join/CallJoinViewModel.kt @@ -20,23 +20,25 @@ import androidx.lifecycle.ViewModel import androidx.lifecycle.viewModelScope import com.google.android.gms.auth.api.signin.GoogleSignInClient import dagger.hilt.android.lifecycle.HiltViewModel +import io.getstream.android.push.PushProvider import io.getstream.chat.android.client.ChatClient import io.getstream.video.android.core.Call import io.getstream.video.android.core.StreamVideo import io.getstream.video.android.datastore.delegate.StreamUserDataStore +import io.getstream.video.android.model.Device import io.getstream.video.android.model.User import io.getstream.video.android.model.mapper.isValidCallCid import io.getstream.video.android.model.mapper.toTypeAndId import io.getstream.video.android.util.NetworkMonitor import io.getstream.video.android.util.StreamVideoInitHelper -import kotlinx.coroutines.delay +import io.getstream.video.android.util.fcmToken import kotlinx.coroutines.flow.Flow import kotlinx.coroutines.flow.MutableSharedFlow +import kotlinx.coroutines.flow.MutableStateFlow import kotlinx.coroutines.flow.SharedFlow import kotlinx.coroutines.flow.SharingStarted import kotlinx.coroutines.flow.flatMapLatest import kotlinx.coroutines.flow.flowOf -import kotlinx.coroutines.flow.map import kotlinx.coroutines.flow.shareIn import kotlinx.coroutines.launch import java.util.UUID @@ -49,7 +51,7 @@ class CallJoinViewModel @Inject constructor( networkMonitor: NetworkMonitor, ) : ViewModel() { val user: Flow = dataStore.user - val isLoggedOut = dataStore.user.map { it == null } + val isLoggedOut = MutableStateFlow(false) var autoLogInAfterLogOut = true val isNetworkAvailable = networkMonitor.isNetworkAvailable @@ -101,12 +103,26 @@ class CallJoinViewModel @Inject constructor( fun logOut() { viewModelScope.launch { - googleSignInClient.signOut() - dataStore.clear() - StreamVideo.instance().logOut() ChatClient.instance().disconnect(true).enqueue() - delay(200) + dataStore.clear() // Demo App DataStore + googleSignInClient.signOut() + + StreamVideo.instanceOrNull()?.let { streamVideo -> + fcmToken?.let { fcmToken -> + streamVideo.deleteDevice( + Device( + id = fcmToken, + pushProvider = PushProvider.FIREBASE.key, + pushProviderName = "firebase", + ), + ) + } + streamVideo.logOut() + } + StreamVideo.removeClient() + + isLoggedOut.value = true } } } diff --git a/demo-app/src/main/kotlin/io/getstream/video/android/ui/lobby/CallLobbyViewModel.kt b/demo-app/src/main/kotlin/io/getstream/video/android/ui/lobby/CallLobbyViewModel.kt index cd2b5ecdee..f900c98ddd 100644 --- a/demo-app/src/main/kotlin/io/getstream/video/android/ui/lobby/CallLobbyViewModel.kt +++ b/demo-app/src/main/kotlin/io/getstream/video/android/ui/lobby/CallLobbyViewModel.kt @@ -26,6 +26,7 @@ import io.getstream.chat.android.client.ChatClient import io.getstream.video.android.core.Call import io.getstream.video.android.core.DeviceStatus import io.getstream.video.android.core.StreamVideo +import io.getstream.video.android.core.utils.isAutoOn import io.getstream.video.android.datastore.delegate.StreamUserDataStore import io.getstream.video.android.model.StreamCallId import io.getstream.video.android.model.User @@ -116,7 +117,7 @@ class CallLobbyViewModel @Inject constructor( // based on it val settings = call.state.settings.first { it != null } - val enabled = when (call.camera.status.first()) { + val isCameraEnabled = when (call.camera.status.first()) { is DeviceStatus.NotSelected -> { settings?.video?.cameraDefaultOn ?: false } @@ -131,7 +132,10 @@ class CallLobbyViewModel @Inject constructor( } // enable/disable camera capture (no preview would be visible otherwise) - call.camera.setEnabled(enabled) + call.camera.setEnabled(isCameraEnabled) + + val isNoiseCancellationEnabled = settings?.audio?.noiseCancellation?.isAutoOn ?: false + call.setAudioProcessingEnabled(isNoiseCancellationEnabled) } } diff --git a/demo-app/src/main/kotlin/io/getstream/video/android/ui/menu/MenuDefinitions.kt b/demo-app/src/main/kotlin/io/getstream/video/android/ui/menu/MenuDefinitions.kt index 670b04abac..826e9673eb 100644 --- a/demo-app/src/main/kotlin/io/getstream/video/android/ui/menu/MenuDefinitions.kt +++ b/demo-app/src/main/kotlin/io/getstream/video/android/ui/menu/MenuDefinitions.kt @@ -30,6 +30,7 @@ import androidx.compose.material.icons.filled.HeadsetMic import androidx.compose.material.icons.filled.PortableWifiOff import androidx.compose.material.icons.filled.RestartAlt import androidx.compose.material.icons.filled.SettingsVoice +import androidx.compose.material.icons.filled.SpatialAudioOff import androidx.compose.material.icons.filled.SpeakerPhone import androidx.compose.material.icons.filled.SwitchLeft import androidx.compose.material.icons.filled.VideoFile @@ -46,6 +47,8 @@ import io.getstream.video.android.ui.menu.base.SubMenuItem */ fun defaultStreamMenu( showDebugOptions: Boolean = false, + noiseCancellationFeatureEnabled: Boolean = false, + noiseCancellationEnabled: Boolean = false, codecList: List, onCodecSelected: (MediaCodecInfo) -> Unit, isScreenShareEnabled: Boolean, @@ -57,6 +60,7 @@ fun defaultStreamMenu( onKillSfuWsClick: () -> Unit, onSwitchSfuClick: () -> Unit, onShowFeedback: () -> Unit, + onNoiseCancellation: () -> Unit, onDeviceSelected: (StreamAudioDevice) -> Unit, availableDevices: List, loadRecordings: suspend () -> List, @@ -108,6 +112,16 @@ fun defaultStreamMenu( action = onToggleScreenShare, ), ) + if (noiseCancellationFeatureEnabled) { + add( + ActionMenuItem( + title = "Noise cancellation", + icon = Icons.Default.SpatialAudioOff, + highlight = noiseCancellationEnabled, + action = onNoiseCancellation, + ), + ) + } if (showDebugOptions) { add( SubMenuItem( diff --git a/demo-app/src/main/kotlin/io/getstream/video/android/ui/menu/SettingsMenu.kt b/demo-app/src/main/kotlin/io/getstream/video/android/ui/menu/SettingsMenu.kt index 7447eff3ce..9c4d544de9 100644 --- a/demo-app/src/main/kotlin/io/getstream/video/android/ui/menu/SettingsMenu.kt +++ b/demo-app/src/main/kotlin/io/getstream/video/android/ui/menu/SettingsMenu.kt @@ -51,7 +51,7 @@ import com.google.accompanist.permissions.PermissionStatus import com.google.accompanist.permissions.rememberPermissionState import io.getstream.video.android.compose.theme.VideoTheme import io.getstream.video.android.core.Call -import io.getstream.video.android.core.call.audio.AudioFilter +import io.getstream.video.android.core.call.audio.InputAudioFilter import io.getstream.video.android.core.mapper.ReactionMapper import io.getstream.video.android.tooling.extensions.toPx import io.getstream.video.android.ui.call.ReactionsMenu @@ -68,9 +68,12 @@ internal fun SettingsMenu( call: Call, selectedVideoFilter: Int, showDebugOptions: Boolean, + noiseCancellationFeatureEnabled: Boolean, + noiseCancellationEnabled: Boolean, onDismissed: () -> Unit, onSelectVideoFilter: (Int) -> Unit, onShowFeedback: () -> Unit, + onNoiseCancellation: () -> Unit, onShowCallStats: () -> Unit, ) { val context = LocalContext.current @@ -104,7 +107,7 @@ internal fun SettingsMenu( val onToggleAudioFilterClick: () -> Unit = { if (call.audioFilter == null) { - call.audioFilter = object : AudioFilter { + call.audioFilter = object : InputAudioFilter { override fun applyFilter( audioFormat: Int, channelCount: Int, @@ -206,6 +209,8 @@ internal fun SettingsMenu( }, items = defaultStreamMenu( showDebugOptions = showDebugOptions, + noiseCancellationFeatureEnabled = noiseCancellationFeatureEnabled, + noiseCancellationEnabled = noiseCancellationEnabled, codecList = codecInfos, availableDevices = availableDevices, onDeviceSelected = { @@ -223,6 +228,7 @@ internal fun SettingsMenu( onToggleAudioFilterClick = onToggleAudioFilterClick, onSwitchSfuClick = onSwitchSfuClick, onShowCallStats = onShowCallStats, + onNoiseCancellation = onNoiseCancellation, isScreenShareEnabled = isScreenSharing, loadRecordings = onLoadRecordings, ), @@ -284,6 +290,7 @@ private fun SettingsMenuPreview() { availableDevices = emptyList(), onDeviceSelected = {}, onShowFeedback = {}, + onNoiseCancellation = {}, loadRecordings = { emptyList() }, ), ) diff --git a/demo-app/src/main/kotlin/io/getstream/video/android/ui/menu/base/DynamicMenu.kt b/demo-app/src/main/kotlin/io/getstream/video/android/ui/menu/base/DynamicMenu.kt index af3138334a..216cc627b7 100644 --- a/demo-app/src/main/kotlin/io/getstream/video/android/ui/menu/base/DynamicMenu.kt +++ b/demo-app/src/main/kotlin/io/getstream/video/android/ui/menu/base/DynamicMenu.kt @@ -222,6 +222,7 @@ private fun DynamicMenuPreview() { availableDevices = emptyList(), onDeviceSelected = {}, onShowFeedback = {}, + onNoiseCancellation = {}, loadRecordings = { emptyList() }, ), ) @@ -248,6 +249,7 @@ private fun DynamicMenuDebugOptionPreview() { availableDevices = emptyList(), onDeviceSelected = {}, onShowFeedback = {}, + onNoiseCancellation = {}, loadRecordings = { emptyList() }, ), ) diff --git a/demo-app/src/main/kotlin/io/getstream/video/android/util/PushNotifications.kt b/demo-app/src/main/kotlin/io/getstream/video/android/util/PushNotifications.kt new file mode 100644 index 0000000000..669a43aa68 --- /dev/null +++ b/demo-app/src/main/kotlin/io/getstream/video/android/util/PushNotifications.kt @@ -0,0 +1,32 @@ +/* + * Copyright (c) 2014-2024 Stream.io Inc. All rights reserved. + * + * Licensed under the Stream License; + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://github.com/GetStream/stream-video-android/blob/main/LICENSE + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.getstream.video.android.util + +import android.util.Log +import com.google.firebase.messaging.FirebaseMessaging +import kotlinx.coroutines.runBlocking +import kotlinx.coroutines.tasks.await + +val fcmToken: String? + get() = runBlocking { + try { + FirebaseMessaging.getInstance().token.await() + } catch (e: Exception) { + Log.e("FCM Token", "Failed to retrieve FCM token", e) + null + } + } diff --git a/demo-app/src/main/kotlin/io/getstream/video/android/util/StreamVideoInitHelper.kt b/demo-app/src/main/kotlin/io/getstream/video/android/util/StreamVideoInitHelper.kt index 7d11d48e0e..b60f8d8378 100644 --- a/demo-app/src/main/kotlin/io/getstream/video/android/util/StreamVideoInitHelper.kt +++ b/demo-app/src/main/kotlin/io/getstream/video/android/util/StreamVideoInitHelper.kt @@ -36,6 +36,7 @@ import io.getstream.video.android.data.services.stream.StreamService import io.getstream.video.android.datastore.delegate.StreamUserDataStore import io.getstream.video.android.model.ApiKey import io.getstream.video.android.model.User +import io.getstream.video.android.noise.cancellation.NoiseCancellation import io.getstream.video.android.util.config.AppConfig import kotlinx.coroutines.flow.MutableStateFlow import kotlinx.coroutines.flow.StateFlow @@ -207,6 +208,7 @@ object StreamVideoInitHelper { authData.token }, appName = "Stream Video Demo App", + audioProcessing = NoiseCancellation(context), ).build() } } diff --git a/demo-app/src/main/res/xml/network_security_config.xml b/demo-app/src/main/res/xml/network_security_config.xml new file mode 100644 index 0000000000..b57fed9239 --- /dev/null +++ b/demo-app/src/main/res/xml/network_security_config.xml @@ -0,0 +1,25 @@ + + + + + + + + + + + \ No newline at end of file diff --git a/docusaurus/docs/Android/02-tutorials/03-livestream.mdx b/docusaurus/docs/Android/02-tutorials/03-livestream.mdx index 3799426f33..879f44d8ce 100644 --- a/docusaurus/docs/Android/02-tutorials/03-livestream.mdx +++ b/docusaurus/docs/Android/02-tutorials/03-livestream.mdx @@ -363,10 +363,10 @@ It also went into more details about HLS & RTMP-in. There are several advanced features that can improve the livestreaming experience: * ** [Co-hosts](../03-guides/02-joining-creating-calls.mdx) ** You can add members to your livestream with elevated permissions. So you can have co-hosts, moderators etc. -* ** [Custom events](../03-guides/09-reactions-and-custom-events.mdx) ** You can use custom events on the call to share any additional data. Think about showing the score for a game, or any other realtime use case. -* ** [Reactions & Chat](../03-guides/09-reactions-and-custom-events.mdx) ** Users can react to the livestream, and you can add chat. This makes for a more engaging experience. +* ** [Custom events](../03-guides/10-reactions-and-custom-events.mdx) ** You can use custom events on the call to share any additional data. Think about showing the score for a game, or any other realtime use case. +* ** [Reactions & Chat](../03-guides/10-reactions-and-custom-events.mdx) ** Users can react to the livestream, and you can add chat. This makes for a more engaging experience. * ** [Notifications](../06-advanced/01-ringing.mdx) ** You can notify users via push notifications when the livestream starts -* ** [Recording](../06-advanced/06-recording.mdx) ** The call recording functionality allows you to record the call with various options and layouts +* ** [Recording](../06-advanced/09-recording.mdx) ** The call recording functionality allows you to record the call with various options and layouts ### Recap diff --git a/docusaurus/docs/Android/03-guides/05-noise-cancellation.mdx b/docusaurus/docs/Android/03-guides/05-noise-cancellation.mdx new file mode 100644 index 0000000000..60e521c832 --- /dev/null +++ b/docusaurus/docs/Android/03-guides/05-noise-cancellation.mdx @@ -0,0 +1,139 @@ +--- +title: Noise Cancellation +description: How to implement noise cancellation in Stream Video Android SDK +--- + +Noise Cancellation capabilities of our [Android Video SDK](https://github.com/GetStream/stream-video-android) can be enabled by installing our [NoiseCancellation](https://central.sonatype.com/artifact/io.getstream/stream-video-android-noise-cancellation/overview) package. Under the hood, this package uses the technology developed by [krisp.ai](https://krisp.ai/). + +## Installation + +### Add the library to your project + +To add the Stream Video Noise Cancellation library, open your app's `build.gradle.kts` file and add the following dependency: + +```kotlin +dependencies { + implementation("io.getstream:stream-video-android-noise-cancellation:1.0.1") +} +``` + +Make sure to replace `1.0.1` with the latest version of the noise cancellation library. + +## Integration + +Our Android SDK provides a utility component that makes the integration smoother. You'll need to create a `NoiseCancellation` instance and pass it to the `StreamVideoBuilder` when initializing the SDK. +```kotlin +import io.getstream.video.android.core.StreamVideoBuilder +import io.getstream.video.android.noise.cancellation.NoiseCancellation + +// ... + +val noiseCancellation = NoiseCancellation(context) +val streamVideo = StreamVideoBuilder( + context = context, + apiKey = apiKey, + user = user, + token = token, + // ... other configuration options + audioProcessing = noiseCancellation +).build() + +// ... +``` + +## Feature availability + +The availability of noise cancellation is controlled by the call settings. You can check the availability and status of noise cancellation through the `Call` object: + +```kotlin +val call: Call = // ... obtain your call object +val noiseCancellationMode = call.state.settings.value?.audio?.noiseCancellation?.mode +``` + +There are three possible modes for noise cancellation: + +### Available + +```kotlin +if (noiseCancellationMode == NoiseCancellationSettings.Mode.Available) { + // The feature is enabled on the dashboard and available for the call + // You can present noise cancellation toggle UI in your application +} +``` + +The feature has been enabled on the dashboard and it's available for the call. In this case, you are free to present any noise cancellation toggle UI in your application. + +:::info +Even though the feature may be enabled for your call, you should note that NoiseCancellation is a very performance-heavy process. For that reason, it's recommended to only allow the feature on devices with sufficient processing power. + +While there isn't a definitive way to determine if a device can handle noise cancellation efficiently, you can use the following method to check for advanced audio processing capabilities: + +```kotlin +import android.content.pm.PackageManager + +val context: Context = // ... obtain your context +val hasAdvancedAudioProcessing = context.packageManager.hasSystemFeature(PackageManager.FEATURE_AUDIO_PRO) +``` + +This can serve as an indicator of whether the device might be capable of handling noise cancellation efficiently. Devices with this feature are more likely to have the necessary hardware to support performance-intensive audio processing tasks. + +For the most accurate assessment of noise cancellation performance, you may want to consider implementing your own benchmarking or testing mechanism on different device models. +::: + +For more info, you can refer to our UI docs about Noise Cancellation. + +### Disabled + +````kotlin +if (noiseCancellationMode == NoiseCancellationSettings.Mode.Disabled) { + // The feature is not enabled on the dashboard or not available for the call + // You should hide any noise cancellation toggle UI in your application +} +```` + +The feature hasn't been enabled on the dashboard or the feature isn't available for the call. In this case, you should hide any noise cancellation toggle UI in your application. + +### AutoOn + +````kotlin +if (noiseCancellationMode == NoiseCancellationSettings.Mode.AutoOn) { + // Noise cancellation is automatically enabled +} +```` + +Similar to `Available` with the difference that if possible, the StreamVideo SDK will enable the filter automatically, when the user joins the call. + +:::note +The requirements for `AutoOn` to work properly are: + +1. A `NoiseCancellation` instance provided when you initialize StreamVideo: + ```kotlin + val noiseCancellation = NoiseCancellation(context) + val streamVideo = StreamVideoBuilder( + // ... other parameters + audioProcessing = noiseCancellation + ).build() + ``` +2. Device has sufficient processing power (you can use the `FEATURE_AUDIO_PRO` check as an indicator) +::: + + +## Activate/Deactivate the filter + +To toggle noise cancellation during a call, you can use the `toggleAudioProcessing()` method on the `StreamVideo` instance: + +```kotlin +val streamVideo: StreamVideo = // ... obtain your StreamVideo instance + +// Check if audio processing (noise cancellation) is enabled +val isAudioProcessingEnabled = streamVideo.isAudioProcessingEnabled() + +// Toggle noise cancellation +val isEnabled = streamVideo.toggleAudioProcessing() + +// Or using the setAudioProcessingEnabled method +streamVideo.setAudioProcessingEnabled(!isAudioProcessingEnabled) +``` + +Note that toggling noise cancellation affects all ongoing and future calls for the current `StreamVideo` instance. + diff --git a/docusaurus/docs/Android/03-guides/05-call-types.mdx b/docusaurus/docs/Android/03-guides/06-call-types.mdx similarity index 100% rename from docusaurus/docs/Android/03-guides/05-call-types.mdx rename to docusaurus/docs/Android/03-guides/06-call-types.mdx diff --git a/docusaurus/docs/Android/03-guides/06-keeping-the-call-alive.mdx b/docusaurus/docs/Android/03-guides/07-keeping-the-call-alive.mdx similarity index 100% rename from docusaurus/docs/Android/03-guides/06-keeping-the-call-alive.mdx rename to docusaurus/docs/Android/03-guides/07-keeping-the-call-alive.mdx diff --git a/docusaurus/docs/Android/03-guides/07-querying-calls.mdx b/docusaurus/docs/Android/03-guides/08-querying-calls.mdx similarity index 100% rename from docusaurus/docs/Android/03-guides/07-querying-calls.mdx rename to docusaurus/docs/Android/03-guides/08-querying-calls.mdx diff --git a/docusaurus/docs/Android/03-guides/08-permissions-and-moderation.mdx b/docusaurus/docs/Android/03-guides/09-permissions-and-moderation.mdx similarity index 96% rename from docusaurus/docs/Android/03-guides/08-permissions-and-moderation.mdx rename to docusaurus/docs/Android/03-guides/09-permissions-and-moderation.mdx index 5b4fb08077..667ea8f8de 100644 --- a/docusaurus/docs/Android/03-guides/08-permissions-and-moderation.mdx +++ b/docusaurus/docs/Android/03-guides/09-permissions-and-moderation.mdx @@ -72,12 +72,12 @@ The permissions are required and any usage of the `Call` object without them may ::: In order to notify an inconsistency the SDK will log a warning when `Call.join()` is being called without the required permissions. -This is completely ok, if you have a [call type](./05-call-types.mdx) which does not require streaming audio or video from the users device (e.g. `audio_room` or live broadcast where the user is only a guest and listens in to the stream). +This is completely ok, if you have a [call type](./06-call-types.mdx) which does not require streaming audio or video from the users device (e.g. `audio_room` or live broadcast where the user is only a guest and listens in to the stream). The SDK by default will check for runtime permissions based on call capabilities, so if your call requires audio to be sent, the SDK will expect that the `android.Manifest.permission.RECORD_AUDIO` is granted. :::warning -If you are not overriding the `runForegroundServiceForCalls` flag to `false` in the `StreamVideoBuilder` the resulting foreground service that starts for [keeping the call alive](./06-keeping-the-call-alive.mdx) can not run without the permissions and will crash with a detailed message. +If you are not overriding the `runForegroundServiceForCalls` flag to `false` in the `StreamVideoBuilder` the resulting foreground service that starts for [keeping the call alive](./07-keeping-the-call-alive.mdx) can not run without the permissions and will crash with a detailed message. ::: If you wish to override the behavior on which permissions are required for your calls you can provide a new implementation of `StreamPermissionCheck` to the `StreamVideoBuilder`. diff --git a/docusaurus/docs/Android/03-guides/09-reactions-and-custom-events.mdx b/docusaurus/docs/Android/03-guides/10-reactions-and-custom-events.mdx similarity index 100% rename from docusaurus/docs/Android/03-guides/09-reactions-and-custom-events.mdx rename to docusaurus/docs/Android/03-guides/10-reactions-and-custom-events.mdx diff --git a/docusaurus/docs/Android/04-ui-components/04-call/02-audio-call-content.mdx b/docusaurus/docs/Android/04-ui-components/04-call/02-audio-call-content.mdx index d2bfae4af4..443b928721 100644 --- a/docusaurus/docs/Android/04-ui-components/04-call/02-audio-call-content.mdx +++ b/docusaurus/docs/Android/04-ui-components/04-call/02-audio-call-content.mdx @@ -5,7 +5,7 @@ The `AudioCallContent` component is an alternative to the [`CallContent`](./1-ca :::note The `AudioCallContent` by itself does not make the call audio only. In order to have true audio only call you must update the capabilities of the call type and disable video via the Stream dashboard. -The call can still stream video if this is allowed by the call configuration. See [call types](../.././03-guides/05-call-types.mdx) for more details. +The call can still stream video if this is allowed by the call configuration. See [call types](../.././03-guides/06-call-types.mdx) for more details. ::: This component renders a call UI similar to the [ringing calls](./05-ringing-call.mdx) UI and has parameters (without the `videoRenderer` etc..) similar to [`CallContent`](./01-call-content.mdx) component. diff --git a/docusaurus/shared b/docusaurus/shared new file mode 120000 index 0000000000..61347b5d2d --- /dev/null +++ b/docusaurus/shared @@ -0,0 +1 @@ +/Users/kanat/.nvm/versions/node/v20.10.0/bin/../lib/node_modules/stream-chat-docusaurus-cli/shared \ No newline at end of file diff --git a/gradle/libs.versions.toml b/gradle/libs.versions.toml index 3420408de4..4616282cf0 100644 --- a/gradle/libs.versions.toml +++ b/gradle/libs.versions.toml @@ -43,7 +43,8 @@ threetenAbp = "1.4.7" tink = "1.9.0" turbine = "0.13.0" -streamWebRTC = "1.1.2" +streamWebRTC = "1.2.1" +streamNoiseCancellation = "1.0.1" streamResult = "1.2.0" streamChat = "6.0.13" streamLog = "1.1.4" @@ -120,6 +121,7 @@ landscapist-transformation = { group = "com.github.skydoves", name = "landscapis accompanist-permission = { group = "com.google.accompanist", name = "accompanist-permissions", version.ref = "accompanist" } +stream-video-android-noise-cancellation = { module = "io.getstream:stream-video-android-noise-cancellation", version.ref = "streamNoiseCancellation" } telephoto = { group = "me.saket.telephoto", name = "zoomable", version.ref = "telephoto" } audioswitch = { group = "com.twilio", name = "audioswitch", version.ref = "audioswitch"} diff --git a/stream-video-android-core/api/stream-video-android-core.api b/stream-video-android-core/api/stream-video-android-core.api index 44e4b6f76d..50f32b0e09 100644 --- a/stream-video-android-core/api/stream-video-android-core.api +++ b/stream-video-android-core/api/stream-video-android-core.api @@ -9,7 +9,7 @@ public final class io/getstream/video/android/core/Call { public final fun end (Lkotlin/coroutines/Continuation;)Ljava/lang/Object; public final fun fireEvent (Lorg/openapitools/client/models/VideoEvent;)V public final fun get (Lkotlin/coroutines/Continuation;)Ljava/lang/Object; - public final fun getAudioFilter ()Lio/getstream/video/android/core/call/audio/AudioFilter; + public final fun getAudioFilter ()Lio/getstream/video/android/core/call/audio/InputAudioFilter; public final fun getCamera ()Lio/getstream/video/android/core/CameraManager; public final fun getCid ()Ljava/lang/String; public final fun getId ()Ljava/lang/String; @@ -31,6 +31,7 @@ public final class io/getstream/video/android/core/Call { public final fun hasCapability ([Lorg/openapitools/client/models/OwnCapability;)Z public final fun initRenderer (Lio/getstream/webrtc/android/ui/VideoTextureViewRenderer;Ljava/lang/String;Lstream/video/sfu/models/TrackType;Lkotlin/jvm/functions/Function1;)V public static synthetic fun initRenderer$default (Lio/getstream/video/android/core/Call;Lio/getstream/webrtc/android/ui/VideoTextureViewRenderer;Ljava/lang/String;Lstream/video/sfu/models/TrackType;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)V + public final fun isAudioProcessingEnabled ()Z public final fun isLocalPin (Ljava/lang/String;)Z public final fun isPinnedParticipant (Ljava/lang/String;)Z public final fun isServerPin (Ljava/lang/String;)Z @@ -60,7 +61,8 @@ public final class io/getstream/video/android/core/Call { public final fun sendCustomEvent (Ljava/util/Map;Lkotlin/coroutines/Continuation;)Ljava/lang/Object; public final fun sendReaction (Ljava/lang/String;Ljava/lang/String;Ljava/util/Map;Lkotlin/coroutines/Continuation;)Ljava/lang/Object; public static synthetic fun sendReaction$default (Lio/getstream/video/android/core/Call;Ljava/lang/String;Ljava/lang/String;Ljava/util/Map;Lkotlin/coroutines/Continuation;ILjava/lang/Object;)Ljava/lang/Object; - public final fun setAudioFilter (Lio/getstream/video/android/core/call/audio/AudioFilter;)V + public final fun setAudioFilter (Lio/getstream/video/android/core/call/audio/InputAudioFilter;)V + public final fun setAudioProcessingEnabled (Z)V public final fun setVideoFilter (Lio/getstream/video/android/core/call/video/VideoFilter;)V public final fun setVisibility (Ljava/lang/String;Lstream/video/sfu/models/TrackType;Z)V public final fun startHLS (Lkotlin/coroutines/Continuation;)Ljava/lang/Object; @@ -74,6 +76,7 @@ public final class io/getstream/video/android/core/Call { public final fun subscribeFor ([Ljava/lang/Class;Lio/getstream/video/android/core/events/VideoEventListener;)Lio/getstream/video/android/core/EventSubscription; public final fun switchSfu (Lkotlin/coroutines/Continuation;)Ljava/lang/Object; public final fun takeScreenshot (Lio/getstream/video/android/core/model/VideoTrack;Lkotlin/coroutines/Continuation;)Ljava/lang/Object; + public final fun toggleAudioProcessing ()Z public final fun unpinForEveryone (Ljava/lang/String;Ljava/lang/String;Lkotlin/coroutines/Continuation;)Ljava/lang/Object; public final fun update (Ljava/util/Map;Lorg/openapitools/client/models/CallSettingsRequest;Lorg/threeten/bp/OffsetDateTime;Lkotlin/coroutines/Continuation;)Ljava/lang/Object; public static synthetic fun update$default (Lio/getstream/video/android/core/Call;Ljava/util/Map;Lorg/openapitools/client/models/CallSettingsRequest;Lorg/threeten/bp/OffsetDateTime;Lkotlin/coroutines/Continuation;ILjava/lang/Object;)Ljava/lang/Object; @@ -823,7 +826,8 @@ public final class io/getstream/video/android/core/StreamVideoBuilder { public fun (Landroid/content/Context;Ljava/lang/String;Lio/getstream/video/android/core/GEO;Lio/getstream/video/android/model/User;Ljava/lang/String;Lkotlin/jvm/functions/Function2;Lio/getstream/video/android/core/logging/LoggingLevel;Lio/getstream/video/android/core/notifications/NotificationConfig;Lkotlin/jvm/functions/Function1;JZLjava/lang/String;ZLio/getstream/video/android/core/notifications/internal/service/CallServiceConfig;Ljava/lang/String;Lio/getstream/video/android/core/sounds/Sounds;ZLio/getstream/video/android/core/permission/android/StreamPermissionCheck;)V public fun (Landroid/content/Context;Ljava/lang/String;Lio/getstream/video/android/core/GEO;Lio/getstream/video/android/model/User;Ljava/lang/String;Lkotlin/jvm/functions/Function2;Lio/getstream/video/android/core/logging/LoggingLevel;Lio/getstream/video/android/core/notifications/NotificationConfig;Lkotlin/jvm/functions/Function1;JZLjava/lang/String;ZLio/getstream/video/android/core/notifications/internal/service/CallServiceConfig;Ljava/lang/String;Lio/getstream/video/android/core/sounds/Sounds;ZLio/getstream/video/android/core/permission/android/StreamPermissionCheck;I)V public fun (Landroid/content/Context;Ljava/lang/String;Lio/getstream/video/android/core/GEO;Lio/getstream/video/android/model/User;Ljava/lang/String;Lkotlin/jvm/functions/Function2;Lio/getstream/video/android/core/logging/LoggingLevel;Lio/getstream/video/android/core/notifications/NotificationConfig;Lkotlin/jvm/functions/Function1;JZLjava/lang/String;ZLio/getstream/video/android/core/notifications/internal/service/CallServiceConfig;Ljava/lang/String;Lio/getstream/video/android/core/sounds/Sounds;ZLio/getstream/video/android/core/permission/android/StreamPermissionCheck;ILjava/lang/String;)V - public synthetic fun (Landroid/content/Context;Ljava/lang/String;Lio/getstream/video/android/core/GEO;Lio/getstream/video/android/model/User;Ljava/lang/String;Lkotlin/jvm/functions/Function2;Lio/getstream/video/android/core/logging/LoggingLevel;Lio/getstream/video/android/core/notifications/NotificationConfig;Lkotlin/jvm/functions/Function1;JZLjava/lang/String;ZLio/getstream/video/android/core/notifications/internal/service/CallServiceConfig;Ljava/lang/String;Lio/getstream/video/android/core/sounds/Sounds;ZLio/getstream/video/android/core/permission/android/StreamPermissionCheck;ILjava/lang/String;ILkotlin/jvm/internal/DefaultConstructorMarker;)V + public fun (Landroid/content/Context;Ljava/lang/String;Lio/getstream/video/android/core/GEO;Lio/getstream/video/android/model/User;Ljava/lang/String;Lkotlin/jvm/functions/Function2;Lio/getstream/video/android/core/logging/LoggingLevel;Lio/getstream/video/android/core/notifications/NotificationConfig;Lkotlin/jvm/functions/Function1;JZLjava/lang/String;ZLio/getstream/video/android/core/notifications/internal/service/CallServiceConfig;Ljava/lang/String;Lio/getstream/video/android/core/sounds/Sounds;ZLio/getstream/video/android/core/permission/android/StreamPermissionCheck;ILjava/lang/String;Lorg/webrtc/ManagedAudioProcessingFactory;)V + public synthetic fun (Landroid/content/Context;Ljava/lang/String;Lio/getstream/video/android/core/GEO;Lio/getstream/video/android/model/User;Ljava/lang/String;Lkotlin/jvm/functions/Function2;Lio/getstream/video/android/core/logging/LoggingLevel;Lio/getstream/video/android/core/notifications/NotificationConfig;Lkotlin/jvm/functions/Function1;JZLjava/lang/String;ZLio/getstream/video/android/core/notifications/internal/service/CallServiceConfig;Ljava/lang/String;Lio/getstream/video/android/core/sounds/Sounds;ZLio/getstream/video/android/core/permission/android/StreamPermissionCheck;ILjava/lang/String;Lorg/webrtc/ManagedAudioProcessingFactory;ILkotlin/jvm/internal/DefaultConstructorMarker;)V public final fun build ()Lio/getstream/video/android/core/StreamVideo; } @@ -987,7 +991,10 @@ public final class io/getstream/video/android/core/call/TrackDimensions { public fun toString ()Ljava/lang/String; } -public abstract interface class io/getstream/video/android/core/call/audio/AudioFilter { +public abstract interface class io/getstream/video/android/core/call/audio/AudioFilter : io/getstream/video/android/core/call/audio/InputAudioFilter { +} + +public abstract interface class io/getstream/video/android/core/call/audio/InputAudioFilter { public abstract fun applyFilter (IIILjava/nio/ByteBuffer;)V } @@ -1026,17 +1033,20 @@ public final class io/getstream/video/android/core/call/connection/StreamPeerCon } public final class io/getstream/video/android/core/call/connection/StreamPeerConnectionFactory { - public fun (Landroid/content/Context;I)V - public synthetic fun (Landroid/content/Context;IILkotlin/jvm/internal/DefaultConstructorMarker;)V + public fun (Landroid/content/Context;ILorg/webrtc/ManagedAudioProcessingFactory;)V + public synthetic fun (Landroid/content/Context;ILorg/webrtc/ManagedAudioProcessingFactory;ILkotlin/jvm/internal/DefaultConstructorMarker;)V public final fun getEglBase ()Lorg/webrtc/EglBase; + public final fun isAudioProcessingEnabled ()Z public final fun makeAudioSource (Lorg/webrtc/MediaConstraints;)Lorg/webrtc/AudioSource; public static synthetic fun makeAudioSource$default (Lio/getstream/video/android/core/call/connection/StreamPeerConnectionFactory;Lorg/webrtc/MediaConstraints;ILjava/lang/Object;)Lorg/webrtc/AudioSource; public final fun makeAudioTrack (Lorg/webrtc/AudioSource;Ljava/lang/String;)Lorg/webrtc/AudioTrack; public final fun makePeerConnection (Lkotlinx/coroutines/CoroutineScope;Lorg/webrtc/PeerConnection$RTCConfiguration;Lio/getstream/video/android/core/model/StreamPeerType;Lorg/webrtc/MediaConstraints;Lkotlin/jvm/functions/Function1;Lkotlin/jvm/functions/Function2;Lkotlin/jvm/functions/Function2;I)Lio/getstream/video/android/core/call/connection/StreamPeerConnection; public static synthetic fun makePeerConnection$default (Lio/getstream/video/android/core/call/connection/StreamPeerConnectionFactory;Lkotlinx/coroutines/CoroutineScope;Lorg/webrtc/PeerConnection$RTCConfiguration;Lio/getstream/video/android/core/model/StreamPeerType;Lorg/webrtc/MediaConstraints;Lkotlin/jvm/functions/Function1;Lkotlin/jvm/functions/Function2;Lkotlin/jvm/functions/Function2;IILjava/lang/Object;)Lio/getstream/video/android/core/call/connection/StreamPeerConnection; public final fun makeVideoTrack (Lorg/webrtc/VideoSource;Ljava/lang/String;)Lorg/webrtc/VideoTrack; + public final fun setAudioProcessingEnabled (Z)V public final fun setAudioRecordDataCallback (Lkotlin/jvm/functions/Function4;)V public final fun setAudioSampleCallback (Lkotlin/jvm/functions/Function1;)V + public final fun toggleAudioProcessing ()Z } public final class io/getstream/video/android/core/call/signal/socket/RTCEventMapper { @@ -4239,6 +4249,7 @@ public final class io/getstream/video/android/core/notifications/internal/servic } public final class io/getstream/video/android/core/notifications/internal/service/CallServiceConfigKt { + public static final fun audioCallServiceConfig ()Lio/getstream/video/android/core/notifications/internal/service/CallServiceConfig; public static final fun callServiceConfig ()Lio/getstream/video/android/core/notifications/internal/service/CallServiceConfig; public static final fun livestreamAudioCallServiceConfig ()Lio/getstream/video/android/core/notifications/internal/service/CallServiceConfig; public static final fun livestreamCallServiceConfig ()Lio/getstream/video/android/core/notifications/internal/service/CallServiceConfig; @@ -4566,6 +4577,13 @@ public final class io/getstream/video/android/core/utils/MinimalSdpParser { public final fun setSdp (Ljava/lang/String;)V } +public final class io/getstream/video/android/core/utils/NoiseCancellationUtilsKt { + public static final fun isAutoOn (Lorg/openapitools/client/models/NoiseCancellationSettings;)Z + public static final fun isAvailable (Lorg/openapitools/client/models/NoiseCancellationSettings;)Z + public static final fun isDisabled (Lorg/openapitools/client/models/NoiseCancellationSettings;)Z + public static final fun isEnabled (Lorg/openapitools/client/models/NoiseCancellationSettings;)Z +} + public final class io/getstream/video/android/core/utils/RtpMapAttribute { public fun (ILjava/lang/String;Ljava/lang/String;Ljava/lang/String;)V public final fun component1 ()I diff --git a/stream-video-android-core/src/main/AndroidManifest.xml b/stream-video-android-core/src/main/AndroidManifest.xml index 7360e764c4..b8258835b8 100644 --- a/stream-video-android-core/src/main/AndroidManifest.xml +++ b/stream-video-android-core/src/main/AndroidManifest.xml @@ -26,37 +26,32 @@ - - - - - - + + - + - - - + + + + - + - + - + - - - @@ -112,5 +106,10 @@ android:name=".notifications.internal.service.LivestreamViewerService" android:foregroundServiceType="mediaPlayback" android:exported="false" /> + + \ No newline at end of file diff --git a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/Call.kt b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/Call.kt index e984318a90..a0b0874866 100644 --- a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/Call.kt +++ b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/Call.kt @@ -26,7 +26,7 @@ import io.getstream.result.Result import io.getstream.result.Result.Failure import io.getstream.result.Result.Success import io.getstream.video.android.core.call.RtcSession -import io.getstream.video.android.core.call.audio.AudioFilter +import io.getstream.video.android.core.call.audio.InputAudioFilter import io.getstream.video.android.core.call.utils.SoundInputProcessor import io.getstream.video.android.core.call.video.VideoFilter import io.getstream.video.android.core.call.video.YuvFrame @@ -147,9 +147,9 @@ public class Call( var videoFilter: VideoFilter? = null /** - * Set a custom [AudioFilter] that will be applied to the audio stream recorded on your device. + * Set a custom [InputAudioFilter] that will be applied to the audio stream recorded on your device. */ - var audioFilter: AudioFilter? = null + var audioFilter: InputAudioFilter? = null /** * Called by the [CallHealthMonitor] when the ICE restarts failed after @@ -1068,6 +1068,18 @@ public class Call( return state.ownCapabilities.value.containsAll(elements) } + fun isAudioProcessingEnabled(): Boolean { + return clientImpl.isAudioProcessingEnabled() + } + + fun setAudioProcessingEnabled(enabled: Boolean) { + return clientImpl.setAudioProcessingEnabled(enabled) + } + + fun toggleAudioProcessing(): Boolean { + return clientImpl.toggleAudioProcessing() + } + @InternalStreamVideoApi public val debug = Debug(this) diff --git a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/ClientState.kt b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/ClientState.kt index ca111ef206..928c5d4574 100644 --- a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/ClientState.kt +++ b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/ClientState.kt @@ -23,8 +23,11 @@ import io.getstream.video.android.core.notifications.internal.service.CallServic import io.getstream.video.android.core.utils.safeCall import io.getstream.video.android.model.StreamCallId import io.getstream.video.android.model.User +import io.getstream.video.android.model.UserType +import kotlinx.coroutines.CoroutineName import kotlinx.coroutines.flow.MutableStateFlow import kotlinx.coroutines.flow.StateFlow +import kotlinx.coroutines.launch import org.openapitools.client.models.CallCreatedEvent import org.openapitools.client.models.CallRingEvent import org.openapitools.client.models.ConnectedEvent @@ -103,6 +106,8 @@ class ClientState(client: StreamVideo) { // mark connected if (event is ConnectedEvent) { _connection.value = ConnectionState.Connected + + registerPushDevice() } else if (event is CallCreatedEvent) { // what's the right thing to do here? // if it's ringing we add it @@ -118,6 +123,14 @@ class ClientState(client: StreamVideo) { } } + private fun registerPushDevice() { + with(clientImpl) { + scope.launch(CoroutineName("ClientState#registerPushDevice")) { + if (user.type == UserType.Authenticated) registerPushDevice() + } + } + } + internal fun handleError(error: Throwable) { if (error is ConnectException) { _connection.value = ConnectionState.Failed(error = Error(error)) diff --git a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/StreamVideoBuilder.kt b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/StreamVideoBuilder.kt index a19e1c9399..01648d393a 100644 --- a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/StreamVideoBuilder.kt +++ b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/StreamVideoBuilder.kt @@ -42,7 +42,7 @@ import io.getstream.video.android.model.UserToken import io.getstream.video.android.model.UserType import kotlinx.coroutines.CoroutineScope import kotlinx.coroutines.launch -import java.lang.RuntimeException +import org.webrtc.ManagedAudioProcessingFactory import java.net.ConnectException /** @@ -74,12 +74,14 @@ import java.net.ConnectException * @property ensureSingleInstance Verify that only 1 version of the video client exists. Prevents integration mistakes. * @property videoDomain URL overwrite to allow for testing against a local instance of video. * @property runForegroundServiceForCalls If set to true, when there is an active call the SDK will run a foreground service to keep the process alive. (default: true) + * @property callServiceConfig Configuration for the call foreground service. See [CallServiceConfig]. * @property localSfuAddress Local SFU address (IP:port) to be used for testing. Leave null if not needed. * @property sounds Overwrite the default SDK sounds. See [Sounds]. * @property permissionCheck Used to check for system permission based on call capabilities. See [StreamPermissionCheck]. * @property crashOnMissingPermission Throw an exception or just log an error if [permissionCheck] fails. * @property audioUsage Used to signal to the system how to treat the audio tracks (voip or media). * @property appName Optional name for the application that is using the Stream Video SDK. Used for logging and debugging purposes. + * @property audioProcessing The audio processor used for custom modifications to audio data within WebRTC. * * @see build * @see ClientState.connection @@ -106,6 +108,7 @@ public class StreamVideoBuilder @JvmOverloads constructor( private val permissionCheck: StreamPermissionCheck = DefaultStreamPermissionCheck(), private val audioUsage: Int = defaultAudioUsage, private val appName: String? = null, + private val audioProcessing: ManagedAudioProcessingFactory? = null, ) { private val context: Context = context.applicationContext private val scope = CoroutineScope(DispatcherProvider.IO) @@ -204,6 +207,7 @@ public class StreamVideoBuilder @JvmOverloads constructor( crashOnMissingPermission = crashOnMissingPermission, audioUsage = audioUsage, appName = appName, + audioProcessing = audioProcessing, ) if (user.type == UserType.Guest) { @@ -239,14 +243,6 @@ public class StreamVideoBuilder @JvmOverloads constructor( // Installs Stream Video instance StreamVideo.install(client) - // Needs to be started after the client is initialised because the VideoPushDelegate - // is accessing the StreamVideo instance - scope.launch { - if (user.type == UserType.Authenticated) { - client.registerPushDevice() - } - } - return client } } diff --git a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/StreamVideoImpl.kt b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/StreamVideoImpl.kt index 6b4b201966..ddbadd449a 100644 --- a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/StreamVideoImpl.kt +++ b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/StreamVideoImpl.kt @@ -127,6 +127,7 @@ import org.openapitools.client.models.UpdateUserPermissionsResponse import org.openapitools.client.models.UserRequest import org.openapitools.client.models.VideoEvent import org.openapitools.client.models.WSCallEvent +import org.webrtc.ManagedAudioProcessingFactory import retrofit2.HttpException import java.net.ConnectException import java.util.* @@ -156,6 +157,7 @@ internal class StreamVideoImpl internal constructor( internal val crashOnMissingPermission: Boolean = false, internal val audioUsage: Int = defaultAudioUsage, internal val appName: String? = null, + internal val audioProcessing: ManagedAudioProcessingFactory? = null, ) : StreamVideo, NotificationHandler by streamNotificationManager { private var locationJob: Deferred>? = null @@ -182,7 +184,9 @@ internal class StreamVideoImpl internal constructor( private lateinit var connectContinuation: Continuation> @InternalStreamVideoApi - public var peerConnectionFactory = StreamPeerConnectionFactory(context, audioUsage) + public var peerConnectionFactory = + StreamPeerConnectionFactory(context, audioUsage, audioProcessing) + public override val userId = user.id private val logger by taggedLogger("Call:StreamVideo") @@ -1106,6 +1110,18 @@ internal class StreamVideoImpl internal constructor( connectionModule.api.getCall(type, id, ring = true) } } + + internal fun isAudioProcessingEnabled(): Boolean { + return peerConnectionFactory.isAudioProcessingEnabled() + } + + internal fun setAudioProcessingEnabled(enabled: Boolean) { + return peerConnectionFactory.setAudioProcessingEnabled(enabled) + } + + internal fun toggleAudioProcessing(): Boolean { + return peerConnectionFactory.toggleAudioProcessing() + } } /** Extension function that makes it easy to use on kotlin, but keeps Java usable as well */ diff --git a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/call/audio/AudioFilter.kt b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/call/audio/InputAudioFilter.kt similarity index 65% rename from stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/call/audio/AudioFilter.kt rename to stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/call/audio/InputAudioFilter.kt index 21b6ff4207..fe23870c9b 100644 --- a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/call/audio/AudioFilter.kt +++ b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/call/audio/InputAudioFilter.kt @@ -18,7 +18,19 @@ package io.getstream.video.android.core.call.audio import java.nio.ByteBuffer -interface AudioFilter { +@Deprecated( + message = "Use InputAudioFilter instead", + replaceWith = ReplaceWith( + expression = "InputAudioFilter", + imports = ["io.getstream.video.android.core.call.audio.InputAudioFilter"], + ), +) +public fun interface AudioFilter : InputAudioFilter + +/** + * Manipulates the audio data before it's fed into WebRTC. + */ +public fun interface InputAudioFilter { /** * Invoked after an audio sample is recorded. Can be used to manipulate * the ByteBuffer before it's fed into WebRTC. Currently the audio in the @@ -26,5 +38,10 @@ interface AudioFilter { * * @param audioFormat format in android.media.AudioFormat */ - fun applyFilter(audioFormat: Int, channelCount: Int, sampleRate: Int, sampleData: ByteBuffer) + public fun applyFilter( + audioFormat: Int, + channelCount: Int, + sampleRate: Int, + sampleData: ByteBuffer, + ) } diff --git a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/call/connection/StreamPeerConnectionFactory.kt b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/call/connection/StreamPeerConnectionFactory.kt index 27413a0b60..ca3428c6e7 100644 --- a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/call/connection/StreamPeerConnectionFactory.kt +++ b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/call/connection/StreamPeerConnectionFactory.kt @@ -30,6 +30,7 @@ import org.webrtc.AudioTrack import org.webrtc.DefaultVideoDecoderFactory import org.webrtc.EglBase import org.webrtc.Logging +import org.webrtc.ManagedAudioProcessingFactory import org.webrtc.MediaConstraints import org.webrtc.MediaStream import org.webrtc.PeerConnection @@ -45,13 +46,15 @@ import java.nio.ByteBuffer /** * Builds a factory that provides [PeerConnection]s when requested. * - * @param context Used to build the underlying native components for the factory. - * @param audioUsage signal to the system how the audio tracks are used. + * @property context Used to build the underlying native components for the factory. + * @property audioUsage signal to the system how the audio tracks are used. + * @property audioProcessing Factory that provides audio processing capabilities. * Set this to [AudioAttributes.USAGE_MEDIA] if you want the audio track to behave like media, useful for livestreaming scenarios. */ public class StreamPeerConnectionFactory( private val context: Context, private val audioUsage: Int = defaultAudioUsage, + private var audioProcessing: ManagedAudioProcessingFactory? = null, ) { private val webRtcLogger by taggedLogger("Call:WebRTC") @@ -109,9 +112,9 @@ public class StreamPeerConnectionFactory( private val videoEncoderFactory by lazy { SimulcastAlignedVideoEncoderFactory( eglBase.eglBaseContext, - enableIntelVp8Encoder = true, - enableH264HighProfile = true, - resolutionAdjustment = ResolutionAdjustment.MULTIPLE_OF_16, + true, + true, + ResolutionAdjustment.MULTIPLE_OF_16, ) } @@ -151,6 +154,9 @@ public class StreamPeerConnectionFactory( ) PeerConnectionFactory.builder() + .apply { + audioProcessing?.also { setAudioProcessingFactory(it) } + } .setVideoDecoderFactory(videoDecoderFactory) .setVideoEncoderFactory(videoEncoderFactory) .setAudioDeviceModule( @@ -354,4 +360,28 @@ public class StreamPeerConnectionFactory( source: AudioSource, trackId: String, ): AudioTrack = factory.createAudioTrack(trackId, source) + + /** + * True if the audio processing is enabled, false otherwise. + */ + public fun isAudioProcessingEnabled(): Boolean { + return audioProcessing?.isEnabled ?: false + } + + /** + * Sets the audio processing on or off. + */ + public fun setAudioProcessingEnabled(enabled: Boolean) { + audioProcessing?.isEnabled = enabled + } + + /** + * Toggles the audio processing on and off. + */ + public fun toggleAudioProcessing(): Boolean { + return audioProcessing?.let { + it.isEnabled = !it.isEnabled + it.isEnabled + } ?: false + } } diff --git a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/notifications/internal/StreamNotificationManager.kt b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/notifications/internal/StreamNotificationManager.kt index 949d51215f..b3a4fad40a 100644 --- a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/notifications/internal/StreamNotificationManager.kt +++ b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/notifications/internal/StreamNotificationManager.kt @@ -48,7 +48,7 @@ internal class StreamNotificationManager private constructor( private val context: Context, private val scope: CoroutineScope, private val notificationConfig: NotificationConfig, - private val api: ProductvideoApi, + private var api: ProductvideoApi, internal val deviceTokenStorage: DeviceTokenStorage, private val notificationPermissionManager: NotificationPermissionManager?, ) : NotificationHandler by notificationConfig.notificationHandler { @@ -123,6 +123,7 @@ internal class StreamNotificationManager private constructor( pushProvider = this.pushProvider.key, pushProviderName = this.providerName ?: "", ) + private fun PushDevice.toCreateDeviceRequest(): Result = when (pushProvider) { PushProvider.FIREBASE -> Result.Success(CreateDeviceRequest.PushProvider.Firebase) @@ -138,10 +139,12 @@ internal class StreamNotificationManager private constructor( } internal companion object { + private val logger: TaggedLogger by taggedLogger("StreamVideo:Notifications") @SuppressLint("StaticFieldLeak") private lateinit var internalStreamNotificationManager: StreamNotificationManager + internal fun install( context: Context, scope: CoroutineScope, @@ -151,10 +154,7 @@ internal class StreamNotificationManager private constructor( ): StreamNotificationManager { synchronized(this) { if (Companion::internalStreamNotificationManager.isInitialized) { - logger.e { - "The $internalStreamNotificationManager is already installed but you've " + - "tried to install a new one." - } + internalStreamNotificationManager.api = api } else { val application = context.applicationContext as? Application val updatedNotificationConfig = diff --git a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/notifications/internal/service/AudioCallService.kt b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/notifications/internal/service/AudioCallService.kt new file mode 100644 index 0000000000..2234044205 --- /dev/null +++ b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/notifications/internal/service/AudioCallService.kt @@ -0,0 +1,26 @@ +/* + * Copyright (c) 2014-2024 Stream.io Inc. All rights reserved. + * + * Licensed under the Stream License; + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://github.com/GetStream/stream-video-android/blob/main/LICENSE + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.getstream.video.android.core.notifications.internal.service + +import android.content.pm.ServiceInfo +import io.getstream.log.TaggedLogger +import io.getstream.log.taggedLogger + +internal class AudioCallService : CallService() { + override val logger: TaggedLogger by taggedLogger("AudioCallService") + override val serviceType = ServiceInfo.FOREGROUND_SERVICE_TYPE_MICROPHONE +} diff --git a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/notifications/internal/service/CallService.kt b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/notifications/internal/service/CallService.kt index e1385b962d..44f4fd6230 100644 --- a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/notifications/internal/service/CallService.kt +++ b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/notifications/internal/service/CallService.kt @@ -337,9 +337,9 @@ internal open class CallService : Service() { if (trigger == TRIGGER_INCOMING_CALL) { updateRingingCall(streamVideo, intentCallId, RingingState.Incoming()) - if (mediaPlayer == null) mediaPlayer = MediaPlayer() + instantiateMediaPlayer() } else if (trigger == TRIGGER_OUTGOING_CALL) { - if (mediaPlayer == null) mediaPlayer = MediaPlayer() + instantiateMediaPlayer() } observeCall(intentCallId, streamVideo) registerToggleCameraBroadcastReceiver() @@ -424,6 +424,12 @@ internal open class CallService : Service() { } } + private fun instantiateMediaPlayer() { + synchronized(this) { + if (mediaPlayer == null) mediaPlayer = MediaPlayer() + } + } + private fun observeCall(callId: StreamCallId, streamVideo: StreamVideoImpl) { observeRingingState(callId, streamVideo) observeCallEvents(callId, streamVideo) @@ -480,16 +486,18 @@ internal open class CallService : Service() { private fun playCallSound(soundUri: Uri?) { try { - requestAudioFocus( - context = applicationContext, - onGranted = { - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.P) { - playWithRingtone(soundUri) - } else { - playWithMediaPlayer(soundUri) - } - }, - ) + synchronized(this) { + requestAudioFocus( + context = applicationContext, + onGranted = { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.P) { + playWithRingtone(soundUri) + } else { + playWithMediaPlayer(soundUri) + } + }, + ) + } } catch (e: Exception) { logger.d { "[Sounds] Error playing call sound: ${e.message}" } } @@ -564,18 +572,20 @@ internal open class CallService : Service() { } private fun stopCallSound() { - try { - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.P) { - logger.d { "[Sounds] Stopping Ringtone sound" } - if (ringtone?.isPlaying == true) ringtone?.stop() - } else { - logger.d { "[Sounds] Stopping MediaPlayer sound" } - if (mediaPlayer?.isPlaying == true) mediaPlayer?.stop() + synchronized(this) { + try { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.P) { + logger.d { "[Sounds] Stopping Ringtone sound" } + if (ringtone?.isPlaying == true) ringtone?.stop() + } else { + logger.d { "[Sounds] Stopping MediaPlayer sound" } + if (mediaPlayer?.isPlaying == true) mediaPlayer?.stop() + } + } catch (e: Exception) { + logger.d { "[Sounds] Error stopping call sound: ${e.message}" } + } finally { + abandonAudioFocus() } - } catch (e: Exception) { - logger.d { "[Sounds] Error stopping call sound: ${e.message}" } - } finally { - abandonAudioFocus() } } @@ -675,17 +685,6 @@ internal open class CallService : Service() { } } - private fun unregisterToggleCameraBroadcastReceiver() { - if (isToggleCameraBroadcastReceiverRegistered) { - try { - unregisterReceiver(toggleCameraBroadcastReceiver) - isToggleCameraBroadcastReceiverRegistered = false - } catch (e: Exception) { - logger.d { "Unable to unregister ToggleCameraBroadcastReceiver." } - } - } - } - override fun onTimeout(startId: Int) { super.onTimeout(startId) logger.w { "Timeout received from the system, service will stop." } @@ -775,17 +774,30 @@ internal open class CallService : Service() { stopSelf() } + private fun unregisterToggleCameraBroadcastReceiver() { + if (isToggleCameraBroadcastReceiverRegistered) { + try { + unregisterReceiver(toggleCameraBroadcastReceiver) + isToggleCameraBroadcastReceiverRegistered = false + } catch (e: Exception) { + logger.d { "Unable to unregister ToggleCameraBroadcastReceiver." } + } + } + } + private fun cleanAudioResources() { - logger.d { "[Sounds] Cleaning audio resources" } + synchronized(this) { + logger.d { "[Sounds] Cleaning audio resources" } - if (ringtone?.isPlaying == true) ringtone?.stop() - ringtone = null + if (ringtone?.isPlaying == true) ringtone?.stop() + ringtone = null - mediaPlayer?.release() - mediaPlayer = null + mediaPlayer?.release() + mediaPlayer = null - audioManager = null - audioFocusRequest = null + audioManager = null + audioFocusRequest = null + } } // This service does not return a Binder diff --git a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/notifications/internal/service/CallServiceConfig.kt b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/notifications/internal/service/CallServiceConfig.kt index 0226507d86..5083006d4f 100644 --- a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/notifications/internal/service/CallServiceConfig.kt +++ b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/notifications/internal/service/CallServiceConfig.kt @@ -25,9 +25,15 @@ internal const val ANY_MARKER = "ALL_CALL_TYPES" // API /** - * Configuration class for the call service. + * Configuration class for the call foreground service. * @param runCallServiceInForeground If the call service should run in the foreground. * @param callServicePerType A map of call service per type. + * + * @see callServiceConfig + * @see livestreamCallServiceConfig + * @see livestreamAudioCallServiceConfig + * @see livestreamGuestCallServiceConfig + * @see audioCallServiceConfig */ public data class CallServiceConfig( val runCallServiceInForeground: Boolean = true, @@ -38,7 +44,8 @@ public data class CallServiceConfig( ) /** - * Return a default configuration for the call service configuration. + * Returns the default call foreground service configuration. + * Uses: `FOREGROUND_SERVICE_TYPE_PHONE_CALL`. */ public fun callServiceConfig(): CallServiceConfig { return CallServiceConfig( @@ -50,7 +57,8 @@ public fun callServiceConfig(): CallServiceConfig { } /** - * Return a default configuration for the call service configuration. + * Returns a foreground service configuration appropriate for livestream hosts. + * Uses: `FOREGROUND_SERVICE_TYPE_CAMERA` and `FOREGROUND_SERVICE_TYPE_MICROPHONE`. */ public fun livestreamCallServiceConfig(): CallServiceConfig { return CallServiceConfig( @@ -63,7 +71,8 @@ public fun livestreamCallServiceConfig(): CallServiceConfig { } /** - * Return a default configuration for the call service configuration for livestream which has no camera + * Returns a foreground service configuration appropriate for audio-only livestream hosts. + * Uses: `FOREGROUND_SERVICE_TYPE_MICROPHONE`. */ public fun livestreamAudioCallServiceConfig(): CallServiceConfig { return CallServiceConfig( @@ -76,7 +85,8 @@ public fun livestreamAudioCallServiceConfig(): CallServiceConfig { } /** - * Return a default configuration for the call service configuration. + * Returns a foreground service configuration appropriate for livestream viewers. + * Uses: `FOREGROUND_SERVICE_TYPE_MEDIA_PLAYBACK`. */ public fun livestreamGuestCallServiceConfig(): CallServiceConfig { return CallServiceConfig( @@ -89,6 +99,20 @@ public fun livestreamGuestCallServiceConfig(): CallServiceConfig { ) } +/** + * Returns a foreground service configuration appropriate for audio-only calls. + * Uses: `FOREGROUND_SERVICE_TYPE_MICROPHONE`. + */ +public fun audioCallServiceConfig(): CallServiceConfig { + return CallServiceConfig( + runCallServiceInForeground = true, + callServicePerType = mapOf( + Pair(ANY_MARKER, CallService::class.java), + Pair("audio_call", AudioCallService::class.java), + ), + ) +} + // Internal internal fun resolveServiceClass(callId: StreamCallId, config: CallServiceConfig): Class<*> { val callType = callId.type diff --git a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/utils/NoiseCancellationUtils.kt b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/utils/NoiseCancellationUtils.kt new file mode 100644 index 0000000000..452a139b4f --- /dev/null +++ b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/utils/NoiseCancellationUtils.kt @@ -0,0 +1,44 @@ +/* + * Copyright (c) 2014-2024 Stream.io Inc. All rights reserved. + * + * Licensed under the Stream License; + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://github.com/GetStream/stream-video-android/blob/main/LICENSE + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.getstream.video.android.core.utils + +import org.openapitools.client.models.NoiseCancellationSettings + +/** + * Returns true if the noise cancellation mode is "auto-on". + */ +val NoiseCancellationSettings.isAutoOn get() = mode == NoiseCancellationSettings.Mode.AutoOn + +/** + * Returns true if the noise cancellation mode is "available". + */ +val NoiseCancellationSettings.isAvailable get() = mode == NoiseCancellationSettings.Mode.Available + +/** + * Returns true if the noise cancellation mode is "disabled". + */ +val NoiseCancellationSettings.isDisabled get() = mode == NoiseCancellationSettings.Mode.Disabled + +/** + * Returns true if the noise cancellation mode is "auto-on" or "available". + */ +val NoiseCancellationSettings.isEnabled get() = when (mode) { + NoiseCancellationSettings.Mode.Available, + NoiseCancellationSettings.Mode.AutoOn, + -> true + else -> false +}