diff --git a/buildSrc/src/main/kotlin/io/getstream/video/android/Configuration.kt b/buildSrc/src/main/kotlin/io/getstream/video/android/Configuration.kt index e86bd2de59..a449bdc80d 100644 --- a/buildSrc/src/main/kotlin/io/getstream/video/android/Configuration.kt +++ b/buildSrc/src/main/kotlin/io/getstream/video/android/Configuration.kt @@ -6,11 +6,11 @@ object Configuration { const val minSdk = 24 const val majorVersion = 1 const val minorVersion = 0 - const val patchVersion = 16 + const val patchVersion = 17 const val versionName = "$majorVersion.$minorVersion.$patchVersion" - const val versionCode = 39 + const val versionCode = 40 const val snapshotVersionName = "$majorVersion.$minorVersion.${patchVersion + 1}-SNAPSHOT" const val artifactGroup = "io.getstream" - const val streamVideoCallGooglePlayVersion = "1.1.9" + const val streamVideoCallGooglePlayVersion = "1.1.10" const val streamWebRtcVersionName = "1.2.1" } diff --git a/demo-app/src/main/kotlin/io/getstream/video/android/data/services/stream/StreamService.kt b/demo-app/src/main/kotlin/io/getstream/video/android/data/services/stream/StreamService.kt index a50e5b07fb..eb2262c003 100644 --- a/demo-app/src/main/kotlin/io/getstream/video/android/data/services/stream/StreamService.kt +++ b/demo-app/src/main/kotlin/io/getstream/video/android/data/services/stream/StreamService.kt @@ -17,6 +17,9 @@ package io.getstream.video.android.data.services.stream import com.jakewharton.retrofit2.converter.kotlinx.serialization.asConverterFactory +import io.getstream.video.android.model.User +import io.getstream.video.android.models.UserCredentials +import io.getstream.video.android.models.builtInCredentials import kotlinx.serialization.json.Json import okhttp3.MediaType.Companion.toMediaType import retrofit2.Retrofit @@ -24,7 +27,7 @@ import retrofit2.create import retrofit2.http.GET import retrofit2.http.Query -interface StreamService { +fun interface StreamService { @GET("api/auth/create-token") suspend fun getAuthData( @Query("environment") environment: String, @@ -41,6 +44,15 @@ interface StreamService { .addConverterFactory(json.asConverterFactory("application/json".toMediaType())) .build() - val instance = retrofit.create() + private val serviceInstance = retrofit.create() + + val instance = StreamService { environment, userId -> + User.builtInCredentials[userId]?.toAuthDataResponse() + ?: serviceInstance.getAuthData(environment, userId) + } } } + +private fun UserCredentials.toAuthDataResponse(): GetAuthDataResponse { + return GetAuthDataResponse(userId, apiKey, token) +} diff --git a/demo-app/src/main/kotlin/io/getstream/video/android/models/Users.kt b/demo-app/src/main/kotlin/io/getstream/video/android/models/Users.kt index 2f2a0d15f4..3b2fd2248e 100644 --- a/demo-app/src/main/kotlin/io/getstream/video/android/models/Users.kt +++ b/demo-app/src/main/kotlin/io/getstream/video/android/models/Users.kt @@ -18,6 +18,11 @@ package io.getstream.video.android.models import io.getstream.video.android.model.User +data class UserCredentials(val userId: String, val apiKey: String, val token: String) + +public val User.Companion.builtInCredentials: Map + get() = mapOf() + public fun User.Companion.builtInUsers(): List { return listOf( User( diff --git a/docusaurus/docs/Android/03-guides/01-client-auth.mdx b/docusaurus/docs/Android/03-guides/01-client-auth.mdx index 833a3ac6ff..2b8c92d5a0 100644 --- a/docusaurus/docs/Android/03-guides/01-client-auth.mdx +++ b/docusaurus/docs/Android/03-guides/01-client-auth.mdx @@ -57,6 +57,25 @@ val streamVideo = StreamVideoBuilder( ).build() ``` +Anonymous users don't establish an active web socket connection, therefore they won't receive any events. They are just able to watch a livestream or join a call. + +The token for an anonymous user should contain the `call_cids` field, which is an array of the call `cid`'s that the user is allowed to join. + +Here's an example JWT token payload for an anonymous user: + +```kotlin +{ + "iss": "@stream-io/dashboard", + "iat": 1726406693, + "exp": 1726493093, + "user_id": "!anon", + "role": "viewer", + "call_cids": [ + "livestream:123" + ] +} +``` + ### Client options Here's a more complete example of the client options: diff --git a/gradle/libs.versions.toml b/gradle/libs.versions.toml index 4616282cf0..b84d63734e 100644 --- a/gradle/libs.versions.toml +++ b/gradle/libs.versions.toml @@ -32,7 +32,7 @@ coil = "2.6.0" landscapist = "2.3.6" accompanist = "0.34.0" telephoto = "0.3.0" -audioswitch = "1.1.8" +audioswitch = "1.2.0" libyuv = "0.30.0" wire = "4.7.0" @@ -43,7 +43,7 @@ threetenAbp = "1.4.7" tink = "1.9.0" turbine = "0.13.0" -streamWebRTC = "1.2.1" +streamWebRTC = "1.2.2" streamNoiseCancellation = "1.0.1" streamResult = "1.2.0" streamChat = "6.0.13" diff --git a/stream-video-android-core/api/stream-video-android-core.api b/stream-video-android-core/api/stream-video-android-core.api index 50f32b0e09..caa2c7366d 100644 --- a/stream-video-android-core/api/stream-video-android-core.api +++ b/stream-video-android-core/api/stream-video-android-core.api @@ -35,6 +35,7 @@ public final class io/getstream/video/android/core/Call { public final fun isLocalPin (Ljava/lang/String;)Z public final fun isPinnedParticipant (Ljava/lang/String;)Z public final fun isServerPin (Ljava/lang/String;)Z + public final fun isVideoEnabled ()Z public final fun join (ZLio/getstream/video/android/core/CreateCallOptions;ZZLkotlin/coroutines/Continuation;)Ljava/lang/Object; public static synthetic fun join$default (Lio/getstream/video/android/core/Call;ZLio/getstream/video/android/core/CreateCallOptions;ZZLkotlin/coroutines/Continuation;ILjava/lang/Object;)Ljava/lang/Object; public final fun leave ()V @@ -428,6 +429,10 @@ public final class io/getstream/video/android/core/MediaManagerImpl { public final fun getVideoTrack ()Lorg/webrtc/VideoTrack; } +public final class io/getstream/video/android/core/MediaManagerKt { + public static final fun trySetEnabled (Lorg/webrtc/MediaStreamTrack;Z)V +} + public final class io/getstream/video/android/core/MediaStatsInfo { public static final field Companion Lio/getstream/video/android/core/MediaStatsInfo$Companion; public fun (Ljava/lang/String;Ljava/lang/Double;Ljava/lang/Long;Ljava/lang/Long;Ljava/lang/Double;Ljava/lang/Double;)V @@ -544,6 +549,7 @@ public final class io/getstream/video/android/core/ParticipantState { public final fun muteVideo (Lkotlin/coroutines/Continuation;)Ljava/lang/Object; public final fun pin (Lkotlin/coroutines/Continuation;)Ljava/lang/Object; public final fun setSessionId (Ljava/lang/String;)V + public final fun setVideoTrack (Lio/getstream/video/android/core/model/VideoTrack;)V public fun toString ()Ljava/lang/String; public final fun unpin (Lkotlin/coroutines/Continuation;)Ljava/lang/Object; public final fun updateAudioLevel (F)V @@ -3034,6 +3040,17 @@ public final class io/getstream/video/android/core/events/GoAwayEvent : io/getst public fun toString ()Ljava/lang/String; } +public final class io/getstream/video/android/core/events/ICERestartEvent : io/getstream/video/android/core/events/SfuDataEvent { + public fun (Lstream/video/sfu/models/PeerType;)V + public final fun component1 ()Lstream/video/sfu/models/PeerType; + public final fun copy (Lstream/video/sfu/models/PeerType;)Lio/getstream/video/android/core/events/ICERestartEvent; + public static synthetic fun copy$default (Lio/getstream/video/android/core/events/ICERestartEvent;Lstream/video/sfu/models/PeerType;ILjava/lang/Object;)Lio/getstream/video/android/core/events/ICERestartEvent; + public fun equals (Ljava/lang/Object;)Z + public final fun getPeerType ()Lstream/video/sfu/models/PeerType; + public fun hashCode ()I + public fun toString ()Ljava/lang/String; +} + public final class io/getstream/video/android/core/events/ICETrickleEvent : io/getstream/video/android/core/events/SfuDataEvent { public fun (Ljava/lang/String;Lstream/video/sfu/models/PeerType;)V public final fun component1 ()Ljava/lang/String; diff --git a/stream-video-android-core/src/main/AndroidManifest.xml b/stream-video-android-core/src/main/AndroidManifest.xml index b8258835b8..4357bb744a 100644 --- a/stream-video-android-core/src/main/AndroidManifest.xml +++ b/stream-video-android-core/src/main/AndroidManifest.xml @@ -109,7 +109,7 @@ \ No newline at end of file diff --git a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/Call.kt b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/Call.kt index a0b0874866..243e18b8a6 100644 --- a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/Call.kt +++ b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/Call.kt @@ -307,7 +307,9 @@ public class Call( ring: Boolean = false, notify: Boolean = false, ): Result { - logger.d { "[join] #ringing; create: $create, ring: $ring, notify: $notify" } + logger.d { + "[join] #ringing; #track; create: $create, ring: $ring, notify: $notify, createOptions: $createOptions" + } val permissionPass = clientImpl.permissionCheck.checkAndroidPermissions(clientImpl.context, this) // Check android permissions and log a warning to make sure developers requested adequate permissions prior to using the call. @@ -378,6 +380,9 @@ public class Call( "Call $cid has already been joined. Please use call.leave before joining it again", ) } + logger.d { + "[joinInternal] #track; create: $create, ring: $ring, notify: $notify, createOptions: $createOptions" + } // step 1. call the join endpoint to get a list of SFUs @@ -493,14 +498,19 @@ public class Call( // first check if sfuSocketReconnectionTime isn't already set - if yes // then we are already doing a full reconnect if (state._connection.value == RealtimeConnection.Migrating) { - logger.d { "Skipping disconnected channel event - we are migrating" } + logger.d { + "[handleSignalChannelDisconnect] #track; Skipping disconnected channel event - we are migrating" + } return } if (!isRetry && sfuSocketReconnectionTime != null) { - logger.d { "[handleSignalChannelDisconnect] Already doing a full reconnect cycle - ignoring call" } + logger.d { + "[handleSignalChannelDisconnect] #track; Already doing a full reconnect cycle - ignoring call" + } return } + logger.d { "[handleSignalChannelDisconnect] #track; isRetry: $isRetry" } if (!isRetry) { state._connection.value = RealtimeConnection.Reconnecting @@ -675,11 +685,14 @@ public class Call( } fun setVisibility(sessionId: String, trackType: TrackType, visible: Boolean) { + logger.i { + "[setVisibility] #track; #sfu; sessionId: $sessionId, trackType: $trackType, visible: $visible" + } session?.updateTrackDimensions(sessionId, trackType, visible) } fun handleEvent(event: VideoEvent) { - logger.i { "[call handleEvent] #sfu; event: $event" } + logger.v { "[call handleEvent] #sfu; event.type: ${event.getEventType()}" } when (event) { is GoAwayEvent -> @@ -708,40 +721,52 @@ public class Call( trackType: TrackType, onRendered: (VideoTextureViewRenderer) -> Unit = {}, ) { - logger.d { "[initRenderer] #sfu; sessionId: $sessionId" } + logger.d { "[initRenderer] #sfu; #track; sessionId: $sessionId" } // Note this comes from peerConnectionFactory.eglBase videoRenderer.init( clientImpl.peerConnectionFactory.eglBase.eglBaseContext, object : RendererCommon.RendererEvents { override fun onFirstFrameRendered() { - logger.d { "[initRenderer.onFirstFrameRendered] #sfu; sessionId: $sessionId" } + val width = videoRenderer.measuredWidth + val height = videoRenderer.measuredHeight + logger.i { + "[initRenderer.onFirstFrameRendered] #sfu; #track; " + + "trackType: $trackType, dimension: ($width - $height), " + + "sessionId: $sessionId" + } if (trackType != TrackType.TRACK_TYPE_SCREEN_SHARE) { session?.updateTrackDimensions( sessionId, trackType, true, - VideoDimension( - videoRenderer.measuredWidth, - videoRenderer.measuredHeight, - ), + VideoDimension(width, height), ) } onRendered(videoRenderer) } - override fun onFrameResolutionChanged(p0: Int, p1: Int, p2: Int) { - logger.d { "[initRenderer.onFrameResolutionChanged] #sfu; sessionId: $sessionId" } + override fun onFrameResolutionChanged( + videoWidth: Int, + videoHeight: Int, + rotation: Int, + ) { + val width = videoRenderer.measuredWidth + val height = videoRenderer.measuredHeight + logger.v { + "[initRenderer.onFrameResolutionChanged] #sfu; #track; " + + "trackType: $trackType, " + + "dimension1: ($width - $height), " + + "dimension2: ($videoWidth - $videoHeight), " + + "sessionId: $sessionId" + } if (trackType != TrackType.TRACK_TYPE_SCREEN_SHARE) { session?.updateTrackDimensions( sessionId, trackType, true, - VideoDimension( - videoRenderer.measuredWidth, - videoRenderer.measuredHeight, - ), + VideoDimension(videoWidth, videoHeight), ) } } @@ -900,10 +925,20 @@ public class Call( private fun updateMediaManagerFromSettings(callSettings: CallSettingsResponse) { // Speaker - speaker.setEnabled( - enabled = callSettings.audio.defaultDevice == AudioSettingsResponse.DefaultDevice.Speaker || - callSettings.audio.speakerDefaultOn, - ) + if (speaker.status.value is DeviceStatus.NotSelected) { + val enableSpeaker = if (callSettings.video.cameraDefaultOn || camera.status.value is DeviceStatus.Enabled) { + // if camera is enabled then enable speaker. Eventually this should + // be a new audio.defaultDevice setting returned from backend + true + } else { + callSettings.audio.defaultDevice == AudioSettingsResponse.DefaultDevice.Speaker || + callSettings.audio.speakerDefaultOn + } + + speaker.setEnabled( + enabled = enableSpeaker, + ) + } // Camera if (camera.status.value is DeviceStatus.NotSelected) { @@ -1068,6 +1103,10 @@ public class Call( return state.ownCapabilities.value.containsAll(elements) } + fun isVideoEnabled(): Boolean { + return state.settings.value?.video?.enabled ?: false + } + fun isAudioProcessingEnabled(): Boolean { return clientImpl.isAudioProcessingEnabled() } diff --git a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/CallState.kt b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/CallState.kt index c7b3695109..cffc14e3f1 100644 --- a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/CallState.kt +++ b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/CallState.kt @@ -264,8 +264,9 @@ public class CallState( private val livestreamFlow: Flow = channelFlow { fun emitLivestreamVideo() { val participants = participants.value - val filteredVideo = - participants.mapNotNull { it.video.value }.firstOrNull { it.track != null } + val filteredVideo = participants.firstOrNull { + it.video.value?.enabled == true + }?.video?.value scope.launch { if (_backstage.value) { send(null) @@ -277,12 +278,17 @@ public class CallState( scope.launch { _participants.collect { + logger.v { + "[livestreamFlow] #track; participants: ${it.size} =>" + + "${it.map { "${it.value.userId.value} - ${it.value.video.value?.enabled}" }}" + } emitLivestreamVideo() } } // TODO: could optimize performance by subscribing only to relevant events call.subscribe { + logger.v { "[livestreamFlow] #track; event.type: ${it.getEventType()}" } if (it is TrackPublishedEvent) { val participant = getOrCreateParticipant(it.sessionId, it.userId) @@ -307,6 +313,7 @@ public class CallState( } // emit livestream Video + logger.d { "[livestreamFlow] #track; no args" } emitLivestreamVideo() awaitClose { } diff --git a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/MediaManager.kt b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/MediaManager.kt index 090911f31f..6627c4cd5a 100644 --- a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/MediaManager.kt +++ b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/MediaManager.kt @@ -39,6 +39,7 @@ import io.getstream.video.android.core.call.video.FilterVideoProcessor import io.getstream.video.android.core.screenshare.StreamScreenShareService import io.getstream.video.android.core.utils.buildAudioConstraints import io.getstream.video.android.core.utils.mapState +import io.getstream.video.android.core.utils.safeCall import kotlinx.coroutines.CoroutineScope import kotlinx.coroutines.flow.MutableStateFlow import kotlinx.coroutines.flow.StateFlow @@ -49,6 +50,7 @@ import org.webrtc.Camera2Capturer import org.webrtc.Camera2Enumerator import org.webrtc.CameraEnumerationAndroid import org.webrtc.EglBase +import org.webrtc.MediaStreamTrack import org.webrtc.ScreenCapturerAndroid import org.webrtc.SurfaceTextureHelper import stream.video.sfu.models.VideoDimension @@ -137,23 +139,24 @@ class SpeakerManager( * @param defaultFallback when [enable] is false this is used to select the next device after the speaker. * */ fun setSpeakerPhone(enable: Boolean, defaultFallback: StreamAudioDevice? = null) { - microphoneManager.setup() - val devices = devices.value - val selectedBeforeSpeaker = selectedDevice.value - if (enable) { - val speaker = devices.filterIsInstance().firstOrNull() - _speakerPhoneEnabled.value = true - microphoneManager.select(speaker) - } else { - _speakerPhoneEnabled.value = false - // swap back to the old one - val defaultFallbackFromType = defaultFallback?.let { - devices.filterIsInstance(defaultFallback::class.java) - }?.firstOrNull() - val fallback = defaultFallbackFromType ?: selectedBeforeSpeaker ?: devices.firstOrNull { - it !is StreamAudioDevice.Speakerphone + microphoneManager.enforceSetup { + val devices = devices.value + if (enable) { + val speaker = devices.filterIsInstance().firstOrNull() + selectedBeforeSpeaker = selectedDevice.value + _speakerPhoneEnabled.value = true + microphoneManager.select(speaker) + } else { + _speakerPhoneEnabled.value = false + // swap back to the old one + val defaultFallbackFromType = defaultFallback?.let { + devices.filterIsInstance(defaultFallback::class.java) + }?.firstOrNull() + val fallback = defaultFallbackFromType ?: selectedBeforeSpeaker ?: devices.firstOrNull { + it !is StreamAudioDevice.Speakerphone + } + microphoneManager.select(fallback) } - microphoneManager.select(fallback) } } @@ -161,12 +164,13 @@ class SpeakerManager( * Set the volume as a percentage, 0-100 */ fun setVolume(volumePercentage: Int) { - microphoneManager.setup() - microphoneManager.audioManager?.let { - val max = it.getStreamMaxVolume(AudioManager.STREAM_VOICE_CALL) - val level = max / 100 * volumePercentage - _volume.value = volumePercentage - it.setStreamVolume(AudioManager.STREAM_VOICE_CALL, level, 0) + microphoneManager.enforceSetup { + microphoneManager.audioManager?.let { + val max = it.getStreamMaxVolume(AudioManager.STREAM_VOICE_CALL) + val level = max / 100 * volumePercentage + _volume.value = volumePercentage + it.setStreamVolume(AudioManager.STREAM_VOICE_CALL, level, 0) + } } } @@ -364,7 +368,7 @@ class MicrophoneManager( if (fromUser) { _status.value = DeviceStatus.Enabled } - mediaManager.audioTrack.setEnabled(true) + mediaManager.audioTrack.trySetEnabled(true) } } @@ -393,7 +397,7 @@ class MicrophoneManager( if (fromUser) { _status.value = DeviceStatus.Disabled } - mediaManager.audioTrack.setEnabled(false) + mediaManager.audioTrack.trySetEnabled(false) } } @@ -404,7 +408,6 @@ class MicrophoneManager( enforceSetup { if (enabled) { enable(fromUser = fromUser) - mediaManager.speaker.setEnabled(enabled = false, fromUser = false) } else { disable(fromUser = fromUser) } @@ -438,35 +441,40 @@ class MicrophoneManager( fun canHandleDeviceSwitch() = audioUsage != AudioAttributes.USAGE_MEDIA // Internal logic - internal fun setup() { + internal fun setup(onAudioDevicesUpdate: (() -> Unit)? = null) { + var capturedOnAudioDevicesUpdate = onAudioDevicesUpdate + if (setupCompleted) { - // Already setup, return + capturedOnAudioDevicesUpdate?.invoke() + capturedOnAudioDevicesUpdate = null + return } + audioManager = mediaManager.context.getSystemService() if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) { audioManager?.allowedCapturePolicy = AudioAttributes.ALLOW_CAPTURE_BY_ALL } if (canHandleDeviceSwitch()) { - audioHandler = - AudioSwitchHandler(mediaManager.context) { devices, selected -> - logger.i { "audio devices. selected $selected, available devices are $devices" } - _devices.value = devices.map { it.fromAudio() } - _selectedDevice.value = selected?.fromAudio() - } + audioHandler = AudioSwitchHandler(mediaManager.context) { devices, selected -> + logger.i { "audio devices. selected $selected, available devices are $devices" } + + _devices.value = devices.map { it.fromAudio() } + _selectedDevice.value = selected?.fromAudio() + + capturedOnAudioDevicesUpdate?.invoke() + capturedOnAudioDevicesUpdate = null + setupCompleted = true + } audioHandler.start() } else { logger.d { "[MediaManager#setup] usage is MEDIA, cannot handle device switch" } } - setupCompleted = true } - private inline fun enforceSetup(actual: () -> T): T { - setup() - return actual.invoke() - } + internal fun enforceSetup(actual: () -> Unit) = setup(onAudioDevicesUpdate = actual) private fun ifAudioHandlerInitialized(then: (audioHandler: AudioSwitchHandler) -> Unit) { if (this::audioHandler.isInitialized) { @@ -549,7 +557,7 @@ public class CameraManager( if (fromUser) { _status.value = DeviceStatus.Enabled } - mediaManager.videoTrack.setEnabled(true) + mediaManager.videoTrack.trySetEnabled(true) startCapture() } @@ -583,7 +591,7 @@ public class CameraManager( if (fromUser) { _status.value = DeviceStatus.Disabled } - mediaManager.videoTrack.setEnabled(false) + mediaManager.videoTrack.trySetEnabled(false) videoCapturer.stopCapture() isCapturingVideo = false } @@ -871,3 +879,5 @@ class MediaManagerImpl( microphone.cleanup() } } + +fun MediaStreamTrack.trySetEnabled(enabled: Boolean) = safeCall { setEnabled(enabled) } diff --git a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/ParticipantState.kt b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/ParticipantState.kt index d002cafd07..a499e40750 100644 --- a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/ParticipantState.kt +++ b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/ParticipantState.kt @@ -17,6 +17,7 @@ package io.getstream.video.android.core import androidx.compose.runtime.Stable +import io.getstream.log.taggedLogger import io.getstream.result.Result import io.getstream.video.android.core.internal.InternalStreamVideoApi import io.getstream.video.android.core.model.AudioTrack @@ -55,6 +56,8 @@ public data class ParticipantState( var trackLookupPrefix: String = "", ) { + private val logger by taggedLogger("ParticipantState") + val isLocal by lazy { sessionId == call.session?.sessionId } @@ -199,6 +202,11 @@ public data class ParticipantState( internal val _roles = MutableStateFlow>(emptyList()) val roles: StateFlow> = _roles + fun setVideoTrack(track: VideoTrack?) { + logger.i { "[setVideoTrack] #sfu; #track; userId: ${userId.value} track: $track" } + _videoTrack.value = track + } + fun updateFromParticipantInfo(participant: Participant) { sessionId = participant.session_id diff --git a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/audio/AudioHandler.kt b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/audio/AudioHandler.kt index eb6a7f14b1..616f8e4f72 100644 --- a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/audio/AudioHandler.kt +++ b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/audio/AudioHandler.kt @@ -51,7 +51,6 @@ public class AudioSwitchHandler( private var audioSwitch: AudioSwitch? = null - // AudioSwitch is not threadsafe, so all calls should be done on the main thread. private val handler = Handler(Looper.getMainLooper()) override fun start() { @@ -62,11 +61,10 @@ public class AudioSwitchHandler( val devices = mutableListOf( AudioDevice.WiredHeadset::class.java, AudioDevice.BluetoothHeadset::class.java, + AudioDevice.Earpiece::class.java, + AudioDevice.Speakerphone::class.java, ) - devices.add(AudioDevice.Earpiece::class.java) - devices.add(AudioDevice.Speakerphone::class.java) - handler.post { val switch = AudioSwitch( context = context, diff --git a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/call/RtcSession.kt b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/call/RtcSession.kt index 838aeb4b8c..1cca79a2cc 100644 --- a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/call/RtcSession.kt +++ b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/call/RtcSession.kt @@ -36,6 +36,7 @@ import io.getstream.video.android.core.call.utils.stringify import io.getstream.video.android.core.dispatchers.DispatcherProvider import io.getstream.video.android.core.errors.RtcException import io.getstream.video.android.core.events.ChangePublishQualityEvent +import io.getstream.video.android.core.events.ICERestartEvent import io.getstream.video.android.core.events.ICETrickleEvent import io.getstream.video.android.core.events.JoinCallResponseEvent import io.getstream.video.android.core.events.ParticipantJoinedEvent @@ -249,8 +250,7 @@ public class RtcSession internal constructor( when (type) { TrackType.TRACK_TYPE_VIDEO -> { - call.state.getParticipantBySessionId(sessionId)?._videoTrack?.value = - track.asVideoTrack() + call.state.getParticipantBySessionId(sessionId)?.setVideoTrack(track.asVideoTrack()) } TrackType.TRACK_TYPE_AUDIO -> { @@ -324,6 +324,7 @@ public class RtcSession internal constructor( "SDK hasn't been initialised yet - can't start a RtcSession", ) } + logger.i { " #sfu; #track; no args" } // step 1 setup the peer connections subscriber = createSubscriber() @@ -348,6 +349,7 @@ public class RtcSession internal constructor( coroutineScope.launch { // call update participant subscriptions debounced trackDimensionsDebounced.collect { + logger.v { " #sfu; #track; trackDimensions: $it" } setVideoSubscriptions() } } @@ -419,6 +421,7 @@ public class RtcSession internal constructor( } suspend fun connect() { + logger.i { "[connect] #sfu; #track; no args" } sfuConnectionModule.sfuSocket.connect() // ensure that the join event has been handled before starting RTC try { @@ -565,7 +568,7 @@ public class RtcSession internal constructor( trackTypeMap[trackTypeString] ?: TrackType.fromValue(trackTypeString.toInt()) ?: throw IllegalStateException("trackType not recognized: $trackTypeString") - logger.i { "[] #sfu; mediaStream: $mediaStream" } + logger.i { "[addStream] #sfu; mediaStream: $mediaStream" } mediaStream.audioTracks.forEach { track -> logger.v { "[addStream] #sfu; audioTrack: ${track.stringify()}" } track.setEnabled(true) @@ -581,6 +584,7 @@ public class RtcSession internal constructor( } mediaStream.videoTracks.forEach { track -> + logger.w { "[addStream] #sfu; #track; videoTrack: ${track.stringify()}" } track.setEnabled(true) val videoTrack = VideoTrack( streamId = mediaStream.id, @@ -598,6 +602,7 @@ public class RtcSession internal constructor( } private suspend fun connectRtc() { + logger.d { "[connectRtc] #sfu; #track; no args" } val settings = call.state.settings.value // turn of the speaker if needed @@ -719,7 +724,7 @@ public class RtcSession internal constructor( } fun cleanup() { - logger.i { "[cleanup] #sfu; no args" } + logger.i { "[cleanup] #sfu; #track; no args" } supervisorJob.cancel() // disconnect the socket and clean it up @@ -811,7 +816,7 @@ public class RtcSession internal constructor( @VisibleForTesting public fun createSubscriber(): StreamPeerConnection { - logger.i { "[createSubscriber] #sfu" } + logger.i { "[createSubscriber] #sfu; no args" } val peerConnection = clientImpl.peerConnectionFactory.makePeerConnection( coroutineScope = coroutineScope, configuration = connectionConfiguration, @@ -858,6 +863,7 @@ public class RtcSession internal constructor( @VisibleForTesting fun createPublisher(): StreamPeerConnection { + logger.i { "[createPublisher] #sfu; no args" } val publisher = clientImpl.peerConnectionFactory.makePeerConnection( coroutineScope = coroutineScope, configuration = connectionConfiguration, @@ -999,6 +1005,7 @@ public class RtcSession internal constructor( * -- we cap at 30 retries to prevent endless loops */ private fun setVideoSubscriptions(useDefaults: Boolean = false) { + logger.d { "[setVideoSubscriptions] #sfu; #track; useDefaults: $useDefaults" } // default is to subscribe to the top 5 sorted participants var tracks = if (useDefaults) { defaultTracks() @@ -1017,6 +1024,7 @@ public class RtcSession internal constructor( it.copy(dimension = it.dimension?.copy(width = 200, height = 200)) } } + logger.v { "[setVideoSubscriptions] #sfu; #track; tracks.size: ${tracks.size}" } val new = tracks.toList() subscriptions.value = new @@ -1035,8 +1043,8 @@ public class RtcSession internal constructor( ) println("request $request") val sessionToDimension = tracks.map { it.session_id to it.dimension } - dynascaleLogger.i { - "[setVideoSubscriptions] $useDefaults #sfu; $sessionId subscribing to : $sessionToDimension" + dynascaleLogger.v { + "[setVideoSubscriptions] $useDefaults #sfu; #track; $sessionId subscribing to : $sessionToDimension" } val result = updateSubscriptions(request) emit(result.getOrThrow()) @@ -1101,6 +1109,22 @@ public class RtcSession internal constructor( removeParticipantTrackDimensions(event.participant) } + is ICETrickleEvent -> { + handleIceTrickle(event) + } + + is ICERestartEvent -> { + val peerType = event.peerType + when (peerType) { + PeerType.PEER_TYPE_PUBLISHER_UNSPECIFIED -> { + publisher?.connection?.restartIce() + } + PeerType.PEER_TYPE_SUBSCRIBER -> { + subscriber?.connection?.restartIce() + } + } + } + else -> { logger.d { "[onRtcEvent] skipped event: $event" } } @@ -1121,6 +1145,7 @@ public class RtcSession internal constructor( } private fun removeParticipantTrackDimensions(participant: Participant) { + logger.v { "[removeParticipantTrackDimensions] #sfu; #track; participant: $participant" } val newTrackDimensions = trackDimensions.value.toMutableMap() newTrackDimensions.remove(participant.session_id).also { if (it == null) { @@ -1632,20 +1657,23 @@ public class RtcSession internal constructor( } // call after onNegotiation Needed - private suspend fun setPublisher(request: SetPublisherRequest): Result = - wrapAPICall { + private suspend fun setPublisher(request: SetPublisherRequest): Result { + logger.e { "[setPublisher] #sfu; request $request" } + return wrapAPICall { val result = sfuConnectionModule.signalService.setPublisher(request) result.error?.let { throw RtcException(error = it, message = it.message) } result } + } // share what size and which participants we're looking at private suspend fun updateSubscriptions( request: UpdateSubscriptionsRequest, ): Result = wrapAPICall { + logger.v { "[updateSubscriptions] #sfu; #track; request $request" } val result = sfuConnectionModule.signalService.updateSubscriptions(request) result.error?.let { throw RtcException(error = it, message = it.message) @@ -1680,6 +1708,9 @@ public class RtcSession internal constructor( visible: Boolean, dimensions: VideoDimension = defaultVideoDimension, ) { + logger.v { + "[updateTrackDimensions] #track; #sfu; sessionId: $sessionId, trackType: $trackType, visible: $visible, dimensions: $dimensions" + } // The map contains all track dimensions for all participants dynascaleLogger.d { "updating dimensions $sessionId $visible $dimensions" } @@ -1723,7 +1754,7 @@ public class RtcSession internal constructor( remoteIceServers: List, failedToSwitch: () -> Unit, ) { - logger.i { "[switchSfu] from ${this.sfuUrl} to $sfuUrl" } + logger.i { "[switchSfu] #sfu; #track; from ${this.sfuUrl} to $sfuUrl" } // Prepare SDP val getSdp = suspend { diff --git a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/call/connection/StreamPeerConnection.kt b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/call/connection/StreamPeerConnection.kt index 38f7a40a44..90c948f752 100644 --- a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/call/connection/StreamPeerConnection.kt +++ b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/call/connection/StreamPeerConnection.kt @@ -369,7 +369,7 @@ public class StreamPeerConnection( * @param stream The stream that contains audio or video. */ override fun onAddStream(stream: MediaStream?) { - logger.i { "[onAddStream] #sfu; #$typeTag; stream: $stream" } + logger.w { "[onAddStream] #sfu; #track; #$typeTag; stream: $stream" } if (stream != null) { onStreamAdded?.invoke(stream) } @@ -383,15 +383,21 @@ public class StreamPeerConnection( * @param mediaStreams The streams that were added containing their appropriate tracks. */ override fun onAddTrack(receiver: RtpReceiver?, mediaStreams: Array?) { - logger.i { "[onAddTrack] #sfu; #$typeTag; receiver: $receiver, mediaStreams: $mediaStreams" } + logger.i { + "[onAddTrack] #sfu; #track; #$typeTag; receiver: $receiver, mediaStreams: $mediaStreams" + } mediaStreams?.forEach { mediaStream -> - logger.v { "[onAddTrack] #sfu; #$typeTag; mediaStream: $mediaStream" } + logger.v { "[onAddTrack] #sfu; #track; #$typeTag; mediaStream: $mediaStream" } mediaStream.audioTracks?.forEach { remoteAudioTrack -> - logger.v { "[onAddTrack] #sfu; #$typeTag; remoteAudioTrack: ${remoteAudioTrack.stringify()}" } + logger.v { + "[onAddTrack] #sfu; #track; #$typeTag; remoteAudioTrack: ${remoteAudioTrack.stringify()}" + } remoteAudioTrack.setEnabled(true) } mediaStream.videoTracks?.forEach { remoteVideoTrack -> - logger.v { "[onAddTrack] #sfu; #$typeTag; remoteVideoTrack: ${remoteVideoTrack.stringify()}" } + logger.v { + "[onAddTrack] #sfu; #track; #$typeTag; remoteVideoTrack: ${remoteVideoTrack.stringify()}" + } remoteVideoTrack.setEnabled(true) } onStreamAdded?.invoke(mediaStream) @@ -411,7 +417,9 @@ public class StreamPeerConnection( * * @param stream The stream that was removed from the connection. */ - override fun onRemoveStream(stream: MediaStream?) {} + override fun onRemoveStream(stream: MediaStream?) { + logger.v { "[onRemoveStream] #sfu; #track; #$typeTag; stream: $stream" } + } /** * Triggered when the connection state changes. Used to start and stop the stats observing. @@ -474,7 +482,7 @@ public class StreamPeerConnection( */ override fun onRemoveTrack(receiver: RtpReceiver?) { - logger.i { "[onRemoveTrack] #sfu; #$typeTag; receiver: $receiver" } + logger.i { "[onRemoveTrack] #sfu; #track; #$typeTag; receiver: $receiver" } } override fun onSignalingChange(newState: PeerConnection.SignalingState?) { diff --git a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/call/connection/StreamPeerConnectionFactory.kt b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/call/connection/StreamPeerConnectionFactory.kt index ca3428c6e7..65b453f831 100644 --- a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/call/connection/StreamPeerConnectionFactory.kt +++ b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/call/connection/StreamPeerConnectionFactory.kt @@ -27,7 +27,7 @@ import io.getstream.video.android.core.model.StreamPeerType import kotlinx.coroutines.CoroutineScope import org.webrtc.AudioSource import org.webrtc.AudioTrack -import org.webrtc.DefaultVideoDecoderFactory +import org.webrtc.DefaultBlacklistedVideoDecoderFactory import org.webrtc.EglBase import org.webrtc.Logging import org.webrtc.ManagedAudioProcessingFactory @@ -101,9 +101,7 @@ public class StreamPeerConnectionFactory( * Default video decoder factory used to unpack video from the remote tracks. */ private val videoDecoderFactory by lazy { - DefaultVideoDecoderFactory( - eglBase.eglBaseContext, - ) + DefaultBlacklistedVideoDecoderFactory(eglBase.eglBaseContext) } /** diff --git a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/call/signal/socket/RTCEventMapper.kt b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/call/signal/socket/RTCEventMapper.kt index 5bc0971171..67e739b228 100644 --- a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/call/signal/socket/RTCEventMapper.kt +++ b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/call/signal/socket/RTCEventMapper.kt @@ -24,6 +24,7 @@ import io.getstream.video.android.core.events.ConnectionQualityChangeEvent import io.getstream.video.android.core.events.DominantSpeakerChangedEvent import io.getstream.video.android.core.events.ErrorEvent import io.getstream.video.android.core.events.GoAwayEvent +import io.getstream.video.android.core.events.ICERestartEvent import io.getstream.video.android.core.events.ICETrickleEvent import io.getstream.video.android.core.events.JoinCallResponseEvent import io.getstream.video.android.core.events.ParticipantCount @@ -118,6 +119,8 @@ public object RTCEventMapper { ICETrickleEvent(ice_candidate, peer_type) } + event.ice_restart != null -> ICERestartEvent(event.ice_restart.peer_type) + event.publisher_answer != null -> PublisherAnswerEvent(sdp = event.publisher_answer.sdp) event.error != null -> ErrorEvent(event.error.error) diff --git a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/events/SfuDataEvent.kt b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/events/SfuDataEvent.kt index 3181dce884..94bce8caf5 100644 --- a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/events/SfuDataEvent.kt +++ b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/events/SfuDataEvent.kt @@ -30,7 +30,7 @@ import stream.video.sfu.models.TrackType public sealed class SfuDataEvent : VideoEvent() { override fun getEventType(): String { - return "SfuDataEvent" + return this::class.simpleName ?: "UnknownEvent" } } @@ -56,6 +56,10 @@ public data class ICETrickleEvent( val peerType: PeerType, ) : SfuDataEvent() +public data class ICERestartEvent( + val peerType: PeerType, +) : SfuDataEvent() + public data class SubscriberOfferEvent( val sdp: String, ) : SfuDataEvent() diff --git a/stream-video-android-core/src/test/kotlin/io/getstream/video/android/core/MicrophoneManagerTest.kt b/stream-video-android-core/src/test/kotlin/io/getstream/video/android/core/MicrophoneManagerTest.kt index ca54ef6147..a52aaa32ca 100644 --- a/stream-video-android-core/src/test/kotlin/io/getstream/video/android/core/MicrophoneManagerTest.kt +++ b/stream-video-android-core/src/test/kotlin/io/getstream/video/android/core/MicrophoneManagerTest.kt @@ -19,8 +19,10 @@ package io.getstream.video.android.core import android.content.Context import android.media.AudioAttributes import android.media.AudioManager +import io.getstream.video.android.core.audio.AudioSwitchHandler import io.mockk.every import io.mockk.mockk +import io.mockk.slot import io.mockk.spyk import io.mockk.verify import io.mockk.verifyOrder @@ -37,10 +39,22 @@ class MicrophoneManagerTest { val mediaManager = mockk(relaxed = true) val actual = MicrophoneManager(mediaManager, audioUsage) val context = mockk(relaxed = true) - val microphoneManager = spyk(actual) every { mediaManager.context } returns context every { context.getSystemService(any()) } returns mockk(relaxed = true) + val microphoneManager = spyk(actual) + val slot = slot<() -> Unit>() + every { microphoneManager.setup(capture(slot)) } answers { slot.captured.invoke() } + every { + microphoneManager["ifAudioHandlerInitialized"]( + any< + ( + AudioSwitchHandler, + ) -> Unit, + >(), + ) + } answers { true } + // When microphoneManager.enable() // 1 microphoneManager.select(null) // 2 @@ -53,12 +67,7 @@ class MicrophoneManagerTest { // Then verify(exactly = 10) { // Setup will be called exactly 10 times - microphoneManager.setup() - } - verify(exactly = 1) { - // Even thou setup was invoked 10 times, actual initialization happened once - // because context.getSystemService was called once only. - context.getSystemService(any()) + microphoneManager.setup(any()) } } @@ -96,6 +105,9 @@ class MicrophoneManagerTest { every { mediaManager.context } returns context every { context.getSystemService(any()) } returns mockk(relaxed = true) + val slot = slot<() -> Unit>() + every { microphoneManager.setup(capture(slot)) } answers { slot.captured.invoke() } + // When microphoneManager.setup() microphoneManager.cleanup() // Clean and then invoke again @@ -104,13 +116,15 @@ class MicrophoneManagerTest { // Then verify(exactly = 2) { // Setup was called twice - microphoneManager.setup() + microphoneManager.setup(any()) } verifyOrder { - microphoneManager.setup() // Manual call + microphoneManager.setup(any()) // Manual call microphoneManager.cleanup() // Manual call microphoneManager.resume() // Manual call - microphoneManager.setup() // Automatic as part of enforce setup strategy of resume() + microphoneManager.setup( + any(), + ) // Automatic as part of enforce setup strategy of resume() } } @@ -118,21 +132,23 @@ class MicrophoneManagerTest { fun `Resume will call enable only if prior status was DeviceStatus#enabled`() { // Given val mediaManager = mockk(relaxed = true) - val actual = MicrophoneManager(mediaManager, audioUsage) - val context = mockk(relaxed = true) - val microphoneManager = spyk(actual) - every { mediaManager.context } returns context - every { context.getSystemService(any()) } returns mockk(relaxed = true) + val microphoneManager = MicrophoneManager(mediaManager, audioUsage) + val spyMicrophoneManager = spyk(microphoneManager) + val mockContext = mockk(relaxed = true) + every { mediaManager.context } returns mockContext + every { mockContext.getSystemService(any()) } returns mockk(relaxed = true) + + val slot = slot<() -> Unit>() + every { spyMicrophoneManager.setup(capture(slot)) } answers { slot.captured.invoke() } // When - microphoneManager.setup() - microphoneManager.priorStatus = DeviceStatus.Enabled - microphoneManager.resume() // Should call setup again + spyMicrophoneManager.priorStatus = DeviceStatus.Enabled + spyMicrophoneManager.resume() // Calls setup internally // Then verify(exactly = 1) { // Setup was called twice - microphoneManager.enable() + spyMicrophoneManager.enable() } } } diff --git a/stream-video-android-ui-compose/src/main/kotlin/io/getstream/video/android/compose/ui/components/video/VideoRenderer.kt b/stream-video-android-ui-compose/src/main/kotlin/io/getstream/video/android/compose/ui/components/video/VideoRenderer.kt index 5b39f960ca..24fb5021a9 100644 --- a/stream-video-android-ui-compose/src/main/kotlin/io/getstream/video/android/compose/ui/components/video/VideoRenderer.kt +++ b/stream-video-android-ui-compose/src/main/kotlin/io/getstream/video/android/compose/ui/components/video/VideoRenderer.kt @@ -65,67 +65,75 @@ public fun VideoRenderer( videoRendererConfig: VideoRendererConfig = videoRenderConfig(), onRendered: (VideoTextureViewRenderer) -> Unit = {}, ) { - if (LocalInspectionMode.current) { - Image( - modifier = modifier - .fillMaxSize() - .testTag("video_renderer"), - painter = painterResource( - id = io.getstream.video.android.ui.common.R.drawable.stream_video_call_sample, - ), - contentScale = ContentScale.Crop, - contentDescription = null, - ) - return - } + Box( + modifier = modifier + .fillMaxSize() + .testTag("video_renderer_container"), + ) { + if (LocalInspectionMode.current) { + Image( + modifier = Modifier + .fillMaxSize() + .testTag("video_renderer"), + painter = painterResource( + id = io.getstream.video.android.ui.common.R.drawable.stream_video_call_sample, + ), + contentScale = ContentScale.Crop, + contentDescription = null, + ) + return + } - // Show avatar always behind the video. - videoRendererConfig.fallbackContent.invoke(call) + // Show avatar always behind the video. + videoRendererConfig.fallbackContent.invoke(call) - if (video?.enabled == true) { - val mediaTrack = video.track - val sessionId = video.sessionId - val trackType = video.type + if (video?.enabled == true) { + val mediaTrack = video.track + val sessionId = video.sessionId + val trackType = video.type - var view: VideoTextureViewRenderer? by remember { mutableStateOf(null) } + var view: VideoTextureViewRenderer? by remember { mutableStateOf(null) } - DisposableEffect(call, video) { - // inform the call that we want to render this video track. (this will trigger a subscription to the track) - call.setVisibility(sessionId, trackType, true) + DisposableEffect(call, video) { + // inform the call that we want to render this video track. (this will trigger a subscription to the track) + call.setVisibility(sessionId, trackType, true) - onDispose { - cleanTrack(view, mediaTrack) - // inform the call that we no longer want to render this video track - call.setVisibility(sessionId, trackType, false) + onDispose { + cleanTrack(view, mediaTrack) + // inform the call that we no longer want to render this video track + call.setVisibility(sessionId, trackType, false) + } } - } - if (mediaTrack != null) { - Box(modifier = modifier.fillMaxSize(), contentAlignment = Alignment.Center) { - AndroidView( - factory = { context -> - StreamVideoTextureViewRenderer(context).apply { - call.initRenderer( - videoRenderer = this, - sessionId = sessionId, - trackType = trackType, - onRendered = onRendered, - ) - setMirror(videoRendererConfig.mirrorStream) - setScalingType( - scalingType = videoRendererConfig.scalingType.toCommonScalingType(), - ) - setupVideo(mediaTrack, this) - - view = this - } - }, - update = { v -> - v.setMirror(videoRendererConfig.mirrorStream) - setupVideo(mediaTrack, v) - }, - modifier = modifier.testTag("video_renderer"), - ) + if (mediaTrack != null) { + Box(modifier = Modifier.fillMaxSize(), contentAlignment = Alignment.Center) { + AndroidView( + factory = { context -> + StreamVideoTextureViewRenderer(context).apply { + call.initRenderer( + videoRenderer = this, + sessionId = sessionId, + trackType = trackType, + onRendered = onRendered, + ) + setMirror(videoRendererConfig.mirrorStream) + setScalingType( + videoRendererConfig.scalingType.toCommonScalingType(), + ) + setupVideo(mediaTrack, this) + + view = this + } + }, + update = { v -> + v.setMirror(videoRendererConfig.mirrorStream) + setupVideo(mediaTrack, v) + }, + modifier = Modifier + .fillMaxSize() + .testTag("video_renderer"), + ) + } } } } diff --git a/stream-video-android-ui-core/api/stream-video-android-ui-core.api b/stream-video-android-ui-core/api/stream-video-android-ui-core.api index 37b85ba400..ffa3b07576 100644 --- a/stream-video-android-ui-core/api/stream-video-android-ui-core.api +++ b/stream-video-android-ui-core/api/stream-video-android-ui-core.api @@ -143,7 +143,9 @@ public abstract interface class io/getstream/video/android/ui/common/permission/ public final class io/getstream/video/android/ui/common/renderer/StreamVideoTextureViewRenderer : io/getstream/webrtc/android/ui/VideoTextureViewRenderer { public fun (Landroid/content/Context;)V + public fun onSurfaceTextureAvailable (Landroid/graphics/SurfaceTexture;II)V public fun onSurfaceTextureDestroyed (Landroid/graphics/SurfaceTexture;)Z + public fun onSurfaceTextureSizeChanged (Landroid/graphics/SurfaceTexture;II)V } public final class io/getstream/video/android/ui/common/util/ColorUtilsKt { diff --git a/stream-video-android-ui-core/src/main/kotlin/io/getstream/video/android/ui/common/StreamCallActivity.kt b/stream-video-android-ui-core/src/main/kotlin/io/getstream/video/android/ui/common/StreamCallActivity.kt index f3c7ecbc22..409fd11684 100644 --- a/stream-video-android-ui-core/src/main/kotlin/io/getstream/video/android/ui/common/StreamCallActivity.kt +++ b/stream-video-android-ui-core/src/main/kotlin/io/getstream/video/android/ui/common/StreamCallActivity.kt @@ -407,8 +407,9 @@ public abstract class StreamCallActivity : ComponentActivity() { // Decision making @StreamCallActivityDelicateApi - public open fun isVideoCall(call: Call): Boolean = - call.hasCapability(OwnCapability.SendVideo) + public open fun isVideoCall(call: Call): Boolean { + return call.hasCapability(OwnCapability.SendVideo) || call.isVideoEnabled() + } // Picture in picture (for Video calls) /** diff --git a/stream-video-android-ui-core/src/main/kotlin/io/getstream/video/android/ui/common/renderer/StreamVideoTextureViewRenderer.kt b/stream-video-android-ui-core/src/main/kotlin/io/getstream/video/android/ui/common/renderer/StreamVideoTextureViewRenderer.kt index 8d34ed9cbe..69a27494e9 100644 --- a/stream-video-android-ui-core/src/main/kotlin/io/getstream/video/android/ui/common/renderer/StreamVideoTextureViewRenderer.kt +++ b/stream-video-android-ui-core/src/main/kotlin/io/getstream/video/android/ui/common/renderer/StreamVideoTextureViewRenderer.kt @@ -25,15 +25,47 @@ public class StreamVideoTextureViewRenderer( context: Context, ) : VideoTextureViewRenderer(context) { - private val logger by taggedLogger() + private val logger by taggedLogger("StreamVideoTextureViewRenderer") + + override fun onLayout(changed: Boolean, left: Int, top: Int, right: Int, bottom: Int) { + super.onLayout(changed, left, top, right, bottom) + logger.d { + "[onLayout] #track; changed: $changed, left: $left, top: $top, right: $right, " + + "bottom: $bottom" + } + } + + override fun onSurfaceTextureAvailable( + surfaceTexture: SurfaceTexture, + width: Int, + height: Int, + ) { + super.onSurfaceTextureAvailable(surfaceTexture, width, height) + logger.d { + "[onSurfaceTextureAvailable] #track; width: $width, height: $height, " + + "surfaceTexture: $surfaceTexture" + } + } + + override fun onSurfaceTextureSizeChanged( + surfaceTexture: SurfaceTexture, + width: Int, + height: Int, + ) { + super.onSurfaceTextureSizeChanged(surfaceTexture, width, height) + logger.d { + "[onSurfaceTextureSizeChanged] #track; width: $width, height: $height, " + + "surfaceTexture: $surfaceTexture" + } + } override fun onSurfaceTextureDestroyed(surfaceTexture: SurfaceTexture): Boolean { - logger.d { "onSurfaceTextureDestroyed: $surfaceTexture" } + logger.d { "[onSurfaceTextureDestroyed] #track; surfaceTexture: $surfaceTexture" } return super.onSurfaceTextureDestroyed(surfaceTexture) } override fun onDetachedFromWindow() { super.onDetachedFromWindow() - logger.d { "onDetachedFromWindow" } + logger.d { "[onDetachedFromWindow] no args" } } } diff --git a/tutorials/tutorial-livestream/src/main/kotlin/io/getstream/video/android/tutorial/livestream/LiveGuest.kt b/tutorials/tutorial-livestream/src/main/kotlin/io/getstream/video/android/tutorial/livestream/LiveGuest.kt index a0584ce8b0..4db9c1e376 100644 --- a/tutorials/tutorial-livestream/src/main/kotlin/io/getstream/video/android/tutorial/livestream/LiveGuest.kt +++ b/tutorials/tutorial-livestream/src/main/kotlin/io/getstream/video/android/tutorial/livestream/LiveGuest.kt @@ -17,40 +17,52 @@ package io.getstream.video.android.tutorial.livestream import android.widget.Toast +import androidx.compose.foundation.layout.Box +import androidx.compose.foundation.layout.padding import androidx.compose.runtime.Composable +import androidx.compose.runtime.getValue +import androidx.compose.ui.Alignment +import androidx.compose.ui.Modifier import androidx.compose.ui.platform.LocalContext +import androidx.compose.ui.unit.dp +import androidx.navigation.NavController +import io.getstream.log.Priority import io.getstream.video.android.compose.permission.LaunchCallPermissions +import io.getstream.video.android.compose.ui.components.call.CallAppBar import io.getstream.video.android.compose.ui.components.livestream.LivestreamPlayer import io.getstream.video.android.core.GEO import io.getstream.video.android.core.StreamVideo import io.getstream.video.android.core.StreamVideoBuilder +import io.getstream.video.android.core.logging.LoggingLevel import io.getstream.video.android.core.notifications.internal.service.livestreamGuestCallServiceConfig import io.getstream.video.android.model.User -import io.getstream.video.android.model.UserType @Composable -fun LiveAudience() { +fun LiveAudience( + navController: NavController, + callId: String, +) { + val context = LocalContext.current val userId = "Ben_Skywalker" - val callId = "dE8AsD5Qxqrt" + val userToken = StreamVideo.devToken(userId) // step1 - create a user. val user = User( - type = UserType.Authenticated, id = userId, // any string name = "Tutorial", // name and image are used in the UI role = "user", ) // step2 - initialize StreamVideo. For a production app we recommend adding the client to your Application class or di module. - val context = LocalContext.current val client = StreamVideoBuilder( context = context, apiKey = "k436tyde94hj", // demo API key geo = GEO.GlobalEdgeNetwork, user = user, - token = StreamVideo.devToken(userId), + token = userToken, callServiceConfig = livestreamGuestCallServiceConfig(), ensureSingleInstance = false, + loggingLevel = LoggingLevel(priority = Priority.VERBOSE), ).build() // step3 - join a call, which type is `default` and id is `123`. @@ -62,5 +74,18 @@ fun LiveAudience() { } } - LivestreamPlayer(call = call) + Box { + LivestreamPlayer(call = call) + CallAppBar( + modifier = Modifier + .align(Alignment.TopCenter) + .padding(end = 16.dp, top = 16.dp), + call = call, + centerContent = { }, + onCallAction = { + call.leave() + navController.popBackStack() + }, + ) + } } diff --git a/tutorials/tutorial-livestream/src/main/kotlin/io/getstream/video/android/tutorial/livestream/LiveHost.kt b/tutorials/tutorial-livestream/src/main/kotlin/io/getstream/video/android/tutorial/livestream/LiveHost.kt index 4b6bb124f2..3fb0a3dc4e 100644 --- a/tutorials/tutorial-livestream/src/main/kotlin/io/getstream/video/android/tutorial/livestream/LiveHost.kt +++ b/tutorials/tutorial-livestream/src/main/kotlin/io/getstream/video/android/tutorial/livestream/LiveHost.kt @@ -19,9 +19,12 @@ package io.getstream.video.android.tutorial.livestream import android.widget.Toast import androidx.compose.foundation.background import androidx.compose.foundation.layout.Box +import androidx.compose.foundation.layout.Row +import androidx.compose.foundation.layout.Spacer import androidx.compose.foundation.layout.fillMaxSize import androidx.compose.foundation.layout.fillMaxWidth import androidx.compose.foundation.layout.padding +import androidx.compose.foundation.layout.width import androidx.compose.foundation.shape.RoundedCornerShape import androidx.compose.material.Button import androidx.compose.material.ButtonDefaults @@ -37,9 +40,11 @@ import androidx.compose.ui.draw.clip import androidx.compose.ui.graphics.Color import androidx.compose.ui.platform.LocalContext import androidx.compose.ui.unit.dp +import androidx.navigation.NavController import io.getstream.log.Priority import io.getstream.video.android.compose.permission.LaunchCallPermissions import io.getstream.video.android.compose.theme.VideoTheme +import io.getstream.video.android.compose.ui.components.call.controls.actions.LeaveCallAction import io.getstream.video.android.compose.ui.components.video.VideoRenderer import io.getstream.video.android.core.Call import io.getstream.video.android.core.GEO @@ -52,11 +57,13 @@ import io.getstream.video.android.model.User import kotlinx.coroutines.launch @Composable -fun LiveHost() { +fun LiveHost( + navController: NavController, + callId: String, +) { val context = LocalContext.current val userId = "Darth_Krayt" val userToken = StreamVideo.devToken(userId) - val callId = "dE8AsD5Qxqrt" // step1 - create a user. val user = User( @@ -86,11 +93,14 @@ fun LiveHost() { Toast.makeText(context, "uh oh $it", Toast.LENGTH_SHORT).show() } } - LiveHostContent(call) + LiveHostContent(navController, call) } @Composable -private fun LiveHostContent(call: Call) { +private fun LiveHostContent( + navController: NavController, + call: Call, +) { LaunchCallPermissions(call = call) val connection by call.state.connection.collectAsState() @@ -148,21 +158,28 @@ private fun LiveHostContent(call: Call) { } }, bottomBar = { - Button( - colors = ButtonDefaults.buttonColors( - contentColor = VideoTheme.colors.brandPrimary, - backgroundColor = VideoTheme.colors.brandPrimary, - ), - onClick = { - scope.launch { - if (backstage) call.goLive() else call.stopLive() - } - }, - ) { - Text( - text = if (backstage) "Start Broadcast" else "Stop Broadcast", - color = Color.White, - ) + Row { + Button( + colors = ButtonDefaults.buttonColors( + contentColor = VideoTheme.colors.brandPrimary, + backgroundColor = VideoTheme.colors.brandPrimary, + ), + onClick = { + scope.launch { + if (backstage) call.goLive() else call.stopLive() + } + }, + ) { + Text( + text = if (backstage) "Start Broadcast" else "Stop Broadcast", + color = Color.White, + ) + } + Spacer(modifier = Modifier.width(16.dp)) + LeaveCallAction { + call.leave() + navController.popBackStack() + } } }, ) { diff --git a/tutorials/tutorial-livestream/src/main/kotlin/io/getstream/video/android/tutorial/livestream/LiveMain.kt b/tutorials/tutorial-livestream/src/main/kotlin/io/getstream/video/android/tutorial/livestream/LiveMain.kt index 542873dee3..50975b48ef 100644 --- a/tutorials/tutorial-livestream/src/main/kotlin/io/getstream/video/android/tutorial/livestream/LiveMain.kt +++ b/tutorials/tutorial-livestream/src/main/kotlin/io/getstream/video/android/tutorial/livestream/LiveMain.kt @@ -28,11 +28,17 @@ import androidx.compose.material.Button import androidx.compose.material.ButtonDefaults import androidx.compose.material.Text import androidx.compose.runtime.Composable +import androidx.compose.runtime.getValue +import androidx.compose.runtime.mutableStateOf +import androidx.compose.runtime.remember +import androidx.compose.runtime.setValue import androidx.compose.ui.Alignment import androidx.compose.ui.Modifier +import androidx.compose.ui.text.input.TextFieldValue import androidx.compose.ui.unit.dp import androidx.navigation.NavHostController import io.getstream.video.android.compose.theme.VideoTheme +import io.getstream.video.android.compose.ui.components.base.StreamTextField @Composable fun LiveMain( @@ -48,6 +54,18 @@ fun LiveMain( verticalArrangement = Arrangement.Center, horizontalAlignment = Alignment.CenterHorizontally, ) { + var callId by remember { mutableStateOf(TextFieldValue("dE8AsD5Qxqrt")) } + StreamTextField( + modifier = Modifier.width(300.dp), + value = callId, + placeholder = "Call Id (required)", + onValueChange = { + callId = it + }, + ) + + Spacer(modifier = Modifier.height(44.dp)) + Button( modifier = Modifier .width(300.dp) @@ -57,7 +75,7 @@ fun LiveMain( backgroundColor = VideoTheme.colors.brandPrimary, ), onClick = { - navController.navigate(LiveScreens.Host.destination) + navController.navigate(LiveScreens.Host.destination(callId.text)) }, ) { Text(text = "host", color = VideoTheme.colors.basePrimary) @@ -74,7 +92,7 @@ fun LiveMain( backgroundColor = VideoTheme.colors.brandPrimary, ), onClick = { - navController.navigate(LiveScreens.Guest.destination) + navController.navigate(LiveScreens.Guest.destination(callId.text)) }, ) { Text(text = "guest", color = VideoTheme.colors.basePrimary) diff --git a/tutorials/tutorial-livestream/src/main/kotlin/io/getstream/video/android/tutorial/livestream/LiveNavHost.kt b/tutorials/tutorial-livestream/src/main/kotlin/io/getstream/video/android/tutorial/livestream/LiveNavHost.kt index 18e6cb733f..611f15a255 100644 --- a/tutorials/tutorial-livestream/src/main/kotlin/io/getstream/video/android/tutorial/livestream/LiveNavHost.kt +++ b/tutorials/tutorial-livestream/src/main/kotlin/io/getstream/video/android/tutorial/livestream/LiveNavHost.kt @@ -20,10 +20,13 @@ import androidx.compose.foundation.background import androidx.compose.foundation.layout.fillMaxSize import androidx.compose.runtime.Composable import androidx.compose.ui.Modifier +import androidx.navigation.NavBackStackEntry import androidx.navigation.NavHostController +import androidx.navigation.NavType import androidx.navigation.compose.NavHost import androidx.navigation.compose.composable import androidx.navigation.compose.rememberNavController +import androidx.navigation.navArgument import io.getstream.video.android.compose.theme.VideoTheme @Composable @@ -43,18 +46,36 @@ fun LiveNavHost( LiveMain(navController = navController) } - composable(LiveScreens.Host.destination) { - LiveHost() + composable(LiveScreens.Host.destination, LiveScreens.Host.args) { + LiveHost(navController = navController, callId = LiveScreens.Host.getCallId(it)) } - composable(LiveScreens.Guest.destination) { - LiveAudience() + composable(LiveScreens.Guest.destination, LiveScreens.Guest.args) { + LiveAudience(navController = navController, callId = LiveScreens.Guest.getCallId(it)) } } } -enum class LiveScreens(val destination: String) { - Main("main"), - Host("host"), - Guest("audience"), +sealed class LiveScreens(val destination: String) { + data object Main : LiveScreens(destination = "main") + + sealed class HasCallId(destination: String) : LiveScreens(destination) { + private val argCallId: String = "call_id" + val args = listOf(navArgument(argCallId) { type = NavType.StringType }) + + fun getCallId(backStackEntry: NavBackStackEntry): String { + return backStackEntry.arguments?.getString(argCallId) ?: error("Call ID not found") + } + } + + data object Host : HasCallId(destination = "host/{call_id}") { + fun destination(callId: String): String { + return "host/$callId" + } + } + data object Guest : HasCallId(destination = "guest/{call_id}") { + fun destination(callId: String): String { + return "guest/$callId" + } + } }