Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Switch audio source on screen share #103

Open
wants to merge 8 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
package io.antmedia.webrtcandroidframework.api;

import android.media.projection.MediaProjection;

import org.json.JSONArray;
import org.json.JSONObject;
import org.webrtc.DataChannel;
Expand Down Expand Up @@ -331,4 +333,20 @@ void publish(String streamId, String token, boolean videoCallEnabled, boolean au
* Returns true if SDK resources are released and its shutdown, false otherwise.
*/
boolean isShutdown();

/**
* Send system audio on screen share during call.
*/
void switchToSystemAudioRecordingOnScreenShareDuringCall();

/**
* Send microphone audio on screen share during call.
*/
void switchToMicrophoneAudioRecordingOnScreenShareDuringCall();

/**
* Set audio device module media projection.
* If null passed, audio device module will use microphone audio on screen share.
*/
void setAudioDeviceModuleMediaProjection(MediaProjection mediaProjection);
}
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
import java.util.Arrays;

import io.antmedia.webrtcandroidframework.core.WebRTCClient;
import io.antmedia.webrtcandroidframework.core.model.ScreenShareAudioSource;

public class WebRTCClientBuilder {

Expand Down Expand Up @@ -156,4 +157,9 @@ public WebRTCClientBuilder setBluetoothEnabled(boolean bluetoothEnabled) {
webRTCClientConfig.bluetoothEnabled = bluetoothEnabled;
return this;
}

public WebRTCClientBuilder setScreenShareAudioSource(ScreenShareAudioSource screenShareAudioSource) {
webRTCClientConfig.screenShareAudioSource = screenShareAudioSource;
return this;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,8 @@

import java.util.ArrayList;

import io.antmedia.webrtcandroidframework.core.model.ScreenShareAudioSource;


public class WebRTCClientConfig {

Expand Down Expand Up @@ -183,4 +185,10 @@ public class WebRTCClientConfig {
* Flag for connecting bluetooth headphones.
*/
public boolean bluetoothEnabled = false;

/*
* Audio source during screen share. Possible values are MICROPHONE or SYSTEM
* MICROPHONE by default.
*/
public ScreenShareAudioSource screenShareAudioSource = ScreenShareAudioSource.MICROPHONE;
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
package io.antmedia.webrtcandroidframework.core;

import android.media.projection.MediaProjection;
import android.util.Log;

public abstract class CustomMediaProjectionCallback extends MediaProjection.Callback {

private static final String TAG = "CustomMediaProjectionCallback";

public CustomMediaProjectionCallback() {
super();
}

public abstract void onMediaProjection(MediaProjection mediaProjection);

@Override
public void onStop() {
super.onStop();
}

@Override
public void onCapturedContentResize(int width, int height) {
super.onCapturedContentResize(width, height);
}

@Override
public void onCapturedContentVisibilityChanged(boolean isVisible) {
super.onCapturedContentVisibilityChanged(isVisible);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -17,13 +17,14 @@
import android.util.Log;
import android.view.WindowManager;
import android.widget.Toast;

import androidx.annotation.NonNull;

import org.json.JSONArray;
import org.json.JSONObject;
import org.webrtc.AddIceObserver;
import org.webrtc.AudioSource;
import io.antmedia.webrtcandroidframework.core.model.ScreenShareAudioSource;

import org.webrtc.AudioTrack;
import org.webrtc.Camera2Enumerator;
import org.webrtc.CameraEnumerator;
Expand Down Expand Up @@ -836,7 +837,15 @@ public DisplayMetrics getDisplayMetrics() {
}

public @Nullable VideoCapturer createScreenCapturer() {
return new ScreenCapturerAndroid(config.mediaProjectionIntent, new MediaProjection.Callback() {
return new ScreenCapturerAndroid(config.mediaProjectionIntent, new CustomMediaProjectionCallback() {
@Override
public void onMediaProjection(MediaProjection mediaProjection) {
config.mediaProjection = mediaProjection;
if(adm != null && config.screenShareAudioSource == ScreenShareAudioSource.SYSTEM){
adm.setMediaProjection(mediaProjection);
}
}

@Override
public void onStop() {
reportError(getPublishStreamId(), USER_REVOKED_CAPTURE_SCREEN_PERMISSION);
Expand Down Expand Up @@ -974,7 +983,7 @@ private void publishPlayIfRequested() {

public void publish(String streamId) {
publish(streamId, null, true, true,
null, null, streamId, "qdadsas");
null, null, streamId, null);
}


Expand Down Expand Up @@ -1230,7 +1239,7 @@ public void reportError(String streamId, final String description) {

public void changeVideoSource(StreamSource newSource) {
if (!config.videoSource.equals(newSource)) {
if (newSource.equals(StreamSource.SCREEN) && adm != null) {
if (newSource.equals(StreamSource.SCREEN) && adm != null && config.screenShareAudioSource == ScreenShareAudioSource.SYSTEM) {
adm.setMediaProjection(config.mediaProjection);
}

Expand Down Expand Up @@ -2778,4 +2787,66 @@ public boolean isShutdown() {
return released;
}

@androidx.annotation.Nullable
public AudioDeviceModule getAdm() {
return adm;
}


public void createAudioRecord(){
if(adm != null){
adm.createAudioRecord();
}
}

public void switchToSystemAudioRecordingOnScreenShareDuringCall(){
if(config.mediaProjection == null){
Log.i(TAG,"Config media projection is null. Cannot switch system audio on screen share.");
return;
}
if(adm == null){
return;
}

stopAdmRecording();
adm.setMediaProjection(config.mediaProjection);
createAudioRecord();
startRecording();
}

public void switchToMicrophoneAudioRecordingOnScreenShareDuringCall(){
if(adm == null){
return;
}

stopAdmRecording();
//if media projection is null, microphone will be used to record audio.
adm.setMediaProjection(null);
//media projection is set to null thus it will capture microphone.
createAudioRecord();
startRecording();
}

public void startRecording(){
if(adm != null){
adm.startRecording();
}
}

public void stopAdmRecording(){
if(adm != null){
adm.stopRecording();
}
}

public void setAudioDeviceModuleMediaProjection(MediaProjection mediaProjection){
if(adm != null){
adm.setMediaProjection(mediaProjection);
}
}

public void setAdm(JavaAudioDeviceModule adm){
this.adm = adm;
}

}
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
package io.antmedia.webrtcandroidframework.core.model;

public enum ScreenShareAudioSource {
MICROPHONE,
SYSTEM;
}
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,8 @@
import android.os.Looper;
import android.os.Handler;

import io.antmedia.webrtcandroidframework.core.CustomMediaProjectionCallback;

/**
* An copy of ScreenCapturerAndroid to capture the screen content while being aware of device orientation
*/
Expand Down Expand Up @@ -127,6 +129,10 @@ public synchronized void startCapture(
mediaProjection = mediaProjectionManager.getMediaProjection(
Activity.RESULT_OK, mediaProjectionPermissionResultData);

if(mediaProjectionCallback != null){
((CustomMediaProjectionCallback) mediaProjectionCallback).onMediaProjection(mediaProjection);
}

// Let MediaProjection callback use the SurfaceTextureHelper thread.
mediaProjection.registerCallback(mediaProjectionCallback, surfaceTextureHelper.getHandler());

Expand Down Expand Up @@ -251,10 +257,6 @@ public MediaProjection getMediaProjection() {
return mediaProjection;
}

public void setMediaProjection(MediaProjection mediaProjection) {
this.mediaProjection = mediaProjection;
}

public MediaProjectionManager getMediaProjectionManager() {
return mediaProjectionManager;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,4 +41,10 @@ public interface AudioDeviceModule {
/** Set media projection for the audio record. */
void setMediaProjection(MediaProjection mediaProjection);

void createAudioRecord();

void startRecording();

void stopRecording();

}
Original file line number Diff line number Diff line change
Expand Up @@ -14,10 +14,13 @@
import android.media.AudioAttributes;
import android.media.AudioDeviceInfo;
import android.media.AudioManager;
import android.media.MediaRecorder;
import android.media.projection.MediaProjection;
import android.os.Build;
import androidx.annotation.RequiresApi;
import java.util.concurrent.ScheduledExecutorService;

import org.webrtc.AudioSource;
import org.webrtc.JniCommon;
import org.webrtc.Logging;
import android.media.AudioRecord;
Expand Down Expand Up @@ -406,6 +409,7 @@ public CustomWebRtcAudioRecord getAudioInput() {
return (CustomWebRtcAudioRecord)audioInput;
}


@Override
public long getNativeAudioDeviceModulePointer() {
synchronized (nativeLock) {
Expand Down Expand Up @@ -460,4 +464,20 @@ public void setMediaProjection(MediaProjection mediaProjection){
audioInput.setMediaProjection(mediaProjection);
}

@Override
public void createAudioRecord() {
audioInput.createAudioRecord();
}

@Override
public void startRecording() {
audioInput.startRecording();
}

@Override
public void stopRecording() {
audioInput.stopRecording();
}


}
Original file line number Diff line number Diff line change
Expand Up @@ -47,4 +47,19 @@ public void setMicrophoneMute(boolean mute) {
public void setMediaProjection(MediaProjection mediaProjection) {

}

@Override
public void createAudioRecord() {

}

@Override
public void startRecording() {

}

@Override
public void stopRecording() {

}
}
Loading
Loading