Skip to content

Commit e727b2e

Browse files
authored
React Native Example: prompt user to enable voice isolation (#207)
1 parent 97108b2 commit e727b2e

File tree

8 files changed

+3465
-1047
lines changed

8 files changed

+3465
-1047
lines changed

evi/evi-react-native/App.tsx

Lines changed: 33 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,6 @@ import {
88
SafeAreaView,
99
LayoutAnimation,
1010
} from "react-native";
11-
import { useEvent } from 'expo'
1211

1312
// We use Hume's low-level typescript SDK for this example.
1413
// The React SDK (@humeai/voice-react) does not support React Native.
@@ -22,6 +21,7 @@ import { HumeClient, type Hume } from "hume";
2221
// The provided native module is a good starting place, but you should
2322
// modify it to fit the audio recording needs of your specific app.
2423
import NativeAudio, { AudioEventPayload } from "./modules/audio";
24+
import VoiceIsolationModePrompt from "./VoiceIsolationModePrompt";
2525

2626
// Represents a chat message in the chat display.
2727
interface ChatEntry {
@@ -55,6 +55,8 @@ const App = () => {
5555
const [isConnected, setIsConnected] = useState(false);
5656
const [isMuted, setIsMuted] = useState(false);
5757
const [chatEntries, setChatEntries] = useState<ChatEntry[]>([]);
58+
const [showVoiceIsolationPrompt, setShowVoiceIsolationPrompt] = useState(false);
59+
const [currentMicMode, setCurrentMicMode] = useState("Standard");
5860
const humeRef = useRef<HumeClient | null>(null);
5961
const addChatEntry = (entry: ChatEntry) => {
6062
setChatEntries((prev) => [...prev, entry]);
@@ -95,6 +97,14 @@ const App = () => {
9597
return;
9698
}
9799

100+
const micMode = await NativeAudio.getMicrophoneMode();
101+
setCurrentMicMode(micMode);
102+
103+
if (micMode !== "N/A" && micMode !== "Voice Isolation") {
104+
setShowVoiceIsolationPrompt(true);
105+
return
106+
}
107+
98108
const chatSocket = hume.empathicVoice.chat.connect({
99109
configId: process.env.EXPO_PUBLIC_HUME_CONFIG_ID,
100110
});
@@ -142,50 +152,42 @@ const App = () => {
142152
};
143153

144154
const handleDisconnect = async () => {
155+
if (chatSocketRef.current) {
156+
chatSocketRef.current.close();
157+
chatSocketRef.current = null;
158+
}
145159
try {
146160
await NativeAudio.stopRecording();
147-
await NativeAudio.stopPlayback();
148161
} catch (error) {
149162
console.error("Error while stopping recording", error);
150163
}
151-
if (chatSocketRef.current) {
152-
chatSocketRef.current.close();
153-
}
164+
165+
await NativeAudio.stopPlayback();
154166
};
155167

156168
useEffect(() => {
157169
if (isConnected) {
158-
handleConnect().catch((error) => {
159-
console.error("Error while connecting:", error);
160-
});
170+
handleConnect()
161171
} else {
162-
handleDisconnect().catch((error) => {
163-
console.error("Error while disconnecting:", error);
164-
});
172+
handleDisconnect()
165173
}
166174
const onUnmount = () => {
167-
NativeAudio.stopRecording().catch((error: any) => {
168-
console.error("Error while stopping recording", error);
169-
});
170-
if (
171-
chatSocketRef.current &&
172-
chatSocketRef.current.readyState === WebSocket.OPEN
173-
) {
174-
chatSocketRef.current?.close();
175+
if (chatSocketRef.current) {
176+
chatSocketRef.current.close();
177+
chatSocketRef.current = null;
175178
}
179+
180+
NativeAudio.stopRecording();
181+
NativeAudio.stopPlayback();
176182
};
177183
return onUnmount;
178184
}, [isConnected]);
179185

180186
useEffect(() => {
181187
if (isMuted) {
182-
NativeAudio.mute().catch((error) => {
183-
console.error("Error while muting", error);
184-
});
188+
NativeAudio.mute();
185189
} else {
186-
NativeAudio.unmute().catch((error) => {
187-
console.error("Error while unmuting", error);
188-
});
190+
NativeAudio.unmute();
189191
}
190192
}, [isMuted]);
191193

@@ -290,6 +292,12 @@ const App = () => {
290292
/>
291293
</View>
292294
</SafeAreaView>
295+
296+
<VoiceIsolationModePrompt
297+
isVisible={showVoiceIsolationPrompt}
298+
currentMode={currentMicMode}
299+
onDismiss={() => setShowVoiceIsolationPrompt(false)}
300+
/>
293301
</View>
294302
);
295303
};
Lines changed: 73 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,73 @@
1+
import React from 'react';
2+
import {
3+
View,
4+
Text,
5+
Button,
6+
Linking,
7+
Platform,
8+
Modal,
9+
} from 'react-native';
10+
import NativeAudio from './modules/audio';
11+
12+
interface VoiceIsolationModePromptProps {
13+
isVisible: boolean;
14+
currentMode: string;
15+
onDismiss: () => void;
16+
}
17+
18+
const VoiceIsolationModePrompt: React.FC<VoiceIsolationModePromptProps> = ({
19+
isVisible,
20+
currentMode,
21+
onDismiss,
22+
}) => {
23+
const handleOpenSettings = async () => {
24+
if (Platform.OS === 'ios') {
25+
try {
26+
await NativeAudio.showMicrophoneModes();
27+
} catch (error) {
28+
// Fallback to general settings if the API is not available
29+
Linking.openSettings();
30+
}
31+
} else {
32+
Linking.openSettings();
33+
}
34+
onDismiss();
35+
};
36+
37+
const handleShowMeHow = () => {
38+
const supportUrl = 'https://support.apple.com/en-us/101993';
39+
Linking.openURL(supportUrl);
40+
};
41+
42+
return (
43+
<Modal
44+
visible={isVisible}
45+
transparent={true}
46+
animationType="slide"
47+
onRequestClose={onDismiss}
48+
>
49+
<View style={{
50+
flex: 1,
51+
justifyContent: 'center',
52+
alignItems: 'center',
53+
backgroundColor: 'rgba(0, 0, 0, 0.5)'
54+
}}>
55+
<View style={{ backgroundColor: 'white', padding: 20, borderRadius: 10, width: '90%' }}>
56+
<Text>Enable voice isolation for the best experience</Text>
57+
58+
<Text>
59+
Your device is currently using a {currentMode} microphone mode.
60+
Enabling voice isolation will provide the best audio experience
61+
in a noisy setting.
62+
</Text>
63+
64+
<Button title="Open settings" onPress={handleOpenSettings} />
65+
<Button title="Show me how" onPress={handleShowMeHow} />
66+
<Button title="I'll do this later" onPress={onDismiss} />
67+
</View>
68+
</View>
69+
</Modal>
70+
);
71+
};
72+
73+
export default VoiceIsolationModePrompt;

evi/evi-react-native/modules/audio/ios/AudioModule.swift

Lines changed: 41 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -99,6 +99,47 @@ public class AudioModule: Module {
9999
AsyncFunction("stopPlayback") {
100100
await _soundPlayer?.clearQueue()
101101
}
102+
103+
AsyncFunction("showMicrophoneModes") {
104+
if #available(iOS 15.0, *) {
105+
let wasRecording = await self.audioHub.isRecording
106+
107+
if !wasRecording {
108+
try await self.prepare()
109+
try await self.audioHub.startMicrophone(handler: { _, _ in })
110+
}
111+
112+
AVCaptureDevice.showSystemUserInterface(.microphoneModes)
113+
114+
if !wasRecording {
115+
await self.audioHub.stopMicrophone()
116+
}
117+
} else {
118+
throw NSError(
119+
domain: "AudioModule", code: 3,
120+
userInfo: [NSLocalizedDescriptionKey: "Microphone modes are only available on iOS 15+"])
121+
}
122+
}
123+
124+
AsyncFunction("getMicrophoneMode") { () -> String in
125+
if #available(iOS 15.0, *) {
126+
let mode = AVCaptureDevice.preferredMicrophoneMode
127+
switch mode {
128+
case .standard:
129+
return "Standard"
130+
case .voiceIsolation:
131+
return "Voice Isolation"
132+
case .wideSpectrum:
133+
return "Wide Spectrum"
134+
default:
135+
throw NSError(
136+
domain: "AudioModule", code: 4,
137+
userInfo: [NSLocalizedDescriptionKey: "Unknown microphone mode encountered"])
138+
}
139+
} else {
140+
return "N/A"
141+
}
142+
}
102143
}
103144

104145
private func getPermissions() async throws -> Bool {

evi/evi-react-native/modules/audio/src/AudioModule.ts

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
import { NativeModule, requireNativeModule } from 'expo';
22

3-
import { AudioModuleEvents } from './AudioModule.types';
3+
import { AudioModuleEvents, MicrophoneMode } from './AudioModule.types';
44

55
declare class AudioModule extends NativeModule<AudioModuleEvents> {
66
getPermissions(): Promise<boolean>;
@@ -9,6 +9,8 @@ declare class AudioModule extends NativeModule<AudioModuleEvents> {
99
stopPlayback(): Promise<void>;
1010
mute(): Promise<void>;
1111
unmute(): Promise<void>;
12+
showMicrophoneModes(): Promise<void>;
13+
getMicrophoneMode(): Promise<MicrophoneMode>;
1214
}
1315

1416
// This call loads the native module object from the JSI.

evi/evi-react-native/modules/audio/src/AudioModule.types.ts

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,8 @@
1+
export type MicrophoneMode = "N/A" | "Standard" | "Voice Isolation" | "Wide Spectrum";
2+
13
export type AudioModuleEvents = {
24
onAudioInput: (params: AudioEventPayload) => void;
3-
onError: (params: { error: string }) => void;
5+
onError: (params: { message: string }) => void;
46
};
57

68
export type AudioEventPayload = {

evi/evi-react-native/modules/audio/src/AudioModule.web.ts

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
import { EventEmitter } from 'expo-modules-core';
22
import { convertBlobToBase64, getAudioStream, ensureSingleValidAudioTrack, getBrowserSupportedMimeType, MimeType } from 'hume';
33
import { EVIWebAudioPlayer } from "hume";
4-
import { AudioModuleEvents } from './AudioModule.types';
4+
import { AudioModuleEvents, MicrophoneMode } from './AudioModule.types';
55

66
const emitter = new EventEmitter<AudioModuleEvents>();
77

@@ -84,5 +84,14 @@ export default {
8484
async addListener(eventName: keyof AudioModuleEvents, f: AudioModuleEvents[typeof eventName]): Promise<void> {
8585
emitter.addListener(eventName, f);
8686
return
87+
},
88+
89+
async showMicrophoneModes(): Promise<void> {
90+
console.log('Microphone modes are only available on iOS');
91+
return;
92+
},
93+
94+
async getMicrophoneMode(): Promise<MicrophoneMode> {
95+
return 'N/A';
8796
}
8897
};

0 commit comments

Comments
 (0)