diff --git a/apps/web/src/app/questions/[questionId]/daily/_components/recording-section.tsx b/apps/web/src/app/questions/[questionId]/daily/_components/recording-section.tsx new file mode 100644 index 0000000..e42385e --- /dev/null +++ b/apps/web/src/app/questions/[questionId]/daily/_components/recording-section.tsx @@ -0,0 +1,60 @@ +"use client"; + +import * as React from "react"; +import Waveform from "@/components/waveform/waveform"; +import { Button } from "@/components/button/button"; +import { CheckCircle2, Mic, RotateCcw, Square } from "lucide-react"; + +function RecordingSection() { + const [isRecording, setIsRecording] = React.useState(false); + const [audioStreamingSessionId, setAudioStreamingSessionId] = + React.useState(); + + return ( +
+ +
+ {isRecording && ( + + )} + {!audioStreamingSessionId && !isRecording && ( + + )} + {audioStreamingSessionId && !isRecording && ( +
+ + +
+ )} +
+
+ ); +} + +export default RecordingSection; diff --git a/apps/web/src/app/questions/[questionId]/daily/page.tsx b/apps/web/src/app/questions/[questionId]/daily/page.tsx new file mode 100644 index 0000000..cf0e7c5 --- /dev/null +++ b/apps/web/src/app/questions/[questionId]/daily/page.tsx @@ -0,0 +1,30 @@ +import RecordingSection from "./_components/recording-section"; + +function DailyQuestionPage() { + return ( +
+
+
+
+ React +
+

+ React의 Virtual DOM에 대해 설명하고, 이것이 어떻게 성능을 + 향상시키는지 설명해주세요. +

+
+

+ 재조정(Reconciliation)과 Diffing 알고리즘에 초점을 맞춰주세요. + 실제 DOM 조작이 왜 비용이 많이 드는지, 그리고 React가 어떻게 + 가벼운 객체 트리를 사용하여 업데이트를 효율적으로 처리하는지 + 설명해주세요. +

+
+
+ +
+
+ ); +} + +export default DailyQuestionPage; diff --git a/apps/web/src/components/waveform/waveform.stories.tsx b/apps/web/src/components/waveform/waveform.stories.tsx new file mode 100644 index 0000000..4fd8011 --- /dev/null +++ b/apps/web/src/components/waveform/waveform.stories.tsx @@ -0,0 +1,31 @@ +import type { Meta, StoryObj } from "@storybook/nextjs-vite"; + +import Waveform from "./waveform"; + +const meta = { + title: "Components/Waveform", + component: Waveform, + parameters: { + layout: "padded", + }, + tags: ["autodocs"], + argTypes: { + isRecording: { + control: "boolean", + description: "녹음 상태를 제어합니다", + }, + className: { + control: "text", + description: "추가 CSS 클래스", + }, + }, +} satisfies Meta; + +export default meta; +type Story = StoryObj; + +export const Default: Story = { + args: { + isRecording: false, + }, +}; diff --git a/apps/web/src/components/waveform/waveform.tsx b/apps/web/src/components/waveform/waveform.tsx new file mode 100644 index 0000000..f418f6b --- /dev/null +++ b/apps/web/src/components/waveform/waveform.tsx @@ -0,0 +1,225 @@ +"use client"; + +import * as React from "react"; +import { cn } from "@/lib/cn"; +import useAnimationFrame from "@/hooks/use-animation-frame"; +import createAudioStreamer, { AudioStreamerHandle } from "@/lib/audio-streamer"; + +interface WaveFormProps { + isRecording: boolean; + barWidthPx?: number; + barGapPx?: number; + barColor?: string; + updateIntervalMs?: number; + numberOfPaddingBars?: number; + className?: string; +} + +function Waveform({ + isRecording, + barWidthPx = 4, + barGapPx = 2, + barColor = "#18181b", + updateIntervalMs = 30, + numberOfPaddingBars = 8, + className, +}: WaveFormProps) { + const canvasRef = React.useRef(null); + const canvasWidthRef = React.useRef(0); + const canvasHeightRef = React.useRef(0); + const amplitudeHistoryRef = React.useRef([]); + const lastUpdateTimeRef = React.useRef(0); + const streamerRef = React.useRef(null); + + React.useEffect(() => { + const streamer = streamerRef.current; + if (isRecording) { + streamer?.start(); + } + + return () => { + streamer?.stop(); + }; + }, [isRecording]); + + React.useEffect(() => { + streamerRef.current = createAudioStreamer({ + sampleRate: 16000, + channels: 1, + bitsPerSample: 16, + onAudioChunk: ({ wave }) => { + const now = Date.now(); + + // 일정 간격이 지나지 않았으면 스킵 + if (now - lastUpdateTimeRef.current < updateIntervalMs) { + return; + } + + lastUpdateTimeRef.current = now; + + // wave는 Float32Array이므로 RMS(Root Mean Square) 값을 계산 + let sum = 0; + for (let i = 0; i < wave.length; i++) { + sum += wave[i] * wave[i]; + } + const rms = Math.sqrt(sum / wave.length); + + // RMS 값을 캔버스 높이에 맞게 스케일링 + const maxAmplitude = canvasHeightRef.current * 0.6; // 캔버스 높이의 80%를 최대값으로 + const amplitude = rms * maxAmplitude * 6; // RMS 값을 10배로 증폭 + + // 최소 높이 설정 (너무 작으면 보이지 않으므로) + const finalAmplitude = Math.max(Math.min(amplitude, maxAmplitude), 4); + + amplitudeHistoryRef.current.push(finalAmplitude); + + // 히스토리가 너무 길어지지 않도록 제한 + const maxBars = + Math.ceil(canvasWidthRef.current / (barWidthPx + barGapPx)) + 10; + if (amplitudeHistoryRef.current.length > maxBars) { + amplitudeHistoryRef.current.shift(); + } + }, + }); + }, [barWidthPx, barGapPx, updateIntervalMs]); + + React.useEffect(() => { + const canvas = canvasRef.current; + if (!canvas) { + return; + } + + const ctx = canvas.getContext("2d"); + if (!ctx) { + return; + } + + const resizeCanvas = () => { + const devicePixelRatio = window.devicePixelRatio || 1; + canvasWidthRef.current = canvas.clientWidth; + canvasHeightRef.current = canvas.clientHeight; + + const nextCanvasWidth = Math.max( + 1, + Math.round(canvasWidthRef.current * devicePixelRatio) + ); + const nextCanvasHeight = Math.max( + 1, + Math.round(canvasHeightRef.current * devicePixelRatio) + ); + + if (canvas.width !== nextCanvasWidth) { + canvas.width = nextCanvasWidth; + } + + if (canvas.height !== nextCanvasHeight) { + canvas.height = nextCanvasHeight; + } + + ctx.setTransform(devicePixelRatio, 0, 0, devicePixelRatio, 0, 0); + }; + + resizeCanvas(); + + const resizeObserver = new ResizeObserver(() => { + resizeCanvas(); + }); + + resizeObserver.observe(canvas); + + return () => { + resizeObserver.disconnect(); + }; + }, []); + + useAnimationFrame(() => { + const canvas = canvasRef.current; + if (!canvas) { + return; + } + + const ctx = canvas.getContext("2d"); + if (!ctx) { + return; + } + + // 캔버스 클리어 + ctx.clearRect(0, 0, canvasWidthRef.current, canvasHeightRef.current); + + const history = amplitudeHistoryRef.current; + + // 패딩 구간의 IDLE 바 그리기 (오른쪽 끝) + if (history.length) { + for (let i = 0; i < numberOfPaddingBars; i++) { + const barX = + canvasWidthRef.current - i * (barWidthPx + barGapPx) - barWidthPx; + const idleAmplitude = 4; // IDLE 바의 작은 높이 + const barY = canvasHeightRef.current / 2 - idleAmplitude / 2; + + ctx.fillStyle = barColor; + ctx.beginPath(); + const radius = barWidthPx / 2; + ctx.roundRect(barX, barY, barWidthPx, idleAmplitude, radius); + ctx.fill(); + } + } + + // 히스토리의 각 amplitude를 그리기 (패딩 이후부터) + for (let i = 0; i < history.length; i++) { + const indexFromRight = history.length - 1 - i; + const amplitude = history[indexFromRight]; + + // 패딩 구간을 고려한 위치 계산 + const barX = + canvasWidthRef.current - + (numberOfPaddingBars + i) * (barWidthPx + barGapPx) - + barWidthPx; + const barY = canvasHeightRef.current / 2 - amplitude / 2; + + // 바가 화면 밖으로 나가면 그리지 않음 + if (barX + barWidthPx < 0) { + break; + } + + ctx.fillStyle = barColor; + ctx.beginPath(); + const radius = barWidthPx / 2; + ctx.roundRect(barX, barY, barWidthPx, amplitude, radius); + ctx.fill(); + } + }); + + return ( +
+ {/* Background Grid Pattern */} +
+ + {!isRecording ? ( +
+ READY TO RECORD +
+ ) : ( +
+
+ + Recording + +
+ )} +
+ ); +} + +export default Waveform; diff --git a/apps/web/src/hooks/use-animation-frame.ts b/apps/web/src/hooks/use-animation-frame.ts new file mode 100644 index 0000000..6d89926 --- /dev/null +++ b/apps/web/src/hooks/use-animation-frame.ts @@ -0,0 +1,35 @@ +import * as React from "react"; + +type AnimationCallback = (deltaTime: number) => void; + +function useAnimationFrame(callback: AnimationCallback) { + const callbackRef = React.useRef(callback); + const rafIdRef = React.useRef(null); + const lastTimeRef = React.useRef(null); + + React.useEffect(() => { + callbackRef.current = callback; + }, [callback]); + + React.useEffect(() => { + const loop = (time: number) => { + const last = lastTimeRef.current; + const deltaTime = last === null ? 0 : time - last; + lastTimeRef.current = time; + callbackRef.current(deltaTime); + rafIdRef.current = requestAnimationFrame(loop); + }; + + rafIdRef.current = requestAnimationFrame(loop); + + return () => { + if (rafIdRef.current) { + cancelAnimationFrame(rafIdRef.current); + } + rafIdRef.current = null; + lastTimeRef.current = null; + }; + }, []); +} + +export default useAnimationFrame; diff --git a/apps/web/src/lib/audio-streamer.ts b/apps/web/src/lib/audio-streamer.ts new file mode 100644 index 0000000..2303aab --- /dev/null +++ b/apps/web/src/lib/audio-streamer.ts @@ -0,0 +1,214 @@ +interface AudioStreamerConfig { + sampleRate: number; // 16000 + channels: number; // 1 + bitsPerSample: number; // 16 + /** + * PCM16(LE) chunk를 받는 콜백 + * - buffer: Int16Array.buffer (transferable) + * - sampleRate: 실제 전송 샘플레이트(보통 config.sampleRate) + * - wave: 오디오 파형 + */ + onAudioChunk: (payload: { + buffer: ArrayBufferLike; + sampleRate: number; + wave: Float32Array; + }) => void; + onStart?: () => void; + onStop?: () => void; + onError?: (err: unknown) => void; + constraints?: MediaStreamConstraints; +} + +interface AudioStreamerHandle { + start: () => Promise; + stop: () => Promise; + getState: () => { isRecording: boolean }; +} + +function createAudioStreamer(config: AudioStreamerConfig): AudioStreamerHandle { + let audioContext: AudioContext | null = null; + let audioWorkletNode: AudioWorkletNode | null = null; + let mediaStream: MediaStream | null = null; + let isRecording = false; + + const defaultConstraints: MediaStreamConstraints = { + audio: { + channelCount: 1, + sampleRate: 48000, + echoCancellation: true, + noiseSuppression: true, + autoGainControl: true, + }, + }; + + const floatToPCM16 = (float32Array: Float32Array): Int16Array => { + const pcm16 = new Int16Array(float32Array.length); + for (let i = 0; i < float32Array.length; i++) { + const s = Math.max(-1, Math.min(1, float32Array[i])); + pcm16[i] = s < 0 ? s * 0x8000 : s * 0x7fff; + } + return pcm16; + }; + + const processAudioData = (audioData: Float32Array) => { + if (!audioContext) return; + + const currentSampleRate = audioContext.sampleRate || 48000; + + // 48k/44.1k -> target (ex: 16k) + const resampled = + currentSampleRate === config.sampleRate + ? audioData + : resampleLinear(audioData, currentSampleRate, config.sampleRate); + + const wave = downsampleForWave(resampled, 256); + + const pcmData = floatToPCM16(resampled); + + config.onAudioChunk({ + buffer: pcmData.buffer, + sampleRate: config.sampleRate, + wave, + }); + }; + + const cleanup = async () => { + // AudioWorkletNode + if (audioWorkletNode) { + audioWorkletNode.port.onmessage = null; + audioWorkletNode.disconnect(); + audioWorkletNode = null; + } + + // AudioContext + if (audioContext) { + try { + await audioContext.close(); + } finally { + audioContext = null; + } + } + + // MediaStream + if (mediaStream) { + mediaStream.getTracks().forEach((t) => t.stop()); + mediaStream = null; + } + }; + + const start = async () => { + if (isRecording) return; + + try { + mediaStream = await navigator.mediaDevices.getUserMedia( + config.constraints ?? defaultConstraints + ); + + audioContext = new AudioContext(); + const source = audioContext.createMediaStreamSource(mediaStream); + + // AudioWorklet 로드/생성 + const workletURL = makeWorkletURL(); + await audioContext.audioWorklet.addModule(workletURL); + + audioWorkletNode = new AudioWorkletNode(audioContext, "mic-tap"); + audioWorkletNode.port.onmessage = (event) => { + // worklet에서 Float32Array를 보내준다는 가정 + processAudioData(event.data as Float32Array); + }; + + source.connect(audioWorkletNode); + + // 주의: destination에 연결하면 스피커로 모니터링(에코)될 수 있음. + // 필요할 때만 켜고 싶으면 옵션으로 빼는 게 좋음. + audioWorkletNode.connect(audioContext.destination); + + isRecording = true; + config.onStart?.(); + } catch (err) { + config.onError?.(err); + isRecording = false; + await cleanup(); + throw err; + } + }; + + const stop = async () => { + if (!isRecording) { + await cleanup(); + return; + } + + isRecording = false; + await cleanup(); + config.onStop?.(); + }; + + const getState = () => ({ isRecording }); + + return { start, stop, getState }; +} + +function makeWorkletURL() { + const code = ` + class MicTap extends AudioWorkletProcessor { + process(inputs) { + const input = inputs[0]; + if (input && input[0] && input[0].length) { + // mono: channel 0 + this.port.postMessage(input[0]); + } + return true; + } + } + registerProcessor("mic-tap", MicTap); + `; + return URL.createObjectURL( + new Blob([code], { type: "application/javascript" }) + ); +} + +// 48k(or 44.1k) -> 16k 최소 리샘플(선형 보간) +function resampleLinear(input: Float32Array, inRate: number, outRate: number) { + if (inRate === outRate) return input; + + const ratio = inRate / outRate; + const outLen = Math.floor(input.length / ratio); + const out = new Float32Array(outLen); + + let pos = 0; + for (let i = 0; i < outLen; i++) { + const idx = Math.floor(pos); + const frac = pos - idx; + const s0 = input[idx] ?? 0; + const s1 = input[idx + 1] ?? s0; + out[i] = s0 + (s1 - s0) * frac; + pos += ratio; + } + return out; +} + +const downsampleForWave = (samples: Float32Array, targetPoints = 256) => { + if (samples.length <= targetPoints) return samples; + + const out = new Float32Array(targetPoints); + const step = samples.length / targetPoints; + + for (let i = 0; i < targetPoints; i++) { + const start = Math.floor(i * step); + const end = Math.floor((i + 1) * step); + + let peak = 0; + for (let j = start; j < end; j++) { + const v = Math.abs(samples[j]); + if (v > peak) peak = v; + } + + out[i] = peak; + } + + return out; +}; + +export default createAudioStreamer; +export type { AudioStreamerHandle };