Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -37,5 +37,6 @@
},
"engines": {
"pnpm": "10.4.1"
}
},
"packageManager": "pnpm@10.4.1+sha512.c753b6c3ad7afa13af388fa6d808035a008e30ea9993f58c6663e2bc5ff21679aa834db094987129aa4d488b86df57f7b634981b2f827cdcacc698cc0cfb88af"
Comment thread
parkminjo marked this conversation as resolved.
}
13 changes: 13 additions & 0 deletions src/app/(interview)/interview-live/page.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
import CameraView from '@/features/interview/camera-view';
import VoiceInputButton from '@/features/interview/voice-input-button';

const InterviewPage = () => {
return (
<div>
<CameraView />
<VoiceInputButton />
</div>
);
};

export default InterviewPage;
5 changes: 5 additions & 0 deletions src/app/(interview)/interview-start/page.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
const InterviewStartPage = () => {
return <div>InterviewStartPage</div>;
};

export default InterviewStartPage;
15 changes: 15 additions & 0 deletions src/features/interview/camera-view.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
'use client';

import { useWebcamStream } from '@/features/interview/hooks/use-webcam-stream';

const CameraView = () => {
const videoRef = useWebcamStream();

return (
<div className='w-[300px]'>
<video ref={videoRef} autoPlay />
</div>
);
};

export default CameraView;
56 changes: 56 additions & 0 deletions src/features/interview/hooks/use-audio-recorder.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
import { useRef, useState } from 'react';

export const useAudioRecorder = () => {
// μ°½μ—°λ‹˜μ„ μœ„ν•œ μ£Όμ„μ΄λ‹ˆ PR ν•˜μ‹œλŠ” 뢄듀은 μžμ„Ένžˆ μ•ˆ μ½μœΌμ…”λ„ λ©λ‹ˆλ‹€.

// MediaRecorder μΈμŠ€ν„΄μŠ€λ₯Ό μ €μž₯ν•  κ³³, λ…ΉμŒ μ‹œμž‘/쀑지 λ•Œ μ‚¬μš©λ¨
const audioRecorderRef = useRef<MediaRecorder | null>(null);
// λ…ΉμŒ 쀑인지 μ•„λ‹Œμ§€μ˜ μƒνƒœ
const [isRecording, setIsRecording] = useState(false);
// λ…ΉμŒμ΄ λλ‚œ λ’€, μž¬μƒν•˜κ±°λ‚˜ λ‹€μš΄λ‘œλ“œν•  수 μžˆλ„λ‘ μ˜€λ””μ˜€ Blob을 μ €μž₯함
const [audioBlob, setAudioBlob] = useState<Blob | null>(null);
// MediaRecorderκ°€ μ „λ‹¬ν•˜λŠ” μ˜€λ””μ˜€ 데이터λ₯Ό μž‘μ€ 쑰각(blob) λ‹¨μœ„λ‘œ λͺ¨μ•„λ‘λŠ” κ³³
const audioChunksRef = useRef<Blob[]>([]);

//λ…ΉμŒ μ‹œμž‘
const startRecording = async () => {
try {
// μ‚¬μš©μžν•œν…Œ 마이크 μ ‘κ·Ό κΆŒν•œ μš”μ²­, 승인되면 MediaStream을 λ°›μ•„μ˜΄
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });

// μ˜€λ””μ˜€ 포맷 μ„€μ • (MIME νƒ€μž… μˆ˜μ •)
const mediaRecorder = new MediaRecorder(stream, {
mimeType: 'audio/webm;codecs=opus',
});
audioRecorderRef.current = mediaRecorder;
audioChunksRef.current = [];

// λ…ΉμŒ 쀑 MediaRecorderκ°€ 데이터λ₯Ό μ œκ³΅ν•  λ•Œλ§ˆλ‹€ 쑰각(chunk)을 audioChunksRef에 좔가함
// λ…ΉμŒ 쀑 데이터가 μͺΌκ°œμ Έμ„œ 순차적으둜 λ“€μ–΄μ˜€λŠ” κ΅¬μ‘°μž„
mediaRecorder.ondataavailable = (e) => {
audioChunksRef.current.push(e.data);
};

// λ…ΉμŒμ΄ μ’…λ£Œλ˜λ©΄ μ§€κΈˆκΉŒμ§€ λͺ¨μ€ μ˜€λ””μ˜€ 쑰각듀을 ν•˜λ‚˜λ‘œ ν•˜λ²Όμ„œ Blob으둜 λ§Œλ“¦
// Blob을 λΈŒλΌμš°μ €κ°€ 이해할 수 μžˆλŠ” 가상 URL둜 λ³€ν™˜
mediaRecorder.onstop = () => {
const blob = new Blob(audioChunksRef.current, { type: 'audio/webm' });
setAudioBlob(blob);
};

// μ‹€μ œ λ…ΉμŒμ„ μ‹œμž‘ν•¨
mediaRecorder.start();
setIsRecording(true);
} catch (error) {
console.error('마이크 μ ‘κ·Ό 였λ₯˜:', error);
}
};

// λ…ΉμŒ 쀑단
const stopRecording = () => {
audioRecorderRef.current?.stop();
setIsRecording(false);
};

return { isRecording, audioBlob, startRecording, stopRecording };
};
28 changes: 28 additions & 0 deletions src/features/interview/hooks/use-webcam-stream.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
import { useEffect, useRef } from 'react';

type StreamCallback = (stream: MediaStream) => void;

export const useWebcamStream = () => {
const videoRef = useRef<HTMLVideoElement | null>(null);

const getWebcam = async (onStreamReady: StreamCallback) => {
try {
const constraints = { video: { width: { ideal: 1280 }, height: { ideal: 720 } }, audio: false };
const stream = await navigator.mediaDevices.getUserMedia(constraints);

onStreamReady(stream);
} catch (error) {
console.error(error);
}
};

useEffect(() => {
getWebcam((stream) => {
if (videoRef.current) {
videoRef.current.srcObject = stream;
}
});
}, []);

return videoRef;
};
18 changes: 18 additions & 0 deletions src/features/interview/voice-input-button.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
'use client';

import { useAudioRecorder } from '@/features/interview/hooks/use-audio-recorder';

const VoiceInputButton = () => {
const { isRecording, audioBlob, startRecording, stopRecording } = useAudioRecorder();

return (
<>
<button onClick={isRecording ? stopRecording : startRecording}>
{isRecording ? 'λ‹΅λ³€ μ™„λ£Œν•˜κΈ°' : 'λ‹΅λ³€ν•˜κΈ°'}
</button>
{audioBlob && <audio controls src={URL.createObjectURL(audioBlob)} />}
</>
);
};

export default VoiceInputButton;