diff --git a/package.json b/package.json
index d607499f9..f80505816 100644
--- a/package.json
+++ b/package.json
@@ -37,5 +37,6 @@
},
"engines": {
"pnpm": "10.4.1"
- }
+ },
+ "packageManager": "pnpm@10.4.1+sha512.c753b6c3ad7afa13af388fa6d808035a008e30ea9993f58c6663e2bc5ff21679aa834db094987129aa4d488b86df57f7b634981b2f827cdcacc698cc0cfb88af"
}
diff --git a/src/app/(interview)/interview-live/page.tsx b/src/app/(interview)/interview-live/page.tsx
new file mode 100644
index 000000000..9ab9518b1
--- /dev/null
+++ b/src/app/(interview)/interview-live/page.tsx
@@ -0,0 +1,13 @@
+import CameraView from '@/features/interview/camera-view';
+import VoiceInputButton from '@/features/interview/voice-input-button';
+
+const InterviewPage = () => {
+ return (
+
+
+
+
+ );
+};
+
+export default InterviewPage;
diff --git a/src/app/(interview)/interview-start/page.tsx b/src/app/(interview)/interview-start/page.tsx
new file mode 100644
index 000000000..d61ac77bd
--- /dev/null
+++ b/src/app/(interview)/interview-start/page.tsx
@@ -0,0 +1,5 @@
+const InterviewStartPage = () => {
+ return InterviewStartPage
;
+};
+
+export default InterviewStartPage;
diff --git a/src/features/interview/camera-view.tsx b/src/features/interview/camera-view.tsx
new file mode 100644
index 000000000..6b6fe412c
--- /dev/null
+++ b/src/features/interview/camera-view.tsx
@@ -0,0 +1,15 @@
+'use client';
+
+import { useWebcamStream } from '@/features/interview/hooks/use-webcam-stream';
+
+const CameraView = () => {
+ const videoRef = useWebcamStream();
+
+ return (
+
+
+
+ );
+};
+
+export default CameraView;
diff --git a/src/features/interview/hooks/use-audio-recorder.ts b/src/features/interview/hooks/use-audio-recorder.ts
new file mode 100644
index 000000000..291392dd4
--- /dev/null
+++ b/src/features/interview/hooks/use-audio-recorder.ts
@@ -0,0 +1,56 @@
+import { useRef, useState } from 'react';
+
+export const useAudioRecorder = () => {
+ // 창연님을 위한 주석이니 PR 하시는 분들은 자세히 안 읽으셔도 됩니다.
+
+ // MediaRecorder 인스턴스를 저장할 곳, 녹음 시작/중지 때 사용됨
+ const audioRecorderRef = useRef(null);
+ // 녹음 중인지 아닌지의 상태
+ const [isRecording, setIsRecording] = useState(false);
+ // 녹음이 끝난 뒤, 재생하거나 다운로드할 수 있도록 오디오 Blob을 저장함
+ const [audioBlob, setAudioBlob] = useState(null);
+ // MediaRecorder가 전달하는 오디오 데이터를 작은 조각(blob) 단위로 모아두는 곳
+ const audioChunksRef = useRef([]);
+
+ //녹음 시작
+ const startRecording = async () => {
+ try {
+ // 사용자한테 마이크 접근 권한 요청, 승인되면 MediaStream을 받아옴
+ const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
+
+ // 오디오 포맷 설정 (MIME 타입 수정)
+ const mediaRecorder = new MediaRecorder(stream, {
+ mimeType: 'audio/webm;codecs=opus',
+ });
+ audioRecorderRef.current = mediaRecorder;
+ audioChunksRef.current = [];
+
+ // 녹음 중 MediaRecorder가 데이터를 제공할 때마다 조각(chunk)을 audioChunksRef에 추가함
+ // 녹음 중 데이터가 쪼개져서 순차적으로 들어오는 구조임
+ mediaRecorder.ondataavailable = (e) => {
+ audioChunksRef.current.push(e.data);
+ };
+
+ // 녹음이 종료되면 지금까지 모은 오디오 조각들을 하나로 하벼서 Blob으로 만듦
+ // Blob을 브라우저가 이해할 수 있는 가상 URL로 변환
+ mediaRecorder.onstop = () => {
+ const blob = new Blob(audioChunksRef.current, { type: 'audio/webm' });
+ setAudioBlob(blob);
+ };
+
+ // 실제 녹음을 시작함
+ mediaRecorder.start();
+ setIsRecording(true);
+ } catch (error) {
+ console.error('마이크 접근 오류:', error);
+ }
+ };
+
+ // 녹음 중단
+ const stopRecording = () => {
+ audioRecorderRef.current?.stop();
+ setIsRecording(false);
+ };
+
+ return { isRecording, audioBlob, startRecording, stopRecording };
+};
diff --git a/src/features/interview/hooks/use-webcam-stream.ts b/src/features/interview/hooks/use-webcam-stream.ts
new file mode 100644
index 000000000..2361df810
--- /dev/null
+++ b/src/features/interview/hooks/use-webcam-stream.ts
@@ -0,0 +1,28 @@
+import { useEffect, useRef } from 'react';
+
+type StreamCallback = (stream: MediaStream) => void;
+
+export const useWebcamStream = () => {
+ const videoRef = useRef(null);
+
+ const getWebcam = async (onStreamReady: StreamCallback) => {
+ try {
+ const constraints = { video: { width: { ideal: 1280 }, height: { ideal: 720 } }, audio: false };
+ const stream = await navigator.mediaDevices.getUserMedia(constraints);
+
+ onStreamReady(stream);
+ } catch (error) {
+ console.error(error);
+ }
+ };
+
+ useEffect(() => {
+ getWebcam((stream) => {
+ if (videoRef.current) {
+ videoRef.current.srcObject = stream;
+ }
+ });
+ }, []);
+
+ return videoRef;
+};
diff --git a/src/features/interview/voice-input-button.tsx b/src/features/interview/voice-input-button.tsx
new file mode 100644
index 000000000..01ff8316d
--- /dev/null
+++ b/src/features/interview/voice-input-button.tsx
@@ -0,0 +1,18 @@
+'use client';
+
+import { useAudioRecorder } from '@/features/interview/hooks/use-audio-recorder';
+
+const VoiceInputButton = () => {
+ const { isRecording, audioBlob, startRecording, stopRecording } = useAudioRecorder();
+
+ return (
+ <>
+
+ {audioBlob && }
+ >
+ );
+};
+
+export default VoiceInputButton;