Skip to content

Commit 48c7b40

Browse files
committed
feat: Add AvatarPicker to HomeView and voice command support
- Integrated OSA Gallery AvatarPicker component into HomeView - Added toggle button to show/hide avatar picker - Browse 4260+ VRM avatars from Open Source Avatars Gallery - Select avatars with visual feedback and license info - Notes that Bitcoin ordinal models take priority when equipped - Added voice command support for WebXR mode - Web Speech API integration for hands-free control - Voice commands: hello/hi (wave), sit/down (hide), play/fetch (ball), good/happy (thumbs up), look/watch (point), jump/bounce (excitement) - isVoiceSupported() check for browser compatibility - Continuous listening with automatic restart - Proper error handling for unsupported browsers Voice Commands Available: - 'hello', 'hi', 'hey' → Pet waves back - 'sit', 'down' → Pet crouches/hides - 'play', 'fetch' → Throw ball for pet - 'good', 'happy', 'yes' → Pet shows thumbs up reaction - 'look', 'watch' → Pet points/looks - 'jump', 'bounce' → Pet gets excited Users can now: 1. Browse and select avatars from OSA Gallery in HomeView 2. Use voice commands to interact with pet in WebXR mode 3. Hands-free control while wearing VR headsets
1 parent 8e33810 commit 48c7b40

2 files changed

Lines changed: 129 additions & 0 deletions

File tree

src/components/HomeView.tsx

Lines changed: 33 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,8 @@ import { generateSceneJSON } from '../rp1/SceneJSONGenerator';
1313
import { forceSyncScene } from '../rp1/SceneSync';
1414
import { fetchInscriptionContent, applyImageTextureToMesh, categorizeContentType, load3DModelFromContent } from '../avatar/OrdinalRenderer';
1515
import { QRCodeGenerator } from './QRCodeGenerator';
16+
import { AvatarPicker } from './AvatarPicker';
17+
import type { OSAAvatar } from '../types';
1618

1719
type HomeTheme = {
1820
id: string;
@@ -31,6 +33,7 @@ export function HomeView() {
3133
const [newThemeUnlocked, setNewThemeUnlocked] = useState<HomeTheme | null>(null);
3234
const [quickSyncing, setQuickSyncing] = useState(false);
3335
const [quickSyncResult, setQuickSyncResult] = useState<'success' | 'error' | null>(null);
36+
const [showAvatarPicker, setShowAvatarPicker] = useState(false);
3437

3538
if (!pet) return null;
3639

@@ -104,6 +107,14 @@ export function HomeView() {
104107
}, 3000);
105108
};
106109

110+
const handleAvatarSelect = (avatar: OSAAvatar) => {
111+
console.log('[HomeView] Avatar selected:', avatar.name, avatar.modelFileUrl);
112+
setNotification({ message: `Selected: ${avatar.name}`, emoji: '🎭' });
113+
setShowAvatarPicker(false);
114+
// In a full implementation, this would update the pet's avatar/model
115+
setTimeout(() => setNotification(null), 2000);
116+
};
117+
107118
// 3D Kawaii Home Scene
108119
useEffect(() => {
109120
const canvas = canvasRef.current;
@@ -620,6 +631,28 @@ export function HomeView() {
620631
</div>
621632
)}
622633

634+
{/* OSA Avatar Picker */}
635+
<div className="bg-[#1a1a2e] rounded-xl p-4 mb-4 border border-gray-800">
636+
<div className="flex items-center justify-between mb-3">
637+
<h3 className="text-sm font-semibold text-gray-300">🎭 Change Pet Avatar</h3>
638+
<button
639+
onClick={() => setShowAvatarPicker(!showAvatarPicker)}
640+
className="text-xs text-indigo-400 hover:text-indigo-300 underline"
641+
>
642+
{showAvatarPicker ? 'Close' : 'Browse OSA Gallery'}
643+
</button>
644+
</div>
645+
{showAvatarPicker ? (
646+
<AvatarPicker onSelect={handleAvatarSelect} />
647+
) : (
648+
<p className="text-xs text-gray-500">
649+
Browse 4260+ free VRM avatars from the Open Source Avatars Gallery.
650+
<br />
651+
Note: Bitcoin ordinal 3D models take priority when equipped.
652+
</p>
653+
)}
654+
</div>
655+
623656
{/* Nostr Identity */}
624657
{identity && (
625658
<div className="bg-[#1a1a2e] rounded-xl p-4 border border-gray-800">

src/xr/XRInteractions.ts

Lines changed: 96 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -243,3 +243,99 @@ export function playSpatialAudio(
243243
// 3. Set position based on pet location
244244
// 4. Play the sound
245245
}
246+
247+
/**
248+
* Voice Command Recognition
249+
* Uses Web Speech API for voice commands in WebXR mode
250+
*/
251+
export interface VoiceCommandHandler {
252+
onCommand: (command: string, gesture: GestureType) => void;
253+
onError: (error: string) => void;
254+
}
255+
256+
export function startVoiceRecognition(handler: VoiceCommandHandler): () => void {
257+
// Check if SpeechRecognition is available
258+
const SpeechRecognition = (window as any).SpeechRecognition ||
259+
(window as any).webkitSpeechRecognition;
260+
261+
if (!SpeechRecognition) {
262+
handler.onError('Speech recognition not supported in this browser');
263+
return () => {};
264+
}
265+
266+
const recognition = new SpeechRecognition();
267+
recognition.continuous = true;
268+
recognition.interimResults = false;
269+
recognition.lang = 'en-US';
270+
271+
recognition.onresult = (event: any) => {
272+
const last = event.results.length - 1;
273+
const transcript = event.results[last][0].transcript.toLowerCase().trim();
274+
275+
console.log('[Voice] Heard:', transcript);
276+
277+
// Map voice commands to gestures
278+
let gesture: GestureType = 'none';
279+
280+
if (transcript.includes('hello') || transcript.includes('hi') || transcript.includes('hey')) {
281+
gesture = 'wave';
282+
} else if (transcript.includes('sit') || transcript.includes('down')) {
283+
gesture = 'fist'; // Crouch/hide
284+
} else if (transcript.includes('play') || transcript.includes('fetch')) {
285+
gesture = 'pinch'; // Throw ball
286+
} else if (transcript.includes('good') || transcript.includes('happy') || transcript.includes('yes')) {
287+
gesture = 'thumbsUp';
288+
} else if (transcript.includes('look') || transcript.includes('watch')) {
289+
gesture = 'point';
290+
} else if (transcript.includes('jump') || transcript.includes('bounce')) {
291+
gesture = 'open'; // Open hand = excitement
292+
}
293+
294+
if (gesture !== 'none') {
295+
handler.onCommand(transcript, gesture);
296+
}
297+
};
298+
299+
recognition.onerror = (event: any) => {
300+
console.warn('[Voice] Error:', event.error);
301+
if (event.error !== 'no-speech') {
302+
handler.onError(`Voice error: ${event.error}`);
303+
}
304+
};
305+
306+
recognition.onend = () => {
307+
// Restart if not manually stopped
308+
if (recognition) {
309+
try {
310+
recognition.start();
311+
} catch {
312+
// Already started
313+
}
314+
}
315+
};
316+
317+
try {
318+
recognition.start();
319+
console.log('[Voice] Recognition started');
320+
} catch (e) {
321+
handler.onError('Failed to start voice recognition');
322+
}
323+
324+
// Return cleanup function
325+
return () => {
326+
try {
327+
recognition.stop();
328+
console.log('[Voice] Recognition stopped');
329+
} catch {
330+
// Ignore errors on cleanup
331+
}
332+
};
333+
}
334+
335+
/**
336+
* Check if voice commands are supported
337+
*/
338+
export function isVoiceSupported(): boolean {
339+
return typeof window !== 'undefined' &&
340+
(!!((window as any).SpeechRecognition) || !!((window as any).webkitSpeechRecognition));
341+
}

0 commit comments

Comments
 (0)